Plugins: Migrate Elasticsearch to backend plugin SDK (#36132)

* Migrate Elasticsearch to backend plugin SDK

* Fix linting

* Move away from Convey!

* Rebase commit

* Small logger fix

* Fixes according to reviewer's comments

* Fixes according to reviewer's comments

* Fixes according to reviewer's comments

* More cleanup

* Move things around - small refactoring

* Fix typo

* Update calculator - add tests

* Fixes according to reviewer's comments
This commit is contained in:
Dimitris Sotirakis
2021-07-15 17:45:59 +03:00
committed by GitHub
parent 75947da527
commit 0df1b33d71
18 changed files with 1345 additions and 779 deletions

222
pkg/tsdb/calculator.go Normal file
View File

@@ -0,0 +1,222 @@
package tsdb
import (
"fmt"
"regexp"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb/interval"
)
var (
defaultRes int64 = 1500
defaultMinInterval = time.Millisecond * 1
year = time.Hour * 24 * 365
day = time.Hour * 24
)
type Interval struct {
Text string
Value time.Duration
}
type intervalCalculator struct {
minInterval time.Duration
}
type Calculator interface {
Calculate(timerange backend.TimeRange, minInterval time.Duration) Interval
}
type CalculatorOptions struct {
MinInterval time.Duration
}
func NewCalculator(opts ...CalculatorOptions) *intervalCalculator {
calc := &intervalCalculator{}
for _, o := range opts {
if o.MinInterval == 0 {
calc.minInterval = defaultMinInterval
} else {
calc.minInterval = o.MinInterval
}
}
return calc
}
func (i *Interval) Milliseconds() int64 {
return i.Value.Nanoseconds() / int64(time.Millisecond)
}
func (ic *intervalCalculator) Calculate(timerange backend.TimeRange, minInterval time.Duration) Interval {
to := timerange.To.UnixNano()
from := timerange.From.UnixNano()
intrvl := time.Duration((to - from) / defaultRes)
if intrvl < minInterval {
return Interval{Text: interval.FormatDuration(minInterval), Value: minInterval}
}
rounded := roundInterval(intrvl)
return Interval{Text: interval.FormatDuration(rounded), Value: rounded}
}
// GetIntervalFrom returns the minimum interval.
// dsInterval is the string representation of data source min interval, if configured.
// queryInterval is the string representation of query interval (min interval), e.g. "10ms" or "10s".
// queryIntervalMS is a pre-calculated numeric representation of the query interval in milliseconds.
func GetIntervalFrom(dsInterval, queryInterval string, queryIntervalMS int64, defaultInterval time.Duration) (time.Duration, error) {
if queryInterval == "" {
if queryIntervalMS != 0 {
return time.Duration(queryIntervalMS) * time.Millisecond, nil
}
}
interval := queryInterval
if queryInterval == "" && dsInterval != "" {
interval = dsInterval
}
if interval == "" {
return defaultInterval, nil
}
interval = strings.Replace(strings.Replace(interval, "<", "", 1), ">", "", 1)
isPureNum, err := regexp.MatchString(`^\d+$`, interval)
if err != nil {
return time.Duration(0), err
}
if isPureNum {
interval += "s"
}
parsedInterval, err := time.ParseDuration(interval)
if err != nil {
return time.Duration(0), err
}
return parsedInterval, nil
}
// FormatDuration converts a duration into the kbn format e.g. 1m 2h or 3d
func FormatDuration(inter time.Duration) string {
if inter >= year {
return fmt.Sprintf("%dy", inter/year)
}
if inter >= day {
return fmt.Sprintf("%dd", inter/day)
}
if inter >= time.Hour {
return fmt.Sprintf("%dh", inter/time.Hour)
}
if inter >= time.Minute {
return fmt.Sprintf("%dm", inter/time.Minute)
}
if inter >= time.Second {
return fmt.Sprintf("%ds", inter/time.Second)
}
if inter >= time.Millisecond {
return fmt.Sprintf("%dms", inter/time.Millisecond)
}
return "1ms"
}
//nolint: gocyclo
func roundInterval(interval time.Duration) time.Duration {
switch {
// 0.015s
case interval <= 15*time.Millisecond:
return time.Millisecond * 10 // 0.01s
// 0.035s
case interval <= 35*time.Millisecond:
return time.Millisecond * 20 // 0.02s
// 0.075s
case interval <= 75*time.Millisecond:
return time.Millisecond * 50 // 0.05s
// 0.15s
case interval <= 150*time.Millisecond:
return time.Millisecond * 100 // 0.1s
// 0.35s
case interval <= 350*time.Millisecond:
return time.Millisecond * 200 // 0.2s
// 0.75s
case interval <= 750*time.Millisecond:
return time.Millisecond * 500 // 0.5s
// 1.5s
case interval <= 1500*time.Millisecond:
return time.Millisecond * 1000 // 1s
// 3.5s
case interval <= 3500*time.Millisecond:
return time.Millisecond * 2000 // 2s
// 7.5s
case interval <= 7500*time.Millisecond:
return time.Millisecond * 5000 // 5s
// 12.5s
case interval <= 12500*time.Millisecond:
return time.Millisecond * 10000 // 10s
// 17.5s
case interval <= 17500*time.Millisecond:
return time.Millisecond * 15000 // 15s
// 25s
case interval <= 25000*time.Millisecond:
return time.Millisecond * 20000 // 20s
// 45s
case interval <= 45000*time.Millisecond:
return time.Millisecond * 30000 // 30s
// 1.5m
case interval <= 90000*time.Millisecond:
return time.Millisecond * 60000 // 1m
// 3.5m
case interval <= 210000*time.Millisecond:
return time.Millisecond * 120000 // 2m
// 7.5m
case interval <= 450000*time.Millisecond:
return time.Millisecond * 300000 // 5m
// 12.5m
case interval <= 750000*time.Millisecond:
return time.Millisecond * 600000 // 10m
// 12.5m
case interval <= 1050000*time.Millisecond:
return time.Millisecond * 900000 // 15m
// 25m
case interval <= 1500000*time.Millisecond:
return time.Millisecond * 1200000 // 20m
// 45m
case interval <= 2700000*time.Millisecond:
return time.Millisecond * 1800000 // 30m
// 1.5h
case interval <= 5400000*time.Millisecond:
return time.Millisecond * 3600000 // 1h
// 2.5h
case interval <= 9000000*time.Millisecond:
return time.Millisecond * 7200000 // 2h
// 4.5h
case interval <= 16200000*time.Millisecond:
return time.Millisecond * 10800000 // 3h
// 9h
case interval <= 32400000*time.Millisecond:
return time.Millisecond * 21600000 // 6h
// 24h
case interval <= 86400000*time.Millisecond:
return time.Millisecond * 43200000 // 12h
// 48h
case interval <= 172800000*time.Millisecond:
return time.Millisecond * 86400000 // 24h
// 1w
case interval <= 604800000*time.Millisecond:
return time.Millisecond * 86400000 // 24h
// 3w
case interval <= 1814400000*time.Millisecond:
return time.Millisecond * 604800000 // 1w
// 2y
case interval < 3628800000*time.Millisecond:
return time.Millisecond * 2592000000 // 30d
default:
return time.Millisecond * 31536000000 // 1y
}
}

View File

@@ -0,0 +1,98 @@
package tsdb
import (
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/models"
"github.com/stretchr/testify/assert"
)
func TestIntervalCalculator_Calculate(t *testing.T) {
calculator := NewCalculator(CalculatorOptions{})
timeNow := time.Now()
testCases := []struct {
name string
timeRange backend.TimeRange
expected string
}{
{"from 5m to now", backend.TimeRange{From: timeNow, To: timeNow.Add(5 * time.Minute)}, "200ms"},
{"from 15m to now", backend.TimeRange{From: timeNow, To: timeNow.Add(15 * time.Minute)}, "500ms"},
{"from 30m to now", backend.TimeRange{From: timeNow, To: timeNow.Add(30 * time.Minute)}, "1s"},
{"from 1h to now", backend.TimeRange{From: timeNow, To: timeNow.Add(60 * time.Minute)}, "2s"},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
interval := calculator.Calculate(tc.timeRange, time.Millisecond*1)
assert.Equal(t, tc.expected, interval.Text)
})
}
}
func TestRoundInterval(t *testing.T) {
testCases := []struct {
name string
interval time.Duration
expected time.Duration
}{
{"30ms", time.Millisecond * 30, time.Millisecond * 20},
{"45ms", time.Millisecond * 45, time.Millisecond * 50},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
assert.Equal(t, tc.expected, roundInterval(tc.interval))
})
}
}
func TestFormatDuration(t *testing.T) {
testCases := []struct {
name string
duration time.Duration
expected string
}{
{"61s", time.Second * 61, "1m"},
{"30ms", time.Millisecond * 30, "30ms"},
{"23h", time.Hour * 23, "23h"},
{"24h", time.Hour * 24, "1d"},
{"367d", time.Hour * 24 * 367, "1y"},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
assert.Equal(t, tc.expected, FormatDuration(tc.duration))
})
}
}
func TestGetIntervalFrom(t *testing.T) {
testCases := []struct {
name string
dsInfo *models.DataSource
queryInterval string
queryIntervalMs int64
defaultInterval time.Duration
expected time.Duration
}{
{"45s", nil, "45s", 0, time.Second * 15, time.Second * 45},
{"45", nil, "45", 0, time.Second * 15, time.Second * 45},
{"2m", nil, "2m", 0, time.Second * 15, time.Minute * 2},
{"intervalMs", nil, "", 45000, time.Second * 15, time.Second * 45},
{"intervalMs sub-seconds", nil, "", 45200, time.Second * 15, time.Millisecond * 45200},
{"defaultInterval when interval empty", nil, "", 0, time.Second * 15, time.Second * 15},
{"defaultInterval when intervalMs 0", nil, "", 0, time.Second * 15, time.Second * 15},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
actual, err := GetIntervalFrom(tc.queryInterval, "", tc.queryIntervalMs, tc.defaultInterval)
assert.Nil(t, err)
assert.Equal(t, tc.expected, actual)
})
}
}

View File

@@ -14,24 +14,38 @@ import (
"time"
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb/interval"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"golang.org/x/net/context/ctxhttp"
)
type DatasourceInfo struct {
ID int64
HTTPClientOpts sdkhttpclient.Options
URL string
Database string
ESVersion *semver.Version
TimeField string
Interval string
TimeInterval string
MaxConcurrentShardRequests int64
IncludeFrozen bool
XPack bool
}
const loggerName = "tsdb.elasticsearch.client"
var (
clientLog = log.New(loggerName)
)
var newDatasourceHttpClient = func(httpClientProvider httpclient.Provider, ds *models.DataSource) (*http.Client, error) {
return ds.GetHTTPClient(httpClientProvider)
var newDatasourceHttpClient = func(httpClientProvider httpclient.Provider, ds *DatasourceInfo) (*http.Client, error) {
return httpClientProvider.New(ds.HTTPClientOpts)
}
// Client represents a client which can interact with elasticsearch api
@@ -44,51 +58,9 @@ type Client interface {
EnableDebug()
}
func coerceVersion(v *simplejson.Json) (*semver.Version, error) {
versionString, err := v.String()
if err != nil {
versionNumber, err := v.Int()
if err != nil {
return nil, err
}
// Legacy version numbers (before Grafana 8)
// valid values were 2,5,56,60,70
switch versionNumber {
case 2:
return semver.NewVersion("2.0.0")
case 5:
return semver.NewVersion("5.0.0")
case 56:
return semver.NewVersion("5.6.0")
case 60:
return semver.NewVersion("6.0.0")
case 70:
return semver.NewVersion("7.0.0")
default:
return nil, fmt.Errorf("elasticsearch version=%d is not supported", versionNumber)
}
}
return semver.NewVersion(versionString)
}
// NewClient creates a new elasticsearch client
var NewClient = func(ctx context.Context, httpClientProvider httpclient.Provider, ds *models.DataSource, timeRange plugins.DataTimeRange) (Client, error) {
version, err := coerceVersion(ds.JsonData.Get("esVersion"))
if err != nil {
return nil, fmt.Errorf("elasticsearch version is required, err=%v", err)
}
timeField, err := ds.JsonData.Get("timeField").String()
if err != nil {
return nil, fmt.Errorf("elasticsearch time field name is required, err=%v", err)
}
indexInterval := ds.JsonData.Get("interval").MustString()
ip, err := newIndexPattern(indexInterval, ds.Database)
var NewClient = func(ctx context.Context, httpClientProvider httpclient.Provider, ds *DatasourceInfo, timeRange backend.TimeRange) (Client, error) {
ip, err := newIndexPattern(ds.Interval, ds.Database)
if err != nil {
return nil, err
}
@@ -98,14 +70,14 @@ var NewClient = func(ctx context.Context, httpClientProvider httpclient.Provider
return nil, err
}
clientLog.Info("Creating new client", "version", version.String(), "timeField", timeField, "indices", strings.Join(indices, ", "))
clientLog.Info("Creating new client", "version", ds.ESVersion, "timeField", ds.TimeField, "indices", strings.Join(indices, ", "))
return &baseClientImpl{
ctx: ctx,
httpClientProvider: httpClientProvider,
ds: ds,
version: version,
timeField: timeField,
version: ds.ESVersion,
timeField: ds.TimeField,
indices: indices,
timeRange: timeRange,
}, nil
@@ -114,11 +86,11 @@ var NewClient = func(ctx context.Context, httpClientProvider httpclient.Provider
type baseClientImpl struct {
ctx context.Context
httpClientProvider httpclient.Provider
ds *models.DataSource
ds *DatasourceInfo
version *semver.Version
timeField string
indices []string
timeRange plugins.DataTimeRange
timeRange backend.TimeRange
debugEnabled bool
}
@@ -131,19 +103,14 @@ func (c *baseClientImpl) GetTimeField() string {
}
func (c *baseClientImpl) GetMinInterval(queryInterval string) (time.Duration, error) {
return interval.GetIntervalFrom(c.ds, simplejson.NewFromAny(map[string]interface{}{
"interval": queryInterval,
}), 5*time.Second)
}
func (c *baseClientImpl) getSettings() *simplejson.Json {
return c.ds.JsonData
timeInterval := c.ds.TimeInterval
return tsdb.GetIntervalFrom(queryInterval, timeInterval, 0, 5*time.Second)
}
type multiRequest struct {
header map[string]interface{}
body interface{}
interval interval.Interval
interval tsdb.Interval
}
func (c *baseClientImpl) executeBatchRequest(uriPath, uriQuery string, requests []*multiRequest) (*response, error) {
@@ -185,7 +152,7 @@ func (c *baseClientImpl) encodeBatchRequests(requests []*multiRequest) ([]byte,
}
func (c *baseClientImpl) executeRequest(method, uriPath, uriQuery string, body []byte) (*response, error) {
u, err := url.Parse(c.ds.Url)
u, err := url.Parse(c.ds.URL)
if err != nil {
return nil, err
}
@@ -322,7 +289,10 @@ func (c *baseClientImpl) createMultiSearchRequests(searchRequests []*SearchReque
allowedVersionRange, _ := semver.NewConstraint(">=5.6.0, <7.0.0")
if allowedVersionRange.Check(c.version) {
maxConcurrentShardRequests := c.getSettings().Get("maxConcurrentShardRequests").MustInt(256)
maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests
if maxConcurrentShardRequests == 0 {
maxConcurrentShardRequests = 256
}
mr.header["max_concurrent_shard_requests"] = maxConcurrentShardRequests
}
}
@@ -337,16 +307,16 @@ func (c *baseClientImpl) getMultiSearchQueryParameters() string {
var qs []string
if c.version.Major() >= 7 {
maxConcurrentShardRequests := c.getSettings().Get("maxConcurrentShardRequests").MustInt(5)
maxConcurrentShardRequests := c.ds.MaxConcurrentShardRequests
if maxConcurrentShardRequests == 0 {
maxConcurrentShardRequests = 5
}
qs = append(qs, fmt.Sprintf("max_concurrent_shard_requests=%d", maxConcurrentShardRequests))
}
// Querying frozen indices was added in 6.6 with xpack
includeFrozen := c.getSettings().Get("includeFrozen").MustBool(false)
xpack := c.getSettings().Get("xpack").MustBool(false)
allowedFrozenIndicesVersionRange, _ := semver.NewConstraint(">=6.6.0")
if (allowedFrozenIndicesVersionRange.Check(c.version)) && includeFrozen && xpack {
if (allowedFrozenIndicesVersionRange.Check(c.version)) && c.ds.IncludeFrozen && c.ds.XPack {
qs = append(qs, "ignore_throttled=false")
}

View File

@@ -3,158 +3,111 @@ package es
import (
"bytes"
"context"
"fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/interval"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewClient(t *testing.T) {
t.Run("When no version set should return error", func(t *testing.T) {
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(make(map[string]interface{})),
}
_, err := NewClient(context.Background(), httpclient.NewProvider(), ds, plugins.DataTimeRange{})
require.Error(t, err)
})
t.Run("When no time field name set should return error", func(t *testing.T) {
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 5,
}),
}
_, err := NewClient(context.Background(), httpclient.NewProvider(), ds, plugins.DataTimeRange{})
require.Error(t, err)
})
t.Run("When using legacy version numbers", func(t *testing.T) {
t.Run("When unsupported version set should return error", func(t *testing.T) {
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 6,
"timeField": "@timestamp",
}),
}
_, err := NewClient(context.Background(), httpclient.NewProvider(), ds, plugins.DataTimeRange{})
require.Error(t, err)
})
t.Run("When version 2 should return v2 client", func(t *testing.T) {
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 2,
"timeField": "@timestamp",
}),
version, err := semver.NewVersion("2.0.0")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, plugins.DataTimeRange{})
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, "2.0.0", c.GetVersion().String())
})
t.Run("When version 5 should return v5 client", func(t *testing.T) {
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 5,
"timeField": "@timestamp",
}),
version, err := semver.NewVersion("5.0.0")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, plugins.DataTimeRange{})
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, "5.0.0", c.GetVersion().String())
})
t.Run("When version 56 should return v5.6 client", func(t *testing.T) {
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 56,
"timeField": "@timestamp",
}),
version, err := semver.NewVersion("5.6.0")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, plugins.DataTimeRange{})
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, "5.6.0", c.GetVersion().String())
})
t.Run("When version 60 should return v6.0 client", func(t *testing.T) {
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 60,
"timeField": "@timestamp",
}),
version, err := semver.NewVersion("6.0.0")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, plugins.DataTimeRange{})
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, "6.0.0", c.GetVersion().String())
})
t.Run("When version 70 should return v7.0 client", func(t *testing.T) {
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 70,
"timeField": "@timestamp",
}),
version, err := semver.NewVersion("7.0.0")
require.NoError(t, err)
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, plugins.DataTimeRange{})
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, "7.0.0", c.GetVersion().String())
})
})
t.Run("When version is a valid semver string should create a client", func(t *testing.T) {
version := "7.2.4"
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": version,
"timeField": "@timestamp",
}),
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, plugins.DataTimeRange{})
version, err := semver.NewVersion("7.2.4")
require.NoError(t, err)
assert.Equal(t, version, c.GetVersion().String())
})
t.Run("When version is NOT a valid semver string should return error", func(t *testing.T) {
version := "7.NOT_VALID.4"
ds := &models.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": version,
"timeField": "@timestamp",
}),
ds := &DatasourceInfo{
ESVersion: version,
TimeField: "@timestamp",
}
_, err := NewClient(context.Background(), httpclient.NewProvider(), ds, plugins.DataTimeRange{})
require.Error(t, err)
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, backend.TimeRange{})
require.NoError(t, err)
assert.Equal(t, version.String(), c.GetVersion().String())
})
}
func TestClient_ExecuteMultisearch(t *testing.T) {
httpClientScenario(t, "Given a fake http client and a v2.x client with response", &models.DataSource{
Database: "[metrics-]YYYY.MM.DD",
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 2,
"timeField": "@timestamp",
"interval": "Daily",
}),
version, err := semver.NewVersion("2.0.0")
require.NoError(t, err)
httpClientScenario(t, "Given a fake http client and a v2.x client with response", &DatasourceInfo{
Database: "[metrics-]YYYY.MM.DD",
ESVersion: version,
TimeField: "@timestamp",
Interval: "Daily",
}, func(sc *scenarioContext) {
sc.responseBody = `{
"responses": [
@@ -198,14 +151,14 @@ func TestClient_ExecuteMultisearch(t *testing.T) {
require.Len(t, res.Responses, 1)
})
httpClientScenario(t, "Given a fake http client and a v5.x client with response", &models.DataSource{
Database: "[metrics-]YYYY.MM.DD",
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 5,
"maxConcurrentShardRequests": 100,
"timeField": "@timestamp",
"interval": "Daily",
}),
version, err = semver.NewVersion("5.0.0")
require.NoError(t, err)
httpClientScenario(t, "Given a fake http client and a v5.x client with response", &DatasourceInfo{
Database: "[metrics-]YYYY.MM.DD",
ESVersion: version,
TimeField: "@timestamp",
Interval: "Daily",
MaxConcurrentShardRequests: 100,
}, func(sc *scenarioContext) {
sc.responseBody = `{
"responses": [
@@ -250,16 +203,16 @@ func TestClient_ExecuteMultisearch(t *testing.T) {
require.Len(t, res.Responses, 1)
})
httpClientScenario(t, "Given a fake http client and a v5.6 client with response", &models.DataSource{
Database: "[metrics-]YYYY.MM.DD",
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": "5.6.0",
"maxConcurrentShardRequests": 100,
"timeField": "@timestamp",
"interval": "Daily",
"includeFrozen": true,
"xpack": true,
}),
version, err = semver.NewVersion("5.6.0")
require.NoError(t, err)
httpClientScenario(t, "Given a fake http client and a v5.6 client with response", &DatasourceInfo{
Database: "[metrics-]YYYY.MM.DD",
ESVersion: version,
TimeField: "@timestamp",
Interval: "Daily",
MaxConcurrentShardRequests: 100,
IncludeFrozen: true,
XPack: true,
}, func(sc *scenarioContext) {
sc.responseBody = `{
"responses": [
@@ -305,16 +258,16 @@ func TestClient_ExecuteMultisearch(t *testing.T) {
require.Len(t, res.Responses, 1)
})
httpClientScenario(t, "Given a fake http client and a v7.0 client with response", &models.DataSource{
Database: "[metrics-]YYYY.MM.DD",
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": "7.0.0",
"maxConcurrentShardRequests": 6,
"timeField": "@timestamp",
"interval": "Daily",
"includeFrozen": true,
"xpack": true,
}),
version, err = semver.NewVersion("7.0.0")
require.NoError(t, err)
httpClientScenario(t, "Given a fake http client and a v7.0 client with response", &DatasourceInfo{
Database: "[metrics-]YYYY.MM.DD",
ESVersion: version,
TimeField: "@timestamp",
Interval: "Daily",
MaxConcurrentShardRequests: 6,
IncludeFrozen: true,
XPack: true,
}, func(sc *scenarioContext) {
sc.responseBody = `{
"responses": [
@@ -366,7 +319,7 @@ func createMultisearchForTest(t *testing.T, c Client) (*MultiSearchRequest, erro
t.Helper()
msb := c.MultiSearch()
s := msb.Search(interval.Interval{Value: 15 * time.Second, Text: "15s"})
s := msb.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
s.Agg().DateHistogram("2", "@timestamp", func(a *DateHistogramAgg, ab AggBuilder) {
a.Interval = "$__interval"
@@ -387,7 +340,7 @@ type scenarioContext struct {
type scenarioFunc func(*scenarioContext)
func httpClientScenario(t *testing.T, desc string, ds *models.DataSource, fn scenarioFunc) {
func httpClientScenario(t *testing.T, desc string, ds *DatasourceInfo, fn scenarioFunc) {
t.Helper()
t.Run(desc, func(t *testing.T) {
@@ -407,13 +360,14 @@ func httpClientScenario(t *testing.T, desc string, ds *models.DataSource, fn sce
require.NoError(t, err)
rw.WriteHeader(sc.responseStatus)
}))
ds.Url = ts.URL
ds.URL = ts.URL
from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC)
to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond))
toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond))
timeRange := plugins.NewDataTimeRange(fromStr, toStr)
timeRange := backend.TimeRange{
From: from,
To: to,
}
c, err := NewClient(context.Background(), httpclient.NewProvider(), ds, timeRange)
require.NoError(t, err)
@@ -422,7 +376,7 @@ func httpClientScenario(t *testing.T, desc string, ds *models.DataSource, fn sce
currentNewDatasourceHTTPClient := newDatasourceHttpClient
newDatasourceHttpClient = func(httpClientProvider httpclient.Provider, ds *models.DataSource) (*http.Client, error) {
newDatasourceHttpClient = func(httpClientProvider httpclient.Provider, ds *DatasourceInfo) (*http.Client, error) {
return ts.Client(), nil
}

View File

@@ -6,7 +6,7 @@ import (
"strings"
"time"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana-plugin-sdk-go/backend"
)
const (
@@ -19,7 +19,7 @@ const (
)
type indexPattern interface {
GetIndices(timeRange plugins.DataTimeRange) ([]string, error)
GetIndices(timeRange backend.TimeRange) ([]string, error)
}
var newIndexPattern = func(interval string, pattern string) (indexPattern, error) {
@@ -34,7 +34,7 @@ type staticIndexPattern struct {
indexName string
}
func (ip *staticIndexPattern) GetIndices(timeRange plugins.DataTimeRange) ([]string, error) {
func (ip *staticIndexPattern) GetIndices(timeRange backend.TimeRange) ([]string, error) {
return []string{ip.indexName}, nil
}
@@ -73,9 +73,9 @@ func newDynamicIndexPattern(interval, pattern string) (*dynamicIndexPattern, err
}, nil
}
func (ip *dynamicIndexPattern) GetIndices(timeRange plugins.DataTimeRange) ([]string, error) {
from := timeRange.GetFromAsTimeUTC()
to := timeRange.GetToAsTimeUTC()
func (ip *dynamicIndexPattern) GetIndices(timeRange backend.TimeRange) ([]string, error) {
from := timeRange.From.UTC()
to := timeRange.To.UTC()
intervals := ip.intervalGenerator.Generate(from, to)
indices := make([]string, 0)

View File

@@ -5,283 +5,292 @@ import (
"testing"
"time"
"github.com/grafana/grafana/pkg/plugins"
. "github.com/smartystreets/goconvey/convey"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
)
func TestIndexPattern(t *testing.T) {
Convey("Static index patterns", t, func() {
indexPatternScenario(noInterval, "data-*", plugins.DataTimeRange{}, func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "data-*")
t.Run("Static index patterns", func(t *testing.T) {
indexPatternScenario(t, noInterval, "data-*", backend.TimeRange{}, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "data-*")
})
indexPatternScenario(noInterval, "es-index-name", plugins.DataTimeRange{}, func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "es-index-name")
indexPatternScenario(t, noInterval, "es-index-name", backend.TimeRange{}, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "es-index-name")
})
})
Convey("Dynamic index patterns", t, func() {
from := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond))
to := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond))
t.Run("Dynamic index patterns", func(t *testing.T) {
from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC)
to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
timeRange := backend.TimeRange{
From: from,
To: to,
}
indexPatternScenario(intervalHourly, "[data-]YYYY.MM.DD.HH", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "data-2018.05.15.17")
indexPatternScenario(t, intervalHourly, "[data-]YYYY.MM.DD.HH", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "data-2018.05.15.17")
})
indexPatternScenario(intervalHourly, "YYYY.MM.DD.HH[-data]", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "2018.05.15.17-data")
indexPatternScenario(t, intervalHourly, "YYYY.MM.DD.HH[-data]", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "2018.05.15.17-data")
})
indexPatternScenario(intervalDaily, "[data-]YYYY.MM.DD", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "data-2018.05.15")
indexPatternScenario(t, intervalDaily, "[data-]YYYY.MM.DD", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "data-2018.05.15")
})
indexPatternScenario(intervalDaily, "YYYY.MM.DD[-data]", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "2018.05.15-data")
indexPatternScenario(t, intervalDaily, "YYYY.MM.DD[-data]", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "2018.05.15-data")
})
indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "data-2018.20")
indexPatternScenario(t, intervalWeekly, "[data-]GGGG.WW", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "data-2018.20")
})
indexPatternScenario(intervalWeekly, "GGGG.WW[-data]", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "2018.20-data")
indexPatternScenario(t, intervalWeekly, "GGGG.WW[-data]", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "2018.20-data")
})
indexPatternScenario(intervalMonthly, "[data-]YYYY.MM", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "data-2018.05")
indexPatternScenario(t, intervalMonthly, "[data-]YYYY.MM", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "data-2018.05")
})
indexPatternScenario(intervalMonthly, "YYYY.MM[-data]", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "2018.05-data")
indexPatternScenario(t, intervalMonthly, "YYYY.MM[-data]", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "2018.05-data")
})
indexPatternScenario(intervalYearly, "[data-]YYYY", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "data-2018")
indexPatternScenario(t, intervalYearly, "[data-]YYYY", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "data-2018")
})
indexPatternScenario(intervalYearly, "YYYY[-data]", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "2018-data")
indexPatternScenario(t, intervalYearly, "YYYY[-data]", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "2018-data")
})
indexPatternScenario(intervalDaily, "YYYY[-data-]MM.DD", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "2018-data-05.15")
indexPatternScenario(t, intervalDaily, "YYYY[-data-]MM.DD", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "2018-data-05.15")
})
indexPatternScenario(intervalDaily, "[data-]YYYY[-moredata-]MM.DD", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "data-2018-moredata-05.15")
indexPatternScenario(t, intervalDaily, "[data-]YYYY[-moredata-]MM.DD", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "data-2018-moredata-05.15")
})
Convey("Should return 01 week", func() {
from = fmt.Sprintf("%d", time.Date(2018, 1, 15, 17, 50, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond))
to = fmt.Sprintf("%d", time.Date(2018, 1, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond))
indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", plugins.NewDataTimeRange(from, to), func(indices []string) {
So(indices, ShouldHaveLength, 1)
So(indices[0], ShouldEqual, "data-2018.03")
t.Run("Should return 01 week", func(t *testing.T) {
from = time.Date(2018, 1, 15, 17, 50, 0, 0, time.UTC)
to = time.Date(2018, 1, 15, 17, 55, 0, 0, time.UTC)
timeRange := backend.TimeRange{
From: from,
To: to,
}
indexPatternScenario(t, intervalWeekly, "[data-]GGGG.WW", timeRange, func(indices []string) {
require.Len(t, indices, 1)
require.Equal(t, indices[0], "data-2018.03")
})
})
})
Convey("Hourly interval", t, func() {
Convey("Should return 1 interval", func() {
t.Run("Hourly interval", func(t *testing.T) {
t.Run("Should return 1 interval", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC)
intervals := (&hourlyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 1)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC))
require.Len(t, intervals, 1)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC))
})
Convey("Should return 2 intervals", func() {
t.Run("Should return 2 intervals", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 1, 2, 0, 6, 0, 0, time.UTC)
intervals := (&hourlyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 2)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC))
So(intervals[1], ShouldEqual, time.Date(2018, 1, 2, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 2)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC))
require.Equal(t, intervals[1], time.Date(2018, 1, 2, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 10 intervals", func() {
t.Run("Should return 10 intervals", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 1, 2, 8, 6, 0, 0, time.UTC)
intervals := (&hourlyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 10)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC))
So(intervals[4], ShouldEqual, time.Date(2018, 1, 2, 3, 0, 0, 0, time.UTC))
So(intervals[9], ShouldEqual, time.Date(2018, 1, 2, 8, 0, 0, 0, time.UTC))
require.Len(t, intervals, 10)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC))
require.Equal(t, intervals[4], time.Date(2018, 1, 2, 3, 0, 0, 0, time.UTC))
require.Equal(t, intervals[9], time.Date(2018, 1, 2, 8, 0, 0, 0, time.UTC))
})
})
Convey("Daily interval", t, func() {
Convey("Should return 1 day", func() {
t.Run("Daily interval", func(t *testing.T) {
t.Run("Should return 1 day", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC)
intervals := (&dailyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 1)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 1)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 2 days", func() {
t.Run("Should return 2 days", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 1, 2, 0, 6, 0, 0, time.UTC)
intervals := (&dailyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 2)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
So(intervals[1], ShouldEqual, time.Date(2018, 1, 2, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 2)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[1], time.Date(2018, 1, 2, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 32 days", func() {
t.Run("Should return 32 days", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 2, 1, 8, 6, 0, 0, time.UTC)
intervals := (&dailyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 32)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
So(intervals[30], ShouldEqual, time.Date(2018, 1, 31, 0, 0, 0, 0, time.UTC))
So(intervals[31], ShouldEqual, time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 32)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[30], time.Date(2018, 1, 31, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[31], time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC))
})
})
Convey("Weekly interval", t, func() {
Convey("Should return 1 week (1)", func() {
t.Run("Weekly interval", func(t *testing.T) {
t.Run("Should return 1 week (1)", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC)
intervals := (&weeklyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 1)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 1)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 1 week (2)", func() {
t.Run("Should return 1 week (2)", func(t *testing.T) {
from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2017, 1, 1, 23, 6, 0, 0, time.UTC)
intervals := (&weeklyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 1)
So(intervals[0], ShouldEqual, time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 1)
require.Equal(t, intervals[0], time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 2 weeks (1)", func() {
t.Run("Should return 2 weeks (1)", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 1, 10, 23, 6, 0, 0, time.UTC)
intervals := (&weeklyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 2)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
So(intervals[1], ShouldEqual, time.Date(2018, 1, 8, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 2)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[1], time.Date(2018, 1, 8, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 2 weeks (2)", func() {
t.Run("Should return 2 weeks (2)", func(t *testing.T) {
from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2017, 1, 8, 23, 6, 0, 0, time.UTC)
intervals := (&weeklyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 2)
So(intervals[0], ShouldEqual, time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC))
So(intervals[1], ShouldEqual, time.Date(2017, 1, 2, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 2)
require.Equal(t, intervals[0], time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[1], time.Date(2017, 1, 2, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 3 weeks (1)", func() {
t.Run("Should return 3 weeks (1)", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 1, 21, 23, 6, 0, 0, time.UTC)
intervals := (&weeklyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 3)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
So(intervals[1], ShouldEqual, time.Date(2018, 1, 8, 0, 0, 0, 0, time.UTC))
So(intervals[2], ShouldEqual, time.Date(2018, 1, 15, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 3)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[1], time.Date(2018, 1, 8, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[2], time.Date(2018, 1, 15, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 3 weeks (2)", func() {
t.Run("Should return 3 weeks (2)", func(t *testing.T) {
from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2017, 1, 9, 23, 6, 0, 0, time.UTC)
intervals := (&weeklyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 3)
So(intervals[0], ShouldEqual, time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC))
So(intervals[1], ShouldEqual, time.Date(2017, 1, 2, 0, 0, 0, 0, time.UTC))
So(intervals[2], ShouldEqual, time.Date(2017, 1, 9, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 3)
require.Equal(t, intervals[0], time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[1], time.Date(2017, 1, 2, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[2], time.Date(2017, 1, 9, 0, 0, 0, 0, time.UTC))
})
})
Convey("Monthly interval", t, func() {
Convey("Should return 1 month", func() {
t.Run("Monthly interval", func(t *testing.T) {
t.Run("Should return 1 month", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC)
intervals := (&monthlyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 1)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 1)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 2 months", func() {
t.Run("Should return 2 months", func(t *testing.T) {
from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 2, 2, 0, 6, 0, 0, time.UTC)
intervals := (&monthlyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 2)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
So(intervals[1], ShouldEqual, time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 2)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[1], time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 14 months", func() {
t.Run("Should return 14 months", func(t *testing.T) {
from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 2, 1, 8, 6, 0, 0, time.UTC)
intervals := (&monthlyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 14)
So(intervals[0], ShouldEqual, time.Date(2017, 1, 1, 0, 0, 0, 0, time.UTC))
So(intervals[13], ShouldEqual, time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 14)
require.Equal(t, intervals[0], time.Date(2017, 1, 1, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[13], time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC))
})
})
Convey("Yearly interval", t, func() {
Convey("Should return 1 year (hour diff)", func() {
t.Run("Yearly interval", func(t *testing.T) {
t.Run("Should return 1 year (hour diff)", func(t *testing.T) {
from := time.Date(2018, 2, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 2, 1, 23, 6, 0, 0, time.UTC)
intervals := (&yearlyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 1)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 1)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 1 year (month diff)", func() {
t.Run("Should return 1 year (month diff)", func(t *testing.T) {
from := time.Date(2018, 2, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 12, 31, 23, 59, 59, 0, time.UTC)
intervals := (&yearlyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 1)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 1)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 2 years", func() {
t.Run("Should return 2 years", func(t *testing.T) {
from := time.Date(2018, 2, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2019, 1, 1, 23, 59, 59, 0, time.UTC)
intervals := (&yearlyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 2)
So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
So(intervals[1], ShouldEqual, time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 2)
require.Equal(t, intervals[0], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[1], time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC))
})
Convey("Should return 5 years", func() {
t.Run("Should return 5 years", func(t *testing.T) {
from := time.Date(2014, 1, 1, 23, 1, 1, 0, time.UTC)
to := time.Date(2018, 11, 1, 23, 59, 59, 0, time.UTC)
intervals := (&yearlyInterval{}).Generate(from, to)
So(intervals, ShouldHaveLength, 5)
So(intervals[0], ShouldEqual, time.Date(2014, 1, 1, 0, 0, 0, 0, time.UTC))
So(intervals[4], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
require.Len(t, intervals, 5)
require.Equal(t, intervals[0], time.Date(2014, 1, 1, 0, 0, 0, 0, time.UTC))
require.Equal(t, intervals[4], time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC))
})
})
}
func indexPatternScenario(interval string, pattern string, timeRange plugins.DataTimeRange, fn func(indices []string)) {
Convey(fmt.Sprintf("Index pattern (interval=%s, index=%s", interval, pattern), func() {
func indexPatternScenario(t *testing.T, interval string, pattern string, timeRange backend.TimeRange, fn func(indices []string)) {
testName := fmt.Sprintf("Index pattern (interval=%s, index=%s", interval, pattern)
t.Run(testName, func(t *testing.T) {
ip, err := newIndexPattern(interval, pattern)
So(err, ShouldBeNil)
So(ip, ShouldNotBeNil)
require.NoError(t, err)
require.NotNil(t, ip)
indices, err := ip.GetIndices(timeRange)
So(err, ShouldBeNil)
require.NoError(t, err)
fn(indices)
})
}

View File

@@ -5,7 +5,7 @@ import (
"net/http"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/interval"
"github.com/grafana/grafana/pkg/tsdb"
)
type response struct {
@@ -32,7 +32,7 @@ type SearchDebugInfo struct {
// SearchRequest represents a search request
type SearchRequest struct {
Index string
Interval interval.Interval
Interval tsdb.Interval
Size int
Sort map[string]interface{}
Query *Query

View File

@@ -4,13 +4,13 @@ import (
"strings"
"github.com/Masterminds/semver"
"github.com/grafana/grafana/pkg/tsdb/interval"
"github.com/grafana/grafana/pkg/tsdb"
)
// SearchRequestBuilder represents a builder which can build a search request
type SearchRequestBuilder struct {
version *semver.Version
interval interval.Interval
interval tsdb.Interval
index string
size int
sort map[string]interface{}
@@ -20,7 +20,7 @@ type SearchRequestBuilder struct {
}
// NewSearchRequestBuilder create a new search request builder
func NewSearchRequestBuilder(version *semver.Version, interval interval.Interval) *SearchRequestBuilder {
func NewSearchRequestBuilder(version *semver.Version, interval tsdb.Interval) *SearchRequestBuilder {
builder := &SearchRequestBuilder{
version: version,
interval: interval,
@@ -129,7 +129,7 @@ func NewMultiSearchRequestBuilder(version *semver.Version) *MultiSearchRequestBu
}
// Search initiates and returns a new search request builder
func (m *MultiSearchRequestBuilder) Search(interval interval.Interval) *SearchRequestBuilder {
func (m *MultiSearchRequestBuilder) Search(interval tsdb.Interval) *SearchRequestBuilder {
b := NewSearchRequestBuilder(m.version, interval)
m.requestBuilders = append(m.requestBuilders, b)
return b

View File

@@ -7,8 +7,7 @@ import (
"github.com/Masterminds/semver"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/interval"
"github.com/grafana/grafana/pkg/tsdb"
. "github.com/smartystreets/goconvey/convey"
)
@@ -17,7 +16,7 @@ func TestSearchRequest(t *testing.T) {
timeField := "@timestamp"
Convey("Given new search request builder for es version 5", func() {
version5, _ := semver.NewVersion("5.0.0")
b := NewSearchRequestBuilder(version5, interval.Interval{Value: 15 * time.Second, Text: "15s"})
b := NewSearchRequestBuilder(version5, tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
Convey("When building search request", func() {
sr, err := b.Build()
@@ -393,7 +392,7 @@ func TestSearchRequest(t *testing.T) {
Convey("Given new search request builder for es version 2", func() {
version2, _ := semver.NewVersion("2.0.0")
b := NewSearchRequestBuilder(version2, interval.Interval{Value: 15 * time.Second, Text: "15s"})
b := NewSearchRequestBuilder(version2, tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
Convey("When adding doc value field", func() {
b.AddDocValueField(timeField)
@@ -453,7 +452,7 @@ func TestMultiSearchRequest(t *testing.T) {
b := NewMultiSearchRequestBuilder(version2)
Convey("When adding one search request", func() {
b.Search(interval.Interval{Value: 15 * time.Second, Text: "15s"})
b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
Convey("When building search request should contain one search request", func() {
mr, err := b.Build()
@@ -463,8 +462,8 @@ func TestMultiSearchRequest(t *testing.T) {
})
Convey("When adding two search requests", func() {
b.Search(interval.Interval{Value: 15 * time.Second, Text: "15s"})
b.Search(interval.Interval{Value: 15 * time.Second, Text: "15s"})
b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"})
Convey("When building search request should contain two search requests", func() {
mr, err := b.Build()

View File

@@ -2,50 +2,185 @@ package elasticsearch
import (
"context"
"encoding/json"
"errors"
"fmt"
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/tsdb"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/grafana/grafana/pkg/tsdb/interval"
)
// ElasticsearchExecutor represents a handler for handling elasticsearch datasource request
type Executor struct {
httpClientProvider httpclient.Provider
intervalCalculator interval.Calculator
var eslog = log.New("tsdb.elasticsearch")
func init() {
registry.Register(&registry.Descriptor{
Name: "ElasticSearchService",
InitPriority: registry.Low,
Instance: &Service{},
})
}
// New creates a new Executor func.
// nolint:staticcheck // plugins.DataPlugin deprecated
func New(httpClientProvider httpclient.Provider) func(*models.DataSource) (plugins.DataPlugin, error) {
// nolint:staticcheck // plugins.DataPlugin deprecated
return func(dsInfo *models.DataSource) (plugins.DataPlugin, error) {
return &Executor{
httpClientProvider: httpClientProvider,
intervalCalculator: interval.NewCalculator(),
}, nil
type Service struct {
BackendPluginManager backendplugin.Manager `inject:""`
HTTPClientProvider httpclient.Provider `inject:""`
intervalCalculator tsdb.Calculator
im instancemgmt.InstanceManager
}
func (s *Service) Init() error {
eslog.Debug("initializing")
im := datasource.NewInstanceManager(newInstanceSettings())
factory := coreplugin.New(backend.ServeOpts{
QueryDataHandler: newService(im, s.HTTPClientProvider),
})
if err := s.BackendPluginManager.Register("elasticsearch", factory); err != nil {
eslog.Error("Failed to register plugin", "error", err)
}
return nil
}
// newService creates a new executor func.
func newService(im instancemgmt.InstanceManager, httpClientProvider httpclient.Provider) *Service {
return &Service{
im: im,
HTTPClientProvider: httpClientProvider,
intervalCalculator: tsdb.NewCalculator(),
}
}
// Query handles an elasticsearch datasource request
//nolint: staticcheck // plugins.DataResponse deprecated
func (e *Executor) DataQuery(ctx context.Context, dsInfo *models.DataSource,
tsdbQuery plugins.DataQuery) (plugins.DataResponse, error) {
if len(tsdbQuery.Queries) == 0 {
return plugins.DataResponse{}, fmt.Errorf("query contains no queries")
func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
if len(req.Queries) == 0 {
return &backend.QueryDataResponse{}, fmt.Errorf("query contains no queries")
}
client, err := es.NewClient(ctx, e.httpClientProvider, dsInfo, *tsdbQuery.TimeRange)
dsInfo, err := s.getDSInfo(req.PluginContext)
if err != nil {
return plugins.DataResponse{}, err
return &backend.QueryDataResponse{}, err
}
if tsdbQuery.Debug {
client.EnableDebug()
client, err := es.NewClient(ctx, s.HTTPClientProvider, dsInfo, req.Queries[0].TimeRange)
if err != nil {
return &backend.QueryDataResponse{}, err
}
query := newTimeSeriesQuery(client, tsdbQuery, e.intervalCalculator)
query := newTimeSeriesQuery(client, req.Queries, s.intervalCalculator)
return query.execute()
}
func newInstanceSettings() datasource.InstanceFactoryFunc {
return func(settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
jsonData := map[string]interface{}{}
err := json.Unmarshal(settings.JSONData, &jsonData)
if err != nil {
return nil, fmt.Errorf("error reading settings: %w", err)
}
httpCliOpts, err := settings.HTTPClientOptions()
if err != nil {
return nil, fmt.Errorf("error getting http options: %w", err)
}
version, err := coerceVersion(jsonData["esVersion"])
if err != nil {
return nil, fmt.Errorf("elasticsearch version is required, err=%v", err)
}
timeField, ok := jsonData["timeField"].(string)
if !ok {
return nil, errors.New("timeField cannot be cast to string")
}
if timeField == "" {
return nil, errors.New("elasticsearch time field name is required")
}
interval, ok := jsonData["interval"].(string)
if !ok {
interval = ""
}
timeInterval, ok := jsonData["timeInterval"].(string)
if !ok {
timeInterval = ""
}
maxConcurrentShardRequests, ok := jsonData["maxConcurrentShardRequests"].(float64)
if !ok {
maxConcurrentShardRequests = 256
}
includeFrozen, ok := jsonData["includeFrozen"].(bool)
if !ok {
includeFrozen = false
}
xpack, ok := jsonData["xpack"].(bool)
if !ok {
xpack = false
}
model := es.DatasourceInfo{
ID: settings.ID,
URL: settings.URL,
HTTPClientOpts: httpCliOpts,
Database: settings.Database,
MaxConcurrentShardRequests: int64(maxConcurrentShardRequests),
ESVersion: version,
TimeField: timeField,
Interval: interval,
TimeInterval: timeInterval,
IncludeFrozen: includeFrozen,
XPack: xpack,
}
return model, nil
}
}
func (s *Service) getDSInfo(pluginCtx backend.PluginContext) (*es.DatasourceInfo, error) {
i, err := s.im.Get(pluginCtx)
if err != nil {
return nil, err
}
instance := i.(es.DatasourceInfo)
return &instance, nil
}
func coerceVersion(v interface{}) (*semver.Version, error) {
versionString, ok := v.(string)
if ok {
return semver.NewVersion(versionString)
}
versionNumber, ok := v.(float64)
if !ok {
return nil, fmt.Errorf("elasticsearch version %v, cannot be cast to int", v)
}
// Legacy version numbers (before Grafana 8)
// valid values were 2,5,56,60,70
switch int64(versionNumber) {
case 2:
return semver.NewVersion("2.0.0")
case 5:
return semver.NewVersion("5.0.0")
case 56:
return semver.NewVersion("5.6.0")
case 60:
return semver.NewVersion("6.0.0")
case 70:
return semver.NewVersion("7.0.0")
default:
return nil, fmt.Errorf("elasticsearch version=%d is not supported", int64(versionNumber))
}
}

View File

@@ -0,0 +1,191 @@
package elasticsearch
import (
"encoding/json"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
)
type datasourceInfo struct {
ESVersion interface{} `json:"esVersion"`
TimeField interface{} `json:"timeField"`
MaxConcurrentShardRequests int64 `json:"maxConcurrentShardRequests"`
Interval string `json:"interval"`
TimeInterval string `json:"timeInterval"`
}
func TestCoerceVersion(t *testing.T) {
t.Run("version is string", func(t *testing.T) {
ver := "7.0.0"
smvr, err := coerceVersion(ver)
require.NoError(t, err)
require.NotNil(t, smvr)
require.Equal(t, "7.0.0", smvr.String())
})
t.Run("version is int", func(t *testing.T) {
testCases := []struct {
intVersion float64
stringVersion string
}{
{intVersion: 2, stringVersion: "2.0.0"},
{intVersion: 5, stringVersion: "5.0.0"},
{intVersion: 56, stringVersion: "5.6.0"},
{intVersion: 60, stringVersion: "6.0.0"},
{intVersion: 70, stringVersion: "7.0.0"},
}
for _, tc := range testCases {
smvr, err := coerceVersion(tc.intVersion)
require.NoError(t, err)
require.Equal(t, tc.stringVersion, smvr.String())
}
smvr, err := coerceVersion(12345)
require.Error(t, err)
require.Nil(t, smvr)
})
}
func TestNewInstanceSettings(t *testing.T) {
t.Run("fields exist", func(t *testing.T) {
dsInfo := datasourceInfo{
ESVersion: "7.0.0",
TimeField: "@timestamp",
MaxConcurrentShardRequests: 5,
}
settingsJSON, err := json.Marshal(dsInfo)
require.NoError(t, err)
dsSettings := backend.DataSourceInstanceSettings{
JSONData: json.RawMessage(settingsJSON),
}
_, err = newInstanceSettings()(dsSettings)
require.NoError(t, err)
})
t.Run("esVersion", func(t *testing.T) {
t.Run("correct version", func(t *testing.T) {
dsInfo := datasourceInfo{
ESVersion: 5,
TimeField: "@timestamp",
MaxConcurrentShardRequests: 5,
Interval: "Daily",
TimeInterval: "TimeInterval",
}
settingsJSON, err := json.Marshal(dsInfo)
require.NoError(t, err)
dsSettings := backend.DataSourceInstanceSettings{
JSONData: json.RawMessage(settingsJSON),
}
_, err = newInstanceSettings()(dsSettings)
require.NoError(t, err)
})
t.Run("faulty version int", func(t *testing.T) {
dsInfo := datasourceInfo{
ESVersion: 1234,
TimeField: "@timestamp",
MaxConcurrentShardRequests: 5,
Interval: "Daily",
TimeInterval: "TimeInterval",
}
settingsJSON, err := json.Marshal(dsInfo)
require.NoError(t, err)
dsSettings := backend.DataSourceInstanceSettings{
JSONData: json.RawMessage(settingsJSON),
}
_, err = newInstanceSettings()(dsSettings)
require.EqualError(t, err, "elasticsearch version is required, err=elasticsearch version=1234 is not supported")
})
t.Run("faulty version string", func(t *testing.T) {
dsInfo := datasourceInfo{
ESVersion: "NOT_VALID",
TimeField: "@timestamp",
MaxConcurrentShardRequests: 5,
Interval: "Daily",
TimeInterval: "TimeInterval",
}
settingsJSON, err := json.Marshal(dsInfo)
require.NoError(t, err)
dsSettings := backend.DataSourceInstanceSettings{
JSONData: json.RawMessage(settingsJSON),
}
_, err = newInstanceSettings()(dsSettings)
require.EqualError(t, err, "elasticsearch version is required, err=Invalid Semantic Version")
})
t.Run("no version", func(t *testing.T) {
dsInfo := datasourceInfo{
TimeField: "@timestamp",
MaxConcurrentShardRequests: 5,
Interval: "Daily",
TimeInterval: "TimeInterval",
}
settingsJSON, err := json.Marshal(dsInfo)
require.NoError(t, err)
dsSettings := backend.DataSourceInstanceSettings{
JSONData: json.RawMessage(settingsJSON),
}
_, err = newInstanceSettings()(dsSettings)
require.EqualError(t, err, "elasticsearch version is required, err=elasticsearch version <nil>, cannot be cast to int")
})
})
t.Run("timeField", func(t *testing.T) {
t.Run("is nil", func(t *testing.T) {
dsInfo := datasourceInfo{
ESVersion: 2,
MaxConcurrentShardRequests: 5,
Interval: "Daily",
TimeInterval: "TimeInterval",
}
settingsJSON, err := json.Marshal(dsInfo)
require.NoError(t, err)
dsSettings := backend.DataSourceInstanceSettings{
JSONData: json.RawMessage(settingsJSON),
}
_, err = newInstanceSettings()(dsSettings)
require.EqualError(t, err, "timeField cannot be cast to string")
})
t.Run("is empty", func(t *testing.T) {
dsInfo := datasourceInfo{
ESVersion: 2,
MaxConcurrentShardRequests: 5,
Interval: "Daily",
TimeField: "",
TimeInterval: "TimeInterval",
}
settingsJSON, err := json.Marshal(dsInfo)
require.NoError(t, err)
dsSettings := backend.DataSourceInstanceSettings{
JSONData: json.RawMessage(settingsJSON),
}
_, err = newInstanceSettings()(dsSettings)
require.EqualError(t, err, "elasticsearch time field name is required")
})
})
}

View File

@@ -12,6 +12,7 @@ type Query struct {
Metrics []*MetricAgg `json:"metrics"`
Alias string `json:"alias"`
Interval string
IntervalMs int64
RefID string
}

View File

@@ -8,9 +8,9 @@ import (
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/plugins"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
)
@@ -43,12 +43,12 @@ var newResponseParser = func(responses []*es.SearchResponse, targets []*Query, d
}
// nolint:staticcheck // plugins.DataResponse deprecated
func (rp *responseParser) getTimeSeries() (plugins.DataResponse, error) {
result := plugins.DataResponse{
Results: make(map[string]plugins.DataQueryResult),
func (rp *responseParser) getTimeSeries() (*backend.QueryDataResponse, error) {
result := backend.QueryDataResponse{
Responses: backend.Responses{},
}
if rp.Responses == nil {
return result, nil
return &result, nil
}
for i, res := range rp.Responses {
@@ -60,32 +60,42 @@ func (rp *responseParser) getTimeSeries() (plugins.DataResponse, error) {
}
if res.Error != nil {
errRslt := getErrorFromElasticResponse(res)
errRslt.Meta = debugInfo
result.Results[target.RefID] = errRslt
errResult := getErrorFromElasticResponse(res)
result.Responses[target.RefID] = backend.DataResponse{
Error: errors.New(errResult),
Frames: data.Frames{
&data.Frame{
Meta: &data.FrameMeta{
Custom: debugInfo,
},
},
}}
continue
}
queryRes := plugins.DataQueryResult{
Meta: debugInfo,
Dataframes: plugins.NewDecodedDataFrames(data.Frames{}),
}
queryRes := backend.DataResponse{}
props := make(map[string]string)
err := rp.processBuckets(res.Aggregations, target, &queryRes, props, 0)
if err != nil {
return plugins.DataResponse{}, err
return &backend.QueryDataResponse{}, err
}
rp.nameFields(queryRes, target)
rp.trimDatapoints(queryRes, target)
result.Results[target.RefID] = queryRes
for _, frame := range queryRes.Frames {
frame.Meta = &data.FrameMeta{
Custom: debugInfo,
}
}
result.Responses[target.RefID] = queryRes
}
return result, nil
return &result, nil
}
// nolint:staticcheck // plugins.* deprecated
func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Query,
queryResult *plugins.DataQueryResult, props map[string]string, depth int) error {
queryResult *backend.DataResponse, props map[string]string, depth int) error {
var err error
maxDepth := len(target.BucketAggs) - 1
@@ -163,7 +173,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu
}
// nolint:staticcheck,gocyclo // plugins.* deprecated
func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, query *plugins.DataQueryResult,
func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, query *backend.DataResponse,
props map[string]string) error {
frames := data.Frames{}
esAggBuckets := esAgg.Get("buckets").MustArray()
@@ -347,20 +357,17 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query,
data.NewField("value", tags, values).SetConfig(&data.FieldConfig{DisplayNameFromDS: rp.getMetricName(tags["metric"]) + " " + metric.Field})))
}
}
if query.Dataframes != nil {
oldFrames, err := query.Dataframes.Decoded()
if err != nil {
return err
}
if query.Frames != nil {
oldFrames := query.Frames
frames = append(oldFrames, frames...)
}
query.Dataframes = plugins.NewDecodedDataFrames(frames)
query.Frames = frames
return nil
}
// nolint:staticcheck // plugins.* deprecated
func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef *BucketAgg, target *Query,
queryResult *plugins.DataQueryResult, props map[string]string) error {
queryResult *backend.DataResponse, props map[string]string) error {
propKeys := make([]string, 0)
for k := range props {
propKeys = append(propKeys, k)
@@ -369,7 +376,7 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef
frames := data.Frames{}
var fields []*data.Field
if queryResult.Dataframes == nil {
if queryResult.Frames == nil {
for _, propKey := range propKeys {
fields = append(fields, data.NewField(propKey, nil, []*string{}))
}
@@ -495,7 +502,7 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef
Fields: dataFields,
}}
}
queryResult.Dataframes = plugins.NewDecodedDataFrames(frames)
queryResult.Frames = frames
return nil
}
@@ -512,7 +519,7 @@ func extractDataField(name string, v interface{}) *data.Field {
// TODO remove deprecations
// nolint:staticcheck // plugins.DataQueryResult deprecated
func (rp *responseParser) trimDatapoints(queryResult plugins.DataQueryResult, target *Query) {
func (rp *responseParser) trimDatapoints(queryResult backend.DataResponse, target *Query) {
var histogram *BucketAgg
for _, bucketAgg := range target.BucketAggs {
if bucketAgg.Type == dateHistType {
@@ -530,10 +537,7 @@ func (rp *responseParser) trimDatapoints(queryResult plugins.DataQueryResult, ta
return
}
frames, err := queryResult.Dataframes.Decoded()
if err != nil {
return
}
frames := queryResult.Frames
for _, frame := range frames {
for _, field := range frame.Fields {
@@ -549,12 +553,9 @@ func (rp *responseParser) trimDatapoints(queryResult plugins.DataQueryResult, ta
}
// nolint:staticcheck // plugins.DataQueryResult deprecated
func (rp *responseParser) nameFields(queryResult plugins.DataQueryResult, target *Query) {
func (rp *responseParser) nameFields(queryResult backend.DataResponse, target *Query) {
set := make(map[string]struct{})
frames, err := queryResult.Dataframes.Decoded()
if err != nil {
return
}
frames := queryResult.Frames
for _, v := range frames {
for _, vv := range v.Fields {
if metricType, exists := vv.Labels["metric"]; exists {
@@ -706,20 +707,20 @@ func findAgg(target *Query, aggID string) (*BucketAgg, error) {
}
// nolint:staticcheck // plugins.DataQueryResult deprecated
func getErrorFromElasticResponse(response *es.SearchResponse) plugins.DataQueryResult {
var result plugins.DataQueryResult
func getErrorFromElasticResponse(response *es.SearchResponse) string {
var errorString string
json := simplejson.NewFromAny(response.Error)
reason := json.Get("reason").MustString()
rootCauseReason := json.Get("root_cause").GetIndex(0).Get("reason").MustString()
switch {
case rootCauseReason != "":
result.ErrorString = rootCauseReason
errorString = rootCauseReason
case reason != "":
result.ErrorString = reason
errorString = reason
default:
result.ErrorString = "Unknown elasticsearch error response"
errorString = "Unknown elasticsearch error response"
}
return result
return errorString
}

View File

@@ -6,8 +6,7 @@ import (
"testing"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana-plugin-sdk-go/backend"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -47,12 +46,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
require.NoError(t, err)
dataframes := queryRes.Frames
require.Len(t, dataframes, 1)
frame := dataframes[0]
@@ -99,11 +97,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 2)
@@ -168,9 +166,9 @@ func TestResponseParser(t *testing.T) {
result, err := rp.getTimeSeries()
require.NoError(t, err)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 2)
@@ -238,11 +236,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 4)
@@ -313,11 +311,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 2)
@@ -397,11 +395,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 6)
@@ -503,11 +501,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 3)
@@ -559,11 +557,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 1)
})
@@ -611,11 +609,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 2)
@@ -682,11 +680,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 2)
@@ -741,11 +739,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 1)
@@ -790,11 +788,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 1)
@@ -857,11 +855,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 3)
@@ -947,11 +945,11 @@ func TestResponseParser(t *testing.T) {
require.NoError(t, err)
result, err := rp.getTimeSeries()
require.NoError(t, err)
require.Len(t, result.Results, 1)
require.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
require.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
require.NoError(t, err)
require.Len(t, dataframes, 1)
@@ -1021,11 +1019,11 @@ func TestResponseParser(t *testing.T) {
assert.Nil(t, err)
result, err := rp.getTimeSeries()
assert.Nil(t, err)
assert.Len(t, result.Results, 1)
assert.Len(t, result.Responses, 1)
queryRes := result.Results["A"]
queryRes := result.Responses["A"]
assert.NotNil(t, queryRes)
dataframes, err := queryRes.Dataframes.Decoded()
dataframes := queryRes.Frames
assert.NoError(t, err)
assert.Len(t, dataframes, 2)
@@ -1066,23 +1064,19 @@ func TestResponseParser(t *testing.T) {
func newResponseParserForTest(tsdbQueries map[string]string, responseBody string) (*responseParser, error) {
from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC)
to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC)
fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond))
toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond))
timeRange := plugins.NewDataTimeRange(fromStr, toStr)
tsdbQuery := plugins.DataQuery{
Queries: []plugins.DataSubQuery{},
TimeRange: &timeRange,
timeRange := backend.TimeRange{
From: from,
To: to,
}
tsdbQuery := backend.QueryDataRequest{
Queries: []backend.DataQuery{},
}
for refID, tsdbQueryBody := range tsdbQueries {
tsdbQueryJSON, err := simplejson.NewJson([]byte(tsdbQueryBody))
if err != nil {
return nil, err
}
tsdbQuery.Queries = append(tsdbQuery.Queries, plugins.DataSubQuery{
Model: tsdbQueryJSON,
RefID: refID,
tsdbQuery.Queries = append(tsdbQuery.Queries, backend.DataQuery{
TimeRange: timeRange,
RefID: refID,
JSON: json.RawMessage(tsdbQueryBody),
})
}
@@ -1093,7 +1087,7 @@ func newResponseParserForTest(tsdbQueries map[string]string, responseBody string
}
tsQueryParser := newTimeSeriesQueryParser()
queries, err := tsQueryParser.parse(tsdbQuery)
queries, err := tsQueryParser.parse(tsdbQuery.Queries)
if err != nil {
return nil, err
}

View File

@@ -4,58 +4,59 @@ import (
"fmt"
"regexp"
"strconv"
"time"
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/grafana/grafana/pkg/tsdb/interval"
)
type timeSeriesQuery struct {
client es.Client
tsdbQuery plugins.DataQuery
intervalCalculator interval.Calculator
dataQueries []backend.DataQuery
intervalCalculator tsdb.Calculator
}
var newTimeSeriesQuery = func(client es.Client, dataQuery plugins.DataQuery,
intervalCalculator interval.Calculator) *timeSeriesQuery {
var newTimeSeriesQuery = func(client es.Client, dataQuery []backend.DataQuery,
intervalCalculator tsdb.Calculator) *timeSeriesQuery {
return &timeSeriesQuery{
client: client,
tsdbQuery: dataQuery,
dataQueries: dataQuery,
intervalCalculator: intervalCalculator,
}
}
// nolint:staticcheck // plugins.DataQueryResult deprecated
func (e *timeSeriesQuery) execute() (plugins.DataResponse, error) {
func (e *timeSeriesQuery) execute() (*backend.QueryDataResponse, error) {
tsQueryParser := newTimeSeriesQueryParser()
queries, err := tsQueryParser.parse(e.tsdbQuery)
queries, err := tsQueryParser.parse(e.dataQueries)
if err != nil {
return plugins.DataResponse{}, err
return &backend.QueryDataResponse{}, err
}
ms := e.client.MultiSearch()
from := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetFromAsMsEpoch())
to := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetToAsMsEpoch())
result := plugins.DataResponse{
Results: make(map[string]plugins.DataQueryResult),
from := fmt.Sprintf("%d", e.dataQueries[0].TimeRange.From.UnixNano()/int64(time.Millisecond))
to := fmt.Sprintf("%d", e.dataQueries[0].TimeRange.To.UnixNano()/int64(time.Millisecond))
result := backend.QueryDataResponse{
Responses: backend.Responses{},
}
for _, q := range queries {
if err := e.processQuery(q, ms, from, to, result); err != nil {
return plugins.DataResponse{}, err
return &backend.QueryDataResponse{}, err
}
}
req, err := ms.Build()
if err != nil {
return plugins.DataResponse{}, err
return &backend.QueryDataResponse{}, err
}
res, err := e.client.ExecuteMultisearch(req)
if err != nil {
return plugins.DataResponse{}, err
return &backend.QueryDataResponse{}, err
}
rp := newResponseParser(res.Responses, queries, res.DebugInfo)
@@ -64,14 +65,14 @@ func (e *timeSeriesQuery) execute() (plugins.DataResponse, error) {
// nolint:staticcheck // plugins.DataQueryResult deprecated
func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilder, from, to string,
result plugins.DataResponse) error {
result backend.QueryDataResponse) error {
minInterval, err := e.client.GetMinInterval(q.Interval)
if err != nil {
return err
}
interval := e.intervalCalculator.Calculate(*e.tsdbQuery.TimeRange, minInterval)
intrvl := e.intervalCalculator.Calculate(e.dataQueries[0].TimeRange, minInterval)
b := ms.Search(interval)
b := ms.Search(intrvl)
b.Size(0)
filters := b.Query().Bool().Filter()
filters.AddDateRangeFilter(e.client.GetTimeField(), to, from, es.DateFormatEpochMS)
@@ -82,10 +83,8 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde
if len(q.BucketAggs) == 0 {
if len(q.Metrics) == 0 || q.Metrics[0].Type != "raw_document" {
result.Results[q.RefID] = plugins.DataQueryResult{
RefID: q.RefID,
Error: fmt.Errorf("invalid query, missing metrics and aggregations"),
ErrorString: "invalid query, missing metrics and aggregations",
result.Responses[q.RefID] = backend.DataResponse{
Error: fmt.Errorf("invalid query, missing metrics and aggregations"),
}
return nil
}
@@ -377,10 +376,13 @@ func newTimeSeriesQueryParser() *timeSeriesQueryParser {
return &timeSeriesQueryParser{}
}
func (p *timeSeriesQueryParser) parse(tsdbQuery plugins.DataQuery) ([]*Query, error) {
func (p *timeSeriesQueryParser) parse(tsdbQuery []backend.DataQuery) ([]*Query, error) {
queries := make([]*Query, 0)
for _, q := range tsdbQuery.Queries {
model := q.Model
for _, q := range tsdbQuery {
model, err := simplejson.NewJson(q.JSON)
if err != nil {
return nil, err
}
timeField, err := model.Get("timeField").String()
if err != nil {
return nil, err

View File

@@ -1,18 +1,17 @@
package elasticsearch
import (
"encoding/json"
"fmt"
"testing"
"time"
"github.com/Masterminds/semver"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/tsdb"
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
"github.com/grafana/grafana/pkg/tsdb/interval"
"github.com/stretchr/testify/assert"
"github.com/grafana/grafana/pkg/components/simplejson"
. "github.com/smartystreets/goconvey/convey"
"github.com/stretchr/testify/require"
)
func TestExecuteTimeSeriesQuery(t *testing.T) {
@@ -21,44 +20,44 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond))
toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond))
Convey("Test execute time series query", t, func() {
Convey("With defaults on es 2", func() {
t.Run("Test execute time series query", func(t *testing.T) {
t.Run("With defaults on es 2", func(t *testing.T) {
c := newFakeClient("2.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }],
"metrics": [{"type": "count", "id": "0" }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
rangeFilter := sr.Query.Bool.Filters[0].(*es.RangeFilter)
So(rangeFilter.Key, ShouldEqual, c.timeField)
So(rangeFilter.Lte, ShouldEqual, toStr)
So(rangeFilter.Gte, ShouldEqual, fromStr)
So(rangeFilter.Format, ShouldEqual, es.DateFormatEpochMS)
So(sr.Aggs[0].Key, ShouldEqual, "2")
require.Equal(t, rangeFilter.Key, c.timeField)
require.Equal(t, rangeFilter.Lte, toStr)
require.Equal(t, rangeFilter.Gte, fromStr)
require.Equal(t, rangeFilter.Format, es.DateFormatEpochMS)
require.Equal(t, sr.Aggs[0].Key, "2")
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg)
So(dateHistogramAgg.Field, ShouldEqual, "@timestamp")
So(dateHistogramAgg.ExtendedBounds.Min, ShouldEqual, fromStr)
So(dateHistogramAgg.ExtendedBounds.Max, ShouldEqual, toStr)
require.Equal(t, dateHistogramAgg.Field, "@timestamp")
require.Equal(t, dateHistogramAgg.ExtendedBounds.Min, fromStr)
require.Equal(t, dateHistogramAgg.ExtendedBounds.Max, toStr)
})
Convey("With defaults on es 5", func() {
t.Run("With defaults on es 5", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }],
"metrics": [{"type": "count", "id": "0" }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
So(sr.Query.Bool.Filters[0].(*es.RangeFilter).Key, ShouldEqual, c.timeField)
So(sr.Aggs[0].Key, ShouldEqual, "2")
So(sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Min, ShouldEqual, fromStr)
So(sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Max, ShouldEqual, toStr)
require.Equal(t, sr.Query.Bool.Filters[0].(*es.RangeFilter).Key, c.timeField)
require.Equal(t, sr.Aggs[0].Key, "2")
require.Equal(t, sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Min, fromStr)
require.Equal(t, sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Max, toStr)
})
Convey("With multiple bucket aggs", func() {
t.Run("With multiple bucket aggs", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -68,19 +67,19 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
],
"metrics": [{"type": "count", "id": "1" }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "2")
require.Equal(t, firstLevel.Key, "2")
termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
So(termsAgg.Field, ShouldEqual, "@host")
So(termsAgg.Size, ShouldEqual, 500)
require.Equal(t, termsAgg.Field, "@host")
require.Equal(t, termsAgg.Size, 500)
secondLevel := firstLevel.Aggregation.Aggs[0]
So(secondLevel.Key, ShouldEqual, "3")
So(secondLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp")
require.Equal(t, secondLevel.Key, "3")
require.Equal(t, secondLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, "@timestamp")
})
Convey("With select field", func() {
t.Run("With select field", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -89,18 +88,18 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
],
"metrics": [{"type": "avg", "field": "@value", "id": "1" }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "2")
So(firstLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp")
require.Equal(t, firstLevel.Key, "2")
require.Equal(t, firstLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, "@timestamp")
secondLevel := firstLevel.Aggregation.Aggs[0]
So(secondLevel.Key, ShouldEqual, "1")
So(secondLevel.Aggregation.Type, ShouldEqual, "avg")
So(secondLevel.Aggregation.Aggregation.(*es.MetricAggregation).Field, ShouldEqual, "@value")
require.Equal(t, secondLevel.Key, "1")
require.Equal(t, secondLevel.Aggregation.Type, "avg")
require.Equal(t, secondLevel.Aggregation.Aggregation.(*es.MetricAggregation).Field, "@value")
})
Convey("With term agg and order by metric agg", func() {
t.Run("With term agg and order by metric agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -118,19 +117,19 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
{"type": "avg", "field": "@value", "id": "5" }
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
avgAggOrderBy := sr.Aggs[0].Aggregation.Aggs[0]
So(avgAggOrderBy.Key, ShouldEqual, "5")
So(avgAggOrderBy.Aggregation.Type, ShouldEqual, "avg")
require.Equal(t, avgAggOrderBy.Key, "5")
require.Equal(t, avgAggOrderBy.Aggregation.Type, "avg")
avgAgg := sr.Aggs[0].Aggregation.Aggs[1].Aggregation.Aggs[0]
So(avgAgg.Key, ShouldEqual, "5")
So(avgAgg.Aggregation.Type, ShouldEqual, "avg")
require.Equal(t, avgAgg.Key, "5")
require.Equal(t, avgAgg.Aggregation.Type, "avg")
})
Convey("With term agg and order by count metric agg", func() {
t.Run("With term agg and order by count metric agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -147,14 +146,14 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
{"type": "count", "id": "1" }
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
termsAgg := sr.Aggs[0].Aggregation.Aggregation.(*es.TermsAggregation)
So(termsAgg.Order["_count"], ShouldEqual, "asc")
require.Equal(t, termsAgg.Order["_count"], "asc")
})
Convey("With term agg and order by percentiles agg", func() {
t.Run("With term agg and order by percentiles agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -171,15 +170,15 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
{"type": "percentiles", "field": "@value", "id": "1", "settings": { "percents": ["95","99"] } }
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
orderByAgg := sr.Aggs[0].Aggregation.Aggs[0]
So(orderByAgg.Key, ShouldEqual, "1")
So(orderByAgg.Aggregation.Type, ShouldEqual, "percentiles")
require.Equal(t, orderByAgg.Key, "1")
require.Equal(t, orderByAgg.Aggregation.Type, "percentiles")
})
Convey("With term agg and order by extended stats agg", func() {
t.Run("With term agg and order by extended stats agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -196,15 +195,15 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
{"type": "extended_stats", "field": "@value", "id": "1", "meta": { "std_deviation": true } }
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
orderByAgg := sr.Aggs[0].Aggregation.Aggs[0]
So(orderByAgg.Key, ShouldEqual, "1")
So(orderByAgg.Aggregation.Type, ShouldEqual, "extended_stats")
require.Equal(t, orderByAgg.Key, "1")
require.Equal(t, orderByAgg.Aggregation.Type, "extended_stats")
})
Convey("With term agg and order by term", func() {
t.Run("With term agg and order by term", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -222,16 +221,16 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
{"type": "avg", "field": "@value", "id": "5" }
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "2")
require.Equal(t, firstLevel.Key, "2")
termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
So(termsAgg.Order["_term"], ShouldEqual, "asc")
require.Equal(t, termsAgg.Order["_term"], "asc")
})
Convey("With term agg and order by term with es6.x", func() {
t.Run("With term agg and order by term with es6.x", func(t *testing.T) {
c := newFakeClient("6.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -249,16 +248,16 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
{"type": "avg", "field": "@value", "id": "5" }
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "2")
require.Equal(t, firstLevel.Key, "2")
termsAgg := firstLevel.Aggregation.Aggregation.(*es.TermsAggregation)
So(termsAgg.Order["_key"], ShouldEqual, "asc")
require.Equal(t, termsAgg.Order["_key"], "asc")
})
Convey("With metric percentiles", func() {
t.Run("With metric percentiles", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -276,22 +275,22 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
percentilesAgg := sr.Aggs[0].Aggregation.Aggs[0]
So(percentilesAgg.Key, ShouldEqual, "1")
So(percentilesAgg.Aggregation.Type, ShouldEqual, "percentiles")
require.Equal(t, percentilesAgg.Key, "1")
require.Equal(t, percentilesAgg.Aggregation.Type, "percentiles")
metricAgg := percentilesAgg.Aggregation.Aggregation.(*es.MetricAggregation)
percents := metricAgg.Settings["percents"].([]interface{})
So(percents, ShouldHaveLength, 4)
So(percents[0], ShouldEqual, "1")
So(percents[1], ShouldEqual, "2")
So(percents[2], ShouldEqual, "3")
So(percents[3], ShouldEqual, "4")
require.Len(t, percents, 4)
require.Equal(t, percents[0], "1")
require.Equal(t, percents[1], "2")
require.Equal(t, percents[2], "3")
require.Equal(t, percents[3], "4")
})
Convey("With filters aggs on es 2", func() {
t.Run("With filters aggs on es 2", func(t *testing.T) {
c := newFakeClient("2.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -307,22 +306,22 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
],
"metrics": [{"type": "count", "id": "1" }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
filtersAgg := sr.Aggs[0]
So(filtersAgg.Key, ShouldEqual, "2")
So(filtersAgg.Aggregation.Type, ShouldEqual, "filters")
require.Equal(t, filtersAgg.Key, "2")
require.Equal(t, filtersAgg.Aggregation.Type, "filters")
fAgg := filtersAgg.Aggregation.Aggregation.(*es.FiltersAggregation)
So(fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:cpu")
So(fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:logins.count")
require.Equal(t, fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, "@metric:cpu")
require.Equal(t, fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, "@metric:logins.count")
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggs[0]
So(dateHistogramAgg.Key, ShouldEqual, "4")
So(dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp")
require.Equal(t, dateHistogramAgg.Key, "4")
require.Equal(t, dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, "@timestamp")
})
Convey("With filters aggs on es 5", func() {
t.Run("With filters aggs on es 5", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -338,48 +337,48 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
],
"metrics": [{"type": "count", "id": "1" }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
filtersAgg := sr.Aggs[0]
So(filtersAgg.Key, ShouldEqual, "2")
So(filtersAgg.Aggregation.Type, ShouldEqual, "filters")
require.Equal(t, filtersAgg.Key, "2")
require.Equal(t, filtersAgg.Aggregation.Type, "filters")
fAgg := filtersAgg.Aggregation.Aggregation.(*es.FiltersAggregation)
So(fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:cpu")
So(fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:logins.count")
require.Equal(t, fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, "@metric:cpu")
require.Equal(t, fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, "@metric:logins.count")
dateHistogramAgg := sr.Aggs[0].Aggregation.Aggs[0]
So(dateHistogramAgg.Key, ShouldEqual, "4")
So(dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp")
require.Equal(t, dateHistogramAgg.Key, "4")
require.Equal(t, dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, "@timestamp")
})
Convey("With raw document metric", func() {
t.Run("With raw document metric", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [],
"metrics": [{ "id": "1", "type": "raw_document", "settings": {} }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
So(sr.Size, ShouldEqual, 500)
require.Equal(t, sr.Size, 500)
})
Convey("With raw document metric size set", func() {
t.Run("With raw document metric size set", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [],
"metrics": [{ "id": "1", "type": "raw_document", "settings": { "size": 1337 } }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
So(sr.Size, ShouldEqual, 1337)
require.Equal(t, sr.Size, 1337)
})
Convey("With date histogram agg", func() {
t.Run("With date histogram agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -393,19 +392,19 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
],
"metrics": [{"type": "count", "id": "1" }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "2")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
require.Equal(t, firstLevel.Key, "2")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
hAgg := firstLevel.Aggregation.Aggregation.(*es.DateHistogramAgg)
So(hAgg.Field, ShouldEqual, "@timestamp")
So(hAgg.Interval, ShouldEqual, "$__interval")
So(hAgg.MinDocCount, ShouldEqual, 2)
require.Equal(t, hAgg.Field, "@timestamp")
require.Equal(t, hAgg.Interval, "$__interval")
require.Equal(t, hAgg.MinDocCount, 2)
})
Convey("With histogram agg", func() {
t.Run("With histogram agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -419,20 +418,20 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
],
"metrics": [{"type": "count", "id": "1" }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "3")
So(firstLevel.Aggregation.Type, ShouldEqual, "histogram")
require.Equal(t, firstLevel.Key, "3")
require.Equal(t, firstLevel.Aggregation.Type, "histogram")
hAgg := firstLevel.Aggregation.Aggregation.(*es.HistogramAgg)
So(hAgg.Field, ShouldEqual, "bytes")
So(hAgg.Interval, ShouldEqual, 10)
So(hAgg.MinDocCount, ShouldEqual, 2)
So(*hAgg.Missing, ShouldEqual, 5)
require.Equal(t, hAgg.Field, "bytes")
require.Equal(t, hAgg.Interval, 10)
require.Equal(t, hAgg.MinDocCount, 2)
require.Equal(t, *hAgg.Missing, 5)
})
Convey("With geo hash grid agg", func() {
t.Run("With geo hash grid agg", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -446,18 +445,18 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
],
"metrics": [{"type": "count", "id": "1" }]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "3")
So(firstLevel.Aggregation.Type, ShouldEqual, "geohash_grid")
require.Equal(t, firstLevel.Key, "3")
require.Equal(t, firstLevel.Aggregation.Type, "geohash_grid")
ghGridAgg := firstLevel.Aggregation.Aggregation.(*es.GeoHashGridAggregation)
So(ghGridAgg.Field, ShouldEqual, "@location")
So(ghGridAgg.Precision, ShouldEqual, 3)
require.Equal(t, ghGridAgg.Field, "@location")
require.Equal(t, ghGridAgg.Precision, 3)
})
Convey("With moving average", func() {
t.Run("With moving average", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -474,28 +473,28 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "4")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
So(firstLevel.Aggregation.Aggs, ShouldHaveLength, 2)
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
require.Len(t, firstLevel.Aggregation.Aggs, 2)
sumAgg := firstLevel.Aggregation.Aggs[0]
So(sumAgg.Key, ShouldEqual, "3")
So(sumAgg.Aggregation.Type, ShouldEqual, "sum")
require.Equal(t, sumAgg.Key, "3")
require.Equal(t, sumAgg.Aggregation.Type, "sum")
mAgg := sumAgg.Aggregation.Aggregation.(*es.MetricAggregation)
So(mAgg.Field, ShouldEqual, "@value")
require.Equal(t, mAgg.Field, "@value")
movingAvgAgg := firstLevel.Aggregation.Aggs[1]
So(movingAvgAgg.Key, ShouldEqual, "2")
So(movingAvgAgg.Aggregation.Type, ShouldEqual, "moving_avg")
require.Equal(t, movingAvgAgg.Key, "2")
require.Equal(t, movingAvgAgg.Aggregation.Type, "moving_avg")
pl := movingAvgAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(pl.BucketPath, ShouldEqual, "3")
require.Equal(t, pl.BucketPath, "3")
})
Convey("With moving average doc count", func() {
t.Run("With moving average doc count", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -512,22 +511,22 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "4")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
So(firstLevel.Aggregation.Aggs, ShouldHaveLength, 1)
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
require.Len(t, firstLevel.Aggregation.Aggs, 1)
movingAvgAgg := firstLevel.Aggregation.Aggs[0]
So(movingAvgAgg.Key, ShouldEqual, "2")
So(movingAvgAgg.Aggregation.Type, ShouldEqual, "moving_avg")
require.Equal(t, movingAvgAgg.Key, "2")
require.Equal(t, movingAvgAgg.Aggregation.Type, "moving_avg")
pl := movingAvgAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(pl.BucketPath, ShouldEqual, "_count")
require.Equal(t, pl.BucketPath, "_count")
})
Convey("With broken moving average", func() {
t.Run("With broken moving average", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -548,22 +547,22 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "5")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
require.Equal(t, firstLevel.Key, "5")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
So(firstLevel.Aggregation.Aggs, ShouldHaveLength, 2)
require.Len(t, firstLevel.Aggregation.Aggs, 2)
movingAvgAgg := firstLevel.Aggregation.Aggs[1]
So(movingAvgAgg.Key, ShouldEqual, "2")
require.Equal(t, movingAvgAgg.Key, "2")
plAgg := movingAvgAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(plAgg.BucketPath, ShouldEqual, "3")
require.Equal(t, plAgg.BucketPath, "3")
})
Convey("With cumulative sum", func() {
t.Run("With cumulative sum", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -580,28 +579,28 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "4")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
So(firstLevel.Aggregation.Aggs, ShouldHaveLength, 2)
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
require.Len(t, firstLevel.Aggregation.Aggs, 2)
sumAgg := firstLevel.Aggregation.Aggs[0]
So(sumAgg.Key, ShouldEqual, "3")
So(sumAgg.Aggregation.Type, ShouldEqual, "sum")
require.Equal(t, sumAgg.Key, "3")
require.Equal(t, sumAgg.Aggregation.Type, "sum")
mAgg := sumAgg.Aggregation.Aggregation.(*es.MetricAggregation)
So(mAgg.Field, ShouldEqual, "@value")
require.Equal(t, mAgg.Field, "@value")
cumulativeSumAgg := firstLevel.Aggregation.Aggs[1]
So(cumulativeSumAgg.Key, ShouldEqual, "2")
So(cumulativeSumAgg.Aggregation.Type, ShouldEqual, "cumulative_sum")
require.Equal(t, cumulativeSumAgg.Key, "2")
require.Equal(t, cumulativeSumAgg.Aggregation.Type, "cumulative_sum")
pl := cumulativeSumAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(pl.BucketPath, ShouldEqual, "3")
require.Equal(t, pl.BucketPath, "3")
})
Convey("With cumulative sum doc count", func() {
t.Run("With cumulative sum doc count", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -618,22 +617,22 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "4")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
So(firstLevel.Aggregation.Aggs, ShouldHaveLength, 1)
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
require.Len(t, firstLevel.Aggregation.Aggs, 1)
cumulativeSumAgg := firstLevel.Aggregation.Aggs[0]
So(cumulativeSumAgg.Key, ShouldEqual, "2")
So(cumulativeSumAgg.Aggregation.Type, ShouldEqual, "cumulative_sum")
require.Equal(t, cumulativeSumAgg.Key, "2")
require.Equal(t, cumulativeSumAgg.Aggregation.Type, "cumulative_sum")
pl := cumulativeSumAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(pl.BucketPath, ShouldEqual, "_count")
require.Equal(t, pl.BucketPath, "_count")
})
Convey("With broken cumulative sum", func() {
t.Run("With broken cumulative sum", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -654,22 +653,22 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "5")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
require.Equal(t, firstLevel.Key, "5")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
So(firstLevel.Aggregation.Aggs, ShouldHaveLength, 2)
require.Len(t, firstLevel.Aggregation.Aggs, 2)
cumulativeSumAgg := firstLevel.Aggregation.Aggs[1]
So(cumulativeSumAgg.Key, ShouldEqual, "2")
require.Equal(t, cumulativeSumAgg.Key, "2")
plAgg := cumulativeSumAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(plAgg.BucketPath, ShouldEqual, "3")
require.Equal(t, plAgg.BucketPath, "3")
})
Convey("With derivative", func() {
t.Run("With derivative", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -685,20 +684,20 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "4")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
derivativeAgg := firstLevel.Aggregation.Aggs[1]
So(derivativeAgg.Key, ShouldEqual, "2")
require.Equal(t, derivativeAgg.Key, "2")
plAgg := derivativeAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(plAgg.BucketPath, ShouldEqual, "3")
require.Equal(t, plAgg.BucketPath, "3")
})
Convey("With derivative doc count", func() {
t.Run("With derivative doc count", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -714,20 +713,20 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "4")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
derivativeAgg := firstLevel.Aggregation.Aggs[0]
So(derivativeAgg.Key, ShouldEqual, "2")
require.Equal(t, derivativeAgg.Key, "2")
plAgg := derivativeAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(plAgg.BucketPath, ShouldEqual, "_count")
require.Equal(t, plAgg.BucketPath, "_count")
})
Convey("With serial_diff", func() {
t.Run("With serial_diff", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -743,20 +742,20 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "4")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
serialDiffAgg := firstLevel.Aggregation.Aggs[1]
So(serialDiffAgg.Key, ShouldEqual, "2")
require.Equal(t, serialDiffAgg.Key, "2")
plAgg := serialDiffAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(plAgg.BucketPath, ShouldEqual, "3")
require.Equal(t, plAgg.BucketPath, "3")
})
Convey("With serial_diff doc count", func() {
t.Run("With serial_diff doc count", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -772,20 +771,20 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "4")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
serialDiffAgg := firstLevel.Aggregation.Aggs[0]
So(serialDiffAgg.Key, ShouldEqual, "2")
require.Equal(t, serialDiffAgg.Key, "2")
plAgg := serialDiffAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(plAgg.BucketPath, ShouldEqual, "_count")
require.Equal(t, plAgg.BucketPath, "_count")
})
Convey("With bucket_script", func() {
t.Run("With bucket_script", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -806,23 +805,23 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "4")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
bucketScriptAgg := firstLevel.Aggregation.Aggs[2]
So(bucketScriptAgg.Key, ShouldEqual, "2")
require.Equal(t, bucketScriptAgg.Key, "2")
plAgg := bucketScriptAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(plAgg.BucketPath.(map[string]interface{}), ShouldResemble, map[string]interface{}{
require.Equal(t, plAgg.BucketPath.(map[string]interface{}), map[string]interface{}{
"var1": "3",
"var2": "5",
})
})
Convey("With bucket_script doc count", func() {
t.Run("With bucket_script doc count", func(t *testing.T) {
c := newFakeClient("5.0.0")
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
@@ -841,17 +840,17 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
}
]
}`, from, to, 15*time.Second)
So(err, ShouldBeNil)
require.NoError(t, err)
sr := c.multisearchRequests[0].Requests[0]
firstLevel := sr.Aggs[0]
So(firstLevel.Key, ShouldEqual, "4")
So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram")
require.Equal(t, firstLevel.Key, "4")
require.Equal(t, firstLevel.Aggregation.Type, "date_histogram")
bucketScriptAgg := firstLevel.Aggregation.Aggs[0]
So(bucketScriptAgg.Key, ShouldEqual, "2")
require.Equal(t, bucketScriptAgg.Key, "2")
plAgg := bucketScriptAgg.Aggregation.Aggregation.(*es.PipelineAggregation)
So(plAgg.BucketPath.(map[string]interface{}), ShouldResemble, map[string]interface{}{
require.Equal(t, plAgg.BucketPath.(map[string]interface{}), map[string]interface{}{
"var1": "_count",
})
})
@@ -940,12 +939,12 @@ func TestSettingsCasting(t *testing.T) {
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
{
"type": "date_histogram",
"field": "@timestamp",
"id": "2",
"settings": {
"min_doc_count": "1"
"min_doc_count": "1"
}
}
],
@@ -975,7 +974,7 @@ func TestSettingsCasting(t *testing.T) {
_, err := executeTsdbQuery(c, `{
"timeField": "@timestamp",
"bucketAggs": [
{
{
"type": "date_histogram",
"field": "@timestamp",
"id": "2",
@@ -1059,7 +1058,7 @@ func TestSettingsCasting(t *testing.T) {
"script": "my_script"
}
},
{
{
"id": "3",
"type": "avg",
"settings": {
@@ -1126,15 +1125,11 @@ func (c *fakeClient) MultiSearch() *es.MultiSearchRequestBuilder {
return c.builder
}
func newDataQuery(body string) (plugins.DataQuery, error) {
json, err := simplejson.NewJson([]byte(body))
if err != nil {
return plugins.DataQuery{}, err
}
return plugins.DataQuery{
Queries: []plugins.DataSubQuery{
func newDataQuery(body string) (backend.QueryDataRequest, error) {
return backend.QueryDataRequest{
Queries: []backend.DataQuery{
{
Model: json,
JSON: json.RawMessage(body),
},
},
}, nil
@@ -1142,31 +1137,28 @@ func newDataQuery(body string) (plugins.DataQuery, error) {
// nolint:staticcheck // plugins.DataQueryResult deprecated
func executeTsdbQuery(c es.Client, body string, from, to time.Time, minInterval time.Duration) (
plugins.DataResponse, error) {
json, err := simplejson.NewJson([]byte(body))
if err != nil {
return plugins.DataResponse{}, err
*backend.QueryDataResponse, error) {
timeRange := backend.TimeRange{
From: from,
To: to,
}
fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond))
toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond))
timeRange := plugins.NewDataTimeRange(fromStr, toStr)
tsdbQuery := plugins.DataQuery{
Queries: []plugins.DataSubQuery{
dataRequest := backend.QueryDataRequest{
Queries: []backend.DataQuery{
{
Model: json,
JSON: json.RawMessage(body),
TimeRange: timeRange,
},
},
TimeRange: &timeRange,
}
query := newTimeSeriesQuery(c, tsdbQuery, interval.NewCalculator(interval.CalculatorOptions{MinInterval: minInterval}))
query := newTimeSeriesQuery(c, dataRequest.Queries, tsdb.NewCalculator(tsdb.CalculatorOptions{MinInterval: minInterval}))
return query.execute()
}
func TestTimeSeriesQueryParser(t *testing.T) {
Convey("Test time series query parser", t, func() {
t.Run("Test time series query parser", func(t *testing.T) {
p := newTimeSeriesQueryParser()
Convey("Should be able to parse query", func() {
t.Run("Should be able to parse query", func(t *testing.T) {
body := `{
"timeField": "@timestamp",
"query": "@metric:cpu",
@@ -1217,49 +1209,49 @@ func TestTimeSeriesQueryParser(t *testing.T) {
}
]
}`
tsdbQuery, err := newDataQuery(body)
So(err, ShouldBeNil)
queries, err := p.parse(tsdbQuery)
So(err, ShouldBeNil)
So(queries, ShouldHaveLength, 1)
dataQuery, err := newDataQuery(body)
require.NoError(t, err)
queries, err := p.parse(dataQuery.Queries)
require.NoError(t, err)
require.Len(t, queries, 1)
q := queries[0]
So(q.TimeField, ShouldEqual, "@timestamp")
So(q.RawQuery, ShouldEqual, "@metric:cpu")
So(q.Alias, ShouldEqual, "{{@hostname}} {{metric}}")
So(q.Interval, ShouldEqual, "10m")
require.Equal(t, q.TimeField, "@timestamp")
require.Equal(t, q.RawQuery, "@metric:cpu")
require.Equal(t, q.Alias, "{{@hostname}} {{metric}}")
require.Equal(t, q.Interval, "10m")
So(q.Metrics, ShouldHaveLength, 2)
So(q.Metrics[0].Field, ShouldEqual, "@value")
So(q.Metrics[0].ID, ShouldEqual, "1")
So(q.Metrics[0].Type, ShouldEqual, "percentiles")
So(q.Metrics[0].Hide, ShouldBeFalse)
So(q.Metrics[0].PipelineAggregate, ShouldEqual, "")
So(q.Metrics[0].Settings.Get("percents").MustStringArray()[0], ShouldEqual, "90")
require.Len(t, q.Metrics, 2)
require.Equal(t, q.Metrics[0].Field, "@value")
require.Equal(t, q.Metrics[0].ID, "1")
require.Equal(t, q.Metrics[0].Type, "percentiles")
require.False(t, q.Metrics[0].Hide)
require.Equal(t, q.Metrics[0].PipelineAggregate, "")
require.Equal(t, q.Metrics[0].Settings.Get("percents").MustStringArray()[0], "90")
So(q.Metrics[1].Field, ShouldEqual, "select field")
So(q.Metrics[1].ID, ShouldEqual, "4")
So(q.Metrics[1].Type, ShouldEqual, "count")
So(q.Metrics[1].Hide, ShouldBeFalse)
So(q.Metrics[1].PipelineAggregate, ShouldEqual, "")
So(q.Metrics[1].Settings.MustMap(), ShouldBeEmpty)
require.Equal(t, q.Metrics[1].Field, "select field")
require.Equal(t, q.Metrics[1].ID, "4")
require.Equal(t, q.Metrics[1].Type, "count")
require.False(t, q.Metrics[1].Hide)
require.Equal(t, q.Metrics[1].PipelineAggregate, "")
require.Empty(t, q.Metrics[1].Settings.MustMap())
So(q.BucketAggs, ShouldHaveLength, 2)
So(q.BucketAggs[0].Field, ShouldEqual, "@hostname")
So(q.BucketAggs[0].ID, ShouldEqual, "3")
So(q.BucketAggs[0].Type, ShouldEqual, "terms")
So(q.BucketAggs[0].Settings.Get("min_doc_count").MustInt64(), ShouldEqual, 1)
So(q.BucketAggs[0].Settings.Get("order").MustString(), ShouldEqual, "desc")
So(q.BucketAggs[0].Settings.Get("orderBy").MustString(), ShouldEqual, "_term")
So(q.BucketAggs[0].Settings.Get("size").MustString(), ShouldEqual, "10")
require.Len(t, q.BucketAggs, 2)
require.Equal(t, q.BucketAggs[0].Field, "@hostname")
require.Equal(t, q.BucketAggs[0].ID, "3")
require.Equal(t, q.BucketAggs[0].Type, "terms")
require.Equal(t, q.BucketAggs[0].Settings.Get("min_doc_count").MustInt(), 1)
require.Equal(t, q.BucketAggs[0].Settings.Get("order").MustString(), "desc")
require.Equal(t, q.BucketAggs[0].Settings.Get("orderBy").MustString(), "_term")
require.Equal(t, q.BucketAggs[0].Settings.Get("size").MustString(), "10")
So(q.BucketAggs[1].Field, ShouldEqual, "@timestamp")
So(q.BucketAggs[1].ID, ShouldEqual, "2")
So(q.BucketAggs[1].Type, ShouldEqual, "date_histogram")
So(q.BucketAggs[1].Settings.Get("interval").MustString(), ShouldEqual, "5m")
So(q.BucketAggs[1].Settings.Get("min_doc_count").MustInt64(), ShouldEqual, 0)
So(q.BucketAggs[1].Settings.Get("trimEdges").MustInt64(), ShouldEqual, 0)
require.Equal(t, q.BucketAggs[1].Field, "@timestamp")
require.Equal(t, q.BucketAggs[1].ID, "2")
require.Equal(t, q.BucketAggs[1].Type, "date_histogram")
require.Equal(t, q.BucketAggs[1].Settings.Get("interval").MustString(), "5m")
require.Equal(t, q.BucketAggs[1].Settings.Get("min_doc_count").MustInt(), 0)
require.Equal(t, q.BucketAggs[1].Settings.Get("trimEdges").MustInt(), 0)
})
})
}

View File

@@ -13,7 +13,6 @@ import (
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor"
"github.com/grafana/grafana/pkg/tsdb/cloudmonitoring"
"github.com/grafana/grafana/pkg/tsdb/elasticsearch"
"github.com/grafana/grafana/pkg/tsdb/influxdb"
"github.com/grafana/grafana/pkg/tsdb/loki"
"github.com/grafana/grafana/pkg/tsdb/mssql"
@@ -61,7 +60,6 @@ func (s *Service) Init() error {
s.registry["mssql"] = mssql.NewExecutor
s.registry["postgres"] = s.PostgresService.NewExecutor
s.registry["mysql"] = mysql.New(s.HTTPClientProvider)
s.registry["elasticsearch"] = elasticsearch.New(s.HTTPClientProvider)
s.registry["stackdriver"] = s.CloudMonitoringService.NewExecutor
s.registry["loki"] = loki.New(s.HTTPClientProvider)
s.registry["tempo"] = tempo.New(s.HTTPClientProvider)

View File

@@ -1,7 +1,7 @@
🌟 This was machine generated. Do not edit. 🌟
Frame[0]
Name:
Frame[0]
Name:
Dimensions: 5 Fields by 10 Rows
+-------------------------------+--------------------+---------------------+-----------------+-----------------+
| Name: time | Name: lat | Name: lng | Name: heading | Name: altitude |