Chore: Refactor OpenTSDB using backend SDK (#35815)

* Refactor OpenTSDB using backend SDK

* Adjust tests to the SDK refactor

* Remove openTSDB from service

* Rename OpenTASDB service to Service, use AuthPassword from DecryptedSecureJson

* Devenv: Add opentsdb v2.3 data source and dashboard

* Letting http client provider to set basic auth,
renaming,
casting datasource direclty to pointer

* Update pkg/tsdb/opentsdb/opentsdb.go

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>

* Update pkg/tsdb/opentsdb/opentsdb.go

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>

* Format struct

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>
This commit is contained in:
idafurjes 2021-06-30 15:58:44 +02:00 committed by GitHub
parent a690d0f803
commit 7231eba6a5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 475 additions and 156 deletions

View File

@ -84,6 +84,14 @@ datasources:
tsdbResolution: 1 tsdbResolution: 1
tsdbVersion: 1 tsdbVersion: 1
- name: gdev-opentsdb-v2.3
type: opentsdb
access: proxy
url: http://localhost:4242
jsonData:
tsdbResolution: 1
tsdbVersion: 3
- name: gdev-elasticsearch-v2-metrics - name: gdev-elasticsearch-v2-metrics
type: elasticsearch type: elasticsearch
access: proxy access: proxy

View File

@ -0,0 +1,258 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": 3151,
"links": [],
"panels": [
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "gdev-opentsdb-v2.3",
"fieldConfig": {
"defaults": {
"links": []
},
"overrides": []
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 0,
"y": 0
},
"hiddenSeries": false,
"id": 2,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"percentage": false,
"pluginVersion": "8.1.0-pre",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"aggregator": "sum",
"alias": "$tag_hostname",
"currentFilterGroupBy": false,
"currentFilterKey": "",
"currentFilterType": "literal_or",
"currentFilterValue": "",
"disableDownsampling": false,
"downsampleAggregator": "avg",
"downsampleFillPolicy": "none",
"explicitTags": false,
"filters": [
{
"filter": "*",
"groupBy": true,
"tagk": "hostname",
"type": "wildcard"
}
],
"metric": "cpu",
"refId": "A",
"shouldComputeRate": false
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "CPU per host",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "gdev-opentsdb-v2.3",
"fieldConfig": {
"defaults": {
"links": []
},
"overrides": []
},
"fill": 1,
"fillGradient": 0,
"gridPos": {
"h": 9,
"w": 12,
"x": 12,
"y": 0
},
"hiddenSeries": false,
"id": 4,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"nullPointMode": "null",
"options": {
"alertThreshold": true
},
"percentage": false,
"pluginVersion": "8.1.0-pre",
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"aggregator": "sum",
"alias": "$tag_hostname",
"currentFilterGroupBy": false,
"currentFilterKey": "",
"currentFilterType": "literal_or",
"currentFilterValue": "",
"downsampleAggregator": "avg",
"downsampleFillPolicy": "none",
"filters": [
{
"filter": "*",
"groupBy": true,
"tagk": "hostname",
"type": "wildcard"
}
],
"metric": "logins.count",
"refId": "A"
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Login Count per host",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
}
],
"schemaVersion": 30,
"style": "dark",
"tags": [],
"templating": {
"list": []
},
"time": {
"from": "now-1h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Datasource tests - OpenTSDB v2.3",
"uid": "rZRUGik7k",
"version": 3
}

View File

@ -3,37 +3,81 @@ package opentsdb
import ( import (
"context" "context"
"fmt" "fmt"
"io/ioutil"
"net/http"
"net/url"
"path" "path"
"strconv" "strconv"
"strings" "strings"
"time" "time"
"golang.org/x/net/context/ctxhttp"
"encoding/json" "encoding/json"
"io/ioutil"
"net/http"
"net/url"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/httpclient" "github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"golang.org/x/net/context/ctxhttp"
) )
type OpenTsdbExecutor struct { type Service struct {
httpClientProvider httpclient.Provider HTTPClientProvider httpclient.Provider `inject:""`
Cfg *setting.Cfg `inject:""`
BackendPluginManager backendplugin.Manager `inject:""`
im instancemgmt.InstanceManager
} }
//nolint: staticcheck // plugins.DataPlugin deprecated type datasourceInfo struct {
func New(httpClientProvider httpclient.Provider) func(*models.DataSource) (plugins.DataPlugin, error) { HTTPClient *http.Client
//nolint: staticcheck // plugins.DataPlugin deprecated URL string
return func(*models.DataSource) (plugins.DataPlugin, error) { }
return &OpenTsdbExecutor{
httpClientProvider: httpClientProvider, type DsAccess string
}, nil
func init() {
registry.Register(&registry.Descriptor{Instance: &Service{}})
}
func (s *Service) Init() error {
s.im = datasource.NewInstanceManager(newInstanceSettings(s.HTTPClientProvider))
factory := coreplugin.New(backend.ServeOpts{
QueryDataHandler: s,
})
if err := s.BackendPluginManager.Register("opentsdb", factory); err != nil {
plog.Error("Failed to register plugin", "error", err)
}
return nil
}
func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.InstanceFactoryFunc {
return func(settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
opts, err := settings.HTTPClientOptions()
if err != nil {
return nil, err
}
client, err := httpClientProvider.New(opts)
if err != nil {
return nil, err
}
model := &datasourceInfo{
HTTPClient: client,
URL: settings.URL,
}
return model, nil
} }
} }
@ -41,16 +85,16 @@ var (
plog = log.New("tsdb.opentsdb") plog = log.New("tsdb.opentsdb")
) )
// nolint:staticcheck // plugins.DataQueryResult deprecated func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
func (e *OpenTsdbExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource,
queryContext plugins.DataQuery) (plugins.DataResponse, error) {
var tsdbQuery OpenTsdbQuery var tsdbQuery OpenTsdbQuery
tsdbQuery.Start = queryContext.TimeRange.GetFromAsMsEpoch() q := req.Queries[0]
tsdbQuery.End = queryContext.TimeRange.GetToAsMsEpoch()
for _, query := range queryContext.Queries { tsdbQuery.Start = q.TimeRange.From.UnixNano() / int64(time.Millisecond)
metric := e.buildMetric(query) tsdbQuery.End = q.TimeRange.To.UnixNano() / int64(time.Millisecond)
for _, query := range req.Queries {
metric := s.buildMetric(query)
tsdbQuery.Queries = append(tsdbQuery.Queries, metric) tsdbQuery.Queries = append(tsdbQuery.Queries, metric)
} }
@ -59,33 +103,31 @@ func (e *OpenTsdbExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSou
plog.Debug("OpenTsdb request", "params", tsdbQuery) plog.Debug("OpenTsdb request", "params", tsdbQuery)
} }
req, err := e.createRequest(dsInfo, tsdbQuery) dsInfo, err := s.getDSInfo(req.PluginContext)
if err != nil { if err != nil {
return plugins.DataResponse{}, err return nil, err
} }
httpClient, err := dsInfo.GetHTTPClient(e.httpClientProvider) request, err := s.createRequest(dsInfo, tsdbQuery)
if err != nil { if err != nil {
return plugins.DataResponse{}, err return &backend.QueryDataResponse{}, err
} }
res, err := ctxhttp.Do(ctx, httpClient, req) res, err := ctxhttp.Do(ctx, dsInfo.HTTPClient, request)
if err != nil { if err != nil {
return plugins.DataResponse{}, err return &backend.QueryDataResponse{}, err
} }
queryResult, err := e.parseResponse(tsdbQuery, res) result, err := s.parseResponse(res)
if err != nil { if err != nil {
return plugins.DataResponse{}, err return &backend.QueryDataResponse{}, err
} }
return plugins.DataResponse{ return result, nil
Results: queryResult,
}, nil
} }
func (e *OpenTsdbExecutor) createRequest(dsInfo *models.DataSource, data OpenTsdbQuery) (*http.Request, error) { func (s *Service) createRequest(dsInfo *datasourceInfo, data OpenTsdbQuery) (*http.Request, error) {
u, err := url.Parse(dsInfo.Url) u, err := url.Parse(dsInfo.URL)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -104,17 +146,11 @@ func (e *OpenTsdbExecutor) createRequest(dsInfo *models.DataSource, data OpenTsd
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
if dsInfo.BasicAuth {
req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.DecryptedBasicAuthPassword())
}
return req, nil return req, nil
} }
// nolint:staticcheck // plugins.DataQueryResult deprecated func (s *Service) parseResponse(res *http.Response) (*backend.QueryDataResponse, error) {
func (e *OpenTsdbExecutor) parseResponse(query OpenTsdbQuery, res *http.Response) (map[string]plugins.DataQueryResult, error) { resp := backend.NewQueryDataResponse()
queryResults := make(map[string]plugins.DataQueryResult)
queryRes := plugins.DataQueryResult{}
body, err := ioutil.ReadAll(res.Body) body, err := ioutil.ReadAll(res.Body)
if err != nil { if err != nil {
@ -157,45 +193,51 @@ func (e *OpenTsdbExecutor) parseResponse(query OpenTsdbQuery, res *http.Response
data.NewField("time", nil, timeVector), data.NewField("time", nil, timeVector),
data.NewField("value", nil, values))) data.NewField("value", nil, values)))
} }
queryRes.Dataframes = plugins.NewDecodedDataFrames(frames) result := resp.Responses["A"]
queryResults["A"] = queryRes result.Frames = frames
return queryResults, nil resp.Responses["A"] = result
return resp, nil
} }
func (e *OpenTsdbExecutor) buildMetric(query plugins.DataSubQuery) map[string]interface{} { func (s *Service) buildMetric(query backend.DataQuery) map[string]interface{} {
metric := make(map[string]interface{}) metric := make(map[string]interface{})
model, err := simplejson.NewJson(query.JSON)
if err != nil {
return nil
}
// Setting metric and aggregator // Setting metric and aggregator
metric["metric"] = query.Model.Get("metric").MustString() metric["metric"] = model.Get("metric").MustString()
metric["aggregator"] = query.Model.Get("aggregator").MustString() metric["aggregator"] = model.Get("aggregator").MustString()
// Setting downsampling options // Setting downsampling options
disableDownsampling := query.Model.Get("disableDownsampling").MustBool() disableDownsampling := model.Get("disableDownsampling").MustBool()
if !disableDownsampling { if !disableDownsampling {
downsampleInterval := query.Model.Get("downsampleInterval").MustString() downsampleInterval := model.Get("downsampleInterval").MustString()
if downsampleInterval == "" { if downsampleInterval == "" {
downsampleInterval = "1m" // default value for blank downsampleInterval = "1m" // default value for blank
} }
downsample := downsampleInterval + "-" + query.Model.Get("downsampleAggregator").MustString() downsample := downsampleInterval + "-" + model.Get("downsampleAggregator").MustString()
if query.Model.Get("downsampleFillPolicy").MustString() != "none" { if model.Get("downsampleFillPolicy").MustString() != "none" {
metric["downsample"] = downsample + "-" + query.Model.Get("downsampleFillPolicy").MustString() metric["downsample"] = downsample + "-" + model.Get("downsampleFillPolicy").MustString()
} else { } else {
metric["downsample"] = downsample metric["downsample"] = downsample
} }
} }
// Setting rate options // Setting rate options
if query.Model.Get("shouldComputeRate").MustBool() { if model.Get("shouldComputeRate").MustBool() {
metric["rate"] = true metric["rate"] = true
rateOptions := make(map[string]interface{}) rateOptions := make(map[string]interface{})
rateOptions["counter"] = query.Model.Get("isCounter").MustBool() rateOptions["counter"] = model.Get("isCounter").MustBool()
counterMax, counterMaxCheck := query.Model.CheckGet("counterMax") counterMax, counterMaxCheck := model.CheckGet("counterMax")
if counterMaxCheck { if counterMaxCheck {
rateOptions["counterMax"] = counterMax.MustFloat64() rateOptions["counterMax"] = counterMax.MustFloat64()
} }
resetValue, resetValueCheck := query.Model.CheckGet("counterResetValue") resetValue, resetValueCheck := model.CheckGet("counterResetValue")
if resetValueCheck { if resetValueCheck {
rateOptions["resetValue"] = resetValue.MustFloat64() rateOptions["resetValue"] = resetValue.MustFloat64()
} }
@ -208,16 +250,30 @@ func (e *OpenTsdbExecutor) buildMetric(query plugins.DataSubQuery) map[string]in
} }
// Setting tags // Setting tags
tags, tagsCheck := query.Model.CheckGet("tags") tags, tagsCheck := model.CheckGet("tags")
if tagsCheck && len(tags.MustMap()) > 0 { if tagsCheck && len(tags.MustMap()) > 0 {
metric["tags"] = tags.MustMap() metric["tags"] = tags.MustMap()
} }
// Setting filters // Setting filters
filters, filtersCheck := query.Model.CheckGet("filters") filters, filtersCheck := model.CheckGet("filters")
if filtersCheck && len(filters.MustArray()) > 0 { if filtersCheck && len(filters.MustArray()) > 0 {
metric["filters"] = filters.MustArray() metric["filters"] = filters.MustArray()
} }
return metric return metric
} }
func (s *Service) getDSInfo(pluginCtx backend.PluginContext) (*datasourceInfo, error) {
i, err := s.im.Get(pluginCtx)
if err != nil {
return nil, err
}
instance, ok := i.(*datasourceInfo)
if !ok {
return nil, fmt.Errorf("failed to cast datsource info")
}
return instance, nil
}

View File

@ -8,19 +8,17 @@ import (
"time" "time"
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestOpenTsdbExecutor(t *testing.T) { func TestOpenTsdbExecutor(t *testing.T) {
exec := &OpenTsdbExecutor{} service := &Service{}
t.Run("create request", func(t *testing.T) { t.Run("create request", func(t *testing.T) {
req, err := exec.createRequest(&models.DataSource{}, OpenTsdbQuery{}) req, err := service.createRequest(&datasourceInfo{}, OpenTsdbQuery{})
require.NoError(t, err) require.NoError(t, err)
assert.Equal(t, "POST", req.Method) assert.Equal(t, "POST", req.Method)
@ -34,10 +32,8 @@ func TestOpenTsdbExecutor(t *testing.T) {
t.Run("Parse response should handle invalid JSON", func(t *testing.T) { t.Run("Parse response should handle invalid JSON", func(t *testing.T) {
response := `{ invalid }` response := `{ invalid }`
query := OpenTsdbQuery{} result, err := service.parseResponse(&http.Response{Body: ioutil.NopCloser(strings.NewReader(response))})
require.Nil(t, result)
result, err := exec.parseResponse(query, &http.Response{Body: ioutil.NopCloser(strings.NewReader(response))})
require.Nil(t, result["A"].Dataframes)
require.Error(t, err) require.Error(t, err)
}) })
@ -60,37 +56,33 @@ func TestOpenTsdbExecutor(t *testing.T) {
50}), 50}),
) )
query := OpenTsdbQuery{}
resp := http.Response{Body: ioutil.NopCloser(strings.NewReader(response))} resp := http.Response{Body: ioutil.NopCloser(strings.NewReader(response))}
resp.StatusCode = 200 resp.StatusCode = 200
result, err := exec.parseResponse(query, &resp) result, err := service.parseResponse(&resp)
require.NoError(t, err) require.NoError(t, err)
decoded, err := result["A"].Dataframes.Decoded() frame := result.Responses["A"]
require.NoError(t, err)
require.Len(t, decoded, 1)
frame := decoded[0] if diff := cmp.Diff(testFrame, frame.Frames[0], data.FrameTestCompareOptions()...); diff != "" {
if diff := cmp.Diff(testFrame, frame, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff) t.Errorf("Result mismatch (-want +got):\n%s", diff)
} }
}) })
t.Run("Build metric with downsampling enabled", func(t *testing.T) { t.Run("Build metric with downsampling enabled", func(t *testing.T) {
query := plugins.DataSubQuery{ query := backend.DataQuery{
Model: simplejson.New(), JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": false,
"downsampleInterval": "",
"downsampleAggregator": "avg",
"downsampleFillPolicy": "none"
}`,
),
} }
query.Model.Set("metric", "cpu.average.percent") metric := service.buildMetric(query)
query.Model.Set("aggregator", "avg")
query.Model.Set("disableDownsampling", false)
query.Model.Set("downsampleInterval", "")
query.Model.Set("downsampleAggregator", "avg")
query.Model.Set("downsampleFillPolicy", "none")
metric := exec.buildMetric(query)
require.Len(t, metric, 3) require.Len(t, metric, 3)
require.Equal(t, "cpu.average.percent", metric["metric"]) require.Equal(t, "cpu.average.percent", metric["metric"])
@ -99,18 +91,20 @@ func TestOpenTsdbExecutor(t *testing.T) {
}) })
t.Run("Build metric with downsampling disabled", func(t *testing.T) { t.Run("Build metric with downsampling disabled", func(t *testing.T) {
query := plugins.DataSubQuery{ query := backend.DataQuery{
Model: simplejson.New(), JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": true,
"downsampleInterval": "",
"downsampleAggregator": "avg",
"downsampleFillPolicy": "none"
}`,
),
} }
query.Model.Set("metric", "cpu.average.percent") metric := service.buildMetric(query)
query.Model.Set("aggregator", "avg")
query.Model.Set("disableDownsampling", true)
query.Model.Set("downsampleInterval", "")
query.Model.Set("downsampleAggregator", "avg")
query.Model.Set("downsampleFillPolicy", "none")
metric := exec.buildMetric(query)
require.Len(t, metric, 2) require.Len(t, metric, 2)
require.Equal(t, "cpu.average.percent", metric["metric"]) require.Equal(t, "cpu.average.percent", metric["metric"])
@ -118,18 +112,20 @@ func TestOpenTsdbExecutor(t *testing.T) {
}) })
t.Run("Build metric with downsampling enabled with params", func(t *testing.T) { t.Run("Build metric with downsampling enabled with params", func(t *testing.T) {
query := plugins.DataSubQuery{ query := backend.DataQuery{
Model: simplejson.New(), JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": false,
"downsampleInterval": "5m",
"downsampleAggregator": "sum",
"downsampleFillPolicy": "null"
}`,
),
} }
query.Model.Set("metric", "cpu.average.percent") metric := service.buildMetric(query)
query.Model.Set("aggregator", "avg")
query.Model.Set("disableDownsampling", false)
query.Model.Set("downsampleInterval", "5m")
query.Model.Set("downsampleAggregator", "sum")
query.Model.Set("downsampleFillPolicy", "null")
metric := exec.buildMetric(query)
require.Len(t, metric, 3) require.Len(t, metric, 3)
require.Equal(t, "cpu.average.percent", metric["metric"]) require.Equal(t, "cpu.average.percent", metric["metric"])
@ -138,23 +134,24 @@ func TestOpenTsdbExecutor(t *testing.T) {
}) })
t.Run("Build metric with tags with downsampling disabled", func(t *testing.T) { t.Run("Build metric with tags with downsampling disabled", func(t *testing.T) {
query := plugins.DataSubQuery{ query := backend.DataQuery{
Model: simplejson.New(), JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": true,
"downsampleInterval": "5m",
"downsampleAggregator": "sum",
"downsampleFillPolicy": "null",
"tags": {
"env": "prod",
"app": "grafana"
}
}`,
),
} }
query.Model.Set("metric", "cpu.average.percent") metric := service.buildMetric(query)
query.Model.Set("aggregator", "avg")
query.Model.Set("disableDownsampling", true)
query.Model.Set("downsampleInterval", "5m")
query.Model.Set("downsampleAggregator", "sum")
query.Model.Set("downsampleFillPolicy", "null")
tags := simplejson.New()
tags.Set("env", "prod")
tags.Set("app", "grafana")
query.Model.Set("tags", tags.MustMap())
metric := exec.buildMetric(query)
require.Len(t, metric, 3) require.Len(t, metric, 3)
require.Equal(t, "cpu.average.percent", metric["metric"]) require.Equal(t, "cpu.average.percent", metric["metric"])
@ -169,22 +166,23 @@ func TestOpenTsdbExecutor(t *testing.T) {
}) })
t.Run("Build metric with rate enabled but counter disabled", func(t *testing.T) { t.Run("Build metric with rate enabled but counter disabled", func(t *testing.T) {
query := plugins.DataSubQuery{ query := backend.DataQuery{
Model: simplejson.New(), JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": true,
"shouldComputeRate": true,
"isCounter": false,
"tags": {
"env": "prod",
"app": "grafana"
}
}`,
),
} }
query.Model.Set("metric", "cpu.average.percent") metric := service.buildMetric(query)
query.Model.Set("aggregator", "avg")
query.Model.Set("disableDownsampling", true)
query.Model.Set("shouldComputeRate", true)
query.Model.Set("isCounter", false)
tags := simplejson.New()
tags.Set("env", "prod")
tags.Set("app", "grafana")
query.Model.Set("tags", tags.MustMap())
metric := exec.buildMetric(query)
require.Len(t, metric, 5) require.Len(t, metric, 5)
require.Equal(t, "cpu.average.percent", metric["metric"]) require.Equal(t, "cpu.average.percent", metric["metric"])
@ -201,24 +199,25 @@ func TestOpenTsdbExecutor(t *testing.T) {
}) })
t.Run("Build metric with rate and counter enabled", func(t *testing.T) { t.Run("Build metric with rate and counter enabled", func(t *testing.T) {
query := plugins.DataSubQuery{ query := backend.DataQuery{
Model: simplejson.New(), JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": true,
"shouldComputeRate": true,
"isCounter": true,
"counterMax": 45,
"counterResetValue": 60,
"tags": {
"env": "prod",
"app": "grafana"
}
}`,
),
} }
query.Model.Set("metric", "cpu.average.percent") metric := service.buildMetric(query)
query.Model.Set("aggregator", "avg")
query.Model.Set("disableDownsampling", true)
query.Model.Set("shouldComputeRate", true)
query.Model.Set("isCounter", true)
query.Model.Set("counterMax", 45)
query.Model.Set("counterResetValue", 60)
tags := simplejson.New()
tags.Set("env", "prod")
tags.Set("app", "grafana")
query.Model.Set("tags", tags.MustMap())
metric := exec.buildMetric(query)
require.Len(t, metric, 5) require.Len(t, metric, 5)
require.Equal(t, "cpu.average.percent", metric["metric"]) require.Equal(t, "cpu.average.percent", metric["metric"])

View File

@ -18,7 +18,6 @@ import (
"github.com/grafana/grafana/pkg/tsdb/loki" "github.com/grafana/grafana/pkg/tsdb/loki"
"github.com/grafana/grafana/pkg/tsdb/mssql" "github.com/grafana/grafana/pkg/tsdb/mssql"
"github.com/grafana/grafana/pkg/tsdb/mysql" "github.com/grafana/grafana/pkg/tsdb/mysql"
"github.com/grafana/grafana/pkg/tsdb/opentsdb"
"github.com/grafana/grafana/pkg/tsdb/postgres" "github.com/grafana/grafana/pkg/tsdb/postgres"
"github.com/grafana/grafana/pkg/tsdb/prometheus" "github.com/grafana/grafana/pkg/tsdb/prometheus"
"github.com/grafana/grafana/pkg/tsdb/tempo" "github.com/grafana/grafana/pkg/tsdb/tempo"
@ -57,7 +56,6 @@ type Service struct {
// Init initialises the service. // Init initialises the service.
func (s *Service) Init() error { func (s *Service) Init() error {
s.registry["graphite"] = graphite.New(s.HTTPClientProvider) s.registry["graphite"] = graphite.New(s.HTTPClientProvider)
s.registry["opentsdb"] = opentsdb.New(s.HTTPClientProvider)
s.registry["prometheus"] = prometheus.New(s.HTTPClientProvider) s.registry["prometheus"] = prometheus.New(s.HTTPClientProvider)
s.registry["influxdb"] = influxdb.New(s.HTTPClientProvider) s.registry["influxdb"] = influxdb.New(s.HTTPClientProvider)
s.registry["mssql"] = mssql.NewExecutor s.registry["mssql"] = mssql.NewExecutor