Prometheus: Migrate to use SDK contracts (#37358)

* Use SDK contracts (test needs fixing)

* Fix tests

* Add customQueryParametersMiddleware

* Fix merge conflicts
This commit is contained in:
Dimitris Sotirakis 2021-08-09 12:11:19 +03:00 committed by GitHub
parent 2d33ddf37f
commit 27c71a1f09
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 1097 additions and 1000 deletions

File diff suppressed because it is too large Load Diff

View File

@ -2,18 +2,25 @@ package prometheus
import ( import (
"context" "context"
"encoding/json"
"errors" "errors"
"fmt" "fmt"
"regexp" "regexp"
"strings" "strings"
"time" "time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/httpclient" "github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
"github.com/grafana/grafana/pkg/tsdb/interval" "github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/opentracing/opentracing-go" "github.com/opentracing/opentracing-go"
"github.com/prometheus/client_golang/api" "github.com/prometheus/client_golang/api"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1" apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
@ -21,55 +28,104 @@ import (
) )
var ( var (
plog log.Logger plog = log.New("tsdb.prometheus")
legendFormat *regexp.Regexp = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
safeRes int64 = 11000 safeRes = 11000
) )
type DatasourceInfo struct {
ID int64
HTTPClientOpts sdkhttpclient.Options
URL string
HTTPMethod string
}
func init() { func init() {
plog = log.New("tsdb.prometheus") registry.Register(&registry.Descriptor{
Name: "PrometheusService",
InitPriority: registry.Low,
Instance: &Service{},
})
} }
type PrometheusExecutor struct { type Service struct {
client apiv1.API BackendPluginManager backendplugin.Manager `inject:""`
intervalCalculator interval.Calculator HTTPClientProvider httpclient.Provider `inject:""`
intervalCalculator tsdb.Calculator
im instancemgmt.InstanceManager
} }
//nolint: staticcheck // plugins.DataPlugin deprecated func (s *Service) Init() error {
func New(provider httpclient.Provider) func(*models.DataSource) (plugins.DataPlugin, error) { plog.Debug("initializing")
return func(dsInfo *models.DataSource) (plugins.DataPlugin, error) { im := datasource.NewInstanceManager(newInstanceSettings())
transport, err := dsInfo.GetHTTPTransport(provider, customQueryParametersMiddleware(plog)) factory := coreplugin.New(backend.ServeOpts{
QueryDataHandler: newService(im, s.HTTPClientProvider),
})
if err := s.BackendPluginManager.Register("prometheus", factory); err != nil {
plog.Error("Failed to register plugin", "error", err)
}
return nil
}
func newInstanceSettings() datasource.InstanceFactoryFunc {
return func(settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
jsonData := map[string]interface{}{}
err := json.Unmarshal(settings.JSONData, &jsonData)
if err != nil { if err != nil {
return nil, err return nil, fmt.Errorf("error reading settings: %w", err)
} }
httpCliOpts, err := settings.HTTPClientOptions()
cfg := api.Config{
Address: dsInfo.Url,
RoundTripper: transport,
}
client, err := api.NewClient(cfg)
if err != nil { if err != nil {
return nil, err return nil, fmt.Errorf("error getting http options: %w", err)
} }
return &PrometheusExecutor{ httpMethod, ok := jsonData["httpMethod"].(string)
intervalCalculator: interval.NewCalculator(interval.CalculatorOptions{MinInterval: time.Second * 1}), if !ok {
client: apiv1.NewAPI(client), return nil, errors.New("no http method provided")
}, nil }
mdl := DatasourceInfo{
ID: settings.ID,
URL: settings.URL,
HTTPClientOpts: httpCliOpts,
HTTPMethod: httpMethod,
}
return mdl, nil
}
}
// newService creates a new executor func.
func newService(im instancemgmt.InstanceManager, httpClientProvider httpclient.Provider) *Service {
return &Service{
im: im,
HTTPClientProvider: httpClientProvider,
intervalCalculator: tsdb.NewCalculator(),
} }
} }
//nolint: staticcheck // plugins.DataResponse deprecated //nolint: staticcheck // plugins.DataResponse deprecated
func (e *PrometheusExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource, func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
tsdbQuery plugins.DataQuery) (plugins.DataResponse, error) { if len(req.Queries) == 0 {
result := plugins.DataResponse{ return &backend.QueryDataResponse{}, fmt.Errorf("query contains no queries")
Results: map[string]plugins.DataQueryResult{},
} }
queries, err := e.parseQuery(dsInfo, tsdbQuery) dsInfo, err := s.getDSInfo(req.PluginContext)
if err != nil { if err != nil {
return result, err return nil, err
}
client, err := getClient(dsInfo, s)
if err != nil {
return nil, err
}
result := backend.QueryDataResponse{
Responses: backend.Responses{},
}
queries, err := s.parseQuery(req.Queries)
if err != nil {
return &result, err
} }
for _, query := range queries { for _, query := range queries {
@ -87,20 +143,60 @@ func (e *PrometheusExecutor) DataQuery(ctx context.Context, dsInfo *models.DataS
span.SetTag("stop_unixnano", query.End.UnixNano()) span.SetTag("stop_unixnano", query.End.UnixNano())
defer span.Finish() defer span.Finish()
value, _, err := e.client.QueryRange(ctx, query.Expr, timeRange) value, _, err := client.QueryRange(ctx, query.Expr, timeRange)
if err != nil { if err != nil {
return result, err return &result, err
} }
queryResult, err := parseResponse(value, query) frame, err := parseResponse(value, query)
if err != nil { if err != nil {
return result, err return &result, err
}
result.Responses[query.RefId] = backend.DataResponse{
Frames: frame,
} }
result.Results[query.RefId] = queryResult
} }
return result, nil return &result, nil
}
func getClient(dsInfo *DatasourceInfo, s *Service) (apiv1.API, error) {
opts := &sdkhttpclient.Options{
Timeouts: dsInfo.HTTPClientOpts.Timeouts,
TLS: dsInfo.HTTPClientOpts.TLS,
}
customMiddlewares := customQueryParametersMiddleware(plog)
opts.Middlewares = []sdkhttpclient.Middleware{customMiddlewares}
roundTripper, err := s.HTTPClientProvider.GetTransport(*opts)
if err != nil {
return nil, err
}
cfg := api.Config{
Address: dsInfo.URL,
RoundTripper: roundTripper,
}
client, err := api.NewClient(cfg)
if err != nil {
return nil, err
}
return apiv1.NewAPI(client), nil
}
func (s *Service) getDSInfo(pluginCtx backend.PluginContext) (*DatasourceInfo, error) {
i, err := s.im.Get(pluginCtx)
if err != nil {
return nil, err
}
instance := i.(DatasourceInfo)
return &instance, nil
} }
func formatLegend(metric model.Metric, query *PrometheusQuery) string { func formatLegend(metric model.Metric, query *PrometheusQuery) string {
@ -121,49 +217,47 @@ func formatLegend(metric model.Metric, query *PrometheusQuery) string {
return string(result) return string(result)
} }
func (e *PrometheusExecutor) parseQuery(dsInfo *models.DataSource, query plugins.DataQuery) ( func (s *Service) parseQuery(queries []backend.DataQuery) (
[]*PrometheusQuery, error) { []*PrometheusQuery, error) {
var intervalMode string var intervalMode string
var adjustedInterval time.Duration var adjustedInterval time.Duration
qs := []*PrometheusQuery{} qs := []*PrometheusQuery{}
for _, queryModel := range query.Queries { for _, queryModel := range queries {
expr, err := queryModel.Model.Get("expr").String() jsonModel, err := simplejson.NewJson(queryModel.JSON)
if err != nil {
return nil, err
}
expr, err := jsonModel.Get("expr").String()
if err != nil { if err != nil {
return nil, err return nil, err
} }
format := queryModel.Model.Get("legendFormat").MustString("") format := jsonModel.Get("legendFormat").MustString("")
start, err := query.TimeRange.ParseFrom() start := queryModel.TimeRange.From
if err != nil { end := queryModel.TimeRange.To
return nil, err queryInterval := jsonModel.Get("interval").MustString("")
}
end, err := query.TimeRange.ParseTo() dsInterval, err := tsdb.GetIntervalFrom(queryInterval, "", 0, 15*time.Second)
if err != nil { hasQueryInterval := queryInterval != ""
return nil, err
}
hasQueryInterval := queryModel.Model.Get("interval").MustString("") != ""
// Only use stepMode if we have interval in query, otherwise use "min" // Only use stepMode if we have interval in query, otherwise use "min"
if hasQueryInterval { if hasQueryInterval {
intervalMode = queryModel.Model.Get("stepMode").MustString("min") intervalMode = jsonModel.Get("stepMode").MustString("min")
} else { } else {
intervalMode = "min" intervalMode = "min"
} }
// Calculate interval value from query or data source settings or use default value // Calculate interval value from query or data source settings or use default value
intervalValue, err := interval.GetIntervalFrom(dsInfo, queryModel.Model, time.Second*15)
if err != nil { if err != nil {
return nil, err return nil, err
} }
calculatedInterval, err := e.intervalCalculator.Calculate(*query.TimeRange, intervalValue, intervalMode) calculatedInterval, err := s.intervalCalculator.Calculate(queries[0].TimeRange, dsInterval, tsdb.IntervalMode(intervalMode))
if err != nil { if err != nil {
return nil, err return nil, err
} }
safeInterval := e.intervalCalculator.CalculateSafeInterval(*query.TimeRange, safeRes) safeInterval := s.intervalCalculator.CalculateSafeInterval(queries[0].TimeRange, int64(safeRes))
if calculatedInterval.Value > safeInterval.Value { if calculatedInterval.Value > safeInterval.Value {
adjustedInterval = calculatedInterval.Value adjustedInterval = calculatedInterval.Value
@ -171,7 +265,7 @@ func (e *PrometheusExecutor) parseQuery(dsInfo *models.DataSource, query plugins
adjustedInterval = safeInterval.Value adjustedInterval = safeInterval.Value
} }
intervalFactor := queryModel.Model.Get("intervalFactor").MustInt64(1) intervalFactor := jsonModel.Get("intervalFactor").MustInt64(1)
step := time.Duration(int64(adjustedInterval) * intervalFactor) step := time.Duration(int64(adjustedInterval) * intervalFactor)
qs = append(qs, &PrometheusQuery{ qs = append(qs, &PrometheusQuery{
@ -187,14 +281,12 @@ func (e *PrometheusExecutor) parseQuery(dsInfo *models.DataSource, query plugins
return qs, nil return qs, nil
} }
//nolint: staticcheck // plugins.DataQueryResult deprecated func parseResponse(value model.Value, query *PrometheusQuery) (data.Frames, error) {
func parseResponse(value model.Value, query *PrometheusQuery) (plugins.DataQueryResult, error) {
var queryRes plugins.DataQueryResult
frames := data.Frames{} frames := data.Frames{}
matrix, ok := value.(model.Matrix) matrix, ok := value.(model.Matrix)
if !ok { if !ok {
return queryRes, fmt.Errorf("unsupported result format: %q", value.Type().String()) return frames, fmt.Errorf("unsupported result format: %q", value.Type().String())
} }
for _, v := range matrix { for _, v := range matrix {
@ -215,9 +307,8 @@ func parseResponse(value model.Value, query *PrometheusQuery) (plugins.DataQuery
data.NewField("time", nil, timeVector), data.NewField("time", nil, timeVector),
data.NewField("value", tags, values).SetConfig(&data.FieldConfig{DisplayNameFromDS: name}))) data.NewField("value", tags, values).SetConfig(&data.FieldConfig{DisplayNameFromDS: name})))
} }
queryRes.Dataframes = plugins.NewDecodedDataFrames(frames)
return queryRes, nil return frames, nil
} }
// IsAPIError returns whether err is or wraps a Prometheus error. // IsAPIError returns whether err is or wraps a Prometheus error.

View File

@ -1,40 +1,22 @@
package prometheus package prometheus
import ( import (
"context"
"net/http"
"testing" "testing"
"time" "time"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/httpclient" "github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
p "github.com/prometheus/common/model" p "github.com/prometheus/common/model"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
var now = time.Now()
func TestPrometheus(t *testing.T) { func TestPrometheus(t *testing.T) {
json, _ := simplejson.NewJson([]byte(` service := Service{
{ "customQueryParameters": "custom=par/am&second=f oo"} intervalCalculator: tsdb.NewCalculator(),
`))
dsInfo := &models.DataSource{
JsonData: json,
} }
var capturedRequest *http.Request
mw := sdkhttpclient.MiddlewareFunc(func(opts sdkhttpclient.Options, next http.RoundTripper) http.RoundTripper {
return sdkhttpclient.RoundTripperFunc(func(req *http.Request) (*http.Response, error) {
capturedRequest = req
return &http.Response{StatusCode: http.StatusOK}, nil
})
})
provider := httpclient.NewProvider(sdkhttpclient.ProviderOptions{
Middlewares: []sdkhttpclient.Middleware{mw},
})
plug, err := New(provider)(dsInfo)
require.NoError(t, err)
executor := plug.(*PrometheusExecutor)
t.Run("converting metric name", func(t *testing.T) { t.Run("converting metric name", func(t *testing.T) {
metric := map[p.LabelName]p.LabelValue{ metric := map[p.LabelName]p.LabelValue{
@ -69,9 +51,12 @@ func TestPrometheus(t *testing.T) {
"format": "time_series", "format": "time_series",
"refId": "A" "refId": "A"
}`) }`)
timerange := plugins.NewDataTimeRange("12h", "now") timeRange := backend.TimeRange{
query.TimeRange = &timerange From: now,
models, err := executor.parseQuery(dsInfo, query) To: now.Add(12 * time.Hour),
}
query.TimeRange = timeRange
models, err := service.parseQuery([]backend.DataQuery{query})
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, time.Second*30, models[0].Step) require.Equal(t, time.Second*30, models[0].Step)
}) })
@ -84,9 +69,12 @@ func TestPrometheus(t *testing.T) {
"stepMode": "exact", "stepMode": "exact",
"interval": "7s" "interval": "7s"
}`) }`)
timerange := plugins.NewDataTimeRange("12h", "now") timeRange := backend.TimeRange{
query.TimeRange = &timerange From: now,
models, err := executor.parseQuery(dsInfo, query) To: now.Add(12 * time.Hour),
}
query.TimeRange = timeRange
models, err := service.parseQuery([]backend.DataQuery{query})
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, time.Second*7, models[0].Step) require.Equal(t, time.Second*7, models[0].Step)
}) })
@ -99,9 +87,12 @@ func TestPrometheus(t *testing.T) {
"stepMode": "max", "stepMode": "max",
"interval": "6s" "interval": "6s"
}`) }`)
timerange := plugins.NewDataTimeRange("12h", "now") timeRange := backend.TimeRange{
query.TimeRange = &timerange From: now,
models, err := executor.parseQuery(dsInfo, query) To: now.Add(12 * time.Hour),
}
query.TimeRange = timeRange
models, err := service.parseQuery([]backend.DataQuery{query})
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, time.Second*6, models[0].Step) require.Equal(t, time.Second*6, models[0].Step)
}) })
@ -114,9 +105,12 @@ func TestPrometheus(t *testing.T) {
"stepMode": "max", "stepMode": "max",
"interval": "100s" "interval": "100s"
}`) }`)
timerange := plugins.NewDataTimeRange("12h", "now") timeRange := backend.TimeRange{
query.TimeRange = &timerange From: now,
models, err := executor.parseQuery(dsInfo, query) To: now.Add(12 * time.Hour),
}
query.TimeRange = timeRange
models, err := service.parseQuery([]backend.DataQuery{query})
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, time.Second*30, models[0].Step) require.Equal(t, time.Second*30, models[0].Step)
}) })
@ -129,9 +123,12 @@ func TestPrometheus(t *testing.T) {
"stepMode": "max", "stepMode": "max",
"interval": "2s" "interval": "2s"
}`) }`)
timerange := plugins.NewDataTimeRange("12h", "now") timeRange := backend.TimeRange{
query.TimeRange = &timerange From: now,
models, err := executor.parseQuery(dsInfo, query) To: now.Add(12 * time.Hour),
}
query.TimeRange = timeRange
models, err := service.parseQuery([]backend.DataQuery{query})
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, time.Second*5, models[0].Step) require.Equal(t, time.Second*5, models[0].Step)
}) })
@ -143,69 +140,59 @@ func TestPrometheus(t *testing.T) {
"intervalFactor": 1, "intervalFactor": 1,
"refId": "A" "refId": "A"
}`) }`)
models, err := executor.parseQuery(dsInfo, query) models, err := service.parseQuery([]backend.DataQuery{query})
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, time.Minute*2, models[0].Step) require.Equal(t, time.Minute*2, models[0].Step)
timeRange := plugins.NewDataTimeRange("1h", "now") timeRange := backend.TimeRange{
query.TimeRange = &timeRange From: now,
models, err = executor.parseQuery(dsInfo, query) To: now.Add(1 * time.Hour),
}
query.TimeRange = timeRange
models, err = service.parseQuery([]backend.DataQuery{query})
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, time.Second*15, models[0].Step) require.Equal(t, time.Second*15, models[0].Step)
}) })
t.Run("parsing query model with high intervalFactor", func(t *testing.T) { t.Run("parsing query model with high intervalFactor", func(t *testing.T) {
models, err := executor.parseQuery(dsInfo, queryContext(`{ models, err := service.parseQuery([]backend.DataQuery{queryContext(`{
"expr": "go_goroutines", "expr": "go_goroutines",
"format": "time_series", "format": "time_series",
"intervalFactor": 10, "intervalFactor": 10,
"refId": "A" "refId": "A"
}`)) }`)})
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, time.Minute*20, models[0].Step) require.Equal(t, time.Minute*20, models[0].Step)
}) })
t.Run("parsing query model with low intervalFactor", func(t *testing.T) { t.Run("parsing query model with low intervalFactor", func(t *testing.T) {
models, err := executor.parseQuery(dsInfo, queryContext(`{ models, err := service.parseQuery([]backend.DataQuery{queryContext(`{
"expr": "go_goroutines", "expr": "go_goroutines",
"format": "time_series", "format": "time_series",
"intervalFactor": 1, "intervalFactor": 1,
"refId": "A" "refId": "A"
}`)) }`)})
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, time.Minute*2, models[0].Step) require.Equal(t, time.Minute*2, models[0].Step)
}) })
t.Run("runs query with custom params", func(t *testing.T) {
query := queryContext(`{
"expr": "go_goroutines",
"format": "time_series",
"intervalFactor": 1,
"refId": "A"
}`)
_, _ = executor.DataQuery(context.Background(), dsInfo, query)
require.NotNil(t, capturedRequest)
require.Equal(t, "custom=par%2Fam&second=f+oo", capturedRequest.URL.RawQuery)
})
} }
func queryContext(json string) plugins.DataQuery { func queryContext(json string) backend.DataQuery {
jsonModel, _ := simplejson.NewJson([]byte(json)) timeRange := backend.TimeRange{
queryModels := []plugins.DataSubQuery{ From: now,
{Model: jsonModel}, To: now.Add(48 * time.Hour),
} }
return backend.DataQuery{
timeRange := plugins.NewDataTimeRange("48h", "now") TimeRange: timeRange,
return plugins.DataQuery{ RefID: "A",
TimeRange: &timeRange, JSON: []byte(json),
Queries: queryModels,
} }
} }
func TestParseResponse(t *testing.T) { func TestParseResponse(t *testing.T) {
t.Run("value is not of type matrix", func(t *testing.T) { t.Run("value is not of type matrix", func(t *testing.T) {
//nolint: staticcheck // plugins.DataQueryResult deprecated //nolint: staticcheck // plugins.DataQueryResult deprecated
queryRes := plugins.DataQueryResult{} queryRes := data.Frames{}
value := p.Vector{} value := p.Vector{}
res, err := parseResponse(value, nil) res, err := parseResponse(value, nil)
@ -233,19 +220,18 @@ func TestParseResponse(t *testing.T) {
res, err := parseResponse(value, query) res, err := parseResponse(value, query)
require.NoError(t, err) require.NoError(t, err)
decoded, _ := res.Dataframes.Decoded() require.Len(t, res, 1)
require.Len(t, decoded, 1) require.Equal(t, res[0].Name, "legend Application")
require.Equal(t, decoded[0].Name, "legend Application") require.Len(t, res[0].Fields, 2)
require.Len(t, decoded[0].Fields, 2) require.Len(t, res[0].Fields[0].Labels, 0)
require.Len(t, decoded[0].Fields[0].Labels, 0) require.Equal(t, res[0].Fields[0].Name, "time")
require.Equal(t, decoded[0].Fields[0].Name, "time") require.Len(t, res[0].Fields[1].Labels, 2)
require.Len(t, decoded[0].Fields[1].Labels, 2) require.Equal(t, res[0].Fields[1].Labels.String(), "app=Application, tag2=tag2")
require.Equal(t, decoded[0].Fields[1].Labels.String(), "app=Application, tag2=tag2") require.Equal(t, res[0].Fields[1].Name, "value")
require.Equal(t, decoded[0].Fields[1].Name, "value") require.Equal(t, res[0].Fields[1].Config.DisplayNameFromDS, "legend Application")
require.Equal(t, decoded[0].Fields[1].Config.DisplayNameFromDS, "legend Application")
// Ensure the timestamps are UTC zoned // Ensure the timestamps are UTC zoned
testValue := decoded[0].Fields[0].At(0) testValue := res[0].Fields[0].At(0)
require.Equal(t, "UTC", testValue.(time.Time).Location().String()) require.Equal(t, "UTC", testValue.(time.Time).Location().String())
}) })
} }

View File

@ -16,7 +16,6 @@ import (
"github.com/grafana/grafana/pkg/tsdb/mssql" "github.com/grafana/grafana/pkg/tsdb/mssql"
"github.com/grafana/grafana/pkg/tsdb/mysql" "github.com/grafana/grafana/pkg/tsdb/mysql"
"github.com/grafana/grafana/pkg/tsdb/postgres" "github.com/grafana/grafana/pkg/tsdb/postgres"
"github.com/grafana/grafana/pkg/tsdb/prometheus"
) )
// NewService returns a new Service. // NewService returns a new Service.
@ -52,7 +51,6 @@ type Service struct {
// Init initialises the service. // Init initialises the service.
func (s *Service) Init() error { func (s *Service) Init() error {
s.registry["prometheus"] = prometheus.New(s.HTTPClientProvider)
s.registry["mssql"] = mssql.NewExecutor s.registry["mssql"] = mssql.NewExecutor
s.registry["postgres"] = s.PostgresService.NewExecutor s.registry["postgres"] = s.PostgresService.NewExecutor
s.registry["mysql"] = mysql.New(s.HTTPClientProvider) s.registry["mysql"] = mysql.New(s.HTTPClientProvider)