Chore: Refactor usage of legacy data contracts (#41218)

Refactor usage of legacy data contracts. Moves legacy data contracts 
to pkg/tsdb/legacydata package.
Refactor pkg/expr to be a proper service/dependency that can be provided 
to wire to remove some unneeded dependencies to SSE in ngalert and other places.
Refactor pkg/expr to not use the legacydata,RequestHandler and use 
backend.QueryDataHandler instead.
This commit is contained in:
Marcus Efraimsson 2021-11-10 11:52:16 +01:00 committed by GitHub
parent d6ed5d295e
commit baab021fec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
54 changed files with 732 additions and 951 deletions

View File

@ -17,6 +17,7 @@ import (
httpstatic "github.com/grafana/grafana/pkg/api/static"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/localcache"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/metrics"
@ -54,7 +55,7 @@ import (
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/services/updatechecker"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/grafana/grafana/pkg/util/errutil"
"github.com/grafana/grafana/pkg/web"
"github.com/prometheus/client_golang/prometheus"
@ -97,7 +98,7 @@ type HTTPServer struct {
LivePushGateway *pushhttp.Gateway
ContextHandler *contexthandler.ContextHandler
SQLStore *sqlstore.SQLStore
DataService *tsdb.Service
legacyDataRequestHandler legacydata.RequestHandler
AlertEngine *alerting.AlertEngine
LoadSchemaService *schemaloader.SchemaLoaderService
AlertNG *ngalert.AlertNG
@ -115,6 +116,7 @@ type HTTPServer struct {
internalMetricsSvc *metrics.InternalMetricsService
updateChecker *updatechecker.Service
searchUsersService searchusers.Service
expressionService *expr.Service
}
type ServerOptions struct {
@ -124,7 +126,7 @@ type ServerOptions struct {
func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routing.RouteRegister, bus bus.Bus,
renderService rendering.Service, licensing models.Licensing, hooksService *hooks.HooksService,
cacheService *localcache.CacheService, sqlStore *sqlstore.SQLStore,
dataService *tsdb.Service, alertEngine *alerting.AlertEngine,
legacyDataRequestHandler legacydata.RequestHandler, alertEngine *alerting.AlertEngine,
pluginRequestValidator models.PluginRequestValidator, pluginStaticRouteResolver plugins.StaticRouteResolver,
pluginDashboardManager plugins.PluginDashboardManager, pluginStore plugins.Store, pluginClient plugins.Client,
pluginErrorResolver plugins.ErrorResolver, settingsProvider setting.Provider,
@ -141,7 +143,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi
internalMetricsSvc *metrics.InternalMetricsService, quotaService *quota.QuotaService,
socialService social.Service, oauthTokenService oauthtoken.OAuthTokenService,
encryptionService encryption.Service, updateChecker *updatechecker.Service, searchUsersService searchusers.Service,
dataSourcesService *datasources.Service, secretsService secrets.Service) (*HTTPServer, error) {
dataSourcesService *datasources.Service, secretsService secrets.Service, expressionService *expr.Service) (*HTTPServer, error) {
web.Env = cfg.Env
m := web.New()
@ -154,7 +156,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi
HooksService: hooksService,
CacheService: cacheService,
SQLStore: sqlStore,
DataService: dataService,
legacyDataRequestHandler: legacyDataRequestHandler,
AlertEngine: alertEngine,
PluginRequestValidator: pluginRequestValidator,
pluginClient: pluginClient,
@ -195,6 +197,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi
SecretsService: secretsService,
DataSourcesService: dataSourcesService,
searchUsersService: searchUsersService,
expressionService: expressionService,
}
if hs.Listener != nil {
hs.log.Debug("Using provided listener")

View File

@ -7,16 +7,15 @@ import (
"net/http"
"time"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb/grafanads"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/api/response"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/adapters"
"github.com/grafana/grafana/pkg/tsdb/grafanads"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
// QueryMetricsV2 returns query metrics.
@ -26,12 +25,12 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq
return response.Error(http.StatusBadRequest, "No queries found in query", nil)
}
timeRange := plugins.NewDataTimeRange(reqDTO.From, reqDTO.To)
request := plugins.DataQuery{
timeRange := legacydata.NewDataTimeRange(reqDTO.From, reqDTO.To)
request := legacydata.DataQuery{
TimeRange: &timeRange,
Debug: reqDTO.Debug,
User: c.SignedInUser,
Queries: make([]plugins.DataSubQuery, 0, len(reqDTO.Queries)),
Queries: make([]legacydata.DataSubQuery, 0, len(reqDTO.Queries)),
}
// Parse the queries
@ -53,7 +52,7 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq
hs.log.Debug("Processing metrics query", "query", query)
request.Queries = append(request.Queries, plugins.DataSubQuery{
request.Queries = append(request.Queries, legacydata.DataSubQuery{
RefID: query.Get("refId").MustString("A"),
MaxDataPoints: query.Get("maxDataPoints").MustInt64(100),
IntervalMS: query.Get("intervalMs").MustInt64(1000),
@ -64,11 +63,7 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq
}
if hasExpression {
exprService := expr.Service{
Cfg: hs.Cfg,
DataService: hs.DataService,
}
qdr, err := exprService.WrapTransformData(c.Req.Context(), request)
qdr, err := hs.expressionService.WrapTransformData(c.Req.Context(), request)
if err != nil {
return response.Error(500, "expression request error", err)
}
@ -187,15 +182,15 @@ func (hs *HTTPServer) QueryMetrics(c *models.ReqContext, reqDto dtos.MetricReque
return response.Error(http.StatusForbidden, "Access denied", err)
}
timeRange := plugins.NewDataTimeRange(reqDto.From, reqDto.To)
request := plugins.DataQuery{
timeRange := legacydata.NewDataTimeRange(reqDto.From, reqDto.To)
request := legacydata.DataQuery{
TimeRange: &timeRange,
Debug: reqDto.Debug,
User: c.SignedInUser,
}
for _, query := range reqDto.Queries {
request.Queries = append(request.Queries, plugins.DataSubQuery{
request.Queries = append(request.Queries, legacydata.DataSubQuery{
RefID: query.Get("refId").MustString("A"),
MaxDataPoints: query.Get("maxDataPoints").MustInt64(100),
IntervalMS: query.Get("intervalMs").MustInt64(1000),
@ -204,7 +199,7 @@ func (hs *HTTPServer) QueryMetrics(c *models.ReqContext, reqDto dtos.MetricReque
})
}
resp, err := hs.DataService.HandleRequest(c.Req.Context(), ds, request)
resp, err := hs.legacyDataRequestHandler.HandleRequest(c.Req.Context(), ds, request)
if err != nil {
return response.Error(http.StatusInternalServerError, "Metric request error", err)
}
@ -221,9 +216,7 @@ func (hs *HTTPServer) QueryMetrics(c *models.ReqContext, reqDto dtos.MetricReque
return response.JSON(statusCode, &resp)
}
// nolint:staticcheck // plugins.DataQueryResponse deprecated
func (hs *HTTPServer) createRequest(ctx context.Context, ds *models.DataSource,
query plugins.DataQuery) (*backend.QueryDataRequest, error) {
func (hs *HTTPServer) createRequest(ctx context.Context, ds *models.DataSource, query legacydata.DataQuery) (*backend.QueryDataRequest, error) {
instanceSettings, err := adapters.ModelToInstanceSettings(ds, hs.decryptSecureJsonDataFn())
if err != nil {
return nil, err

View File

@ -7,6 +7,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/encryption"
"github.com/grafana/grafana/pkg/setting"
)
@ -35,15 +36,24 @@ func IsDataSource(uid string) bool {
// Service is service representation for expression handling.
type Service struct {
Cfg *setting.Cfg
DataService plugins.DataRequestHandler
cfg *setting.Cfg
dataService backend.QueryDataHandler
encryptionService encryption.Service
}
func ProvideService(cfg *setting.Cfg, pluginClient plugins.Client, encryptionService encryption.Service) *Service {
return &Service{
cfg: cfg,
dataService: pluginClient,
encryptionService: encryptionService,
}
}
func (s *Service) isDisabled() bool {
if s.Cfg == nil {
if s.cfg == nil {
return true
}
return !s.Cfg.ExpressionsEnabled
return !s.cfg.ExpressionsEnabled
}
// BuildPipeline builds a pipeline from a request.

View File

@ -11,12 +11,13 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/encryption/ossencryption"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/require"
)
// nolint:staticcheck // plugins.DataPlugin deprecated
func TestService(t *testing.T) {
dsDF := data.NewFrame("test",
data.NewField("time", nil, []time.Time{time.Unix(1, 0)}),
@ -25,9 +26,13 @@ func TestService(t *testing.T) {
me := &mockEndpoint{
Frames: []*data.Frame{dsDF},
}
s := Service{DataService: me}
bus.AddHandler("test", func(query *models.GetDataSourceQuery) error {
query.Result = &models.DataSource{Id: 1, OrgId: 1, Type: "test"}
s := Service{
cfg: setting.NewCfg(),
dataService: me,
encryptionService: ossencryption.ProvideService(),
}
bus.AddHandlerCtx("test", func(_ context.Context, query *models.GetDataSourceQuery) error {
query.Result = &models.DataSource{Id: 1, OrgId: 1, Type: "test", JsonData: simplejson.New()}
return nil
})
@ -88,18 +93,10 @@ type mockEndpoint struct {
Frames data.Frames
}
// nolint:staticcheck // plugins.DataQueryResponse deprecated
func (me *mockEndpoint) DataQuery(ctx context.Context, ds *models.DataSource, query plugins.DataQuery) (plugins.DataResponse, error) {
return plugins.DataResponse{
Results: map[string]plugins.DataQueryResult{
"A": {
Dataframes: plugins.NewDecodedDataFrames(me.Frames),
},
},
}, nil
}
// nolint:staticcheck // plugins.DataQueryResponse deprecated
func (me *mockEndpoint) HandleRequest(ctx context.Context, ds *models.DataSource, query plugins.DataQuery) (plugins.DataResponse, error) {
return me.DataQuery(ctx, ds, query)
func (me *mockEndpoint) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
resp := backend.NewQueryDataResponse()
resp.Responses["A"] = backend.DataResponse{
Frames: me.Frames,
}
return resp, nil
}

View File

@ -3,14 +3,14 @@ package expr
import (
"encoding/json"
"fmt"
"strconv"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/adapters"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/grafana/grafana/pkg/util/errutil"
"github.com/prometheus/client_golang/prometheus"
"golang.org/x/net/context"
)
@ -33,7 +33,7 @@ func init() {
}
// WrapTransformData creates and executes transform requests
func (s *Service) WrapTransformData(ctx context.Context, query plugins.DataQuery) (*backend.QueryDataResponse, error) {
func (s *Service) WrapTransformData(ctx context.Context, query legacydata.DataQuery) (*backend.QueryDataResponse, error) {
req := Request{
OrgId: query.User.OrgId,
Queries: []Query{},
@ -206,38 +206,23 @@ func (s *Service) queryData(ctx context.Context, req *backend.QueryDataRequest)
return nil, fmt.Errorf("could not find datasource: %w", err)
}
// Convert plugin-model (datasource) queries to tsdb queries
queries := make([]plugins.DataSubQuery, len(req.Queries))
for i, query := range req.Queries {
sj, err := simplejson.NewJson(query.JSON)
if err != nil {
return nil, err
}
queries[i] = plugins.DataSubQuery{
RefID: query.RefID,
IntervalMS: query.Interval.Milliseconds(),
MaxDataPoints: query.MaxDataPoints,
QueryType: query.QueryType,
DataSource: getDsInfo.Result,
Model: sj,
}
}
// For now take Time Range from first query.
timeRange := plugins.NewDataTimeRange(strconv.FormatInt(req.Queries[0].TimeRange.From.Unix()*1000, 10),
strconv.FormatInt(req.Queries[0].TimeRange.To.Unix()*1000, 10))
tQ := plugins.DataQuery{
TimeRange: &timeRange,
Queries: queries,
Headers: req.Headers,
}
// Execute the converted queries
tsdbRes, err := s.DataService.HandleRequest(ctx, getDsInfo.Result, tQ)
dsInstanceSettings, err := adapters.ModelToInstanceSettings(getDsInfo.Result, s.decryptSecureJsonDataFn(ctx))
if err != nil {
return nil, err
return nil, errutil.Wrap("failed to convert datasource instance settings", err)
}
return tsdbRes.ToBackendDataResponse()
req.PluginContext.DataSourceInstanceSettings = dsInstanceSettings
req.PluginContext.PluginID = getDsInfo.Result.Type
return s.dataService.QueryData(ctx, req)
}
func (s *Service) decryptSecureJsonDataFn(ctx context.Context) func(map[string][]byte) map[string]string {
return func(m map[string][]byte) map[string]string {
decryptedJsonData, err := s.encryptionService.DecryptJsonData(ctx, m, s.cfg.SecretKey)
if err != nil {
logger.Error("Failed to decrypt secure json data", "error", err)
}
return decryptedJsonData
}
}

View File

@ -10,12 +10,6 @@ import (
"github.com/grafana/grafana/pkg/plugins/backendplugin"
)
// DataRequestHandler is a data request handler interface.
type DataRequestHandler interface {
// HandleRequest handles a data request.
HandleRequest(context.Context, *models.DataSource, DataQuery) (DataResponse, error)
}
// Store is the storage for plugins.
type Store interface {
// Plugin finds a plugin by its ID.

View File

@ -9,6 +9,7 @@ import (
"github.com/grafana/grafana/pkg/api"
"github.com/grafana/grafana/pkg/api/routing"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/httpclient/httpclientprovider"
"github.com/grafana/grafana/pkg/infra/kvstore"
@ -57,7 +58,6 @@ import (
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/services/updatechecker"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/tsdb/azuremonitor"
"github.com/grafana/grafana/pkg/tsdb/cloudmonitoring"
"github.com/grafana/grafana/pkg/tsdb/cloudwatch"
@ -65,6 +65,8 @@ import (
"github.com/grafana/grafana/pkg/tsdb/grafanads"
"github.com/grafana/grafana/pkg/tsdb/graphite"
"github.com/grafana/grafana/pkg/tsdb/influxdb"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
legacydataservice "github.com/grafana/grafana/pkg/tsdb/legacydata/service"
"github.com/grafana/grafana/pkg/tsdb/loki"
"github.com/grafana/grafana/pkg/tsdb/mssql"
"github.com/grafana/grafana/pkg/tsdb/mysql"
@ -76,8 +78,8 @@ import (
)
var wireBasicSet = wire.NewSet(
tsdb.NewService,
wire.Bind(new(plugins.DataRequestHandler), new(*tsdb.Service)),
legacydataservice.ProvideService,
wire.Bind(new(legacydata.RequestHandler), new(*legacydataservice.Service)),
alerting.ProvideAlertEngine,
wire.Bind(new(alerting.UsageStatsQuerier), new(*alerting.AlertEngine)),
setting.NewCfgFromArgs,
@ -164,6 +166,7 @@ var wireBasicSet = wire.NewSet(
datasources.ProvideService,
pluginsettings.ProvideService,
alerting.ProvideService,
expr.ProvideService,
)
var wireSet = wire.NewSet(

View File

@ -6,8 +6,8 @@ import (
"strings"
"time"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/interval"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/grafana/grafana/pkg/tsdb/legacydata/interval"
"github.com/grafana/grafana/pkg/tsdb/prometheus"
gocontext "context"
@ -47,8 +47,8 @@ type AlertQuery struct {
}
// Eval evaluates the `QueryCondition`.
func (c *QueryCondition) Eval(context *alerting.EvalContext, requestHandler plugins.DataRequestHandler) (*alerting.ConditionResult, error) {
timeRange := plugins.NewDataTimeRange(c.Query.From, c.Query.To)
func (c *QueryCondition) Eval(context *alerting.EvalContext, requestHandler legacydata.RequestHandler) (*alerting.ConditionResult, error) {
timeRange := legacydata.NewDataTimeRange(c.Query.From, c.Query.To)
seriesList, err := c.executeQuery(context, timeRange, requestHandler)
if err != nil {
@ -109,7 +109,7 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext, requestHandler plug
}, nil
}
func calculateInterval(timeRange plugins.DataTimeRange, model *simplejson.Json, dsInfo *models.DataSource) (time.Duration, error) {
func calculateInterval(timeRange legacydata.DataTimeRange, model *simplejson.Json, dsInfo *models.DataSource) (time.Duration, error) {
// if there is no min-interval specified in the datasource or in the dashboard-panel,
// the value of 1ms is used (this is how it is done in the dashboard-interval-calculation too,
// see https://github.com/grafana/grafana/blob/9a0040c0aeaae8357c650cec2ee644a571dddf3d/packages/grafana-data/src/datetime/rangeutil.ts#L264)
@ -133,8 +133,8 @@ func calculateInterval(timeRange plugins.DataTimeRange, model *simplejson.Json,
return interval.Value, nil
}
func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange plugins.DataTimeRange,
requestHandler plugins.DataRequestHandler) (plugins.DataTimeSeriesSlice, error) {
func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange legacydata.DataTimeRange,
requestHandler legacydata.RequestHandler) (legacydata.DataTimeSeriesSlice, error) {
getDsInfo := &models.GetDataSourceQuery{
Id: c.Query.DatasourceID,
OrgId: context.Rule.OrgID,
@ -153,7 +153,7 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange p
if err != nil {
return nil, fmt.Errorf("interval calculation failed: %w", err)
}
result := make(plugins.DataTimeSeriesSlice, 0)
result := make(legacydata.DataTimeSeriesSlice, 0)
if context.IsDebug {
data := simplejson.New()
@ -247,18 +247,18 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange p
return result, nil
}
func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, timeRange plugins.DataTimeRange,
debug bool) (plugins.DataQuery, error) {
func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, timeRange legacydata.DataTimeRange,
debug bool) (legacydata.DataQuery, error) {
queryModel := c.Query.Model
calculatedInterval, err := calculateInterval(timeRange, queryModel, datasource)
if err != nil {
return plugins.DataQuery{}, err
return legacydata.DataQuery{}, err
}
req := plugins.DataQuery{
req := legacydata.DataQuery{
TimeRange: &timeRange,
Queries: []plugins.DataSubQuery{
Queries: []legacydata.DataSubQuery{
{
RefID: "A",
Model: queryModel,
@ -340,21 +340,21 @@ func validateToValue(to string) error {
// FrameToSeriesSlice converts a frame that is a valid time series as per data.TimeSeriesSchema()
// to a DataTimeSeriesSlice.
func FrameToSeriesSlice(frame *data.Frame) (plugins.DataTimeSeriesSlice, error) {
func FrameToSeriesSlice(frame *data.Frame) (legacydata.DataTimeSeriesSlice, error) {
tsSchema := frame.TimeSeriesSchema()
if tsSchema.Type == data.TimeSeriesTypeNot {
// If no fields, or only a time field, create an empty plugins.DataTimeSeriesSlice with a single
// time series in order to trigger "no data" in alerting.
if frame.Rows() == 0 || (len(frame.Fields) == 1 && frame.Fields[0].Type().Time()) {
return plugins.DataTimeSeriesSlice{{
return legacydata.DataTimeSeriesSlice{{
Name: frame.Name,
Points: make(plugins.DataTimeSeriesPoints, 0),
Points: make(legacydata.DataTimeSeriesPoints, 0),
}}, nil
}
return nil, fmt.Errorf("input frame is not recognized as a time series")
}
seriesCount := len(tsSchema.ValueIndices)
seriesSlice := make(plugins.DataTimeSeriesSlice, 0, seriesCount)
seriesSlice := make(legacydata.DataTimeSeriesSlice, 0, seriesCount)
timeField := frame.Fields[tsSchema.TimeIndex]
timeNullFloatSlice := make([]null.Float, timeField.Len())
@ -368,8 +368,8 @@ func FrameToSeriesSlice(frame *data.Frame) (plugins.DataTimeSeriesSlice, error)
for _, fieldIdx := range tsSchema.ValueIndices { // create a TimeSeries for each value Field
field := frame.Fields[fieldIdx]
ts := plugins.DataTimeSeries{
Points: make(plugins.DataTimeSeriesPoints, field.Len()),
ts := legacydata.DataTimeSeries{
Points: make(legacydata.DataTimeSeriesPoints, field.Len()),
}
if len(field.Labels) > 0 {
@ -395,7 +395,7 @@ func FrameToSeriesSlice(frame *data.Frame) (plugins.DataTimeSeriesSlice, error)
return nil, errutil.Wrapf(err,
"failed to convert frame to DataTimeSeriesSlice, can not convert value %v to float", field.At(rowIdx))
}
ts.Points[rowIdx] = plugins.DataTimePoint{
ts.Points[rowIdx] = legacydata.DataTimePoint{
null.FloatFrom(val),
timeNullFloatSlice[rowIdx],
}

View File

@ -4,14 +4,13 @@ import (
"context"
"testing"
"github.com/grafana/grafana/pkg/services/validations"
"github.com/grafana/grafana/pkg/tsdb/interval"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/alerting"
"github.com/grafana/grafana/pkg/services/validations"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/stretchr/testify/require"
)
@ -27,11 +26,11 @@ func TestQueryInterval(t *testing.T) {
timeRange := "5m"
verifier := func(query plugins.DataSubQuery) {
verifier := func(query legacydata.DataSubQuery) {
// 5minutes timerange = 300000milliseconds; default-resolution is 1500pixels,
// so we should have 300000/1500 = 200milliseconds here
require.Equal(t, int64(200), query.IntervalMS)
require.Equal(t, interval.DefaultRes, query.MaxDataPoints)
require.Equal(t, intervalv2.DefaultRes, query.MaxDataPoints)
}
applyScenario(t, timeRange, dataSourceJson, queryModel, verifier)
@ -45,9 +44,9 @@ func TestQueryInterval(t *testing.T) {
timeRange := "5m"
verifier := func(query plugins.DataSubQuery) {
verifier := func(query legacydata.DataSubQuery) {
require.Equal(t, int64(123000), query.IntervalMS)
require.Equal(t, interval.DefaultRes, query.MaxDataPoints)
require.Equal(t, intervalv2.DefaultRes, query.MaxDataPoints)
}
applyScenario(t, timeRange, dataSourceJson, queryModel, verifier)
@ -64,9 +63,9 @@ func TestQueryInterval(t *testing.T) {
timeRange := "5m"
verifier := func(query plugins.DataSubQuery) {
verifier := func(query legacydata.DataSubQuery) {
require.Equal(t, int64(71000), query.IntervalMS)
require.Equal(t, interval.DefaultRes, query.MaxDataPoints)
require.Equal(t, intervalv2.DefaultRes, query.MaxDataPoints)
}
applyScenario(t, timeRange, dataSourceJson, queryModel, verifier)
@ -83,11 +82,11 @@ func TestQueryInterval(t *testing.T) {
timeRange := "5m"
verifier := func(query plugins.DataSubQuery) {
verifier := func(query legacydata.DataSubQuery) {
// when both panel-min-interval and datasource-min-interval exists,
// panel-min-interval is used
require.Equal(t, int64(19000), query.IntervalMS)
require.Equal(t, interval.DefaultRes, query.MaxDataPoints)
require.Equal(t, intervalv2.DefaultRes, query.MaxDataPoints)
}
applyScenario(t, timeRange, dataSourceJson, queryModel, verifier)
@ -102,11 +101,11 @@ func TestQueryInterval(t *testing.T) {
timeRange := "1s"
verifier := func(query plugins.DataSubQuery) {
verifier := func(query legacydata.DataSubQuery) {
// no min-interval exists, the default-min-interval will be used,
// and for such a short time-range this will cause the value to be 1millisecond.
require.Equal(t, int64(1), query.IntervalMS)
require.Equal(t, interval.DefaultRes, query.MaxDataPoints)
require.Equal(t, intervalv2.DefaultRes, query.MaxDataPoints)
}
applyScenario(t, timeRange, dataSourceJson, queryModel, verifier)
@ -119,24 +118,24 @@ type queryIntervalTestContext struct {
condition *QueryCondition
}
type queryIntervalVerifier func(query plugins.DataSubQuery)
type queryIntervalVerifier func(query legacydata.DataSubQuery)
type fakeIntervalTestReqHandler struct {
//nolint: staticcheck // plugins.DataResponse deprecated
response plugins.DataResponse
//nolint: staticcheck // legacydata.DataResponse deprecated
response legacydata.DataResponse
verifier queryIntervalVerifier
}
//nolint: staticcheck // plugins.DataResponse deprecated
func (rh fakeIntervalTestReqHandler) HandleRequest(ctx context.Context, dsInfo *models.DataSource, query plugins.DataQuery) (
plugins.DataResponse, error) {
//nolint: staticcheck // legacydata.DataResponse deprecated
func (rh fakeIntervalTestReqHandler) HandleRequest(ctx context.Context, dsInfo *models.DataSource, query legacydata.DataQuery) (
legacydata.DataResponse, error) {
q := query.Queries[0]
rh.verifier(q)
return rh.response, nil
}
//nolint: staticcheck // plugins.DataResponse deprecated
func applyScenario(t *testing.T, timeRange string, dataSourceJsonData *simplejson.Json, queryModel string, verifier func(query plugins.DataSubQuery)) {
//nolint: staticcheck // legacydata.DataResponse deprecated
func applyScenario(t *testing.T, timeRange string, dataSourceJsonData *simplejson.Json, queryModel string, verifier func(query legacydata.DataSubQuery)) {
t.Run("desc", func(t *testing.T) {
bus.AddHandlerCtx("test", func(ctx context.Context, query *models.GetDataSourceQuery) error {
query.Result = &models.DataSource{Id: 1, Type: "graphite", JsonData: dataSourceJsonData}
@ -167,11 +166,11 @@ func applyScenario(t *testing.T, timeRange string, dataSourceJsonData *simplejso
ctx.condition = condition
qr := plugins.DataQueryResult{}
qr := legacydata.DataQueryResult{}
reqHandler := fakeIntervalTestReqHandler{
response: plugins.DataResponse{
Results: map[string]plugins.DataQueryResult{
response: legacydata.DataResponse{
Results: map[string]legacydata.DataQueryResult{
"A": qr,
},
},

View File

@ -7,6 +7,7 @@ import (
"time"
"github.com/grafana/grafana/pkg/services/validations"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
@ -15,18 +16,17 @@ import (
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/alerting"
"github.com/stretchr/testify/require"
"github.com/xorcare/pointer"
)
func newTimeSeriesPointsFromArgs(values ...float64) plugins.DataTimeSeriesPoints {
points := make(plugins.DataTimeSeriesPoints, 0)
func newTimeSeriesPointsFromArgs(values ...float64) legacydata.DataTimeSeriesPoints {
points := make(legacydata.DataTimeSeriesPoints, 0)
for i := 0; i < len(values); i += 2 {
points = append(points, plugins.DataTimePoint{null.FloatFrom(values[i]), null.FloatFrom(values[i+1])})
points = append(points, legacydata.DataTimePoint{null.FloatFrom(values[i]), null.FloatFrom(values[i+1])})
}
return points
@ -74,7 +74,7 @@ func TestQueryCondition(t *testing.T) {
t.Run("should fire when avg is above 100", func(t *testing.T) {
ctx := setup()
points := newTimeSeriesPointsFromArgs(120, 0)
ctx.series = plugins.DataTimeSeriesSlice{plugins.DataTimeSeries{Name: "test1", Points: points}}
ctx.series = legacydata.DataTimeSeriesSlice{legacydata.DataTimeSeries{Name: "test1", Points: points}}
cr, err := ctx.exec(t)
require.Nil(t, err)
@ -96,7 +96,7 @@ func TestQueryCondition(t *testing.T) {
t.Run("Should not fire when avg is below 100", func(t *testing.T) {
ctx := setup()
points := newTimeSeriesPointsFromArgs(90, 0)
ctx.series = plugins.DataTimeSeriesSlice{plugins.DataTimeSeries{Name: "test1", Points: points}}
ctx.series = legacydata.DataTimeSeriesSlice{legacydata.DataTimeSeries{Name: "test1", Points: points}}
cr, err := ctx.exec(t)
require.Nil(t, err)
@ -117,9 +117,9 @@ func TestQueryCondition(t *testing.T) {
t.Run("Should fire if only first series matches", func(t *testing.T) {
ctx := setup()
ctx.series = plugins.DataTimeSeriesSlice{
plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs(120, 0)},
plugins.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(0, 0)},
ctx.series = legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs(120, 0)},
legacydata.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(0, 0)},
}
cr, err := ctx.exec(t)
@ -130,7 +130,7 @@ func TestQueryCondition(t *testing.T) {
t.Run("No series", func(t *testing.T) {
ctx := setup()
t.Run("Should set NoDataFound when condition is gt", func(t *testing.T) {
ctx.series = plugins.DataTimeSeriesSlice{}
ctx.series = legacydata.DataTimeSeriesSlice{}
cr, err := ctx.exec(t)
require.Nil(t, err)
@ -140,7 +140,7 @@ func TestQueryCondition(t *testing.T) {
t.Run("Should be firing when condition is no_value", func(t *testing.T) {
ctx.evaluator = `{"type": "no_value", "params": []}`
ctx.series = plugins.DataTimeSeriesSlice{}
ctx.series = legacydata.DataTimeSeriesSlice{}
cr, err := ctx.exec(t)
require.Nil(t, err)
@ -152,8 +152,8 @@ func TestQueryCondition(t *testing.T) {
ctx := setup()
t.Run("Should set Firing if eval match", func(t *testing.T) {
ctx.evaluator = `{"type": "no_value", "params": []}`
ctx.series = plugins.DataTimeSeriesSlice{
plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()},
ctx.series = legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()},
}
cr, err := ctx.exec(t)
@ -162,9 +162,9 @@ func TestQueryCondition(t *testing.T) {
})
t.Run("Should set NoDataFound both series are empty", func(t *testing.T) {
ctx.series = plugins.DataTimeSeriesSlice{
plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()},
plugins.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs()},
ctx.series = legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()},
legacydata.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs()},
}
cr, err := ctx.exec(t)
@ -173,9 +173,9 @@ func TestQueryCondition(t *testing.T) {
})
t.Run("Should set NoDataFound both series contains null", func(t *testing.T) {
ctx.series = plugins.DataTimeSeriesSlice{
plugins.DataTimeSeries{Name: "test1", Points: plugins.DataTimeSeriesPoints{plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}},
plugins.DataTimeSeries{Name: "test2", Points: plugins.DataTimeSeriesPoints{plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}},
ctx.series = legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{Name: "test1", Points: legacydata.DataTimeSeriesPoints{legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}},
legacydata.DataTimeSeries{Name: "test2", Points: legacydata.DataTimeSeriesPoints{legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}},
}
cr, err := ctx.exec(t)
@ -184,9 +184,9 @@ func TestQueryCondition(t *testing.T) {
})
t.Run("Should not set NoDataFound if one series is empty", func(t *testing.T) {
ctx.series = plugins.DataTimeSeriesSlice{
plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()},
plugins.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(120, 0)},
ctx.series = legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()},
legacydata.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(120, 0)},
}
cr, err := ctx.exec(t)
@ -199,13 +199,13 @@ func TestQueryCondition(t *testing.T) {
type queryConditionTestContext struct {
reducer string
evaluator string
series plugins.DataTimeSeriesSlice
series legacydata.DataTimeSeriesSlice
frame *data.Frame
result *alerting.EvalContext
condition *QueryCondition
}
//nolint: staticcheck // plugins.DataPlugin deprecated
//nolint: staticcheck // legacydata.DataPlugin deprecated
func (ctx *queryConditionTestContext) exec(t *testing.T) (*alerting.ConditionResult, error) {
jsonModel, err := simplejson.NewJson([]byte(`{
"type": "query",
@ -224,18 +224,18 @@ func (ctx *queryConditionTestContext) exec(t *testing.T) (*alerting.ConditionRes
ctx.condition = condition
qr := plugins.DataQueryResult{
qr := legacydata.DataQueryResult{
Series: ctx.series,
}
if ctx.frame != nil {
qr = plugins.DataQueryResult{
Dataframes: plugins.NewDecodedDataFrames(data.Frames{ctx.frame}),
qr = legacydata.DataQueryResult{
Dataframes: legacydata.NewDecodedDataFrames(data.Frames{ctx.frame}),
}
}
reqHandler := fakeReqHandler{
response: plugins.DataResponse{
Results: map[string]plugins.DataQueryResult{
response: legacydata.DataResponse{
Results: map[string]legacydata.DataQueryResult{
"A": qr,
},
},
@ -245,13 +245,13 @@ func (ctx *queryConditionTestContext) exec(t *testing.T) (*alerting.ConditionRes
}
type fakeReqHandler struct {
//nolint: staticcheck // plugins.DataPlugin deprecated
response plugins.DataResponse
//nolint: staticcheck // legacydata.DataPlugin deprecated
response legacydata.DataResponse
}
//nolint: staticcheck // plugins.DataPlugin deprecated
func (rh fakeReqHandler) HandleRequest(context.Context, *models.DataSource, plugins.DataQuery) (
plugins.DataResponse, error) {
//nolint: staticcheck // legacydata.DataPlugin deprecated
func (rh fakeReqHandler) HandleRequest(context.Context, *models.DataSource, legacydata.DataQuery) (
legacydata.DataResponse, error) {
return rh.response, nil
}
@ -259,7 +259,7 @@ func TestFrameToSeriesSlice(t *testing.T) {
tests := []struct {
name string
frame *data.Frame
seriesSlice plugins.DataTimeSeriesSlice
seriesSlice legacydata.DataTimeSeriesSlice
Err require.ErrorAssertionFunc
}{
{
@ -278,21 +278,21 @@ func TestFrameToSeriesSlice(t *testing.T) {
4.0,
})),
seriesSlice: plugins.DataTimeSeriesSlice{
plugins.DataTimeSeries{
seriesSlice: legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{
Name: "Values Int64s {Animal Factor=cat}",
Tags: map[string]string{"Animal Factor": "cat"},
Points: plugins.DataTimeSeriesPoints{
plugins.DataTimePoint{null.FloatFrom(math.NaN()), null.FloatFrom(1577934240000)},
plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(1577934270000)},
Points: legacydata.DataTimeSeriesPoints{
legacydata.DataTimePoint{null.FloatFrom(math.NaN()), null.FloatFrom(1577934240000)},
legacydata.DataTimePoint{null.FloatFrom(3), null.FloatFrom(1577934270000)},
},
},
plugins.DataTimeSeries{
legacydata.DataTimeSeries{
Name: "Values Floats {Animal Factor=sloth}",
Tags: map[string]string{"Animal Factor": "sloth"},
Points: plugins.DataTimeSeriesPoints{
plugins.DataTimePoint{null.FloatFrom(2), null.FloatFrom(1577934240000)},
plugins.DataTimePoint{null.FloatFrom(4), null.FloatFrom(1577934270000)},
Points: legacydata.DataTimeSeriesPoints{
legacydata.DataTimePoint{null.FloatFrom(2), null.FloatFrom(1577934240000)},
legacydata.DataTimePoint{null.FloatFrom(4), null.FloatFrom(1577934270000)},
},
},
},
@ -305,16 +305,16 @@ func TestFrameToSeriesSlice(t *testing.T) {
data.NewField(`Values Int64s`, data.Labels{"Animal Factor": "cat"}, []*int64{}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []float64{})),
seriesSlice: plugins.DataTimeSeriesSlice{
plugins.DataTimeSeries{
seriesSlice: legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{
Name: "Values Int64s {Animal Factor=cat}",
Tags: map[string]string{"Animal Factor": "cat"},
Points: plugins.DataTimeSeriesPoints{},
Points: legacydata.DataTimeSeriesPoints{},
},
plugins.DataTimeSeries{
legacydata.DataTimeSeries{
Name: "Values Floats {Animal Factor=sloth}",
Tags: map[string]string{"Animal Factor": "sloth"},
Points: plugins.DataTimeSeriesPoints{},
Points: legacydata.DataTimeSeriesPoints{},
},
},
Err: require.NoError,
@ -325,10 +325,10 @@ func TestFrameToSeriesSlice(t *testing.T) {
data.NewField("Time", data.Labels{}, []time.Time{}),
data.NewField(`Values`, data.Labels{}, []float64{})),
seriesSlice: plugins.DataTimeSeriesSlice{
plugins.DataTimeSeries{
seriesSlice: legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{
Name: "Values",
Points: plugins.DataTimeSeriesPoints{},
Points: legacydata.DataTimeSeriesPoints{},
},
},
Err: require.NoError,
@ -341,10 +341,10 @@ func TestFrameToSeriesSlice(t *testing.T) {
DisplayNameFromDS: "sloth",
})),
seriesSlice: plugins.DataTimeSeriesSlice{
plugins.DataTimeSeries{
seriesSlice: legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{
Name: "sloth",
Points: plugins.DataTimeSeriesPoints{},
Points: legacydata.DataTimeSeriesPoints{},
Tags: map[string]string{"Rating": "10"},
},
},
@ -359,10 +359,10 @@ func TestFrameToSeriesSlice(t *testing.T) {
DisplayNameFromDS: "sloth #2",
})),
seriesSlice: plugins.DataTimeSeriesSlice{
plugins.DataTimeSeries{
seriesSlice: legacydata.DataTimeSeriesSlice{
legacydata.DataTimeSeries{
Name: "sloth #1",
Points: plugins.DataTimeSeriesPoints{},
Points: legacydata.DataTimeSeriesPoints{},
},
},
Err: require.NoError,

View File

@ -6,7 +6,7 @@ import (
"sort"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
// queryReducer reduces a timeseries to a nullable float
@ -18,8 +18,7 @@ type queryReducer struct {
}
//nolint: gocyclo
//nolint: staticcheck // plugins.DataTimeSeries deprecated
func (s *queryReducer) Reduce(series plugins.DataTimeSeries) null.Float {
func (s *queryReducer) Reduce(series legacydata.DataTimeSeries) null.Float {
if len(series.Points) == 0 {
return null.FloatFromPtr(nil)
}
@ -127,8 +126,7 @@ func newSimpleReducer(t string) *queryReducer {
return &queryReducer{Type: t}
}
//nolint: staticcheck // plugins.* deprecated
func calculateDiff(series plugins.DataTimeSeries, allNull bool, value float64, fn func(float64, float64) float64) (bool, float64) {
func calculateDiff(series legacydata.DataTimeSeries, allNull bool, value float64, fn func(float64, float64) float64) (bool, float64) {
var (
points = series.Points
first float64

View File

@ -5,7 +5,7 @@ import (
"testing"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/stretchr/testify/require"
)
@ -52,16 +52,16 @@ func TestSimpleReducer(t *testing.T) {
t.Run("median should ignore null values", func(t *testing.T) {
reducer := newSimpleReducer("median")
series := plugins.DataTimeSeries{
series := legacydata.DataTimeSeries{
Name: "test time series",
}
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(3)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(float64(1)), null.FloatFrom(4)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(float64(2)), null.FloatFrom(5)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(float64(3)), null.FloatFrom(6)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(3)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFrom(float64(1)), null.FloatFrom(4)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFrom(float64(2)), null.FloatFrom(5)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFrom(float64(3)), null.FloatFrom(6)})
result := reducer.Reduce(series)
require.Equal(t, true, result.Valid)
@ -75,25 +75,25 @@ func TestSimpleReducer(t *testing.T) {
t.Run("avg with only nulls", func(t *testing.T) {
reducer := newSimpleReducer("avg")
series := plugins.DataTimeSeries{
series := legacydata.DataTimeSeries{
Name: "test time series",
}
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
require.Equal(t, false, reducer.Reduce(series).Valid)
})
t.Run("count_non_null", func(t *testing.T) {
t.Run("with null values and real values", func(t *testing.T) {
reducer := newSimpleReducer("count_non_null")
series := plugins.DataTimeSeries{
series := legacydata.DataTimeSeries{
Name: "test time series",
}
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(3)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(4)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFrom(3), null.FloatFrom(3)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFrom(3), null.FloatFrom(4)})
require.Equal(t, true, reducer.Reduce(series).Valid)
require.Equal(t, 2.0, reducer.Reduce(series).Float64)
@ -101,12 +101,12 @@ func TestSimpleReducer(t *testing.T) {
t.Run("with null values", func(t *testing.T) {
reducer := newSimpleReducer("count_non_null")
series := plugins.DataTimeSeries{
series := legacydata.DataTimeSeries{
Name: "test time series",
}
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
require.Equal(t, false, reducer.Reduce(series).Valid)
})
@ -114,14 +114,14 @@ func TestSimpleReducer(t *testing.T) {
t.Run("avg of number values and null values should ignore nulls", func(t *testing.T) {
reducer := newSimpleReducer("avg")
series := plugins.DataTimeSeries{
series := legacydata.DataTimeSeries{
Name: "test time series",
}
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(1)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(3)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(4)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFrom(3), null.FloatFrom(1)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(3)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFrom(3), null.FloatFrom(4)})
require.Equal(t, float64(3), reducer.Reduce(series).Float64)
})
@ -179,12 +179,12 @@ func TestSimpleReducer(t *testing.T) {
t.Run("diff with only nulls", func(t *testing.T) {
reducer := newSimpleReducer("diff")
series := plugins.DataTimeSeries{
series := legacydata.DataTimeSeries{
Name: "test time series",
}
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
require.Equal(t, false, reducer.Reduce(series).Valid)
})
@ -242,12 +242,12 @@ func TestSimpleReducer(t *testing.T) {
t.Run("diff_abs with only nulls", func(t *testing.T) {
reducer := newSimpleReducer("diff_abs")
series := plugins.DataTimeSeries{
series := legacydata.DataTimeSeries{
Name: "test time series",
}
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
require.Equal(t, false, reducer.Reduce(series).Valid)
})
@ -305,12 +305,12 @@ func TestSimpleReducer(t *testing.T) {
t.Run("percent_diff with only nulls", func(t *testing.T) {
reducer := newSimpleReducer("percent_diff")
series := plugins.DataTimeSeries{
series := legacydata.DataTimeSeries{
Name: "test time series",
}
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
require.Equal(t, false, reducer.Reduce(series).Valid)
})
@ -368,12 +368,12 @@ func TestSimpleReducer(t *testing.T) {
t.Run("percent_diff_abs with only nulls", func(t *testing.T) {
reducer := newSimpleReducer("percent_diff_abs")
series := plugins.DataTimeSeries{
series := legacydata.DataTimeSeries{
Name: "test time series",
}
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
require.Equal(t, false, reducer.Reduce(series).Valid)
})
@ -396,12 +396,12 @@ func TestSimpleReducer(t *testing.T) {
func testReducer(reducerType string, datapoints ...float64) float64 {
reducer := newSimpleReducer(reducerType)
series := plugins.DataTimeSeries{
series := legacydata.DataTimeSeries{
Name: "test time series",
}
for idx := range datapoints {
series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(datapoints[idx]), null.FloatFrom(1234134)})
series.Points = append(series.Points, legacydata.DataTimePoint{null.FloatFrom(datapoints[idx]), null.FloatFrom(1234134)})
}
return reducer.Reduce(series).Float64

View File

@ -11,10 +11,10 @@ import (
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/usagestats"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/encryption"
"github.com/grafana/grafana/pkg/services/rendering"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/opentracing/opentracing-go"
"github.com/opentracing/opentracing-go/ext"
tlog "github.com/opentracing/opentracing-go/log"
@ -28,7 +28,7 @@ type AlertEngine struct {
RenderService rendering.Service
Bus bus.Bus
RequestValidator models.PluginRequestValidator
DataService plugins.DataRequestHandler
DataService legacydata.RequestHandler
Cfg *setting.Cfg
execQueue chan *Job
@ -48,7 +48,7 @@ func (e *AlertEngine) IsDisabled() bool {
// ProvideAlertEngine returns a new AlertEngine.
func ProvideAlertEngine(renderer rendering.Service, bus bus.Bus, requestValidator models.PluginRequestValidator,
dataService plugins.DataRequestHandler, usageStatsService usagestats.Service, encryptionService encryption.Service,
dataService legacydata.RequestHandler, usageStatsService usagestats.Service, encryptionService encryption.Service,
cfg *setting.Cfg) *AlertEngine {
e := &AlertEngine{
Cfg: cfg,

View File

@ -7,18 +7,18 @@ import (
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/metrics"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
// DefaultEvalHandler is responsible for evaluating the alert rule.
type DefaultEvalHandler struct {
log log.Logger
alertJobTimeout time.Duration
requestHandler plugins.DataRequestHandler
requestHandler legacydata.RequestHandler
}
// NewEvalHandler is the `DefaultEvalHandler` constructor.
func NewEvalHandler(requestHandler plugins.DataRequestHandler) *DefaultEvalHandler {
func NewEvalHandler(requestHandler legacydata.RequestHandler) *DefaultEvalHandler {
return &DefaultEvalHandler{
log: log.New("alerting.evalHandler"),
alertJobTimeout: time.Second * 5,

View File

@ -4,8 +4,8 @@ import (
"context"
"testing"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/validations"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/stretchr/testify/require"
)
@ -17,7 +17,7 @@ type conditionStub struct {
noData bool
}
func (c *conditionStub) Eval(context *EvalContext, reqHandler plugins.DataRequestHandler) (*ConditionResult, error) {
func (c *conditionStub) Eval(context *EvalContext, reqHandler legacydata.RequestHandler) (*ConditionResult, error) {
return &ConditionResult{Firing: c.firing, EvalMatches: c.matches, Operator: c.operator, NoDataFound: c.noData}, nil
}

View File

@ -5,7 +5,7 @@ import (
"time"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
type evalHandler interface {
@ -60,5 +60,5 @@ type ConditionResult struct {
// Condition is responsible for evaluating an alert condition.
type Condition interface {
Eval(result *EvalContext, requestHandler plugins.DataRequestHandler) (*ConditionResult, error)
Eval(result *EvalContext, requestHandler legacydata.RequestHandler) (*ConditionResult, error)
}

View File

@ -8,15 +8,15 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
type FakeCondition struct{}
func (f *FakeCondition) Eval(context *EvalContext, reqHandler plugins.DataRequestHandler) (*ConditionResult, error) {
func (f *FakeCondition) Eval(context *EvalContext, reqHandler legacydata.RequestHandler) (*ConditionResult, error) {
return &ConditionResult{}, nil
}

View File

@ -6,6 +6,7 @@ import (
"time"
"github.com/grafana/grafana/pkg/api/routing"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/datasourceproxy"
"github.com/grafana/grafana/pkg/services/datasources"
@ -18,7 +19,6 @@ import (
"github.com/grafana/grafana/pkg/services/quota"
"github.com/grafana/grafana/pkg/services/secrets"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
)
// timeNow makes it possible to test usage of time
@ -54,7 +54,7 @@ type API struct {
Cfg *setting.Cfg
DatasourceCache datasources.CacheService
RouteRegister routing.RouteRegister
DataService *tsdb.Service
ExpressionService *expr.Service
QuotaService *quota.QuotaService
Schedule schedule.ScheduleService
RuleStore store.RuleStore
@ -93,11 +93,11 @@ func (api *API) RegisterAPIEndpoints(m *metrics.API) {
RulerSrv{DatasourceCache: api.DatasourceCache, QuotaService: api.QuotaService, manager: api.StateManager, store: api.RuleStore, log: logger},
), m)
api.RegisterTestingApiEndpoints(TestingApiSrv{
AlertingProxy: proxy,
Cfg: api.Cfg,
DataService: api.DataService,
DatasourceCache: api.DatasourceCache,
log: logger,
AlertingProxy: proxy,
Cfg: api.Cfg,
ExpressionService: api.ExpressionService,
DatasourceCache: api.DatasourceCache,
log: logger,
}, m)
api.RegisterConfigurationApiEndpoints(AdminSrv{
store: api.AdminConfigStore,

View File

@ -8,22 +8,22 @@ import (
"strconv"
"github.com/grafana/grafana/pkg/api/response"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/datasources"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/eval"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/web"
)
type TestingApiSrv struct {
*AlertingProxy
Cfg *setting.Cfg
DataService *tsdb.Service
DatasourceCache datasources.CacheService
log log.Logger
Cfg *setting.Cfg
ExpressionService *expr.Service
DatasourceCache datasources.CacheService
log log.Logger
}
func (srv TestingApiSrv) RouteTestRuleConfig(c *models.ReqContext, body apimodels.TestRulePayload) response.Response {
@ -32,7 +32,7 @@ func (srv TestingApiSrv) RouteTestRuleConfig(c *models.ReqContext, body apimodel
if body.Type() != apimodels.GrafanaBackend || body.GrafanaManagedCondition == nil {
return ErrResp(http.StatusBadRequest, errors.New("unexpected payload"), "")
}
return conditionEval(c, *body.GrafanaManagedCondition, srv.DatasourceCache, srv.DataService, srv.Cfg, srv.log)
return conditionEval(c, *body.GrafanaManagedCondition, srv.DatasourceCache, srv.ExpressionService, srv.Cfg, srv.log)
}
if body.Type() != apimodels.LoTexRulerBackend {
@ -86,7 +86,7 @@ func (srv TestingApiSrv) RouteEvalQueries(c *models.ReqContext, cmd apimodels.Ev
}
evaluator := eval.Evaluator{Cfg: srv.Cfg, Log: srv.log}
evalResults, err := evaluator.QueriesAndExpressionsEval(c.SignedInUser.OrgId, cmd.Data, now, srv.DataService)
evalResults, err := evaluator.QueriesAndExpressionsEval(c.SignedInUser.OrgId, cmd.Data, now, srv.ExpressionService)
if err != nil {
return ErrResp(http.StatusBadRequest, err, "Failed to evaluate queries and expressions")
}

View File

@ -13,6 +13,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/api/response"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/datasourceproxy"
@ -21,7 +22,6 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/eval"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/util"
"github.com/grafana/grafana/pkg/web"
"github.com/pkg/errors"
@ -223,7 +223,7 @@ func validateQueriesAndExpressions(data []ngmodels.AlertQuery, user *models.Sign
return refIDs, nil
}
func conditionEval(c *models.ReqContext, cmd ngmodels.EvalAlertConditionCommand, datasourceCache datasources.CacheService, dataService *tsdb.Service, cfg *setting.Cfg, log log.Logger) response.Response {
func conditionEval(c *models.ReqContext, cmd ngmodels.EvalAlertConditionCommand, datasourceCache datasources.CacheService, expressionService *expr.Service, cfg *setting.Cfg, log log.Logger) response.Response {
evalCond := ngmodels.Condition{
Condition: cmd.Condition,
OrgID: c.SignedInUser.OrgId,
@ -239,7 +239,7 @@ func conditionEval(c *models.ReqContext, cmd ngmodels.EvalAlertConditionCommand,
}
evaluator := eval.Evaluator{Cfg: cfg, Log: log}
evalResults, err := evaluator.ConditionEval(&evalCond, now, dataService)
evalResults, err := evaluator.ConditionEval(&evalCond, now, expressionService)
if err != nil {
return ErrResp(http.StatusBadRequest, err, "Failed to evaluate conditions")
}

View File

@ -14,7 +14,6 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
@ -165,10 +164,10 @@ type NumberValueCapture struct {
Value *float64
}
func executeCondition(ctx AlertExecCtx, c *models.Condition, now time.Time, dataService *tsdb.Service) ExecutionResults {
func executeCondition(ctx AlertExecCtx, c *models.Condition, now time.Time, exprService *expr.Service) ExecutionResults {
result := ExecutionResults{}
execResp, err := executeQueriesAndExpressions(ctx, c.Data, now, dataService)
execResp, err := executeQueriesAndExpressions(ctx, c.Data, now, exprService)
if err != nil {
return ExecutionResults{Error: err}
@ -232,7 +231,7 @@ func executeCondition(ctx AlertExecCtx, c *models.Condition, now time.Time, data
return result
}
func executeQueriesAndExpressions(ctx AlertExecCtx, data []models.AlertQuery, now time.Time, dataService *tsdb.Service) (resp *backend.QueryDataResponse, err error) {
func executeQueriesAndExpressions(ctx AlertExecCtx, data []models.AlertQuery, now time.Time, exprService *expr.Service) (resp *backend.QueryDataResponse, err error) {
defer func() {
if e := recover(); e != nil {
ctx.Log.Error("alert rule panic", "error", e, "stack", string(debug.Stack()))
@ -250,10 +249,6 @@ func executeQueriesAndExpressions(ctx AlertExecCtx, data []models.AlertQuery, no
return nil, err
}
exprService := expr.Service{
Cfg: &setting.Cfg{ExpressionsEnabled: ctx.ExpressionsEnabled},
DataService: dataService,
}
return exprService.TransformData(ctx.Ctx, queryDataReq)
}
@ -431,26 +426,26 @@ func (evalResults Results) AsDataFrame() data.Frame {
}
// ConditionEval executes conditions and evaluates the result.
func (e *Evaluator) ConditionEval(condition *models.Condition, now time.Time, dataService *tsdb.Service) (Results, error) {
func (e *Evaluator) ConditionEval(condition *models.Condition, now time.Time, expressionService *expr.Service) (Results, error) {
alertCtx, cancelFn := context.WithTimeout(context.Background(), e.Cfg.UnifiedAlerting.EvaluationTimeout)
defer cancelFn()
alertExecCtx := AlertExecCtx{OrgID: condition.OrgID, Ctx: alertCtx, ExpressionsEnabled: e.Cfg.ExpressionsEnabled, Log: e.Log}
execResult := executeCondition(alertExecCtx, condition, now, dataService)
execResult := executeCondition(alertExecCtx, condition, now, expressionService)
evalResults := evaluateExecutionResult(execResult, now)
return evalResults, nil
}
// QueriesAndExpressionsEval executes queries and expressions and returns the result.
func (e *Evaluator) QueriesAndExpressionsEval(orgID int64, data []models.AlertQuery, now time.Time, dataService *tsdb.Service) (*backend.QueryDataResponse, error) {
func (e *Evaluator) QueriesAndExpressionsEval(orgID int64, data []models.AlertQuery, now time.Time, expressionService *expr.Service) (*backend.QueryDataResponse, error) {
alertCtx, cancelFn := context.WithTimeout(context.Background(), e.Cfg.UnifiedAlerting.EvaluationTimeout)
defer cancelFn()
alertExecCtx := AlertExecCtx{OrgID: orgID, Ctx: alertCtx, ExpressionsEnabled: e.Cfg.ExpressionsEnabled, Log: e.Log}
execResult, err := executeQueriesAndExpressions(alertExecCtx, data, now, dataService)
execResult, err := executeQueriesAndExpressions(alertExecCtx, data, now, expressionService)
if err != nil {
return nil, fmt.Errorf("failed to execute conditions: %w", err)
}

View File

@ -7,6 +7,7 @@ import (
"github.com/benbjohnson/clock"
"github.com/grafana/grafana/pkg/api/routing"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/kvstore"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/datasourceproxy"
@ -22,7 +23,6 @@ import (
"github.com/grafana/grafana/pkg/services/secrets"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
"golang.org/x/sync/errgroup"
)
@ -38,20 +38,20 @@ const (
)
func ProvideService(cfg *setting.Cfg, dataSourceCache datasources.CacheService, routeRegister routing.RouteRegister,
sqlStore *sqlstore.SQLStore, kvStore kvstore.KVStore, dataService *tsdb.Service, dataProxy *datasourceproxy.DataSourceProxyService,
sqlStore *sqlstore.SQLStore, kvStore kvstore.KVStore, expressionService *expr.Service, dataProxy *datasourceproxy.DataSourceProxyService,
quotaService *quota.QuotaService, secretsService secrets.Service, m *metrics.NGAlert) (*AlertNG, error) {
ng := &AlertNG{
Cfg: cfg,
DataSourceCache: dataSourceCache,
RouteRegister: routeRegister,
SQLStore: sqlStore,
KVStore: kvStore,
DataService: dataService,
DataProxy: dataProxy,
QuotaService: quotaService,
SecretsService: secretsService,
Metrics: m,
Log: log.New("ngalert"),
Cfg: cfg,
DataSourceCache: dataSourceCache,
RouteRegister: routeRegister,
SQLStore: sqlStore,
KVStore: kvStore,
ExpressionService: expressionService,
DataProxy: dataProxy,
QuotaService: quotaService,
SecretsService: secretsService,
Metrics: m,
Log: log.New("ngalert"),
}
if ng.IsDisabled() {
@ -67,19 +67,19 @@ func ProvideService(cfg *setting.Cfg, dataSourceCache datasources.CacheService,
// AlertNG is the service for evaluating the condition of an alert definition.
type AlertNG struct {
Cfg *setting.Cfg
DataSourceCache datasources.CacheService
RouteRegister routing.RouteRegister
SQLStore *sqlstore.SQLStore
KVStore kvstore.KVStore
DataService *tsdb.Service
DataProxy *datasourceproxy.DataSourceProxyService
QuotaService *quota.QuotaService
SecretsService secrets.Service
Metrics *metrics.NGAlert
Log log.Logger
schedule schedule.ScheduleService
stateManager *state.Manager
Cfg *setting.Cfg
DataSourceCache datasources.CacheService
RouteRegister routing.RouteRegister
SQLStore *sqlstore.SQLStore
KVStore kvstore.KVStore
ExpressionService *expr.Service
DataProxy *datasourceproxy.DataSourceProxyService
QuotaService *quota.QuotaService
SecretsService secrets.Service
Metrics *metrics.NGAlert
Log log.Logger
schedule schedule.ScheduleService
stateManager *state.Manager
// Alerting notification services
MultiOrgAlertmanager *notifier.MultiOrgAlertmanager
@ -136,7 +136,7 @@ func (ng *AlertNG) init() error {
appUrl = nil
}
stateManager := state.NewManager(ng.Log, ng.Metrics.GetStateMetrics(), appUrl, store, store)
scheduler := schedule.NewScheduler(schedCfg, ng.DataService, appUrl, stateManager)
scheduler := schedule.NewScheduler(schedCfg, ng.ExpressionService, appUrl, stateManager)
ng.stateManager = stateManager
ng.schedule = scheduler
@ -145,7 +145,7 @@ func (ng *AlertNG) init() error {
Cfg: ng.Cfg,
DatasourceCache: ng.DataSourceCache,
RouteRegister: ng.RouteRegister,
DataService: ng.DataService,
ExpressionService: ng.ExpressionService,
Schedule: ng.schedule,
DataProxy: ng.DataProxy,
QuotaService: ng.QuotaService,

View File

@ -8,6 +8,7 @@ import (
"sync"
"time"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/alerting"
"github.com/grafana/grafana/pkg/services/ngalert/eval"
@ -17,7 +18,6 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/sender"
"github.com/grafana/grafana/pkg/services/ngalert/state"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/benbjohnson/clock"
"golang.org/x/sync/errgroup"
@ -64,11 +64,11 @@ type schedule struct {
evaluator eval.Evaluator
ruleStore store.RuleStore
instanceStore store.InstanceStore
adminConfigStore store.AdminConfigurationStore
orgStore store.OrgStore
dataService *tsdb.Service
ruleStore store.RuleStore
instanceStore store.InstanceStore
adminConfigStore store.AdminConfigurationStore
orgStore store.OrgStore
expressionService *expr.Service
stateManager *state.Manager
@ -107,7 +107,7 @@ type SchedulerCfg struct {
}
// NewScheduler returns a new schedule.
func NewScheduler(cfg SchedulerCfg, dataService *tsdb.Service, appURL *url.URL, stateManager *state.Manager) *schedule {
func NewScheduler(cfg SchedulerCfg, expressionService *expr.Service, appURL *url.URL, stateManager *state.Manager) *schedule {
ticker := alerting.NewTicker(cfg.C.Now(), time.Second*0, cfg.C, int64(cfg.BaseInterval.Seconds()))
sch := schedule{
@ -123,7 +123,7 @@ func NewScheduler(cfg SchedulerCfg, dataService *tsdb.Service, appURL *url.URL,
ruleStore: cfg.RuleStore,
instanceStore: cfg.InstanceStore,
orgStore: cfg.OrgStore,
dataService: dataService,
expressionService: expressionService,
adminConfigStore: cfg.AdminConfigStore,
multiOrgNotifier: cfg.MultiOrgNotifier,
metrics: cfg.Metrics,
@ -449,7 +449,7 @@ func (sch *schedule) ruleRoutine(grafanaCtx context.Context, key models.AlertRul
OrgID: alertRule.OrgID,
Data: alertRule.Data,
}
results, err := sch.evaluator.ConditionEval(&condition, ctx.now, sch.dataService)
results, err := sch.evaluator.ConditionEval(&condition, ctx.now, sch.expressionService)
dur := sch.clock.Now().Sub(start)
evalTotal.Inc()
evalDuration.Observe(dur.Seconds())

View File

@ -17,6 +17,7 @@ import (
"github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/infra/log"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/eval"
@ -604,7 +605,7 @@ func setupScheduler(t *testing.T, rs store.RuleStore, is store.InstanceStore, ac
Scheme: "http",
Host: "localhost",
}
return NewScheduler(schedCfg, nil, appUrl, st), mockedClock
return NewScheduler(schedCfg, expr.ProvideService(&setting.Cfg{ExpressionsEnabled: true}, nil, nil), appUrl, st), mockedClock
}
// createTestAlertRule creates a dummy alert definition to be used by the tests.

View File

@ -203,6 +203,7 @@ type Cfg struct {
EnforceDomain bool
// Security settings
SecretKey string
EmailCodeValidMinutes int
// build
@ -1139,6 +1140,7 @@ func (cfg *Cfg) SectionWithEnvOverrides(s string) *DynamicSection {
func readSecuritySettings(iniFile *ini.File, cfg *Cfg) error {
security := iniFile.Section("security")
SecretKey = valueAsString(security, "secret_key", "")
cfg.SecretKey = SecretKey
DisableGravatar = security.Key("disable_gravatar").MustBool(true)
cfg.DisableBruteForceLoginProtection = security.Key("disable_brute_force_login_protection").MustBool(false)

View File

@ -12,8 +12,8 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -25,13 +25,13 @@ func TestBuildingAzureResourceGraphQueries(t *testing.T) {
tests := []struct {
name string
queryModel []backend.DataQuery
timeRange plugins.DataTimeRange
timeRange legacydata.DataTimeRange
azureResourceGraphQueries []*AzureResourceGraphQuery
Err require.ErrorAssertionFunc
}{
{
name: "Query with macros should be interpolated",
timeRange: plugins.DataTimeRange{
timeRange: legacydata.DataTimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},

View File

@ -9,7 +9,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb/interval"
"github.com/grafana/grafana/pkg/tsdb/legacydata/interval"
)
const rsIdentifier = `__(timeFilter|timeFrom|timeTo|interval|contains|escapeMulti)`

View File

@ -6,7 +6,7 @@ import (
"strings"
"time"
"github.com/grafana/grafana/pkg/tsdb/interval"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
)
// TimeGrain handles conversions between
@ -19,7 +19,7 @@ var (
)
func (tg *TimeGrain) createISO8601DurationFromIntervalMS(it int64) (string, error) {
formatted := interval.FormatDuration(time.Duration(it) * time.Millisecond)
formatted := intervalv2.FormatDuration(time.Duration(it) * time.Millisecond)
if strings.Contains(formatted, "ms") {
return "PT1M", nil

View File

@ -107,7 +107,6 @@ func (r *logQueryRunner) publishResults(orgID int64, channelName string) error {
// executeLiveLogQuery executes a CloudWatch Logs query with live updates over WebSocket.
// A WebSocket channel is created, which goroutines send responses over.
//nolint: staticcheck // plugins.DataResponse deprecated
func (e *cloudWatchExecutor) executeLiveLogQuery(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
responseChannelName := uuid.New().String()
responseChannel := make(chan *backend.QueryDataResponse)
@ -133,7 +132,6 @@ func (e *cloudWatchExecutor) executeLiveLogQuery(ctx context.Context, req *backe
return response, nil
}
//nolint: staticcheck // plugins.DataResponse deprecated
func (e *cloudWatchExecutor) sendLiveQueriesToChannel(req *backend.QueryDataRequest, responseChannel chan *backend.QueryDataResponse) {
defer close(responseChannel)
@ -211,7 +209,6 @@ func (e *cloudWatchExecutor) fetchConcurrentQueriesQuota(region string, pluginCt
return defaultConcurrentQueries
}
//nolint: staticcheck // plugins.DataResponse deprecated
func (e *cloudWatchExecutor) startLiveQuery(ctx context.Context, responseChannel chan *backend.QueryDataResponse, query backend.DataQuery, timeRange backend.TimeRange, pluginCtx backend.PluginContext) error {
model, err := simplejson.NewJson(query.JSON)
if err != nil {

View File

@ -9,7 +9,6 @@ import (
func ProvideLogsService() *LogsService {
return &LogsService{
// nolint:staticcheck // plugins.DataQueryResponse deprecated
responseChannels: make(map[string]chan *backend.QueryDataResponse),
queues: make(map[string](chan bool)),
}
@ -17,14 +16,12 @@ func ProvideLogsService() *LogsService {
// LogsService provides methods for querying CloudWatch Logs.
type LogsService struct {
channelMu sync.Mutex
// nolint:staticcheck // plugins.DataQueryResult deprecated
channelMu sync.Mutex
responseChannels map[string]chan *backend.QueryDataResponse
queues map[string](chan bool)
queueLock sync.Mutex
}
// nolint:staticcheck // plugins.DataQueryResult deprecated
func (s *LogsService) AddResponseChannel(name string, channel chan *backend.QueryDataResponse) error {
s.channelMu.Lock()
defer s.channelMu.Unlock()
@ -37,7 +34,6 @@ func (s *LogsService) AddResponseChannel(name string, channel chan *backend.Quer
return nil
}
// nolint:staticcheck // plugins.DataQueryResult deprecated
func (s *LogsService) GetResponseChannel(name string) (chan *backend.QueryDataResponse, error) {
s.channelMu.Lock()
defer s.channelMu.Unlock()

View File

@ -6,7 +6,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -48,7 +48,7 @@ func TestRequestParser(t *testing.T) {
})
})
timeRange := tsdb.NewTimeRange("now-1h", "now-2h")
timeRange := legacydata.NewDataTimeRange("now-1h", "now-2h")
from, err := timeRange.ParseFrom()
require.NoError(t, err)
to, err := timeRange.ParseTo()
@ -138,7 +138,7 @@ func TestRequestParser(t *testing.T) {
"hide": false,
})
query.Set("period", "900")
timeRange := tsdb.NewTimeRange("now-1h", "now-2h")
timeRange := legacydata.NewDataTimeRange("now-1h", "now-2h")
from, err := timeRange.ParseFrom()
require.NoError(t, err)
to, err := timeRange.ParseTo()

View File

@ -1,110 +0,0 @@
package tsdb
import (
"context"
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/adapters"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/oauthtoken"
)
// nolint:staticcheck // plugins.DataQuery deprecated
func dataPluginQueryAdapter(pluginID string, handler backend.QueryDataHandler, oAuthService oauthtoken.OAuthTokenService,
dsService *datasources.Service) plugins.DataPluginFunc {
return func(ctx context.Context, ds *models.DataSource, query plugins.DataQuery) (plugins.DataResponse, error) {
jsonDataBytes, err := ds.JsonData.MarshalJSON()
if err != nil {
return plugins.DataResponse{}, err
}
instanceSettings := &backend.DataSourceInstanceSettings{
ID: ds.Id,
Name: ds.Name,
URL: ds.Url,
Database: ds.Database,
User: ds.User,
BasicAuthEnabled: ds.BasicAuth,
BasicAuthUser: ds.BasicAuthUser,
JSONData: jsonDataBytes,
DecryptedSecureJSONData: dsService.DecryptedValues(ds),
Updated: ds.Updated,
UID: ds.Uid,
}
if query.Headers == nil {
query.Headers = make(map[string]string)
}
if oAuthService.IsOAuthPassThruEnabled(ds) {
if token := oAuthService.GetCurrentOAuthToken(ctx, query.User); token != nil {
delete(query.Headers, "Authorization")
query.Headers["Authorization"] = fmt.Sprintf("%s %s", token.Type(), token.AccessToken)
}
}
req := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{
OrgID: ds.OrgId,
PluginID: pluginID,
User: adapters.BackendUserFromSignedInUser(query.User),
DataSourceInstanceSettings: instanceSettings,
},
Queries: []backend.DataQuery{},
Headers: query.Headers,
}
for _, q := range query.Queries {
modelJSON, err := q.Model.MarshalJSON()
if err != nil {
return plugins.DataResponse{}, err
}
req.Queries = append(req.Queries, backend.DataQuery{
RefID: q.RefID,
Interval: time.Duration(q.IntervalMS) * time.Millisecond,
MaxDataPoints: q.MaxDataPoints,
TimeRange: backend.TimeRange{
From: query.TimeRange.GetFromAsTimeUTC(),
To: query.TimeRange.GetToAsTimeUTC(),
},
QueryType: q.QueryType,
JSON: modelJSON,
})
}
resp, err := handler.QueryData(ctx, req)
if err != nil {
return plugins.DataResponse{}, err
}
tR := plugins.DataResponse{
Results: make(map[string]plugins.DataQueryResult, len(resp.Responses)),
}
for refID, r := range resp.Responses {
qr := plugins.DataQueryResult{
RefID: refID,
}
for _, f := range r.Frames {
if f.RefID == "" {
f.RefID = refID
}
}
qr.Dataframes = plugins.NewDecodedDataFrames(r.Frames)
if r.Error != nil {
qr.Error = r.Error
}
tR.Results[refID] = qr
}
return tR, nil
}
}

View File

@ -42,7 +42,6 @@ var newResponseParser = func(responses []*es.SearchResponse, targets []*Query, d
}
}
// nolint:staticcheck
func (rp *responseParser) getTimeSeries() (*backend.QueryDataResponse, error) {
result := backend.QueryDataResponse{
Responses: backend.Responses{},
@ -93,7 +92,6 @@ func (rp *responseParser) getTimeSeries() (*backend.QueryDataResponse, error) {
return &result, nil
}
// nolint:staticcheck
func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Query,
queryResult *backend.DataResponse, props map[string]string, depth int) error {
var err error
@ -172,7 +170,7 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu
return nil
}
// nolint:staticcheck,gocyclo
// nolint:gocyclo
func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, query *backend.DataResponse,
props map[string]string) error {
frames := data.Frames{}
@ -365,7 +363,6 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query,
return nil
}
// nolint:staticcheck
func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef *BucketAgg, target *Query,
queryResult *backend.DataResponse, props map[string]string) error {
propKeys := make([]string, 0)
@ -517,7 +514,6 @@ func extractDataField(name string, v interface{}) *data.Field {
}
}
// nolint:staticcheck
func (rp *responseParser) trimDatapoints(queryResult backend.DataResponse, target *Query) {
var histogram *BucketAgg
for _, bucketAgg := range target.BucketAggs {
@ -551,7 +547,6 @@ func (rp *responseParser) trimDatapoints(queryResult backend.DataResponse, targe
}
}
// nolint:staticcheck
func (rp *responseParser) nameFields(queryResult backend.DataResponse, target *Query) {
set := make(map[string]struct{})
frames := queryResult.Frames
@ -575,7 +570,6 @@ func (rp *responseParser) nameFields(queryResult backend.DataResponse, target *Q
var aliasPatternRegex = regexp.MustCompile(`\{\{([\s\S]+?)\}\}`)
// nolint:staticcheck
func (rp *responseParser) getFieldName(dataField data.Field, target *Query, metricTypeCount int) string {
metricType := dataField.Labels["metric"]
metricName := rp.getMetricName(metricType)
@ -708,7 +702,6 @@ func findAgg(target *Query, aggID string) (*BucketAgg, error) {
return nil, errors.New("can't found aggDef, aggID:" + aggID)
}
// nolint:staticcheck
func getErrorFromElasticResponse(response *es.SearchResponse) string {
var errorString string
json := simplejson.NewFromAny(response.Error)

View File

@ -28,7 +28,6 @@ var newTimeSeriesQuery = func(client es.Client, dataQuery []backend.DataQuery,
}
}
// nolint:staticcheck
func (e *timeSeriesQuery) execute() (*backend.QueryDataResponse, error) {
tsQueryParser := newTimeSeriesQueryParser()
queries, err := tsQueryParser.parse(e.dataQueries)
@ -63,7 +62,6 @@ func (e *timeSeriesQuery) execute() (*backend.QueryDataResponse, error) {
return rp.getTimeSeries()
}
// nolint:staticcheck
func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilder, from, to string,
result backend.QueryDataResponse) error {
minInterval, err := e.client.GetMinInterval(q.Interval)

View File

@ -412,7 +412,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
"id": "2",
"type": "date_histogram",
"field": "@timestamp",
"settings": {
"settings": {
"timeZone": "utc"
}
}
@ -435,7 +435,7 @@ func TestExecuteTimeSeriesQuery(t *testing.T) {
"id": "2",
"type": "date_histogram",
"field": "@timestamp",
"settings": {
"settings": {
"timeZone": "America/Los_Angeles"
}
}
@ -1181,7 +1181,6 @@ func newDataQuery(body string) (backend.QueryDataRequest, error) {
}, nil
}
// nolint:staticcheck // plugins.DataQueryResult deprecated
func executeTsdbQuery(c es.Client, body string, from, to time.Time, minInterval time.Duration) (
*backend.QueryDataResponse, error) {
timeRange := backend.TimeRange{

View File

@ -26,6 +26,7 @@ import (
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/opentracing/opentracing-go"
)
@ -297,7 +298,7 @@ func epochMStoGraphiteTime(tr backend.TimeRange) (string, string) {
/**
* Graphite should always return timestamp as a number but values might be nil when data is missing
*/
func parseDataTimePoint(dataTimePoint plugins.DataTimePoint) (time.Time, *float64, error) {
func parseDataTimePoint(dataTimePoint legacydata.DataTimePoint) (time.Time, *float64, error) {
if !dataTimePoint[1].Valid {
return time.Time{}, nil, errors.New("failed to parse data point timestamp")
}

View File

@ -1,10 +1,10 @@
package graphite
import "github.com/grafana/grafana/pkg/plugins"
import "github.com/grafana/grafana/pkg/tsdb/legacydata"
type TargetResponseDTO struct {
Target string `json:"target"`
DataPoints plugins.DataTimeSeriesPoints `json:"datapoints"`
Target string `json:"target"`
DataPoints legacydata.DataTimeSeriesPoints `json:"datapoints"`
// Graphite <=1.1.7 may return some tags as numbers requiring extra conversion. See https://github.com/grafana/grafana/issues/37614
Tags map[string]interface{} `json:"tags"`
}

View File

@ -8,11 +8,10 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
"github.com/grafana/grafana/pkg/tsdb/interval"
)
var (
defaultRes int64 = 1500
DefaultRes int64 = 1500
defaultMinInterval = time.Millisecond * 1
year = time.Hour * 24 * 365
day = time.Hour * 24
@ -59,18 +58,18 @@ func (ic *intervalCalculator) Calculate(timerange backend.TimeRange, minInterval
from := timerange.From.UnixNano()
resolution := maxDataPoints
if resolution == 0 {
resolution = defaultRes
resolution = DefaultRes
}
calculatedInterval := time.Duration((to - from) / resolution)
if calculatedInterval < minInterval {
return Interval{Text: interval.FormatDuration(minInterval), Value: minInterval}
return Interval{Text: FormatDuration(minInterval), Value: minInterval}
}
rounded := roundInterval(calculatedInterval)
return Interval{Text: interval.FormatDuration(rounded), Value: rounded}
return Interval{Text: FormatDuration(rounded), Value: rounded}
}
func (ic *intervalCalculator) CalculateSafeInterval(timerange backend.TimeRange, safeRes int64) Interval {
@ -79,7 +78,7 @@ func (ic *intervalCalculator) CalculateSafeInterval(timerange backend.TimeRange,
safeInterval := time.Duration((to - from) / safeRes)
rounded := roundInterval(safeInterval)
return Interval{Text: interval.FormatDuration(rounded), Value: rounded}
return Interval{Text: FormatDuration(rounded), Value: rounded}
}
// GetIntervalFrom returns the minimum interval.

View File

@ -1,21 +1,25 @@
package plugins
package legacydata
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"strconv"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/timberio/go-datemath"
)
// RequestHandler is a data request handler interface.
// Deprecated: use backend.QueryDataHandler instead.
type RequestHandler interface {
// HandleRequest handles a data request.
HandleRequest(context.Context, *models.DataSource, DataQuery) (DataResponse, error)
}
// DataSubQuery represents a data sub-query. New work should use the plugin SDK.
type DataSubQuery struct {
RefID string `json:"refId"`
@ -35,12 +39,6 @@ type DataQuery struct {
User *models.SignedInUser
}
type DataTimeRange struct {
From string
To string
Now time.Time
}
type DataTable struct {
Columns []DataTableColumn `json:"columns"`
Rows []DataRowValues `json:"rows"`
@ -230,131 +228,3 @@ func (r DataResponse) ToBackendDataResponse() (*backend.QueryDataResponse, error
}
return qdr, nil
}
// Deprecated: use the plugin SDK
type DataPlugin interface {
DataQuery(ctx context.Context, ds *models.DataSource, query DataQuery) (DataResponse, error)
}
type DataPluginFunc func(ctx context.Context, ds *models.DataSource, query DataQuery) (DataResponse, error)
func (f DataPluginFunc) DataQuery(ctx context.Context, ds *models.DataSource, query DataQuery) (DataResponse, error) {
return f(ctx, ds, query)
}
func NewDataTimeRange(from, to string) DataTimeRange {
return DataTimeRange{
From: from,
To: to,
Now: time.Now(),
}
}
func (tr *DataTimeRange) GetFromAsMsEpoch() int64 {
return tr.MustGetFrom().UnixNano() / int64(time.Millisecond)
}
func (tr *DataTimeRange) GetFromAsSecondsEpoch() int64 {
return tr.GetFromAsMsEpoch() / 1000
}
func (tr *DataTimeRange) GetFromAsTimeUTC() time.Time {
return tr.MustGetFrom().UTC()
}
func (tr *DataTimeRange) GetToAsMsEpoch() int64 {
return tr.MustGetTo().UnixNano() / int64(time.Millisecond)
}
func (tr *DataTimeRange) GetToAsSecondsEpoch() int64 {
return tr.GetToAsMsEpoch() / 1000
}
func (tr *DataTimeRange) GetToAsTimeUTC() time.Time {
return tr.MustGetTo().UTC()
}
func (tr *DataTimeRange) MustGetFrom() time.Time {
res, err := tr.ParseFrom()
if err != nil {
return time.Unix(0, 0)
}
return res
}
func (tr *DataTimeRange) MustGetTo() time.Time {
res, err := tr.ParseTo()
if err != nil {
return time.Unix(0, 0)
}
return res
}
func (tr DataTimeRange) ParseFrom() (time.Time, error) {
return parseTimeRange(tr.From, tr.Now, false, nil)
}
func (tr DataTimeRange) ParseTo() (time.Time, error) {
return parseTimeRange(tr.To, tr.Now, true, nil)
}
func (tr DataTimeRange) ParseFromWithLocation(location *time.Location) (time.Time, error) {
return parseTimeRange(tr.From, tr.Now, false, location)
}
func (tr DataTimeRange) ParseToWithLocation(location *time.Location) (time.Time, error) {
return parseTimeRange(tr.To, tr.Now, true, location)
}
func parseTimeRange(s string, now time.Time, withRoundUp bool, location *time.Location) (time.Time, error) {
if val, err := strconv.ParseInt(s, 10, 64); err == nil {
seconds := val / 1000
nano := (val - seconds*1000) * 1000000
return time.Unix(seconds, nano), nil
}
diff, err := time.ParseDuration("-" + s)
if err != nil {
options := []func(*datemath.Options){
datemath.WithNow(now),
datemath.WithRoundUp(withRoundUp),
}
if location != nil {
options = append(options, datemath.WithLocation(location))
}
return datemath.ParseAndEvaluate(s, options...)
}
return now.Add(diff), nil
}
// SeriesToFrame converts a DataTimeSeries to an SDK frame.
func SeriesToFrame(series DataTimeSeries) (*data.Frame, error) {
timeVec := make([]*time.Time, len(series.Points))
floatVec := make([]*float64, len(series.Points))
for idx, point := range series.Points {
timeVec[idx], floatVec[idx] = convertDataTimePoint(point)
}
frame := data.NewFrame(series.Name,
data.NewField("time", nil, timeVec),
data.NewField("value", data.Labels(series.Tags), floatVec),
)
return frame, nil
}
// convertDataTimePoint converts a DataTimePoint into two values appropriate
// for Series values.
func convertDataTimePoint(point DataTimePoint) (t *time.Time, f *float64) {
timeIdx, valueIdx := 1, 0
if point[timeIdx].Valid { // Assuming valid is null?
tI := int64(point[timeIdx].Float64)
uT := time.Unix(tI/int64(1e+3), (tI%int64(1e+3))*int64(1e+6)) // time.Time from millisecond unix ts
t = &uT
}
if point[valueIdx].Valid {
f = &point[valueIdx].Float64
}
return
}

View File

@ -1,4 +1,4 @@
package plugins
package legacydata
import (
"github.com/grafana/grafana-plugin-sdk-go/data"

View File

@ -0,0 +1,3 @@
// Package legacydata contains old/legacy interfaces/contracts that uses a data format of series/tables.
// Deprecated: use github.com/grafana/grafana-plugin-sdk-go/backend instead.
package legacydata

View File

@ -9,7 +9,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
var (
@ -29,8 +29,8 @@ type intervalCalculator struct {
}
type Calculator interface {
Calculate(timeRange plugins.DataTimeRange, interval time.Duration) Interval
CalculateSafeInterval(timeRange plugins.DataTimeRange, resolution int64) Interval
Calculate(timeRange legacydata.DataTimeRange, interval time.Duration) Interval
CalculateSafeInterval(timeRange legacydata.DataTimeRange, resolution int64) Interval
}
type CalculatorOptions struct {
@ -55,7 +55,7 @@ func (i *Interval) Milliseconds() int64 {
return i.Value.Nanoseconds() / int64(time.Millisecond)
}
func (ic *intervalCalculator) Calculate(timerange plugins.DataTimeRange, minInterval time.Duration) Interval {
func (ic *intervalCalculator) Calculate(timerange legacydata.DataTimeRange, minInterval time.Duration) Interval {
to := timerange.MustGetTo().UnixNano()
from := timerange.MustGetFrom().UnixNano()
calculatedInterval := time.Duration((to - from) / DefaultRes)
@ -68,7 +68,7 @@ func (ic *intervalCalculator) Calculate(timerange plugins.DataTimeRange, minInte
return Interval{Text: FormatDuration(rounded), Value: rounded}
}
func (ic *intervalCalculator) CalculateSafeInterval(timerange plugins.DataTimeRange, safeRes int64) Interval {
func (ic *intervalCalculator) CalculateSafeInterval(timerange legacydata.DataTimeRange, safeRes int64) Interval {
to := timerange.MustGetTo().UnixNano()
from := timerange.MustGetFrom().UnixNano()
safeInterval := time.Duration((to - from) / safeRes)

View File

@ -6,7 +6,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -16,13 +16,13 @@ func TestIntervalCalculator_Calculate(t *testing.T) {
testCases := []struct {
name string
timeRange plugins.DataTimeRange
timeRange legacydata.DataTimeRange
expected string
}{
{"from 5m to now", plugins.NewDataTimeRange("5m", "now"), "200ms"},
{"from 15m to now", plugins.NewDataTimeRange("15m", "now"), "500ms"},
{"from 30m to now", plugins.NewDataTimeRange("30m", "now"), "1s"},
{"from 1h to now", plugins.NewDataTimeRange("1h", "now"), "2s"},
{"from 5m to now", legacydata.NewDataTimeRange("5m", "now"), "200ms"},
{"from 15m to now", legacydata.NewDataTimeRange("15m", "now"), "500ms"},
{"from 30m to now", legacydata.NewDataTimeRange("30m", "now"), "1s"},
{"from 1h to now", legacydata.NewDataTimeRange("1h", "now"), "2s"},
}
for _, tc := range testCases {
@ -38,14 +38,14 @@ func TestIntervalCalculator_CalculateSafeInterval(t *testing.T) {
testCases := []struct {
name string
timeRange plugins.DataTimeRange
timeRange legacydata.DataTimeRange
safeResolution int64
expected string
}{
{"from 5m to now", plugins.NewDataTimeRange("5m", "now"), 11000, "20ms"},
{"from 15m to now", plugins.NewDataTimeRange("15m", "now"), 11000, "100ms"},
{"from 30m to now", plugins.NewDataTimeRange("30m", "now"), 11000, "200ms"},
{"from 24h to now", plugins.NewDataTimeRange("24h", "now"), 11000, "10s"},
{"from 5m to now", legacydata.NewDataTimeRange("5m", "now"), 11000, "20ms"},
{"from 15m to now", legacydata.NewDataTimeRange("15m", "now"), 11000, "100ms"},
{"from 30m to now", legacydata.NewDataTimeRange("30m", "now"), 11000, "200ms"},
{"from 24h to now", legacydata.NewDataTimeRange("24h", "now"), 11000, "10s"},
}
for _, tc := range testCases {

View File

@ -0,0 +1,128 @@
package service
import (
"context"
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/adapters"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/oauthtoken"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
var oAuthIsOAuthPassThruEnabledFunc = func(oAuthTokenService oauthtoken.OAuthTokenService, ds *models.DataSource) bool {
return oAuthTokenService.IsOAuthPassThruEnabled(ds)
}
type Service struct {
pluginsClient plugins.Client
oAuthTokenService oauthtoken.OAuthTokenService
dataSourcesService *datasources.Service
}
func ProvideService(pluginsClient plugins.Client, oAuthTokenService oauthtoken.OAuthTokenService, dataSourcesService *datasources.Service) *Service {
return &Service{
pluginsClient: pluginsClient,
oAuthTokenService: oAuthTokenService,
dataSourcesService: dataSourcesService,
}
}
//nolint: staticcheck // legacydata.DataResponse deprecated
func (h *Service) HandleRequest(ctx context.Context, ds *models.DataSource, query legacydata.DataQuery) (legacydata.DataResponse, error) {
jsonDataBytes, err := ds.JsonData.MarshalJSON()
if err != nil {
return legacydata.DataResponse{}, err
}
instanceSettings := &backend.DataSourceInstanceSettings{
ID: ds.Id,
Name: ds.Name,
URL: ds.Url,
Database: ds.Database,
User: ds.User,
BasicAuthEnabled: ds.BasicAuth,
BasicAuthUser: ds.BasicAuthUser,
JSONData: jsonDataBytes,
DecryptedSecureJSONData: h.dataSourcesService.DecryptedValues(ds),
Updated: ds.Updated,
UID: ds.Uid,
}
if query.Headers == nil {
query.Headers = make(map[string]string)
}
if oAuthIsOAuthPassThruEnabledFunc(h.oAuthTokenService, ds) {
if token := h.oAuthTokenService.GetCurrentOAuthToken(ctx, query.User); token != nil {
delete(query.Headers, "Authorization")
query.Headers["Authorization"] = fmt.Sprintf("%s %s", token.Type(), token.AccessToken)
}
}
req := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{
OrgID: ds.OrgId,
PluginID: ds.Type,
User: adapters.BackendUserFromSignedInUser(query.User),
DataSourceInstanceSettings: instanceSettings,
},
Queries: []backend.DataQuery{},
Headers: query.Headers,
}
for _, q := range query.Queries {
modelJSON, err := q.Model.MarshalJSON()
if err != nil {
return legacydata.DataResponse{}, err
}
req.Queries = append(req.Queries, backend.DataQuery{
RefID: q.RefID,
Interval: time.Duration(q.IntervalMS) * time.Millisecond,
MaxDataPoints: q.MaxDataPoints,
TimeRange: backend.TimeRange{
From: query.TimeRange.GetFromAsTimeUTC(),
To: query.TimeRange.GetToAsTimeUTC(),
},
QueryType: q.QueryType,
JSON: modelJSON,
})
}
resp, err := h.pluginsClient.QueryData(ctx, req)
if err != nil {
return legacydata.DataResponse{}, err
}
tR := legacydata.DataResponse{
Results: make(map[string]legacydata.DataQueryResult, len(resp.Responses)),
}
for refID, r := range resp.Responses {
qr := legacydata.DataQueryResult{
RefID: refID,
}
for _, f := range r.Frames {
if f.RefID == "" {
f.RefID = refID
}
}
qr.Dataframes = legacydata.NewDecodedDataFrames(r.Frames)
if r.Error != nil {
qr.Error = r.Error
}
tR.Results[refID] = qr
}
return tR, nil
}
var _ legacydata.RequestHandler = &Service{}

View File

@ -0,0 +1,67 @@
package service
import (
"context"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/oauthtoken"
"github.com/grafana/grafana/pkg/services/secrets/fakes"
secretsManager "github.com/grafana/grafana/pkg/services/secrets/manager"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/stretchr/testify/require"
)
func TestHandleRequest(t *testing.T) {
t.Run("Should invoke plugin manager QueryData when handling request for query", func(t *testing.T) {
origOAuthIsOAuthPassThruEnabledFunc := oAuthIsOAuthPassThruEnabledFunc
oAuthIsOAuthPassThruEnabledFunc = func(oAuthTokenService oauthtoken.OAuthTokenService, ds *models.DataSource) bool {
return false
}
t.Cleanup(func() {
oAuthIsOAuthPassThruEnabledFunc = origOAuthIsOAuthPassThruEnabledFunc
})
client := &fakePluginsClient{}
var actualReq *backend.QueryDataRequest
client.QueryDataHandlerFunc = func(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
actualReq = req
return backend.NewQueryDataResponse(), nil
}
secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore())
dsService := datasources.ProvideService(bus.New(), nil, secretsService)
s := ProvideService(client, nil, dsService)
ds := &models.DataSource{Id: 12, Type: "unregisteredType", JsonData: simplejson.New()}
req := legacydata.DataQuery{
TimeRange: &legacydata.DataTimeRange{},
Queries: []legacydata.DataSubQuery{
{RefID: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}, Model: simplejson.New()},
{RefID: "B", DataSource: &models.DataSource{Id: 1, Type: "test"}, Model: simplejson.New()},
},
}
res, err := s.HandleRequest(context.Background(), ds, req)
require.NoError(t, err)
require.NotNil(t, actualReq)
require.NotNil(t, res)
})
}
type fakePluginsClient struct {
plugins.Client
backend.QueryDataHandlerFunc
}
func (m *fakePluginsClient) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
if m.QueryDataHandlerFunc != nil {
return m.QueryDataHandlerFunc.QueryData(ctx, req)
}
return nil, nil
}

View File

@ -0,0 +1,119 @@
package legacydata
import (
"strconv"
"time"
"github.com/timberio/go-datemath"
)
type DataTimeRange struct {
From string
To string
Now time.Time
}
func NewDataTimeRange(from, to string) DataTimeRange {
return DataTimeRange{
From: from,
To: to,
Now: time.Now(),
}
}
func (tr *DataTimeRange) GetFromAsMsEpoch() int64 {
return tr.MustGetFrom().UnixNano() / int64(time.Millisecond)
}
func (tr *DataTimeRange) GetFromAsSecondsEpoch() int64 {
return tr.GetFromAsMsEpoch() / 1000
}
func (tr *DataTimeRange) GetFromAsTimeUTC() time.Time {
return tr.MustGetFrom().UTC()
}
func (tr *DataTimeRange) GetToAsMsEpoch() int64 {
return tr.MustGetTo().UnixNano() / int64(time.Millisecond)
}
func (tr *DataTimeRange) GetToAsSecondsEpoch() int64 {
return tr.GetToAsMsEpoch() / 1000
}
func (tr *DataTimeRange) GetToAsTimeUTC() time.Time {
return tr.MustGetTo().UTC()
}
func (tr *DataTimeRange) MustGetFrom() time.Time {
res, err := tr.ParseFrom()
if err != nil {
return time.Unix(0, 0)
}
return res
}
func (tr *DataTimeRange) MustGetTo() time.Time {
res, err := tr.ParseTo()
if err != nil {
return time.Unix(0, 0)
}
return res
}
func (tr DataTimeRange) ParseFrom() (time.Time, error) {
return parseTimeRange(tr.From, tr.Now, false, nil)
}
func (tr DataTimeRange) ParseTo() (time.Time, error) {
return parseTimeRange(tr.To, tr.Now, true, nil)
}
func (tr DataTimeRange) ParseFromWithLocation(location *time.Location) (time.Time, error) {
return parseTimeRange(tr.From, tr.Now, false, location)
}
func (tr DataTimeRange) ParseToWithLocation(location *time.Location) (time.Time, error) {
return parseTimeRange(tr.To, tr.Now, true, location)
}
func (tr DataTimeRange) ParseFromWithWeekStart(location *time.Location, weekstart time.Weekday) (time.Time, error) {
return parseTimeRangeWithWeekStart(tr.From, tr.Now, false, location, weekstart)
}
func (tr *DataTimeRange) ParseToWithWeekStart(location *time.Location, weekstart time.Weekday) (time.Time, error) {
return parseTimeRangeWithWeekStart(tr.To, tr.Now, true, location, weekstart)
}
func parseTimeRange(s string, now time.Time, withRoundUp bool, location *time.Location) (time.Time, error) {
return parseTimeRangeWithWeekStart(s, now, withRoundUp, location, -1)
}
func parseTimeRangeWithWeekStart(s string, now time.Time, withRoundUp bool, location *time.Location, weekstart time.Weekday) (time.Time, error) {
if val, err := strconv.ParseInt(s, 10, 64); err == nil {
seconds := val / 1000
nano := (val - seconds*1000) * 1000000
return time.Unix(seconds, nano), nil
}
diff, err := time.ParseDuration("-" + s)
if err != nil {
options := []func(*datemath.Options){
datemath.WithNow(now),
datemath.WithRoundUp(withRoundUp),
}
if location != nil {
options = append(options, datemath.WithLocation(location))
}
if weekstart != -1 {
if weekstart > now.Weekday() {
weekstart = weekstart - 7
}
options = append(options, datemath.WithStartOfWeek(weekstart))
}
return datemath.ParseAndEvaluate(s, options...)
}
return now.Add(diff), nil
}

View File

@ -1,4 +1,4 @@
package tsdb
package legacydata
import (
"strconv"
@ -13,10 +13,10 @@ func TestTimeRange(t *testing.T) {
now := time.Now()
t.Run("Can parse 5m, now", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "5m",
To: "now",
now: now,
Now: now,
}
t.Run("5m ago ", func(t *testing.T) {
@ -37,10 +37,10 @@ func TestTimeRange(t *testing.T) {
})
t.Run("Can parse 5h, now-10m", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "5h",
To: "now-10m",
now: now,
Now: now,
}
t.Run("5h ago ", func(t *testing.T) {
@ -66,10 +66,10 @@ func TestTimeRange(t *testing.T) {
now, err := time.Parse(time.RFC3339Nano, "2020-03-26T15:12:56.000Z")
require.Nil(t, err)
t.Run("Can parse now-1M/M, now-1M/M", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "now-1M/M",
To: "now-1M/M",
now: now,
Now: now,
}
t.Run("from now-1M/M ", func(t *testing.T) {
@ -92,10 +92,10 @@ func TestTimeRange(t *testing.T) {
})
t.Run("Can parse now-3d, now+3w", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "now-3d",
To: "now+3w",
now: now,
Now: now,
}
t.Run("now-3d ", func(t *testing.T) {
@ -118,10 +118,10 @@ func TestTimeRange(t *testing.T) {
})
t.Run("Can parse 1960-02-01T07:00:00.000Z, 1965-02-03T08:00:00.000Z", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "1960-02-01T07:00:00.000Z",
To: "1965-02-03T08:00:00.000Z",
now: now,
Now: now,
}
t.Run("1960-02-01T07:00:00.000Z ", func(t *testing.T) {
@ -146,7 +146,7 @@ func TestTimeRange(t *testing.T) {
t.Run("Can parse negative unix epochs", func(t *testing.T) {
from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC)
to := time.Date(1965, 2, 3, 8, 0, 0, 0, time.UTC)
tr := NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10))
tr := NewDataTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10))
res, err := tr.ParseFrom()
require.Nil(t, err)
@ -159,10 +159,10 @@ func TestTimeRange(t *testing.T) {
t.Run("can parse unix epochs", func(t *testing.T) {
var err error
tr := TimeRange{
tr := DataTimeRange{
From: "1474973725473",
To: "1474975757930",
now: now,
Now: now,
}
res, err := tr.ParseFrom()
@ -176,10 +176,10 @@ func TestTimeRange(t *testing.T) {
t.Run("Cannot parse asdf", func(t *testing.T) {
var err error
tr := TimeRange{
tr := DataTimeRange{
From: "asdf",
To: "asdf",
now: now,
Now: now,
}
_, err = tr.ParseFrom()
@ -193,10 +193,10 @@ func TestTimeRange(t *testing.T) {
require.Nil(t, err)
t.Run("Can parse now-1M/M, now-1M/M with America/Chicago timezone", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "now-1M/M",
To: "now-1M/M",
now: now,
Now: now,
}
location, err := time.LoadLocation("America/Chicago")
require.Nil(t, err)
@ -221,10 +221,10 @@ func TestTimeRange(t *testing.T) {
})
t.Run("Can parse now-3h, now+2h with America/Chicago timezone", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "now-3h",
To: "now+2h",
now: now,
Now: now,
}
location, err := time.LoadLocation("America/Chicago")
require.Nil(t, err)
@ -249,10 +249,10 @@ func TestTimeRange(t *testing.T) {
})
t.Run("Can parse now-1w/w, now-1w/w without timezone and week start on Monday", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "now-1w/w",
To: "now-1w/w",
now: now,
Now: now,
}
weekstart := time.Monday
require.Nil(t, err)
@ -277,10 +277,10 @@ func TestTimeRange(t *testing.T) {
})
t.Run("Can parse now-1w/w, now-1w/w with America/Chicago timezone and week start on Monday", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "now-1w/w",
To: "now-1w/w",
now: now,
Now: now,
}
weekstart := time.Monday
location, err := time.LoadLocation("America/Chicago")
@ -306,10 +306,10 @@ func TestTimeRange(t *testing.T) {
})
t.Run("Can parse now-1w/w, now-1w/w with America/Chicago timezone and week start on Sunday", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "now-1w/w",
To: "now-1w/w",
now: now,
Now: now,
}
weekstart := time.Sunday
location, err := time.LoadLocation("America/Chicago")
@ -335,10 +335,10 @@ func TestTimeRange(t *testing.T) {
})
t.Run("Can parse now-1w/w, now-1w/w with America/Chicago timezone and week start on Saturday", func(t *testing.T) {
tr := TimeRange{
tr := DataTimeRange{
From: "now-1w/w",
To: "now-1w/w",
now: now,
Now: now,
}
weekstart := time.Saturday
location, err := time.LoadLocation("America/Chicago")

View File

@ -0,0 +1,37 @@
package legacydata
import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// SeriesToFrame converts a DataTimeSeries to an SDK frame.
func SeriesToFrame(series DataTimeSeries) (*data.Frame, error) {
timeVec := make([]*time.Time, len(series.Points))
floatVec := make([]*float64, len(series.Points))
for idx, point := range series.Points {
timeVec[idx], floatVec[idx] = convertDataTimePoint(point)
}
frame := data.NewFrame(series.Name,
data.NewField("time", nil, timeVec),
data.NewField("value", data.Labels(series.Tags), floatVec),
)
return frame, nil
}
// convertDataTimePoint converts a DataTimePoint into two values appropriate
// for Series values.
func convertDataTimePoint(point DataTimePoint) (t *time.Time, f *float64) {
timeIdx, valueIdx := 1, 0
if point[timeIdx].Valid { // Assuming valid is null?
tI := int64(point[timeIdx].Float64)
uT := time.Unix(tI/int64(1e+3), (tI%int64(1e+3))*int64(1e+6)) // time.Time from millisecond unix ts
t = &uT
}
if point[valueIdx].Valid {
f = &point[valueIdx].Float64
}
return
}

View File

@ -1,42 +0,0 @@
package tsdb
import (
"context"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/oauthtoken"
"github.com/grafana/grafana/pkg/setting"
_ "github.com/grafana/grafana/pkg/tsdb/postgres"
)
// NewService returns a new Service.
func NewService(
cfg *setting.Cfg, pluginsClient plugins.Client, oauthTokenService *oauthtoken.Service,
dataSourcesService *datasources.Service) *Service {
return newService(cfg, pluginsClient, oauthTokenService, dataSourcesService)
}
func newService(cfg *setting.Cfg, pluginsClient plugins.Client, oauthTokenService oauthtoken.OAuthTokenService,
dataSourcesService *datasources.Service) *Service {
return &Service{
Cfg: cfg,
pluginsClient: pluginsClient,
OAuthTokenService: oauthTokenService,
DataSourcesService: dataSourcesService,
}
}
// Service handles data requests to data sources.
type Service struct {
Cfg *setting.Cfg
pluginsClient plugins.Client
OAuthTokenService oauthtoken.OAuthTokenService
DataSourcesService *datasources.Service
}
//nolint: staticcheck // plugins.DataPlugin deprecated
func (s *Service) HandleRequest(ctx context.Context, ds *models.DataSource, query plugins.DataQuery) (plugins.DataResponse, error) {
return dataPluginQueryAdapter(ds.Type, s.pluginsClient, s.OAuthTokenService, s.DataSourcesService).DataQuery(ctx, ds, query)
}

View File

@ -1,122 +0,0 @@
package tsdb
import (
"context"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/secrets/fakes"
secretsManager "github.com/grafana/grafana/pkg/services/secrets/manager"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/require"
"golang.org/x/oauth2"
)
func TestHandleRequest(t *testing.T) {
t.Run("Should invoke plugin manager QueryData when handling request for query", func(t *testing.T) {
svc, _, pm := createService(t)
backendPluginManagerCalled := false
pm.QueryDataHandlerFunc = func(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
backendPluginManagerCalled = true
return backend.NewQueryDataResponse(), nil
}
ds := &models.DataSource{Id: 12, Type: "unregisteredType", JsonData: simplejson.New()}
req := plugins.DataQuery{
TimeRange: &plugins.DataTimeRange{},
Queries: []plugins.DataSubQuery{
{RefID: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}, Model: simplejson.New()},
{RefID: "B", DataSource: &models.DataSource{Id: 1, Type: "test"}, Model: simplejson.New()},
},
}
_, err := svc.HandleRequest(context.Background(), ds, req)
require.NoError(t, err)
require.True(t, backendPluginManagerCalled)
})
}
//nolint: staticcheck // plugins.DataPlugin deprecated
type resultsFn func(context plugins.DataQuery) plugins.DataQueryResult
type fakeExecutor struct {
//nolint: staticcheck // plugins.DataPlugin deprecated
results map[string]plugins.DataQueryResult
resultsFn map[string]resultsFn
}
//nolint: staticcheck // plugins.DataPlugin deprecated
func (e *fakeExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource, context plugins.DataQuery) (
plugins.DataResponse, error) {
result := plugins.DataResponse{Results: make(map[string]plugins.DataQueryResult)}
for _, query := range context.Queries {
if results, has := e.results[query.RefID]; has {
result.Results[query.RefID] = results
}
if testFunc, has := e.resultsFn[query.RefID]; has {
result.Results[query.RefID] = testFunc(context)
}
}
return result, nil
}
func (e *fakeExecutor) Return(refID string, series plugins.DataTimeSeriesSlice) {
//nolint: staticcheck // plugins.DataPlugin deprecated
e.results[refID] = plugins.DataQueryResult{
RefID: refID, Series: series,
}
}
func (e *fakeExecutor) HandleQuery(refId string, fn resultsFn) {
e.resultsFn[refId] = fn
}
type fakePluginsClient struct {
plugins.Client
backend.QueryDataHandlerFunc
}
func (m *fakePluginsClient) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
if m.QueryDataHandlerFunc != nil {
return m.QueryDataHandlerFunc.QueryData(ctx, req)
}
return nil, nil
}
type fakeOAuthTokenService struct {
}
func (s *fakeOAuthTokenService) GetCurrentOAuthToken(context.Context, *models.SignedInUser) *oauth2.Token {
return nil
}
func (s *fakeOAuthTokenService) IsOAuthPassThruEnabled(*models.DataSource) bool {
return false
}
func createService(t *testing.T) (*Service, *fakeExecutor, *fakePluginsClient) {
fakePluginsClient := &fakePluginsClient{}
secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore())
dsService := datasources.ProvideService(bus.New(), nil, secretsService)
s := newService(
setting.NewCfg(),
fakePluginsClient,
&fakeOAuthTokenService{},
dsService,
)
e := &fakeExecutor{
//nolint: staticcheck // plugins.DataPlugin deprecated
results: make(map[string]plugins.DataQueryResult),
resultsFn: make(map[string]resultsFn),
}
return s, e, fakePluginsClient
}

View File

@ -382,7 +382,6 @@ var Interpolate = func(query backend.DataQuery, timeRange backend.TimeRange, tim
return sql, nil
}
//nolint: staticcheck // plugins.DataPlugin deprecated
func (e *DataSourceHandler) newProcessCfg(query backend.DataQuery, queryContext context.Context,
rows *core.Rows, interpolatedQuery string) (*dataQueryModel, error) {
columnNames, err := rows.Columns()
@ -425,7 +424,6 @@ func (e *DataSourceHandler) newProcessCfg(query backend.DataQuery, queryContext
default:
}
}
//nolint: staticcheck // plugins.DataPlugin deprecated
qm.TimeRange.From = query.TimeRange.From.UTC()
qm.TimeRange.To = query.TimeRange.To.UTC()

View File

@ -11,7 +11,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/data/sqlutil"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/xorcare/pointer"
@ -420,7 +420,7 @@ type testQueryResultTransformer struct {
transformQueryErrorWasCalled bool
}
func (t *testQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (plugins.DataRowValues, error) {
func (t *testQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (legacydata.DataRowValues, error) {
return nil, nil
}

View File

@ -9,7 +9,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -19,7 +19,7 @@ func TestTestdataScenarios(t *testing.T) {
t.Run("random walk ", func(t *testing.T) {
t.Run("Should start at the requested value", func(t *testing.T) {
timeRange := plugins.DataTimeRange{From: "5m", To: "now", Now: time.Now()}
timeRange := legacydata.DataTimeRange{From: "5m", To: "now", Now: time.Now()}
model := simplejson.New()
model.Set("startValue", 1.234)
@ -63,7 +63,7 @@ func TestTestdataScenarios(t *testing.T) {
t.Run("random walk table", func(t *testing.T) {
t.Run("Should return a table that looks like value/min/max", func(t *testing.T) {
timeRange := plugins.DataTimeRange{From: "5m", To: "now", Now: time.Now()}
timeRange := legacydata.DataTimeRange{From: "5m", To: "now", Now: time.Now()}
model := simplejson.New()
modelBytes, err := model.MarshalJSON()
@ -117,7 +117,7 @@ func TestTestdataScenarios(t *testing.T) {
})
t.Run("Should return a table with some nil values", func(t *testing.T) {
timeRange := plugins.DataTimeRange{From: "5m", To: "now", Now: time.Now()}
timeRange := legacydata.DataTimeRange{From: "5m", To: "now", Now: time.Now()}
model := simplejson.New()
model.Set("withNil", true)

View File

@ -1,130 +0,0 @@
package tsdb
import (
"strconv"
"time"
"github.com/timberio/go-datemath"
)
func NewTimeRange(from, to string) *TimeRange {
return &TimeRange{
From: from,
To: to,
now: time.Now(),
}
}
func NewFakeTimeRange(from, to string, now time.Time) *TimeRange {
return &TimeRange{
From: from,
To: to,
now: now,
}
}
type TimeRange struct {
From string
To string
now time.Time
}
func (tr *TimeRange) GetFromAsMsEpoch() int64 {
return tr.MustGetFrom().UnixNano() / int64(time.Millisecond)
}
func (tr *TimeRange) GetFromAsSecondsEpoch() int64 {
return tr.GetFromAsMsEpoch() / 1000
}
func (tr *TimeRange) GetFromAsTimeUTC() time.Time {
return tr.MustGetFrom().UTC()
}
func (tr *TimeRange) GetToAsMsEpoch() int64 {
return tr.MustGetTo().UnixNano() / int64(time.Millisecond)
}
func (tr *TimeRange) GetToAsSecondsEpoch() int64 {
return tr.GetToAsMsEpoch() / 1000
}
func (tr *TimeRange) GetToAsTimeUTC() time.Time {
return tr.MustGetTo().UTC()
}
func (tr *TimeRange) MustGetFrom() time.Time {
res, err := tr.ParseFrom()
if err != nil {
return time.Unix(0, 0)
}
return res
}
func (tr *TimeRange) MustGetTo() time.Time {
res, err := tr.ParseTo()
if err != nil {
return time.Unix(0, 0)
}
return res
}
func tryParseUnixMsEpoch(val string) (time.Time, bool) {
if val, err := strconv.ParseInt(val, 10, 64); err == nil {
seconds := val / 1000
nano := (val - seconds*1000) * 1000000
return time.Unix(seconds, nano), true
}
return time.Time{}, false
}
func (tr *TimeRange) ParseFrom() (time.Time, error) {
return parse(tr.From, tr.now, false, nil, -1)
}
func (tr *TimeRange) ParseTo() (time.Time, error) {
return parse(tr.To, tr.now, true, nil, -1)
}
func (tr *TimeRange) ParseFromWithLocation(location *time.Location) (time.Time, error) {
return parse(tr.From, tr.now, false, location, -1)
}
func (tr *TimeRange) ParseToWithLocation(location *time.Location) (time.Time, error) {
return parse(tr.To, tr.now, true, location, -1)
}
func (tr *TimeRange) ParseFromWithWeekStart(location *time.Location, weekstart time.Weekday) (time.Time, error) {
return parse(tr.From, tr.now, false, location, weekstart)
}
func (tr *TimeRange) ParseToWithWeekStart(location *time.Location, weekstart time.Weekday) (time.Time, error) {
return parse(tr.To, tr.now, true, location, weekstart)
}
func parse(s string, now time.Time, withRoundUp bool, location *time.Location, weekstart time.Weekday) (time.Time, error) {
if res, ok := tryParseUnixMsEpoch(s); ok {
return res, nil
}
diff, err := time.ParseDuration("-" + s)
if err != nil {
options := []func(*datemath.Options){
datemath.WithNow(now),
datemath.WithRoundUp(withRoundUp),
}
if location != nil {
options = append(options, datemath.WithLocation(location))
}
if weekstart != -1 {
if weekstart > now.Weekday() {
weekstart = weekstart - 7
}
options = append(options, datemath.WithStartOfWeek(weekstart))
}
return datemath.ParseAndEvaluate(s, options...)
}
return now.Add(diff), nil
}