Chore: Remove unused code in legacydata package (#88772)

This commit is contained in:
Marcus Efraimsson 2024-06-05 15:37:32 +02:00 committed by GitHub
parent df784917e4
commit 0fffa31931
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 9 additions and 705 deletions

View File

@ -164,8 +164,6 @@ import (
"github.com/grafana/grafana/pkg/tsdb/grafanads"
"github.com/grafana/grafana/pkg/tsdb/graphite"
"github.com/grafana/grafana/pkg/tsdb/influxdb"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
legacydataservice "github.com/grafana/grafana/pkg/tsdb/legacydata/service"
"github.com/grafana/grafana/pkg/tsdb/loki"
"github.com/grafana/grafana/pkg/tsdb/mssql"
"github.com/grafana/grafana/pkg/tsdb/mysql"
@ -176,8 +174,6 @@ import (
)
var wireBasicSet = wire.NewSet(
legacydataservice.ProvideService,
wire.Bind(new(legacydata.RequestHandler), new(*legacydataservice.Service)),
annotationsimpl.ProvideService,
wire.Bind(new(annotations.Repository), new(*annotationsimpl.RepositoryImpl)),
New,

View File

@ -26,7 +26,6 @@ import (
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
var logger = log.New("tsdb.graphite")
@ -351,7 +350,7 @@ func epochMStoGraphiteTime(tr backend.TimeRange) (string, string) {
/**
* Graphite should always return timestamp as a number but values might be nil when data is missing
*/
func parseDataTimePoint(dataTimePoint legacydata.DataTimePoint) (time.Time, *float64, error) {
func parseDataTimePoint(dataTimePoint DataTimePoint) (time.Time, *float64, error) {
if !dataTimePoint[1].Valid {
return time.Time{}, nil, errors.New("failed to parse data point timestamp")
}

View File

@ -1,10 +1,15 @@
package graphite
import "github.com/grafana/grafana/pkg/tsdb/legacydata"
import (
"github.com/grafana/grafana/pkg/components/null"
)
type TargetResponseDTO struct {
Target string `json:"target"`
DataPoints legacydata.DataTimeSeriesPoints `json:"datapoints"`
Target string `json:"target"`
DataPoints DataTimeSeriesPoints `json:"datapoints"`
// Graphite <=1.1.7 may return some tags as numbers requiring extra conversion. See https://github.com/grafana/grafana/issues/37614
Tags map[string]any `json:"tags"`
}
type DataTimePoint [2]null.Float
type DataTimeSeriesPoints []DataTimePoint

View File

@ -1,193 +0,0 @@
package legacydata
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/user"
)
// RequestHandler is a data request handler interface.
// Deprecated: use backend.QueryDataHandler instead.
type RequestHandler interface {
// HandleRequest handles a data request.
HandleRequest(context.Context, *datasources.DataSource, DataQuery) (DataResponse, error)
}
// DataSubQuery represents a data sub-query. New work should use the plugin SDK.
type DataSubQuery struct {
RefID string `json:"refId"`
Model *simplejson.Json `json:"model,omitempty"`
DataSource *datasources.DataSource `json:"datasource"`
MaxDataPoints int64 `json:"maxDataPoints"`
IntervalMS int64 `json:"intervalMs"`
QueryType string `json:"queryType"`
}
// DataQuery contains all information about a data query request. New work should use the plugin SDK.
type DataQuery struct {
TimeRange *DataTimeRange
Queries []DataSubQuery
Headers map[string]string
Debug bool
User *user.SignedInUser
}
type DataTable struct {
Columns []DataTableColumn `json:"columns"`
Rows []DataRowValues `json:"rows"`
}
type DataTableColumn struct {
Text string `json:"text"`
}
type DataTimePoint [2]null.Float
type DataTimeSeriesPoints []DataTimePoint
type DataTimeSeriesSlice []DataTimeSeries
type DataRowValues []any
// Deprecated: DataQueryResult should use backend.QueryDataResponse
type DataQueryResult struct {
Error error `json:"-"`
ErrorString string `json:"error,omitempty"`
RefID string `json:"refId"`
Meta *simplejson.Json `json:"meta,omitempty"`
Series DataTimeSeriesSlice `json:"series"`
Tables []DataTable `json:"tables"`
Dataframes DataFrames `json:"dataframes"`
}
// UnmarshalJSON deserializes a DataQueryResult from JSON.
//
// Deserialization support is required by tests.
func (r *DataQueryResult) UnmarshalJSON(b []byte) error {
m := map[string]any{}
if err := json.Unmarshal(b, &m); err != nil {
return err
}
refID, ok := m["refId"].(string)
if !ok {
return fmt.Errorf("can't decode field refId - not a string")
}
var meta *simplejson.Json
if m["meta"] != nil {
mm, ok := m["meta"].(map[string]any)
if !ok {
return fmt.Errorf("can't decode field meta - not a JSON object")
}
meta = simplejson.NewFromAny(mm)
}
var series DataTimeSeriesSlice
/* TODO
if m["series"] != nil {
}
*/
var tables []DataTable
if m["tables"] != nil {
ts, ok := m["tables"].([]any)
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
for _, ti := range ts {
tm, ok := ti.(map[string]any)
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
var columns []DataTableColumn
cs, ok := tm["columns"].([]any)
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
for _, ci := range cs {
cm, ok := ci.(map[string]any)
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
val, ok := cm["text"].(string)
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
columns = append(columns, DataTableColumn{Text: val})
}
rs, ok := tm["rows"].([]any)
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
var rows []DataRowValues
for _, ri := range rs {
vals, ok := ri.([]any)
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
rows = append(rows, vals)
}
tables = append(tables, DataTable{
Columns: columns,
Rows: rows,
})
}
}
var dfs *dataFrames
if m["dataframes"] != nil {
raw, ok := m["dataframes"].([]any)
if !ok {
return fmt.Errorf("can't decode field dataframes - not an array of byte arrays")
}
var encoded [][]byte
for _, ra := range raw {
encS, ok := ra.(string)
if !ok {
return fmt.Errorf("can't decode field dataframes - not an array of byte arrays")
}
enc, err := base64.StdEncoding.DecodeString(encS)
if err != nil {
return fmt.Errorf("can't decode field dataframes - not an array of arrow frames")
}
encoded = append(encoded, enc)
}
decoded, err := data.UnmarshalArrowFrames(encoded)
if err != nil {
return err
}
dfs = &dataFrames{
decoded: decoded,
encoded: encoded,
}
}
r.RefID = refID
r.Meta = meta
r.Series = series
r.Tables = tables
if dfs != nil {
r.Dataframes = dfs
}
return nil
}
// DataTimeSeries -- this structure is deprecated, all new work should use DataFrames from the SDK
type DataTimeSeries struct {
Name string `json:"name"`
Points DataTimeSeriesPoints `json:"points"`
Tags map[string]string `json:"tags,omitempty"`
}
// Deprecated: DataResponse -- this structure is deprecated, all new work should use backend.QueryDataResponse
type DataResponse struct {
Results map[string]DataQueryResult `json:"results"`
Message string `json:"message,omitempty"`
}

View File

@ -1,75 +0,0 @@
package legacydata
import (
"github.com/grafana/grafana-plugin-sdk-go/data"
jsoniter "github.com/json-iterator/go"
)
// DataFrames is an interface for retrieving encoded and decoded data frames.
//
// See NewDecodedDataFrames and NewEncodedDataFrames for more information.
type DataFrames interface {
// Encoded encodes Frames into a slice of []byte.
// If an error occurs [][]byte will be nil.
// The encoded result, if any, will be cached and returned next time Encoded is called.
Encoded() ([][]byte, error)
// Decoded decodes a slice of Arrow encoded frames to data.Frames ([]*data.Frame).
// If an error occurs Frames will be nil.
// The decoded result, if any, will be cached and returned next time Decoded is called.
Decoded() (data.Frames, error)
}
type dataFrames struct {
decoded data.Frames
encoded [][]byte
}
// NewDecodedDataFrames instantiates DataFrames from decoded frames.
//
// This should be the primary function for creating DataFrames if you're implementing a plugin.
// In a Grafana Alerting scenario it needs to operate on decoded frames, which is why this function is
// preferrable. When encoded data frames are needed, e.g. returned from Grafana HTTP API, it will
// happen automatically when MarshalJSON() is called.
func NewDecodedDataFrames(decodedFrames data.Frames) DataFrames {
return &dataFrames{
decoded: decodedFrames,
}
}
func (df *dataFrames) Encoded() ([][]byte, error) {
if df.encoded == nil {
encoded, err := df.decoded.MarshalArrow()
if err != nil {
return nil, err
}
df.encoded = encoded
}
return df.encoded, nil
}
func (df *dataFrames) Decoded() (data.Frames, error) {
if df.decoded == nil {
decoded, err := data.UnmarshalArrowFrames(df.encoded)
if err != nil {
return nil, err
}
df.decoded = decoded
}
return df.decoded, nil
}
func (df *dataFrames) MarshalJSON() ([]byte, error) {
encoded, err := df.Encoded()
if err != nil {
return nil, err
}
// Use a configuration that's compatible with the standard library
// to minimize the risk of introducing bugs. This will make sure
// that map keys is ordered.
jsonCfg := jsoniter.ConfigCompatibleWithStandardLibrary
return jsonCfg.Marshal(encoded)
}

View File

@ -1,124 +0,0 @@
package interval
import (
"regexp"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
var (
DefaultRes int64 = 1500
defaultMinInterval = time.Millisecond * 1
)
type Interval struct {
Text string
Value time.Duration
}
type intervalCalculator struct {
minInterval time.Duration
}
type Calculator interface {
Calculate(timeRange legacydata.DataTimeRange, interval time.Duration) Interval
CalculateSafeInterval(timeRange legacydata.DataTimeRange, resolution int64) Interval
}
type CalculatorOptions struct {
MinInterval time.Duration
}
func NewCalculator(opts ...CalculatorOptions) *intervalCalculator {
calc := &intervalCalculator{}
for _, o := range opts {
if o.MinInterval == 0 {
calc.minInterval = defaultMinInterval
} else {
calc.minInterval = o.MinInterval
}
}
return calc
}
func (i *Interval) Milliseconds() int64 {
return i.Value.Nanoseconds() / int64(time.Millisecond)
}
func (ic *intervalCalculator) Calculate(timerange legacydata.DataTimeRange, minInterval time.Duration) Interval {
to := timerange.MustGetTo().UnixNano()
from := timerange.MustGetFrom().UnixNano()
calculatedInterval := time.Duration((to - from) / DefaultRes)
if calculatedInterval < minInterval {
return Interval{Text: gtime.FormatInterval(minInterval), Value: minInterval}
}
rounded := roundInterval(calculatedInterval)
return Interval{Text: gtime.FormatInterval(rounded), Value: rounded}
}
func (ic *intervalCalculator) CalculateSafeInterval(timerange legacydata.DataTimeRange, safeRes int64) Interval {
to := timerange.MustGetTo().UnixNano()
from := timerange.MustGetFrom().UnixNano()
safeInterval := time.Duration((to - from) / safeRes)
rounded := roundInterval(safeInterval)
return Interval{Text: gtime.FormatInterval(rounded), Value: rounded}
}
func GetIntervalFrom(dsInfo *datasources.DataSource, queryModel *simplejson.Json, defaultInterval time.Duration) (time.Duration, error) {
interval := queryModel.Get("interval").MustString("")
// intervalMs field appears in the v2 plugins API and should be preferred
// if 'interval' isn't present.
if interval == "" {
intervalMS := queryModel.Get("intervalMs").MustInt(0)
if intervalMS != 0 {
return time.Duration(intervalMS) * time.Millisecond, nil
}
}
if interval == "" && dsInfo != nil && dsInfo.JsonData != nil {
dsInterval := dsInfo.JsonData.Get("timeInterval").MustString("")
if dsInterval != "" {
interval = dsInterval
}
}
if interval == "" {
return defaultInterval, nil
}
interval = strings.Replace(strings.Replace(interval, "<", "", 1), ">", "", 1)
isPureNum, err := regexp.MatchString(`^\d+$`, interval)
if err != nil {
return time.Duration(0), err
}
if isPureNum {
interval += "s"
}
parsedInterval, err := gtime.ParseDuration(interval)
if err != nil {
return time.Duration(0), err
}
return parsedInterval, nil
}
//nolint:gocyclo
func roundInterval(interval time.Duration) time.Duration {
// 0.015s
if interval <= 15*time.Millisecond {
return time.Millisecond * 10 // 0.01s
}
return gtime.RoundInterval(interval)
}

View File

@ -1,107 +0,0 @@
package interval
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
func TestIntervalCalculator_Calculate(t *testing.T) {
calculator := NewCalculator(CalculatorOptions{})
testCases := []struct {
name string
timeRange legacydata.DataTimeRange
expected string
}{
{"from 5m to now", legacydata.NewDataTimeRange("5m", "now"), "200ms"},
{"from 15m to now", legacydata.NewDataTimeRange("15m", "now"), "500ms"},
{"from 30m to now", legacydata.NewDataTimeRange("30m", "now"), "1s"},
{"from 1h to now", legacydata.NewDataTimeRange("1h", "now"), "2s"},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
interval := calculator.Calculate(tc.timeRange, time.Millisecond*1)
assert.Equal(t, tc.expected, interval.Text)
})
}
}
func TestIntervalCalculator_CalculateSafeInterval(t *testing.T) {
calculator := NewCalculator(CalculatorOptions{})
testCases := []struct {
name string
timeRange legacydata.DataTimeRange
safeResolution int64
expected string
}{
{"from 5m to now", legacydata.NewDataTimeRange("5m", "now"), 11000, "20ms"},
{"from 15m to now", legacydata.NewDataTimeRange("15m", "now"), 11000, "100ms"},
{"from 30m to now", legacydata.NewDataTimeRange("30m", "now"), 11000, "200ms"},
{"from 24h to now", legacydata.NewDataTimeRange("24h", "now"), 11000, "10s"},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
interval := calculator.CalculateSafeInterval(tc.timeRange, tc.safeResolution)
assert.Equal(t, tc.expected, interval.Text)
})
}
}
func TestRoundInterval(t *testing.T) {
testCases := []struct {
name string
interval time.Duration
expected time.Duration
}{
{"30ms", time.Millisecond * 30, time.Millisecond * 20},
{"45ms", time.Millisecond * 45, time.Millisecond * 50},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
assert.Equal(t, tc.expected, roundInterval(tc.interval))
})
}
}
func TestGetIntervalFrom(t *testing.T) {
dsJSON, err := simplejson.NewJson([]byte(`{"timeInterval": "60s"}`))
require.NoError(t, err)
testCases := []struct {
name string
dsInfo *datasources.DataSource
queryModel string
defaultInterval time.Duration
expected time.Duration
}{
{"45s", nil, `{"interval": "45s"}`, time.Second * 15, time.Second * 45},
{"45", nil, `{"interval": "45"}`, time.Second * 15, time.Second * 45},
{"2m", nil, `{"interval": "2m"}`, time.Second * 15, time.Minute * 2},
{"intervalMs", nil, `{"intervalMs": 45000}`, time.Second * 15, time.Second * 45},
{"intervalMs sub-seconds", nil, `{"intervalMs": 45200}`, time.Second * 15, time.Millisecond * 45200},
{"dsInfo timeInterval", &datasources.DataSource{
JsonData: dsJSON,
}, `{}`, time.Second * 15, time.Second * 60},
{"defaultInterval when interval empty", nil, `{"interval": ""}`, time.Second * 15, time.Second * 15},
{"defaultInterval when intervalMs 0", nil, `{"intervalMs": 0}`, time.Second * 15, time.Second * 15},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
js, _ := simplejson.NewJson([]byte(tc.queryModel))
actual, err := GetIntervalFrom(tc.dsInfo, js, tc.defaultInterval)
assert.Nil(t, err)
assert.Equal(t, tc.expected, actual)
})
}
}

View File

@ -1,108 +0,0 @@
package service
import (
"context"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/oauthtoken"
"github.com/grafana/grafana/pkg/services/pluginsintegration/plugincontext"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
type Service struct {
pluginsClient plugins.Client
oAuthTokenService oauthtoken.OAuthTokenService
dataSourcesService datasources.DataSourceService
pCtxProvider *plugincontext.Provider
}
func ProvideService(pluginsClient plugins.Client, oAuthTokenService oauthtoken.OAuthTokenService,
dataSourcesService datasources.DataSourceService, pCtxProvider *plugincontext.Provider) *Service {
return &Service{
pluginsClient: pluginsClient,
oAuthTokenService: oAuthTokenService,
dataSourcesService: dataSourcesService,
pCtxProvider: pCtxProvider,
}
}
//nolint:staticcheck // legacydata.DataResponse deprecated
func (h *Service) HandleRequest(ctx context.Context, ds *datasources.DataSource, query legacydata.DataQuery) (legacydata.DataResponse, error) {
req, err := h.generateRequest(ctx, ds, query)
if err != nil {
return legacydata.DataResponse{}, err
}
resp, err := h.pluginsClient.QueryData(ctx, req)
if err != nil {
return legacydata.DataResponse{}, err
}
tR := legacydata.DataResponse{
Results: make(map[string]legacydata.DataQueryResult, len(resp.Responses)),
}
for refID, r := range resp.Responses {
qr := legacydata.DataQueryResult{
RefID: refID,
}
for _, f := range r.Frames {
if f.RefID == "" {
f.RefID = refID
}
}
qr.Dataframes = legacydata.NewDecodedDataFrames(r.Frames)
if r.Error != nil {
qr.Error = r.Error
}
tR.Results[refID] = qr
}
return tR, nil
}
func (h *Service) generateRequest(ctx context.Context, ds *datasources.DataSource, query legacydata.DataQuery) (*backend.QueryDataRequest, error) {
if query.Headers == nil {
query.Headers = make(map[string]string)
}
pCtx, err := h.pCtxProvider.GetWithDataSource(ctx, ds.Type, query.User, ds)
if err != nil {
return nil, err
}
req := &backend.QueryDataRequest{
PluginContext: pCtx,
Queries: []backend.DataQuery{},
Headers: query.Headers,
}
for _, q := range query.Queries {
modelJSON, err := q.Model.MarshalJSON()
if err != nil {
return nil, err
}
req.Queries = append(req.Queries, backend.DataQuery{
RefID: q.RefID,
Interval: time.Duration(q.IntervalMS) * time.Millisecond,
MaxDataPoints: q.MaxDataPoints,
TimeRange: backend.TimeRange{
From: query.TimeRange.GetFromAsTimeUTC(),
To: query.TimeRange.GetToAsTimeUTC(),
},
QueryType: q.QueryType,
JSON: modelJSON,
})
}
return req, nil
}
var _ legacydata.RequestHandler = &Service{}

View File

@ -1,89 +0,0 @@
package service
import (
"context"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/localcache"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/plugins"
pluginfakes "github.com/grafana/grafana/pkg/plugins/manager/fakes"
acmock "github.com/grafana/grafana/pkg/services/accesscontrol/mock"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/datasources/guardian"
datasourceservice "github.com/grafana/grafana/pkg/services/datasources/service"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginconfig"
"github.com/grafana/grafana/pkg/services/pluginsintegration/plugincontext"
pluginSettings "github.com/grafana/grafana/pkg/services/pluginsintegration/pluginsettings/service"
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore"
"github.com/grafana/grafana/pkg/services/quota/quotatest"
"github.com/grafana/grafana/pkg/services/secrets/fakes"
secretskvs "github.com/grafana/grafana/pkg/services/secrets/kvstore"
secretsmng "github.com/grafana/grafana/pkg/services/secrets/manager"
"github.com/grafana/grafana/pkg/services/user"
"github.com/grafana/grafana/pkg/tests/testsuite"
"github.com/grafana/grafana/pkg/tsdb/legacydata"
)
func TestMain(m *testing.M) {
testsuite.Run(m)
}
func TestHandleRequest(t *testing.T) {
t.Run("Should invoke plugin manager QueryData when handling request for query", func(t *testing.T) {
client := &fakePluginsClient{}
var actualReq *backend.QueryDataRequest
client.QueryDataHandlerFunc = func(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
actualReq = req
return backend.NewQueryDataResponse(), nil
}
sqlStore, cfg := db.InitTestDBWithCfg(t)
secretsService := secretsmng.SetupTestService(t, fakes.NewFakeSecretsStore())
secretsStore := secretskvs.NewSQLSecretsKVStore(sqlStore, secretsService, log.New("test.logger"))
datasourcePermissions := acmock.NewMockedPermissionsService()
quotaService := quotatest.New(false, nil)
dsCache := datasourceservice.ProvideCacheService(localcache.ProvideService(), sqlStore, guardian.ProvideGuardian())
dsService, err := datasourceservice.ProvideService(nil, secretsService, secretsStore, cfg, featuremgmt.WithFeatures(),
acmock.New(), datasourcePermissions, quotaService, &pluginstore.FakePluginStore{}, &pluginfakes.FakePluginClient{},
plugincontext.ProvideBaseService(cfg, pluginconfig.NewFakePluginRequestConfigProvider()))
require.NoError(t, err)
pCtxProvider := plugincontext.ProvideService(cfg, localcache.ProvideService(), &pluginstore.FakePluginStore{
PluginList: []pluginstore.Plugin{{JSONData: plugins.JSONData{ID: "test"}}},
}, dsCache, dsService, pluginSettings.ProvideService(sqlStore, secretsService), pluginconfig.NewFakePluginRequestConfigProvider())
s := ProvideService(client, nil, dsService, pCtxProvider)
ds := &datasources.DataSource{ID: 12, Type: "test", JsonData: simplejson.New()}
req := legacydata.DataQuery{
TimeRange: &legacydata.DataTimeRange{},
Queries: []legacydata.DataSubQuery{
{RefID: "A", Model: simplejson.New()},
{RefID: "B", Model: simplejson.New()},
},
User: &user.SignedInUser{},
}
res, err := s.HandleRequest(context.Background(), ds, req)
require.NoError(t, err)
require.NotNil(t, actualReq)
require.NotNil(t, res)
})
}
type fakePluginsClient struct {
plugins.Client
backend.QueryDataHandlerFunc
}
func (m *fakePluginsClient) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
if m.QueryDataHandlerFunc != nil {
return m.QueryDataHandlerFunc.QueryData(ctx, req)
}
return nil, nil
}