diff --git a/pkg/api/alerting.go b/pkg/api/alerting.go index 0b23cc40171..c42e1f89c0c 100644 --- a/pkg/api/alerting.go +++ b/pkg/api/alerting.go @@ -128,19 +128,13 @@ func GetAlerts(c *models.ReqContext) response.Response { } // POST /api/alerts/test -func AlertTest(c *models.ReqContext, dto dtos.AlertTestCommand) response.Response { +func (hs *HTTPServer) AlertTest(c *models.ReqContext, dto dtos.AlertTestCommand) response.Response { if _, idErr := dto.Dashboard.Get("id").Int64(); idErr != nil { return response.Error(400, "The dashboard needs to be saved at least once before you can test an alert rule", nil) } - backendCmd := alerting.AlertTestCommand{ - OrgID: c.OrgId, - Dashboard: dto.Dashboard, - PanelID: dto.PanelId, - User: c.SignedInUser, - } - - if err := bus.Dispatch(&backendCmd); err != nil { + res, err := hs.AlertEngine.AlertTest(c.OrgId, dto.Dashboard, dto.PanelId, c.SignedInUser) + if err != nil { var validationErr alerting.ValidationError if errors.As(err, &validationErr) { return response.Error(422, validationErr.Error(), nil) @@ -151,7 +145,6 @@ func AlertTest(c *models.ReqContext, dto dtos.AlertTestCommand) response.Respons return response.Error(500, "Failed to test rule", err) } - res := backendCmd.Result dtoRes := &dtos.AlertTestResult{ Firing: res.Firing, ConditionEvals: res.ConditionEvals, diff --git a/pkg/api/api.go b/pkg/api/api.go index f15a50701f7..2fdd9cb968a 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -266,14 +266,14 @@ func (hs *HTTPServer) registerRoutes() { apiRoute.Get("/plugins", routing.Wrap(hs.GetPluginList)) apiRoute.Get("/plugins/:pluginId/settings", routing.Wrap(GetPluginSettingByID)) - apiRoute.Get("/plugins/:pluginId/markdown/:name", routing.Wrap(GetPluginMarkdown)) + apiRoute.Get("/plugins/:pluginId/markdown/:name", routing.Wrap(hs.GetPluginMarkdown)) apiRoute.Get("/plugins/:pluginId/health", routing.Wrap(hs.CheckHealth)) apiRoute.Any("/plugins/:pluginId/resources", hs.CallResource) apiRoute.Any("/plugins/:pluginId/resources/*", hs.CallResource) apiRoute.Any("/plugins/errors", routing.Wrap(hs.GetPluginErrorsList)) apiRoute.Group("/plugins", func(pluginRoute routing.RouteRegister) { - pluginRoute.Get("/:pluginId/dashboards/", routing.Wrap(GetPluginDashboards)) + pluginRoute.Get("/:pluginId/dashboards/", routing.Wrap(hs.GetPluginDashboards)) pluginRoute.Post("/:pluginId/settings", bind(models.UpdatePluginSettingCmd{}), routing.Wrap(UpdatePluginSetting)) pluginRoute.Get("/:pluginId/metrics", routing.Wrap(hs.CollectPluginMetrics)) }, reqOrgAdmin) @@ -316,7 +316,7 @@ func (hs *HTTPServer) registerRoutes() { dashboardRoute.Post("/db", bind(models.SaveDashboardCommand{}), routing.Wrap(hs.PostDashboard)) dashboardRoute.Get("/home", routing.Wrap(hs.GetHomeDashboard)) dashboardRoute.Get("/tags", GetDashboardTags) - dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), routing.Wrap(ImportDashboard)) + dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), routing.Wrap(hs.ImportDashboard)) dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute routing.RouteRegister) { dashIdRoute.Get("/versions", routing.Wrap(GetDashboardVersions)) @@ -353,13 +353,13 @@ func (hs *HTTPServer) registerRoutes() { // metrics apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), routing.Wrap(hs.QueryMetrics)) apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, routing.Wrap(GenerateSQLTestData)) - apiRoute.Get("/tsdb/testdata/random-walk", routing.Wrap(GetTestDataRandomWalk)) + apiRoute.Get("/tsdb/testdata/random-walk", routing.Wrap(hs.GetTestDataRandomWalk)) // DataSource w/ expressions apiRoute.Post("/ds/query", bind(dtos.MetricRequest{}), routing.Wrap(hs.QueryMetricsV2)) apiRoute.Group("/alerts", func(alertsRoute routing.RouteRegister) { - alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), routing.Wrap(AlertTest)) + alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), routing.Wrap(hs.AlertTest)) alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), routing.Wrap(PauseAlert)) alertsRoute.Get("/:alertId", ValidateOrgAlert, routing.Wrap(GetAlert)) alertsRoute.Get("/", routing.Wrap(GetAlerts)) diff --git a/pkg/api/app_routes.go b/pkg/api/app_routes.go index 55687aa247f..130c9baf580 100644 --- a/pkg/api/app_routes.go +++ b/pkg/api/app_routes.go @@ -11,6 +11,7 @@ import ( "github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/util" macaron "gopkg.in/macaron.v1" ) @@ -31,7 +32,7 @@ func (hs *HTTPServer) initAppPluginRoutes(r *macaron.Macaron) { TLSHandshakeTimeout: 10 * time.Second, } - for _, plugin := range plugins.Apps { + for _, plugin := range manager.Apps { for _, route := range plugin.Routes { url := util.JoinURLFragments("/api/plugin-proxy/"+plugin.Id, route.Path) handlers := make([]macaron.Handler, 0) diff --git a/pkg/api/dashboard.go b/pkg/api/dashboard.go index d72e4cb3853..93ff3f79c19 100644 --- a/pkg/api/dashboard.go +++ b/pkg/api/dashboard.go @@ -8,6 +8,7 @@ import ( "path/filepath" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/dashboards" @@ -17,7 +18,6 @@ import ( "github.com/grafana/grafana/pkg/components/dashdiffs" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/metrics" - "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/guardian" "github.com/grafana/grafana/pkg/util" ) @@ -226,7 +226,7 @@ func (hs *HTTPServer) deleteDashboard(c *models.ReqContext) response.Response { } } - err := dashboards.NewService().DeleteDashboard(dash.Id, c.OrgId) + err := dashboards.NewService(hs.DataService).DeleteDashboard(dash.Id, c.OrgId) if err != nil { var dashboardErr models.DashboardErr if ok := errors.As(err, &dashboardErr); ok { @@ -288,7 +288,7 @@ func (hs *HTTPServer) PostDashboard(c *models.ReqContext, cmd models.SaveDashboa Overwrite: cmd.Overwrite, } - dashboard, err := dashboards.NewService().SaveDashboard(dashItem, allowUiUpdate) + dashboard, err := dashboards.NewService(hs.DataService).SaveDashboard(dashItem, allowUiUpdate) if err != nil { return dashboardSaveErrorToApiResponse(err) } @@ -356,7 +356,7 @@ func dashboardSaveErrorToApiResponse(err error) response.Response { if ok := errors.As(err, &pluginErr); ok { message := fmt.Sprintf("The dashboard belongs to plugin %s.", pluginErr.PluginId) // look up plugin name - if pluginDef, exist := plugins.Plugins[pluginErr.PluginId]; exist { + if pluginDef, exist := manager.Plugins[pluginErr.PluginId]; exist { message = fmt.Sprintf("The dashboard belongs to plugin %s.", pluginDef.Name) } return response.JSON(412, util.DynMap{"status": "plugin-dashboard", "message": message}) diff --git a/pkg/api/dataproxy.go b/pkg/api/dataproxy.go index 49de42c5a01..88ad41b5174 100644 --- a/pkg/api/dataproxy.go +++ b/pkg/api/dataproxy.go @@ -9,7 +9,7 @@ import ( "github.com/grafana/grafana/pkg/api/pluginproxy" "github.com/grafana/grafana/pkg/infra/metrics" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" ) // ProxyDataSourceRequest proxies datasource requests @@ -34,7 +34,7 @@ func (hs *HTTPServer) ProxyDataSourceRequest(c *models.ReqContext) { } // find plugin - plugin, ok := plugins.DataSources[ds.Type] + plugin, ok := manager.DataSources[ds.Type] if !ok { c.JsonApiErr(http.StatusInternalServerError, "Unable to find datasource plugin", err) return diff --git a/pkg/api/datasources.go b/pkg/api/datasources.go index 3d96f3b7948..7ba1d612e72 100644 --- a/pkg/api/datasources.go +++ b/pkg/api/datasources.go @@ -13,8 +13,8 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins" - "github.com/grafana/grafana/pkg/plugins/datasource/wrapper" + "github.com/grafana/grafana/pkg/plugins/adapters" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/util" ) @@ -47,7 +47,7 @@ func (hs *HTTPServer) GetDataSources(c *models.ReqContext) response.Response { ReadOnly: ds.ReadOnly, } - if plugin, exists := plugins.DataSources[ds.Type]; exists { + if plugin, exists := manager.DataSources[ds.Type]; exists { dsItem.TypeLogoUrl = plugin.Info.Logos.Small dsItem.TypeName = plugin.Name } else { @@ -363,19 +363,19 @@ func (hs *HTTPServer) CallDatasourceResource(c *models.ReqContext) { } // find plugin - plugin, ok := plugins.DataSources[ds.Type] + plugin, ok := manager.DataSources[ds.Type] if !ok { c.JsonApiErr(500, "Unable to find datasource plugin", err) return } - dsInstanceSettings, err := wrapper.ModelToInstanceSettings(ds) + dsInstanceSettings, err := adapters.ModelToInstanceSettings(ds) if err != nil { c.JsonApiErr(500, "Unable to process datasource instance model", err) } pCtx := backend.PluginContext{ - User: wrapper.BackendUserFromSignedInUser(c.SignedInUser), + User: adapters.BackendUserFromSignedInUser(c.SignedInUser), OrgID: c.OrgId, PluginID: plugin.Id, DataSourceInstanceSettings: dsInstanceSettings, @@ -433,12 +433,12 @@ func (hs *HTTPServer) CheckDatasourceHealth(c *models.ReqContext) response.Respo return response.Error(500, "Unable to find datasource plugin", err) } - dsInstanceSettings, err := wrapper.ModelToInstanceSettings(ds) + dsInstanceSettings, err := adapters.ModelToInstanceSettings(ds) if err != nil { return response.Error(500, "Unable to get datasource model", err) } pCtx := backend.PluginContext{ - User: wrapper.BackendUserFromSignedInUser(c.SignedInUser), + User: adapters.BackendUserFromSignedInUser(c.SignedInUser), OrgID: c.OrgId, PluginID: plugin.Id, DataSourceInstanceSettings: dsInstanceSettings, diff --git a/pkg/api/dtos/plugins.go b/pkg/api/dtos/plugins.go index 4b83e443c1d..e6c7cc12bdc 100644 --- a/pkg/api/dtos/plugins.go +++ b/pkg/api/dtos/plugins.go @@ -3,6 +3,7 @@ package dtos import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" ) type PluginSetting struct { @@ -63,6 +64,6 @@ type ImportDashboardCommand struct { Path string `json:"path"` Overwrite bool `json:"overwrite"` Dashboard *simplejson.Json `json:"dashboard"` - Inputs []plugins.ImportDashboardInput `json:"inputs"` + Inputs []manager.ImportDashboardInput `json:"inputs"` FolderId int64 `json:"folderId"` } diff --git a/pkg/api/frontend_logging_test.go b/pkg/api/frontend_logging_test.go index ee78f694a86..53194d2732f 100644 --- a/pkg/api/frontend_logging_test.go +++ b/pkg/api/frontend_logging_test.go @@ -16,6 +16,7 @@ import ( "github.com/grafana/grafana/pkg/api/routing" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/setting" log "github.com/inconshreveable/log15" @@ -90,7 +91,7 @@ func TestFrontendLoggingEndpoint(t *testing.T) { require.NoError(t, err) // fake plugin route so we will try to find a source map there. I can't believe I can do this - plugins.StaticRoutes = append(plugins.StaticRoutes, &plugins.PluginStaticRoute{ + manager.StaticRoutes = append(manager.StaticRoutes, &plugins.PluginStaticRoute{ Directory: "/usr/local/telepathic-panel", PluginId: "telepathic", }) diff --git a/pkg/api/frontendlogging/source_maps.go b/pkg/api/frontendlogging/source_maps.go index 818bf7649cd..bc3579a18fa 100644 --- a/pkg/api/frontendlogging/source_maps.go +++ b/pkg/api/frontendlogging/source_maps.go @@ -12,7 +12,7 @@ import ( sourcemap "github.com/go-sourcemap/sourcemap" "github.com/getsentry/sentry-go" - "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/setting" ) @@ -80,7 +80,7 @@ func (store *SourceMapStore) guessSourceMapLocation(sourceURL string) (*sourceMa } // if source comes from a plugin, look in plugin dir } else if strings.HasPrefix(u.Path, "/public/plugins/") { - for _, route := range plugins.StaticRoutes { + for _, route := range manager.StaticRoutes { pluginPrefix := filepath.Join("/public/plugins/", route.PluginId) if strings.HasPrefix(u.Path, pluginPrefix) { return &sourceMapLocation{ diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go index 92573ef766a..c647373edcf 100644 --- a/pkg/api/frontendsettings.go +++ b/pkg/api/frontendsettings.go @@ -5,6 +5,7 @@ import ( "strconv" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/util" @@ -109,12 +110,12 @@ func (hs *HTTPServer) getFSDataSources(c *models.ReqContext, enabledPlugins *plu // add data sources that are built in (meaning they are not added via data sources page, nor have any entry in // the datasource table) - for _, ds := range plugins.DataSources { + for _, ds := range manager.DataSources { if ds.BuiltIn { dataSources[ds.Name] = map[string]interface{}{ "type": ds.Type, "name": ds.Name, - "meta": plugins.DataSources[ds.Id], + "meta": manager.DataSources[ds.Id], } } } @@ -124,7 +125,7 @@ func (hs *HTTPServer) getFSDataSources(c *models.ReqContext, enabledPlugins *plu // getFrontendSettingsMap returns a json object with all the settings needed for front end initialisation. func (hs *HTTPServer) getFrontendSettingsMap(c *models.ReqContext) (map[string]interface{}, error) { - enabledPlugins, err := plugins.GetEnabledPlugins(c.OrgId) + enabledPlugins, err := hs.PluginManager.GetEnabledPlugins(c.OrgId) if err != nil { return nil, err } diff --git a/pkg/api/frontendsettings_test.go b/pkg/api/frontendsettings_test.go index a1c720ef6fe..b25210dfda2 100644 --- a/pkg/api/frontendsettings_test.go +++ b/pkg/api/frontendsettings_test.go @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/require" - "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/services/rendering" "github.com/grafana/grafana/pkg/services/licensing" @@ -50,7 +50,7 @@ func setupTestEnvironment(t *testing.T, cfg *setting.Cfg) (*macaron.Macaron, *HT Bus: bus.GetBus(), License: &licensing.OSSLicensingService{Cfg: cfg}, RenderService: r, - PluginManager: &plugins.PluginManager{Cfg: cfg}, + PluginManager: &manager.PluginManager{Cfg: cfg}, } m := macaron.New() diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 85e3c232b82..feedf8ff70b 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -13,12 +13,12 @@ import ( "strings" "sync" + "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/live" "github.com/grafana/grafana/pkg/services/search" "github.com/grafana/grafana/pkg/services/shorturls" "github.com/grafana/grafana/pkg/services/sqlstore" - - "github.com/grafana/grafana/pkg/plugins/backendplugin" + "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/api/routing" httpstatic "github.com/grafana/grafana/pkg/api/static" @@ -29,7 +29,10 @@ import ( "github.com/grafana/grafana/pkg/infra/remotecache" "github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/backendplugin" + _ "github.com/grafana/grafana/pkg/plugins/backendplugin/manager" + "github.com/grafana/grafana/pkg/plugins/manager" + "github.com/grafana/grafana/pkg/plugins/plugindashboards" "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/services/contexthandler" "github.com/grafana/grafana/pkg/services/datasources" @@ -76,13 +79,16 @@ type HTTPServer struct { License models.Licensing `inject:""` BackendPluginManager backendplugin.Manager `inject:""` PluginRequestValidator models.PluginRequestValidator `inject:""` - PluginManager *plugins.PluginManager `inject:""` + PluginManager *manager.PluginManager `inject:""` SearchService *search.SearchService `inject:""` ShortURLService *shorturls.ShortURLService `inject:""` Live *live.GrafanaLive `inject:""` ContextHandler *contexthandler.ContextHandler `inject:""` SQLStore *sqlstore.SQLStore `inject:""` LibraryPanelService *librarypanels.LibraryPanelService `inject:""` + DataService *tsdb.Service `inject:""` + PluginDashboardService *plugindashboards.Service `inject:""` + AlertEngine *alerting.AlertEngine `inject:""` Listener net.Listener } @@ -312,7 +318,7 @@ func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() { m.Use(middleware.Recovery(hs.Cfg)) - for _, route := range plugins.StaticRoutes { + for _, route := range manager.StaticRoutes { pluginRoute := path.Join("/public/plugins/", route.PluginId) hs.log.Debug("Plugins: Adding route", "route", pluginRoute, "dir", route.Directory) hs.mapStatic(m, route.Directory, "", pluginRoute) diff --git a/pkg/api/index.go b/pkg/api/index.go index b97ed4d0830..80f4804954d 100644 --- a/pkg/api/index.go +++ b/pkg/api/index.go @@ -8,7 +8,6 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" ) @@ -63,8 +62,8 @@ func getProfileNode(c *models.ReqContext) *dtos.NavLink { } } -func getAppLinks(c *models.ReqContext) ([]*dtos.NavLink, error) { - enabledPlugins, err := plugins.GetEnabledPlugins(c.OrgId) +func (hs *HTTPServer) getAppLinks(c *models.ReqContext) ([]*dtos.NavLink, error) { + enabledPlugins, err := hs.PluginManager.GetEnabledPlugins(c.OrgId) if err != nil { return nil, err } @@ -213,7 +212,7 @@ func (hs *HTTPServer) getNavTree(c *models.ReqContext, hasEditPerm bool) ([]*dto }) } - appLinks, err := getAppLinks(c) + appLinks, err := hs.getAppLinks(c) if err != nil { return nil, err } diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go index e8032fbf6c1..076aac8b254 100644 --- a/pkg/api/metrics.go +++ b/pkg/api/metrics.go @@ -7,12 +7,12 @@ import ( "github.com/grafana/grafana/pkg/expr" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/api/response" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/util" ) @@ -23,11 +23,12 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq return response.Error(http.StatusBadRequest, "No queries found in query", nil) } - request := &tsdb.TsdbQuery{ - TimeRange: tsdb.NewTimeRange(reqDTO.From, reqDTO.To), + timeRange := plugins.NewDataTimeRange(reqDTO.From, reqDTO.To) + request := plugins.DataQuery{ + TimeRange: &timeRange, Debug: reqDTO.Debug, User: c.SignedInUser, - Queries: make([]*tsdb.Query, 0, len(reqDTO.Queries)), + Queries: make([]plugins.DataSubQuery, 0, len(reqDTO.Queries)), } // Loop to see if we have an expression. @@ -57,10 +58,10 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq } } - request.Queries = append(request.Queries, &tsdb.Query{ - RefId: query.Get("refId").MustString("A"), + request.Queries = append(request.Queries, plugins.DataSubQuery{ + RefID: query.Get("refId").MustString("A"), MaxDataPoints: query.Get("maxDataPoints").MustInt64(100), - IntervalMs: query.Get("intervalMs").MustInt64(1000), + IntervalMS: query.Get("intervalMs").MustInt64(1000), QueryType: query.Get("queryType").MustString(""), Model: query, DataSource: ds, @@ -72,7 +73,7 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq return response.Error(http.StatusForbidden, "Access denied", err) } - resp, err := tsdb.HandleRequest(c.Req.Context(), ds, request) + resp, err := hs.DataService.HandleRequest(c.Req.Context(), ds, request) if err != nil { return response.Error(http.StatusInternalServerError, "Metric request error", err) } @@ -91,11 +92,12 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq // handleExpressions handles POST /api/ds/query when there is an expression. func (hs *HTTPServer) handleExpressions(c *models.ReqContext, reqDTO dtos.MetricRequest) response.Response { - request := &tsdb.TsdbQuery{ - TimeRange: tsdb.NewTimeRange(reqDTO.From, reqDTO.To), + timeRange := plugins.NewDataTimeRange(reqDTO.From, reqDTO.To) + request := plugins.DataQuery{ + TimeRange: &timeRange, Debug: reqDTO.Debug, User: c.SignedInUser, - Queries: make([]*tsdb.Query, 0, len(reqDTO.Queries)), + Queries: make([]plugins.DataSubQuery, 0, len(reqDTO.Queries)), } for _, query := range reqDTO.Queries { @@ -116,16 +118,19 @@ func (hs *HTTPServer) handleExpressions(c *models.ReqContext, reqDTO dtos.Metric } } - request.Queries = append(request.Queries, &tsdb.Query{ - RefId: query.Get("refId").MustString("A"), + request.Queries = append(request.Queries, plugins.DataSubQuery{ + RefID: query.Get("refId").MustString("A"), MaxDataPoints: query.Get("maxDataPoints").MustInt64(100), - IntervalMs: query.Get("intervalMs").MustInt64(1000), + IntervalMS: query.Get("intervalMs").MustInt64(1000), QueryType: query.Get("queryType").MustString(""), Model: query, }) } - exprService := expr.Service{Cfg: hs.Cfg} + exprService := expr.Service{ + Cfg: hs.Cfg, + DataService: hs.DataService, + } resp, err := exprService.WrapTransformData(c.Req.Context(), request) if err != nil { return response.Error(500, "expression request error", err) @@ -157,8 +162,6 @@ func (hs *HTTPServer) handleGetDataSourceError(err error, datasourceID int64) *r // QueryMetrics returns query metrics // POST /api/tsdb/query func (hs *HTTPServer) QueryMetrics(c *models.ReqContext, reqDto dtos.MetricRequest) response.Response { - timeRange := tsdb.NewTimeRange(reqDto.From, reqDto.To) - if len(reqDto.Queries) == 0 { return response.Error(http.StatusBadRequest, "No queries found in query", nil) } @@ -178,23 +181,24 @@ func (hs *HTTPServer) QueryMetrics(c *models.ReqContext, reqDto dtos.MetricReque return response.Error(http.StatusForbidden, "Access denied", err) } - request := &tsdb.TsdbQuery{ - TimeRange: timeRange, + timeRange := plugins.NewDataTimeRange(reqDto.From, reqDto.To) + request := plugins.DataQuery{ + TimeRange: &timeRange, Debug: reqDto.Debug, User: c.SignedInUser, } for _, query := range reqDto.Queries { - request.Queries = append(request.Queries, &tsdb.Query{ - RefId: query.Get("refId").MustString("A"), + request.Queries = append(request.Queries, plugins.DataSubQuery{ + RefID: query.Get("refId").MustString("A"), MaxDataPoints: query.Get("maxDataPoints").MustInt64(100), - IntervalMs: query.Get("intervalMs").MustInt64(1000), + IntervalMS: query.Get("intervalMs").MustInt64(1000), Model: query, DataSource: ds, }) } - resp, err := tsdb.HandleRequest(c.Req.Context(), ds, request) + resp, err := hs.DataService.HandleRequest(c.Req.Context(), ds, request) if err != nil { return response.Error(http.StatusInternalServerError, "Metric request error", err) } @@ -221,28 +225,28 @@ func GenerateSQLTestData(c *models.ReqContext) response.Response { } // GET /api/tsdb/testdata/random-walk -func GetTestDataRandomWalk(c *models.ReqContext) response.Response { +func (hs *HTTPServer) GetTestDataRandomWalk(c *models.ReqContext) response.Response { from := c.Query("from") to := c.Query("to") - intervalMs := c.QueryInt64("intervalMs") + intervalMS := c.QueryInt64("intervalMs") - timeRange := tsdb.NewTimeRange(from, to) - request := &tsdb.TsdbQuery{TimeRange: timeRange} + timeRange := plugins.NewDataTimeRange(from, to) + request := plugins.DataQuery{TimeRange: &timeRange} dsInfo := &models.DataSource{ Type: "testdata", JsonData: simplejson.New(), } - request.Queries = append(request.Queries, &tsdb.Query{ - RefId: "A", - IntervalMs: intervalMs, + request.Queries = append(request.Queries, plugins.DataSubQuery{ + RefID: "A", + IntervalMS: intervalMS, Model: simplejson.NewFromAny(&util.DynMap{ "scenario": "random_walk", }), DataSource: dsInfo, }) - resp, err := tsdb.HandleRequest(context.Background(), dsInfo, request) + resp, err := hs.DataService.HandleRequest(context.Background(), dsInfo, request) if err != nil { return response.Error(500, "Metric request error", err) } diff --git a/pkg/api/pluginproxy/ds_auth_provider.go b/pkg/api/pluginproxy/ds_auth_provider.go index a92fcc4e738..493d2e997d1 100644 --- a/pkg/api/pluginproxy/ds_auth_provider.go +++ b/pkg/api/pluginproxy/ds_auth_provider.go @@ -14,7 +14,8 @@ import ( ) // ApplyRoute should use the plugin route data to set auth headers and custom headers. -func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route *plugins.AppPluginRoute, ds *models.DataSource) { +func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route *plugins.AppPluginRoute, + ds *models.DataSource) { proxyPath = strings.TrimPrefix(proxyPath, route.Path) data := templateData{ diff --git a/pkg/api/pluginproxy/ds_proxy_test.go b/pkg/api/pluginproxy/ds_proxy_test.go index 50e68c8e567..a000dfef76c 100644 --- a/pkg/api/pluginproxy/ds_proxy_test.go +++ b/pkg/api/pluginproxy/ds_proxy_test.go @@ -14,6 +14,7 @@ import ( "github.com/grafana/grafana/pkg/api/datasource" "github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -23,7 +24,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/login/social" - "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) diff --git a/pkg/api/pluginproxy/pluginproxy.go b/pkg/api/pluginproxy/pluginproxy.go index 34e88459ef3..d145480afc2 100644 --- a/pkg/api/pluginproxy/pluginproxy.go +++ b/pkg/api/pluginproxy/pluginproxy.go @@ -20,7 +20,8 @@ type templateData struct { } // NewApiPluginProxy create a plugin proxy -func NewApiPluginProxy(ctx *models.ReqContext, proxyPath string, route *plugins.AppPluginRoute, appID string, cfg *setting.Cfg) *httputil.ReverseProxy { +func NewApiPluginProxy(ctx *models.ReqContext, proxyPath string, route *plugins.AppPluginRoute, + appID string, cfg *setting.Cfg) *httputil.ReverseProxy { director := func(req *http.Request) { query := models.GetPluginSettingByIdQuery{OrgId: ctx.OrgId, PluginId: appID} if err := bus.Dispatch(&query); err != nil { diff --git a/pkg/api/plugins.go b/pkg/api/plugins.go index 1becc2fb953..fe93e00c3f0 100644 --- a/pkg/api/plugins.go +++ b/pkg/api/plugins.go @@ -14,8 +14,9 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/adapters" "github.com/grafana/grafana/pkg/plugins/backendplugin" - "github.com/grafana/grafana/pkg/plugins/datasource/wrapper" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util/errutil" ) @@ -25,7 +26,7 @@ var ErrPluginNotFound error = errors.New("plugin not found, no installed plugin func (hs *HTTPServer) getPluginContext(pluginID string, user *models.SignedInUser) (backend.PluginContext, error) { pc := backend.PluginContext{} - plugin, exists := plugins.Plugins[pluginID] + plugin, exists := manager.Plugins[pluginID] if !exists { return pc, ErrPluginNotFound } @@ -53,7 +54,7 @@ func (hs *HTTPServer) getPluginContext(pluginID string, user *models.SignedInUse return backend.PluginContext{ OrgID: user.OrgId, PluginID: plugin.Id, - User: wrapper.BackendUserFromSignedInUser(user), + User: adapters.BackendUserFromSignedInUser(user), AppInstanceSettings: &backend.AppInstanceSettings{ JSONData: jsonData, DecryptedSecureJSONData: decryptedSecureJSONData, @@ -73,14 +74,14 @@ func (hs *HTTPServer) GetPluginList(c *models.ReqContext) response.Response { coreFilter = "1" } - pluginSettingsMap, err := plugins.GetPluginSettings(c.OrgId) + pluginSettingsMap, err := hs.PluginManager.GetPluginSettings(c.OrgId) if err != nil { return response.Error(500, "Failed to get list of plugins", err) } result := make(dtos.PluginList, 0) - for _, pluginDef := range plugins.Plugins { + for _, pluginDef := range manager.Plugins { // filter out app sub plugins if embeddedFilter == "0" && pluginDef.IncludedInAppId != "" { continue @@ -130,7 +131,7 @@ func (hs *HTTPServer) GetPluginList(c *models.ReqContext) response.Response { } // filter out built in data sources - if ds, exists := plugins.DataSources[pluginDef.Id]; exists { + if ds, exists := manager.DataSources[pluginDef.Id]; exists { if ds.BuiltIn { continue } @@ -146,7 +147,7 @@ func (hs *HTTPServer) GetPluginList(c *models.ReqContext) response.Response { func GetPluginSettingByID(c *models.ReqContext) response.Response { pluginID := c.Params(":pluginId") - def, exists := plugins.Plugins[pluginID] + def, exists := manager.Plugins[pluginID] if !exists { return response.Error(404, "Plugin not found, no installed plugin with that id", nil) } @@ -169,7 +170,7 @@ func GetPluginSettingByID(c *models.ReqContext) response.Response { SignatureOrg: def.SignatureOrg, } - if app, ok := plugins.Apps[def.Id]; ok { + if app, ok := manager.Apps[def.Id]; ok { dto.Enabled = app.AutoEnabled dto.Pinned = app.AutoEnabled } @@ -194,7 +195,7 @@ func UpdatePluginSetting(c *models.ReqContext, cmd models.UpdatePluginSettingCmd cmd.OrgId = c.OrgId cmd.PluginId = pluginID - if _, ok := plugins.Apps[cmd.PluginId]; !ok { + if _, ok := manager.Apps[cmd.PluginId]; !ok { return response.Error(404, "Plugin not installed.", nil) } @@ -205,10 +206,10 @@ func UpdatePluginSetting(c *models.ReqContext, cmd models.UpdatePluginSettingCmd return response.Success("Plugin settings updated") } -func GetPluginDashboards(c *models.ReqContext) response.Response { +func (hs *HTTPServer) GetPluginDashboards(c *models.ReqContext) response.Response { pluginID := c.Params(":pluginId") - list, err := plugins.GetPluginDashboards(c.OrgId, pluginID) + list, err := hs.PluginManager.GetPluginDashboards(c.OrgId, pluginID) if err != nil { var notFound plugins.PluginNotFoundError if errors.As(err, ¬Found) { @@ -221,11 +222,11 @@ func GetPluginDashboards(c *models.ReqContext) response.Response { return response.JSON(200, list) } -func GetPluginMarkdown(c *models.ReqContext) response.Response { +func (hs *HTTPServer) GetPluginMarkdown(c *models.ReqContext) response.Response { pluginID := c.Params(":pluginId") name := c.Params(":name") - content, err := plugins.GetPluginMarkdown(pluginID, name) + content, err := hs.PluginManager.GetPluginMarkdown(pluginID, name) if err != nil { var notFound plugins.PluginNotFoundError if errors.As(err, ¬Found) { @@ -237,7 +238,7 @@ func GetPluginMarkdown(c *models.ReqContext) response.Response { // fallback try readme if len(content) == 0 { - content, err = plugins.GetPluginMarkdown(pluginID, "readme") + content, err = hs.PluginManager.GetPluginMarkdown(pluginID, "readme") if err != nil { return response.Error(501, "Could not get markdown file", err) } @@ -248,27 +249,18 @@ func GetPluginMarkdown(c *models.ReqContext) response.Response { return resp } -func ImportDashboard(c *models.ReqContext, apiCmd dtos.ImportDashboardCommand) response.Response { +func (hs *HTTPServer) ImportDashboard(c *models.ReqContext, apiCmd dtos.ImportDashboardCommand) response.Response { if apiCmd.PluginId == "" && apiCmd.Dashboard == nil { return response.Error(422, "Dashboard must be set", nil) } - cmd := plugins.ImportDashboardCommand{ - OrgId: c.OrgId, - User: c.SignedInUser, - PluginId: apiCmd.PluginId, - Path: apiCmd.Path, - Inputs: apiCmd.Inputs, - Overwrite: apiCmd.Overwrite, - FolderId: apiCmd.FolderId, - Dashboard: apiCmd.Dashboard, - } - - if err := bus.Dispatch(&cmd); err != nil { + dashInfo, err := hs.PluginManager.ImportDashboard(apiCmd.PluginId, apiCmd.Path, c.OrgId, apiCmd.FolderId, + apiCmd.Dashboard, apiCmd.Overwrite, apiCmd.Inputs, c.SignedInUser, hs.DataService) + if err != nil { return dashboardSaveErrorToApiResponse(err) } - return response.JSON(200, cmd.Result) + return response.JSON(200, dashInfo) } // CollectPluginMetrics collect metrics from a plugin. @@ -276,7 +268,7 @@ func ImportDashboard(c *models.ReqContext, apiCmd dtos.ImportDashboardCommand) r // /api/plugins/:pluginId/metrics func (hs *HTTPServer) CollectPluginMetrics(c *models.ReqContext) response.Response { pluginID := c.Params("pluginId") - plugin, exists := plugins.Plugins[pluginID] + plugin, exists := manager.Plugins[pluginID] if !exists { return response.Error(404, "Plugin not found", nil) } diff --git a/pkg/expr/graph.go b/pkg/expr/graph.go index 3fb60c1dc6c..d22a80e8526 100644 --- a/pkg/expr/graph.go +++ b/pkg/expr/graph.go @@ -28,7 +28,7 @@ type Node interface { ID() int64 // ID() allows the gonum graph node interface to be fulfilled NodeType() NodeType RefID() string - Execute(c context.Context, vars mathexp.Vars) (mathexp.Results, error) + Execute(c context.Context, vars mathexp.Vars, s *Service) (mathexp.Results, error) String() string } @@ -37,10 +37,10 @@ type DataPipeline []Node // execute runs all the command/datasource requests in the pipeline return a // map of the refId of the of each command -func (dp *DataPipeline) execute(c context.Context) (mathexp.Vars, error) { +func (dp *DataPipeline) execute(c context.Context, s *Service) (mathexp.Vars, error) { vars := make(mathexp.Vars) for _, node := range *dp { - res, err := node.Execute(c, vars) + res, err := node.Execute(c, vars, s) if err != nil { return nil, err } @@ -52,8 +52,8 @@ func (dp *DataPipeline) execute(c context.Context) (mathexp.Vars, error) { // BuildPipeline builds a graph of the nodes, and returns the nodes in an // executable order. -func buildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) { - graph, err := buildDependencyGraph(req) +func (s *Service) buildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) { + graph, err := s.buildDependencyGraph(req) if err != nil { return nil, err } @@ -67,8 +67,8 @@ func buildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) { } // buildDependencyGraph returns a dependency graph for a set of queries. -func buildDependencyGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) { - graph, err := buildGraph(req) +func (s *Service) buildDependencyGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) { + graph, err := s.buildGraph(req) if err != nil { return nil, err } @@ -113,7 +113,7 @@ func buildNodeRegistry(g *simple.DirectedGraph) map[string]Node { } // buildGraph creates a new graph populated with nodes for every query. -func buildGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) { +func (s *Service) buildGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) { dp := simple.NewDirectedGraph() for _, query := range req.Queries { @@ -139,7 +139,7 @@ func buildGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) { case DatasourceName: node, err = buildCMDNode(dp, rn) default: // If it's not an expression query, it's a data source query. - node, err = buildDSNode(dp, rn, req.PluginContext.OrgID) + node, err = s.buildDSNode(dp, rn, req.PluginContext.OrgID) } if err != nil { return nil, err diff --git a/pkg/expr/nodes.go b/pkg/expr/nodes.go index a8c618f5fc9..b3de4dd5301 100644 --- a/pkg/expr/nodes.go +++ b/pkg/expr/nodes.go @@ -82,7 +82,7 @@ func (gn *CMDNode) NodeType() NodeType { // Execute runs the node and adds the results to vars. If the node requires // other nodes they must have already been executed and their results must // already by in vars. -func (gn *CMDNode) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Results, error) { +func (gn *CMDNode) Execute(ctx context.Context, vars mathexp.Vars, s *Service) (mathexp.Results, error) { return gn.Command.Execute(ctx, vars) } @@ -142,7 +142,7 @@ func (dn *DSNode) NodeType() NodeType { return TypeDatasourceNode } -func buildDSNode(dp *simple.DirectedGraph, rn *rawNode, orgID int64) (*DSNode, error) { +func (s *Service) buildDSNode(dp *simple.DirectedGraph, rn *rawNode, orgID int64) (*DSNode, error) { encodedQuery, err := json.Marshal(rn.Query) if err != nil { return nil, err @@ -203,7 +203,7 @@ func buildDSNode(dp *simple.DirectedGraph, rn *rawNode, orgID int64) (*DSNode, e // Execute runs the node and adds the results to vars. If the node requires // other nodes they must have already been executed and their results must // already by in vars. -func (dn *DSNode) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Results, error) { +func (dn *DSNode) Execute(ctx context.Context, vars mathexp.Vars, s *Service) (mathexp.Results, error) { pc := backend.PluginContext{ OrgID: dn.orgID, DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{ @@ -223,7 +223,7 @@ func (dn *DSNode) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Resul }, } - resp, err := QueryData(ctx, &backend.QueryDataRequest{ + resp, err := s.queryData(ctx, &backend.QueryDataRequest{ PluginContext: pc, Queries: q, }) diff --git a/pkg/expr/service.go b/pkg/expr/service.go index 39a7bd93026..fc1f4254f98 100644 --- a/pkg/expr/service.go +++ b/pkg/expr/service.go @@ -5,6 +5,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb" ) // DatasourceName is the string constant used as the datasource name in requests @@ -21,7 +22,8 @@ const DatasourceUID = "-100" // Service is service representation for expression handling. type Service struct { - Cfg *setting.Cfg + Cfg *setting.Cfg + DataService *tsdb.Service } func (s *Service) isDisabled() bool { @@ -33,13 +35,13 @@ func (s *Service) isDisabled() bool { // BuildPipeline builds a pipeline from a request. func (s *Service) BuildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) { - return buildPipeline(req) + return s.buildPipeline(req) } // ExecutePipeline executes an expression pipeline and returns all the results. func (s *Service) ExecutePipeline(ctx context.Context, pipeline DataPipeline) (*backend.QueryDataResponse, error) { res := backend.NewQueryDataResponse() - vars, err := pipeline.execute(ctx) + vars, err := pipeline.execute(ctx, s) if err != nil { return nil, err } diff --git a/pkg/expr/service_test.go b/pkg/expr/service_test.go index b6d4ae8b5b9..e53c24cf19a 100644 --- a/pkg/expr/service_test.go +++ b/pkg/expr/service_test.go @@ -12,6 +12,9 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/backendplugin" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/tsdb" "github.com/stretchr/testify/require" ) @@ -21,9 +24,21 @@ func TestService(t *testing.T) { data.NewField("time", nil, []*time.Time{utp(1)}), data.NewField("value", nil, []*float64{fp(2)})) - registerEndPoint(dsDF) - - s := Service{} + dataSvc := tsdb.NewService() + dataSvc.PluginManager = &manager.PluginManager{ + BackendPluginManager: fakeBackendPM{}, + } + s := Service{DataService: &dataSvc} + me := &mockEndpoint{ + Frames: []*data.Frame{dsDF}, + } + s.DataService.RegisterQueryHandler("test", func(*models.DataSource) (plugins.DataPlugin, error) { + return me, nil + }) + bus.AddHandler("test", func(query *models.GetDataSourceQuery) error { + query.Result = &models.DataSource{Id: 1, OrgId: 1, Type: "test"} + return nil + }) queries := []backend.DataQuery{ { @@ -87,27 +102,21 @@ type mockEndpoint struct { Frames data.Frames } -func (me *mockEndpoint) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) { - return &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ +func (me *mockEndpoint) DataQuery(ctx context.Context, ds *models.DataSource, query plugins.DataQuery) ( + plugins.DataResponse, error) { + return plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "A": { - Dataframes: tsdb.NewDecodedDataFrames(me.Frames), + Dataframes: plugins.NewDecodedDataFrames(me.Frames), }, }, }, nil } -func registerEndPoint(df ...*data.Frame) { - me := &mockEndpoint{ - Frames: df, - } - endpoint := func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - return me, nil - } - - tsdb.RegisterTsdbQueryEndpoint("test", endpoint) - bus.AddHandler("test", func(query *models.GetDataSourceQuery) error { - query.Result = &models.DataSource{Id: 1, OrgId: 1, Type: "test"} - return nil - }) +type fakeBackendPM struct { + backendplugin.Manager +} + +func (pm fakeBackendPM) GetDataPlugin(string) interface{} { + return nil } diff --git a/pkg/expr/transform.go b/pkg/expr/transform.go index 262ca6cc1d0..b29d67d0b0d 100644 --- a/pkg/expr/transform.go +++ b/pkg/expr/transform.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/prometheus/client_golang/prometheus" "golang.org/x/net/context" "google.golang.org/grpc/codes" @@ -35,7 +35,7 @@ func init() { } // WrapTransformData creates and executes transform requests -func (s *Service) WrapTransformData(ctx context.Context, query *tsdb.TsdbQuery) (*tsdb.Response, error) { +func (s *Service) WrapTransformData(ctx context.Context, query plugins.DataQuery) (plugins.DataResponse, error) { sdkReq := &backend.QueryDataRequest{ PluginContext: backend.PluginContext{ OrgID: query.User.OrgId, @@ -46,12 +46,12 @@ func (s *Service) WrapTransformData(ctx context.Context, query *tsdb.TsdbQuery) for _, q := range query.Queries { modelJSON, err := q.Model.MarshalJSON() if err != nil { - return nil, err + return plugins.DataResponse{}, err } sdkReq.Queries = append(sdkReq.Queries, backend.DataQuery{ JSON: modelJSON, - Interval: time.Duration(q.IntervalMs) * time.Millisecond, - RefID: q.RefId, + Interval: time.Duration(q.IntervalMS) * time.Millisecond, + RefID: q.RefID, MaxDataPoints: q.MaxDataPoints, QueryType: q.QueryType, TimeRange: backend.TimeRange{ @@ -62,16 +62,16 @@ func (s *Service) WrapTransformData(ctx context.Context, query *tsdb.TsdbQuery) } pbRes, err := s.TransformData(ctx, sdkReq) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - tR := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult, len(pbRes.Responses)), + tR := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult, len(pbRes.Responses)), } for refID, res := range pbRes.Responses { - tRes := &tsdb.QueryResult{ - RefId: refID, - Dataframes: tsdb.NewDecodedDataFrames(res.Frames), + tRes := plugins.DataQueryResult{ + RefID: refID, + Dataframes: plugins.NewDecodedDataFrames(res.Frames), } // if len(res.JsonMeta) != 0 { // tRes.Meta = simplejson.NewFromAny(res.JsonMeta) @@ -158,9 +158,9 @@ func hiddenRefIDs(queries []backend.DataQuery) (map[string]struct{}, error) { return hidden, nil } -// QueryData is called used to query datasources that are not expression commands, but are used +// queryData is called used to query datasources that are not expression commands, but are used // alongside expressions and/or are the input of an expression command. -func QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) { +func (s *Service) queryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) { if len(req.Queries) == 0 { return nil, fmt.Errorf("zero queries found in datasource request") } @@ -184,15 +184,15 @@ func QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.Que } // Convert plugin-model (datasource) queries to tsdb queries - queries := make([]*tsdb.Query, len(req.Queries)) + queries := make([]plugins.DataSubQuery, len(req.Queries)) for i, query := range req.Queries { sj, err := simplejson.NewJson(query.JSON) if err != nil { return nil, err } - queries[i] = &tsdb.Query{ - RefId: query.RefID, - IntervalMs: query.Interval.Milliseconds(), + queries[i] = plugins.DataSubQuery{ + RefID: query.RefID, + IntervalMS: query.Interval.Milliseconds(), MaxDataPoints: query.MaxDataPoints, QueryType: query.QueryType, DataSource: getDsInfo.Result, @@ -201,20 +201,21 @@ func QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.Que } // For now take Time Range from first query. - timeRange := tsdb.NewTimeRange(strconv.FormatInt(req.Queries[0].TimeRange.From.Unix()*1000, 10), strconv.FormatInt(req.Queries[0].TimeRange.To.Unix()*1000, 10)) + timeRange := plugins.NewDataTimeRange(strconv.FormatInt(req.Queries[0].TimeRange.From.Unix()*1000, 10), + strconv.FormatInt(req.Queries[0].TimeRange.To.Unix()*1000, 10)) - tQ := &tsdb.TsdbQuery{ - TimeRange: timeRange, + tQ := plugins.DataQuery{ + TimeRange: &timeRange, Queries: queries, } // Execute the converted queries - tsdbRes, err := tsdb.HandleRequest(ctx, getDsInfo.Result, tQ) + tsdbRes, err := s.DataService.HandleRequest(ctx, getDsInfo.Result, tQ) if err != nil { return nil, err } // Convert tsdb results (map) to plugin-model/datasource (slice) results. - // Only error, tsdb.Series, and encoded Dataframes responses are mapped. + // Only error, Series, and encoded Dataframes responses are mapped. responses := make(map[string]backend.DataResponse, len(tsdbRes.Results)) for refID, res := range tsdbRes.Results { pRes := backend.DataResponse{} @@ -233,7 +234,7 @@ func QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.Que } for _, series := range res.Series { - frame, err := tsdb.SeriesToFrame(series) + frame, err := plugins.SeriesToFrame(series) frame.RefID = refID if err != nil { return nil, err diff --git a/pkg/infra/usagestats/usage_stats.go b/pkg/infra/usagestats/usage_stats.go index 34ec0dfa0af..9479a9a24c9 100644 --- a/pkg/infra/usagestats/usage_stats.go +++ b/pkg/infra/usagestats/usage_stats.go @@ -12,7 +12,7 @@ import ( "github.com/grafana/grafana/pkg/infra/metrics" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/setting" ) @@ -53,9 +53,9 @@ func (uss *UsageStatsService) GetUsageReport(ctx context.Context) (UsageReport, metrics["stats.users.count"] = statsQuery.Result.Users metrics["stats.orgs.count"] = statsQuery.Result.Orgs metrics["stats.playlist.count"] = statsQuery.Result.Playlists - metrics["stats.plugins.apps.count"] = len(plugins.Apps) - metrics["stats.plugins.panels.count"] = len(plugins.Panels) - metrics["stats.plugins.datasources.count"] = len(plugins.DataSources) + metrics["stats.plugins.apps.count"] = len(manager.Apps) + metrics["stats.plugins.panels.count"] = len(manager.Panels) + metrics["stats.plugins.datasources.count"] = len(manager.DataSources) metrics["stats.alerts.count"] = statsQuery.Result.Alerts metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers metrics["stats.datasources.count"] = statsQuery.Result.Datasources @@ -291,7 +291,7 @@ func (uss *UsageStatsService) updateTotalStats() { } func (uss *UsageStatsService) shouldBeReported(dsType string) bool { - ds, ok := plugins.DataSources[dsType] + ds, ok := manager.DataSources[dsType] if !ok { return false } diff --git a/pkg/infra/usagestats/usage_stats_test.go b/pkg/infra/usagestats/usage_stats_test.go index bdb8d206887..4973e086a00 100644 --- a/pkg/infra/usagestats/usage_stats_test.go +++ b/pkg/infra/usagestats/usage_stats_test.go @@ -10,6 +10,8 @@ import ( "testing" "time" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/licensing" "github.com/stretchr/testify/require" @@ -20,7 +22,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/setting" "github.com/stretchr/testify/assert" @@ -248,9 +249,9 @@ func TestMetrics(t *testing.T) { assert.Equal(t, getSystemStatsQuery.Result.Users, metrics.Get("stats.users.count").MustInt64()) assert.Equal(t, getSystemStatsQuery.Result.Orgs, metrics.Get("stats.orgs.count").MustInt64()) assert.Equal(t, getSystemStatsQuery.Result.Playlists, metrics.Get("stats.playlist.count").MustInt64()) - assert.Equal(t, len(plugins.Apps), metrics.Get("stats.plugins.apps.count").MustInt()) - assert.Equal(t, len(plugins.Panels), metrics.Get("stats.plugins.panels.count").MustInt()) - assert.Equal(t, len(plugins.DataSources), metrics.Get("stats.plugins.datasources.count").MustInt()) + assert.Equal(t, len(manager.Apps), metrics.Get("stats.plugins.apps.count").MustInt()) + assert.Equal(t, len(manager.Panels), metrics.Get("stats.plugins.panels.count").MustInt()) + assert.Equal(t, len(manager.DataSources), metrics.Get("stats.plugins.datasources.count").MustInt()) assert.Equal(t, getSystemStatsQuery.Result.Alerts, metrics.Get("stats.alerts.count").MustInt64()) assert.Equal(t, getSystemStatsQuery.Result.ActiveUsers, metrics.Get("stats.active_users.count").MustInt64()) assert.Equal(t, getSystemStatsQuery.Result.Datasources, metrics.Get("stats.datasources.count").MustInt64()) @@ -530,19 +531,19 @@ func (aum *alertingUsageMock) QueryUsageStats() (*alerting.UsageStats, error) { } func setupSomeDataSourcePlugins(t *testing.T) { - originalDataSources := plugins.DataSources - t.Cleanup(func() { plugins.DataSources = originalDataSources }) + originalDataSources := manager.DataSources + t.Cleanup(func() { manager.DataSources = originalDataSources }) - plugins.DataSources = make(map[string]*plugins.DataSourcePlugin) + manager.DataSources = make(map[string]*plugins.DataSourcePlugin) - plugins.DataSources[models.DS_ES] = &plugins.DataSourcePlugin{ + manager.DataSources[models.DS_ES] = &plugins.DataSourcePlugin{ FrontendPluginBase: plugins.FrontendPluginBase{ PluginBase: plugins.PluginBase{ Signature: "internal", }, }, } - plugins.DataSources[models.DS_PROMETHEUS] = &plugins.DataSourcePlugin{ + manager.DataSources[models.DS_PROMETHEUS] = &plugins.DataSourcePlugin{ FrontendPluginBase: plugins.FrontendPluginBase{ PluginBase: plugins.PluginBase{ Signature: "internal", @@ -550,7 +551,7 @@ func setupSomeDataSourcePlugins(t *testing.T) { }, } - plugins.DataSources[models.DS_GRAPHITE] = &plugins.DataSourcePlugin{ + manager.DataSources[models.DS_GRAPHITE] = &plugins.DataSourcePlugin{ FrontendPluginBase: plugins.FrontendPluginBase{ PluginBase: plugins.PluginBase{ Signature: "internal", @@ -558,7 +559,7 @@ func setupSomeDataSourcePlugins(t *testing.T) { }, } - plugins.DataSources[models.DS_MYSQL] = &plugins.DataSourcePlugin{ + manager.DataSources[models.DS_MYSQL] = &plugins.DataSourcePlugin{ FrontendPluginBase: plugins.FrontendPluginBase{ PluginBase: plugins.PluginBase{ Signature: "internal", diff --git a/pkg/plugins/adapters/adapters.go b/pkg/plugins/adapters/adapters.go new file mode 100644 index 00000000000..f3ff23d89c5 --- /dev/null +++ b/pkg/plugins/adapters/adapters.go @@ -0,0 +1,42 @@ +// Package adapters contains plugin SDK adapters. +package adapters + +import ( + "github.com/grafana/grafana-plugin-sdk-go/backend" + "github.com/grafana/grafana/pkg/models" +) + +// ModelToInstanceSettings converts a models.DataSource to a backend.DataSourceInstanceSettings. +func ModelToInstanceSettings(ds *models.DataSource) (*backend.DataSourceInstanceSettings, error) { + jsonDataBytes, err := ds.JsonData.MarshalJSON() + if err != nil { + return nil, err + } + + return &backend.DataSourceInstanceSettings{ + ID: ds.Id, + Name: ds.Name, + URL: ds.Url, + Database: ds.Database, + User: ds.User, + BasicAuthEnabled: ds.BasicAuth, + BasicAuthUser: ds.BasicAuthUser, + JSONData: jsonDataBytes, + DecryptedSecureJSONData: ds.DecryptedValues(), + Updated: ds.Updated, + }, nil +} + +// BackendUserFromSignedInUser converts Grafana's SignedInUser model +// to the backend plugin's model. +func BackendUserFromSignedInUser(su *models.SignedInUser) *backend.User { + if su == nil { + return nil + } + return &backend.User{ + Login: su.Login, + Name: su.Name, + Email: su.Email, + Role: string(su.OrgRole), + } +} diff --git a/pkg/plugins/app_plugin.go b/pkg/plugins/app_plugin.go index 40ef49efa73..1cbc003378d 100644 --- a/pkg/plugins/app_plugin.go +++ b/pkg/plugins/app_plugin.go @@ -59,33 +59,29 @@ type JwtTokenAuth struct { Params map[string]string `json:"params"` } -func (app *AppPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error { +func (app *AppPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) ( + interface{}, error) { if err := decoder.Decode(app); err != nil { - return err - } - - if err := app.registerPlugin(base); err != nil { - return err + return nil, err } if app.Backend { cmd := ComposePluginStartCommand(app.Executable) - fullpath := filepath.Join(app.PluginDir, cmd) + fullpath := filepath.Join(base.PluginDir, cmd) factory := grpcplugin.NewBackendPlugin(app.Id, fullpath, grpcplugin.PluginStartFuncs{}) if err := backendPluginManager.Register(app.Id, factory); err != nil { - return errutil.Wrapf(err, "failed to register backend plugin") + return nil, errutil.Wrapf(err, "failed to register backend plugin") } } - Apps[app.Id] = app - return nil + return app, nil } -func (app *AppPlugin) initApp() { - app.initFrontendPlugin() +func (app *AppPlugin) InitApp(panels map[string]*PanelPlugin, dataSources map[string]*DataSourcePlugin) []*PluginStaticRoute { + staticRoutes := app.InitFrontendPlugin() // check if we have child panels - for _, panel := range Panels { + for _, panel := range panels { if strings.HasPrefix(panel.PluginDir, app.PluginDir) { panel.setPathsBasedOnApp(app) app.FoundChildPlugins = append(app.FoundChildPlugins, &PluginInclude{ @@ -97,7 +93,7 @@ func (app *AppPlugin) initApp() { } // check if we have child datasources - for _, ds := range DataSources { + for _, ds := range dataSources { if strings.HasPrefix(ds.PluginDir, app.PluginDir) { ds.setPathsBasedOnApp(app) app.FoundChildPlugins = append(app.FoundChildPlugins, &PluginInclude{ @@ -120,4 +116,6 @@ func (app *AppPlugin) initApp() { app.DefaultNavUrl = setting.AppSubUrl + "/dashboard/db/" + include.Slug } } + + return staticRoutes } diff --git a/pkg/plugins/backendplugin/backendplugin.go b/pkg/plugins/backendplugin/backendplugin.go new file mode 100644 index 00000000000..00ee458431b --- /dev/null +++ b/pkg/plugins/backendplugin/backendplugin.go @@ -0,0 +1,9 @@ +// Package backendplugin contains backend plugin related logic. +package backendplugin + +import ( + "github.com/grafana/grafana/pkg/infra/log" +) + +// PluginFactoryFunc is a function type for creating a Plugin. +type PluginFactoryFunc func(pluginID string, logger log.Logger, env []string) (Plugin, error) diff --git a/pkg/plugins/backendplugin/coreplugin/core_plugin.go b/pkg/plugins/backendplugin/coreplugin/core_plugin.go index 7732aadf71e..bfe28e35f75 100644 --- a/pkg/plugins/backendplugin/coreplugin/core_plugin.go +++ b/pkg/plugins/backendplugin/coreplugin/core_plugin.go @@ -6,14 +6,16 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/backendplugin" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins/backendplugin/instrumentation" ) // corePlugin represents a plugin that's part of Grafana core. type corePlugin struct { - pluginID string - logger log.Logger + isDataPlugin bool + pluginID string + logger log.Logger backend.CheckHealthHandler backend.CallResourceHandler backend.QueryDataHandler @@ -21,7 +23,7 @@ type corePlugin struct { // New returns a new backendplugin.PluginFactoryFunc for creating a core (built-in) backendplugin.Plugin. func New(opts backend.ServeOpts) backendplugin.PluginFactoryFunc { - return backendplugin.PluginFactoryFunc(func(pluginID string, logger log.Logger, env []string) (backendplugin.Plugin, error) { + return func(pluginID string, logger log.Logger, env []string) (backendplugin.Plugin, error) { return &corePlugin{ pluginID: pluginID, logger: logger, @@ -29,7 +31,7 @@ func New(opts backend.ServeOpts) backendplugin.PluginFactoryFunc { CallResourceHandler: opts.CallResourceHandler, QueryDataHandler: opts.QueryDataHandler, }, nil - }) + } } func (cp *corePlugin) PluginID() string { @@ -40,11 +42,21 @@ func (cp *corePlugin) Logger() log.Logger { return cp.logger } +func (cp *corePlugin) CanHandleDataQueries() bool { + return cp.isDataPlugin +} + +func (cp *corePlugin) DataQuery(ctx context.Context, dsInfo *models.DataSource, + tsdbQuery plugins.DataQuery) (plugins.DataResponse, error) { + // TODO: Inline the adapter, since it shouldn't be necessary + adapter := newQueryEndpointAdapter(cp.pluginID, cp.logger, instrumentation.InstrumentQueryDataHandler( + cp.QueryDataHandler)) + return adapter.DataQuery(ctx, dsInfo, tsdbQuery) +} + func (cp *corePlugin) Start(ctx context.Context) error { if cp.QueryDataHandler != nil { - tsdb.RegisterTsdbQueryEndpoint(cp.pluginID, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - return newQueryEndpointAdapter(cp.pluginID, cp.logger, backendplugin.InstrumentQueryDataHandler(cp.QueryDataHandler)), nil - }) + cp.isDataPlugin = true } return nil } diff --git a/pkg/plugins/backendplugin/coreplugin/core_plugin_test.go b/pkg/plugins/backendplugin/coreplugin/core_plugin_test.go index 18732afc5b0..2bd0a86a067 100644 --- a/pkg/plugins/backendplugin/coreplugin/core_plugin_test.go +++ b/pkg/plugins/backendplugin/coreplugin/core_plugin_test.go @@ -36,11 +36,13 @@ func TestCorePlugin(t *testing.T) { checkHealthCalled := false callResourceCalled := false factory := coreplugin.New(backend.ServeOpts{ - CheckHealthHandler: backend.CheckHealthHandlerFunc(func(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) { + CheckHealthHandler: backend.CheckHealthHandlerFunc(func(ctx context.Context, + req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) { checkHealthCalled = true return nil, nil }), - CallResourceHandler: backend.CallResourceHandlerFunc(func(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error { + CallResourceHandler: backend.CallResourceHandlerFunc(func(ctx context.Context, + req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error { callResourceCalled = true return nil }), diff --git a/pkg/plugins/backendplugin/coreplugin/query_endpoint_adapter.go b/pkg/plugins/backendplugin/coreplugin/query_endpoint_adapter.go index 311d06cbf4b..d75b4cf7a4b 100644 --- a/pkg/plugins/backendplugin/coreplugin/query_endpoint_adapter.go +++ b/pkg/plugins/backendplugin/coreplugin/query_endpoint_adapter.go @@ -7,11 +7,11 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins/datasource/wrapper" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/adapters" ) -func newQueryEndpointAdapter(pluginID string, logger log.Logger, handler backend.QueryDataHandler) tsdb.TsdbQueryEndpoint { +func newQueryEndpointAdapter(pluginID string, logger log.Logger, handler backend.QueryDataHandler) plugins.DataPlugin { return &queryEndpointAdapter{ pluginID: pluginID, logger: logger, @@ -45,17 +45,18 @@ func modelToInstanceSettings(ds *models.DataSource) (*backend.DataSourceInstance }, nil } -func (a *queryEndpointAdapter) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) { +func (a *queryEndpointAdapter) DataQuery(ctx context.Context, ds *models.DataSource, query plugins.DataQuery) ( + plugins.DataResponse, error) { instanceSettings, err := modelToInstanceSettings(ds) if err != nil { - return nil, err + return plugins.DataResponse{}, err } req := &backend.QueryDataRequest{ PluginContext: backend.PluginContext{ OrgID: ds.OrgId, PluginID: a.pluginID, - User: wrapper.BackendUserFromSignedInUser(query.User), + User: adapters.BackendUserFromSignedInUser(query.User), DataSourceInstanceSettings: instanceSettings, }, Queries: []backend.DataQuery{}, @@ -65,11 +66,11 @@ func (a *queryEndpointAdapter) Query(ctx context.Context, ds *models.DataSource, for _, q := range query.Queries { modelJSON, err := q.Model.MarshalJSON() if err != nil { - return nil, err + return plugins.DataResponse{}, err } req.Queries = append(req.Queries, backend.DataQuery{ - RefID: q.RefId, - Interval: time.Duration(q.IntervalMs) * time.Millisecond, + RefID: q.RefID, + Interval: time.Duration(q.IntervalMS) * time.Millisecond, MaxDataPoints: q.MaxDataPoints, TimeRange: backend.TimeRange{ From: query.TimeRange.GetFromAsTimeUTC(), @@ -82,16 +83,16 @@ func (a *queryEndpointAdapter) Query(ctx context.Context, ds *models.DataSource, resp, err := a.handler.QueryData(ctx, req) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - tR := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult, len(resp.Responses)), + tR := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult, len(resp.Responses)), } for refID, r := range resp.Responses { - qr := &tsdb.QueryResult{ - RefId: refID, + qr := plugins.DataQueryResult{ + RefID: refID, } for _, f := range r.Frames { @@ -100,7 +101,7 @@ func (a *queryEndpointAdapter) Query(ctx context.Context, ds *models.DataSource, } } - qr.Dataframes = tsdb.NewDecodedDataFrames(r.Frames) + qr.Dataframes = plugins.NewDecodedDataFrames(r.Frames) if r.Error != nil { qr.Error = r.Error diff --git a/pkg/plugins/backendplugin/errors.go b/pkg/plugins/backendplugin/errors.go new file mode 100644 index 00000000000..9da8254788f --- /dev/null +++ b/pkg/plugins/backendplugin/errors.go @@ -0,0 +1,14 @@ +package backendplugin + +import "errors" + +var ( + // ErrPluginNotRegistered error returned when plugin not registered. + ErrPluginNotRegistered = errors.New("plugin not registered") + // ErrHealthCheckFailed error returned when health check failed. + ErrHealthCheckFailed = errors.New("health check failed") + // ErrPluginUnavailable error returned when plugin is unavailable. + ErrPluginUnavailable = errors.New("plugin unavailable") + // ErrMethodNotImplemented error returned when plugin method not implemented. + ErrMethodNotImplemented = errors.New("method not implemented") +) diff --git a/pkg/plugins/backendplugin/grpcplugin/client_v1.go b/pkg/plugins/backendplugin/grpcplugin/client_v1.go index 57ad97f1350..7fe1fe2573c 100644 --- a/pkg/plugins/backendplugin/grpcplugin/client_v1.go +++ b/pkg/plugins/backendplugin/grpcplugin/client_v1.go @@ -8,6 +8,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/plugins/backendplugin" + "github.com/grafana/grafana/pkg/plugins/backendplugin/instrumentation" "github.com/hashicorp/go-plugin" ) @@ -75,7 +76,7 @@ func instrumentDatasourcePluginV1(plugin datasourceV1.DatasourcePlugin) datasour return datasourceV1QueryFunc(func(ctx context.Context, req *datasourceV1.DatasourceRequest) (*datasourceV1.DatasourceResponse, error) { var resp *datasourceV1.DatasourceResponse - err := backendplugin.InstrumentQueryDataRequest(req.Datasource.Type, func() (innerErr error) { + err := instrumentation.InstrumentQueryDataRequest(req.Datasource.Type, func() (innerErr error) { resp, innerErr = plugin.Query(ctx, req) return }) diff --git a/pkg/plugins/backendplugin/grpcplugin/client_v2.go b/pkg/plugins/backendplugin/grpcplugin/client_v2.go index 42255b8bd3b..68a175f492d 100644 --- a/pkg/plugins/backendplugin/grpcplugin/client_v2.go +++ b/pkg/plugins/backendplugin/grpcplugin/client_v2.go @@ -10,6 +10,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/genproto/pluginv2" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/plugins/backendplugin" + "github.com/grafana/grafana/pkg/plugins/backendplugin/instrumentation" "github.com/grafana/grafana/pkg/plugins/backendplugin/pluginextensionv2" "github.com/grafana/grafana/pkg/util/errutil" "github.com/hashicorp/go-plugin" @@ -170,7 +171,7 @@ func instrumentDataClient(plugin grpcplugin.DataClient) grpcplugin.DataClient { return dataClientQueryDataFunc(func(ctx context.Context, req *pluginv2.QueryDataRequest, opts ...grpc.CallOption) (*pluginv2.QueryDataResponse, error) { var resp *pluginv2.QueryDataResponse - err := backendplugin.InstrumentQueryDataRequest(req.PluginContext.PluginId, func() (innerErr error) { + err := instrumentation.InstrumentQueryDataRequest(req.PluginContext.PluginId, func() (innerErr error) { resp, innerErr = plugin.QueryData(ctx, req) return }) diff --git a/pkg/plugins/backendplugin/grpcplugin/grpc_plugin.go b/pkg/plugins/backendplugin/grpcplugin/grpc_plugin.go index 5e3cc790287..f724b07f9ad 100644 --- a/pkg/plugins/backendplugin/grpcplugin/grpc_plugin.go +++ b/pkg/plugins/backendplugin/grpcplugin/grpc_plugin.go @@ -28,7 +28,7 @@ type grpcPlugin struct { // newPlugin allocates and returns a new gRPC (external) backendplugin.Plugin. func newPlugin(descriptor PluginDescriptor) backendplugin.PluginFactoryFunc { - return backendplugin.PluginFactoryFunc(func(pluginID string, logger log.Logger, env []string) (backendplugin.Plugin, error) { + return func(pluginID string, logger log.Logger, env []string) (backendplugin.Plugin, error) { return &grpcPlugin{ descriptor: descriptor, logger: logger, @@ -36,7 +36,11 @@ func newPlugin(descriptor PluginDescriptor) backendplugin.PluginFactoryFunc { return plugin.NewClient(newClientConfig(descriptor.executablePath, env, logger, descriptor.versionedPlugins)) }, }, nil - }) + } +} + +func (p *grpcPlugin) CanHandleDataQueries() bool { + return false } func (p *grpcPlugin) PluginID() string { diff --git a/pkg/plugins/backendplugin/ifaces.go b/pkg/plugins/backendplugin/ifaces.go new file mode 100644 index 00000000000..832d0aab8c0 --- /dev/null +++ b/pkg/plugins/backendplugin/ifaces.go @@ -0,0 +1,40 @@ +package backendplugin + +import ( + "context" + + "github.com/grafana/grafana-plugin-sdk-go/backend" + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/models" +) + +// Manager manages backend plugins. +type Manager interface { + // Register registers a backend plugin + Register(pluginID string, factory PluginFactoryFunc) error + // StartPlugin starts a non-managed backend plugin + StartPlugin(ctx context.Context, pluginID string) error + // CollectMetrics collects metrics from a registered backend plugin. + CollectMetrics(ctx context.Context, pluginID string) (*backend.CollectMetricsResult, error) + // CheckHealth checks the health of a registered backend plugin. + CheckHealth(ctx context.Context, pCtx backend.PluginContext) (*backend.CheckHealthResult, error) + // CallResource calls a plugin resource. + CallResource(pluginConfig backend.PluginContext, ctx *models.ReqContext, path string) + // GetDataPlugin gets a DataPlugin with a certain ID or nil if it doesn't exist. + // TODO: interface{} is the return type in order to break a dependency cycle. Should be plugins.DataPlugin. + GetDataPlugin(pluginID string) interface{} +} + +// Plugin is the backend plugin interface. +type Plugin interface { + PluginID() string + Logger() log.Logger + Start(ctx context.Context) error + Stop(ctx context.Context) error + IsManaged() bool + Exited() bool + CanHandleDataQueries() bool + backend.CollectMetricsHandler + backend.CheckHealthHandler + backend.CallResourceHandler +} diff --git a/pkg/plugins/backendplugin/instrumentation.go b/pkg/plugins/backendplugin/instrumentation/instrumentation.go similarity index 82% rename from pkg/plugins/backendplugin/instrumentation.go rename to pkg/plugins/backendplugin/instrumentation/instrumentation.go index cc11a4d0ade..dfdf21dfcba 100644 --- a/pkg/plugins/backendplugin/instrumentation.go +++ b/pkg/plugins/backendplugin/instrumentation/instrumentation.go @@ -1,4 +1,5 @@ -package backendplugin +// Package instrumentation contains backend plugin instrumentation logic. +package instrumentation import ( "context" @@ -48,19 +49,22 @@ func instrumentPluginRequest(pluginID string, endpoint string, fn func() error) return err } -func instrumentCollectMetrics(pluginID string, fn func() error) error { +// InstrumentCollectMetrics instruments collectMetrics. +func InstrumentCollectMetrics(pluginID string, fn func() error) error { return instrumentPluginRequest(pluginID, "collectMetrics", fn) } -func instrumentCheckHealthRequest(pluginID string, fn func() error) error { +// InstrumentCheckHealthRequest instruments checkHealth. +func InstrumentCheckHealthRequest(pluginID string, fn func() error) error { return instrumentPluginRequest(pluginID, "checkHealth", fn) } -func instrumentCallResourceRequest(pluginID string, fn func() error) error { +// InstrumentCallResourceRequest instruments callResource. +func InstrumentCallResourceRequest(pluginID string, fn func() error) error { return instrumentPluginRequest(pluginID, "callResource", fn) } -// InstrumentQueryDataRequest instruments success rate and latency of query data request. +// InstrumentQueryDataRequest instruments success rate and latency of query data requests. func InstrumentQueryDataRequest(pluginID string, fn func() error) error { return instrumentPluginRequest(pluginID, "queryData", fn) } diff --git a/pkg/plugins/backendplugin/manager.go b/pkg/plugins/backendplugin/manager/manager.go similarity index 80% rename from pkg/plugins/backendplugin/manager.go rename to pkg/plugins/backendplugin/manager/manager.go index 92fd33c4595..50c2851985c 100644 --- a/pkg/plugins/backendplugin/manager.go +++ b/pkg/plugins/backendplugin/manager/manager.go @@ -1,4 +1,4 @@ -package backendplugin +package manager import ( "context" @@ -15,53 +15,31 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/backendplugin" + "github.com/grafana/grafana/pkg/plugins/backendplugin/instrumentation" "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/proxyutil" ) -var ( - // ErrPluginNotRegistered error returned when plugin not registered. - ErrPluginNotRegistered = errors.New("plugin not registered") - // ErrHealthCheckFailed error returned when health check failed. - ErrHealthCheckFailed = errors.New("health check failed") - // ErrPluginUnavailable error returned when plugin is unavailable. - ErrPluginUnavailable = errors.New("plugin unavailable") - // ErrMethodNotImplemented error returned when plugin method not implemented. - ErrMethodNotImplemented = errors.New("method not implemented") -) - func init() { registry.RegisterServiceWithPriority(&manager{}, registry.MediumHigh) } -// Manager manages backend plugins. -type Manager interface { - // Register registers a backend plugin - Register(pluginID string, factory PluginFactoryFunc) error - // StartPlugin starts a non-managed backend plugin - StartPlugin(ctx context.Context, pluginID string) error - // CollectMetrics collects metrics from a registered backend plugin. - CollectMetrics(ctx context.Context, pluginID string) (*backend.CollectMetricsResult, error) - // CheckHealth checks the health of a registered backend plugin. - CheckHealth(ctx context.Context, pCtx backend.PluginContext) (*backend.CheckHealthResult, error) - // CallResource calls a plugin resource. - CallResource(pluginConfig backend.PluginContext, ctx *models.ReqContext, path string) -} - type manager struct { Cfg *setting.Cfg `inject:""` License models.Licensing `inject:""` PluginRequestValidator models.PluginRequestValidator `inject:""` pluginsMu sync.RWMutex - plugins map[string]Plugin + plugins map[string]backendplugin.Plugin logger log.Logger pluginSettings map[string]pluginSettings } func (m *manager) Init() error { - m.plugins = make(map[string]Plugin) + m.plugins = make(map[string]backendplugin.Plugin) m.logger = log.New("plugins.backend") m.pluginSettings = extractPluginSettings(m.Cfg) @@ -76,7 +54,7 @@ func (m *manager) Run(ctx context.Context) error { } // Register registers a backend plugin -func (m *manager) Register(pluginID string, factory PluginFactoryFunc) error { +func (m *manager) Register(pluginID string, factory backendplugin.PluginFactoryFunc) error { m.logger.Debug("Registering backend plugin", "pluginId", pluginID) m.pluginsMu.Lock() defer m.pluginsMu.Unlock() @@ -121,6 +99,19 @@ func (m *manager) Register(pluginID string, factory PluginFactoryFunc) error { return nil } +func (m *manager) GetDataPlugin(pluginID string) interface{} { + plugin := m.plugins[pluginID] + if plugin == nil || !plugin.CanHandleDataQueries() { + return nil + } + + if dataPlugin, ok := plugin.(plugins.DataPlugin); ok { + return dataPlugin + } + + return nil +} + // start starts all managed backend plugins func (m *manager) start(ctx context.Context) { m.pluginsMu.RLock() @@ -143,7 +134,7 @@ func (m *manager) StartPlugin(ctx context.Context, pluginID string) error { p, registered := m.plugins[pluginID] m.pluginsMu.RUnlock() if !registered { - return ErrPluginNotRegistered + return backendplugin.ErrPluginNotRegistered } if p.IsManaged() { @@ -160,7 +151,7 @@ func (m *manager) stop(ctx context.Context) { var wg sync.WaitGroup for _, p := range m.plugins { wg.Add(1) - go func(p Plugin, ctx context.Context) { + go func(p backendplugin.Plugin, ctx context.Context) { defer wg.Done() p.Logger().Debug("Stopping plugin") if err := p.Stop(ctx); err != nil { @@ -179,11 +170,11 @@ func (m *manager) CollectMetrics(ctx context.Context, pluginID string) (*backend m.pluginsMu.RUnlock() if !registered { - return nil, ErrPluginNotRegistered + return nil, backendplugin.ErrPluginNotRegistered } var resp *backend.CollectMetricsResult - err := instrumentCollectMetrics(p.PluginID(), func() (innerErr error) { + err := instrumentation.InstrumentCollectMetrics(p.PluginID(), func() (innerErr error) { resp, innerErr = p.CollectMetrics(ctx) return }) @@ -214,25 +205,25 @@ func (m *manager) CheckHealth(ctx context.Context, pluginContext backend.PluginC m.pluginsMu.RUnlock() if !registered { - return nil, ErrPluginNotRegistered + return nil, backendplugin.ErrPluginNotRegistered } var resp *backend.CheckHealthResult - err = instrumentCheckHealthRequest(p.PluginID(), func() (innerErr error) { + err = instrumentation.InstrumentCheckHealthRequest(p.PluginID(), func() (innerErr error) { resp, innerErr = p.CheckHealth(ctx, &backend.CheckHealthRequest{PluginContext: pluginContext}) return }) if err != nil { - if errors.Is(err, ErrMethodNotImplemented) { + if errors.Is(err, backendplugin.ErrMethodNotImplemented) { return nil, err } - if errors.Is(err, ErrPluginUnavailable) { + if errors.Is(err, backendplugin.ErrPluginUnavailable) { return nil, err } - return nil, errutil.Wrap("failed to check plugin health", ErrHealthCheckFailed) + return nil, errutil.Wrap("failed to check plugin health", backendplugin.ErrHealthCheckFailed) } return resp, nil @@ -248,7 +239,7 @@ func (m *manager) callResourceInternal(w http.ResponseWriter, req *http.Request, m.pluginsMu.RUnlock() if !registered { - return ErrPluginNotRegistered + return backendplugin.ErrPluginNotRegistered } keepCookieModel := keepCookiesJSONModel{} @@ -276,7 +267,7 @@ func (m *manager) callResourceInternal(w http.ResponseWriter, req *http.Request, Body: body, } - return instrumentCallResourceRequest(p.PluginID(), func() error { + return instrumentation.InstrumentCallResourceRequest(p.PluginID(), func() error { childCtx, cancel := context.WithCancel(req.Context()) defer cancel() stream := newCallResourceResponseStream(childCtx) @@ -336,12 +327,12 @@ func (m *manager) CallResource(pCtx backend.PluginContext, reqCtx *models.ReqCon } func handleCallResourceError(err error, reqCtx *models.ReqContext) { - if errors.Is(err, ErrPluginUnavailable) { + if errors.Is(err, backendplugin.ErrPluginUnavailable) { reqCtx.JsonApiErr(503, "Plugin unavailable", err) return } - if errors.Is(err, ErrMethodNotImplemented) { + if errors.Is(err, backendplugin.ErrMethodNotImplemented) { reqCtx.JsonApiErr(404, "Not found", err) return } @@ -349,7 +340,7 @@ func handleCallResourceError(err error, reqCtx *models.ReqContext) { reqCtx.JsonApiErr(500, "Failed to call resource", err) } -func flushStream(plugin Plugin, stream CallResourceClientResponseStream, w http.ResponseWriter) error { +func flushStream(plugin backendplugin.Plugin, stream callResourceClientResponseStream, w http.ResponseWriter) error { processedStreams := 0 for { @@ -404,12 +395,12 @@ func flushStream(plugin Plugin, stream CallResourceClientResponseStream, w http. } } -func startPluginAndRestartKilledProcesses(ctx context.Context, p Plugin) error { +func startPluginAndRestartKilledProcesses(ctx context.Context, p backendplugin.Plugin) error { if err := p.Start(ctx); err != nil { return err } - go func(ctx context.Context, p Plugin) { + go func(ctx context.Context, p backendplugin.Plugin) { if err := restartKilledProcess(ctx, p); err != nil { p.Logger().Error("Attempt to restart killed plugin process failed", "error", err) } @@ -418,7 +409,7 @@ func startPluginAndRestartKilledProcesses(ctx context.Context, p Plugin) error { return nil } -func restartKilledProcess(ctx context.Context, p Plugin) error { +func restartKilledProcess(ctx context.Context, p backendplugin.Plugin) error { ticker := time.NewTicker(time.Second * 1) for { @@ -442,3 +433,9 @@ func restartKilledProcess(ctx context.Context, p Plugin) error { } } } + +// callResourceClientResponseStream is used for receiving resource call responses. +type callResourceClientResponseStream interface { + Recv() (*backend.CallResourceResponse, error) + Close() error +} diff --git a/pkg/plugins/backendplugin/manager_test.go b/pkg/plugins/backendplugin/manager/manager_test.go similarity index 89% rename from pkg/plugins/backendplugin/manager_test.go rename to pkg/plugins/backendplugin/manager/manager_test.go index 87aedd781a5..0095a6d7e1f 100644 --- a/pkg/plugins/backendplugin/manager_test.go +++ b/pkg/plugins/backendplugin/manager/manager_test.go @@ -1,4 +1,4 @@ -package backendplugin +package manager import ( "bytes" @@ -12,6 +12,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins/backendplugin" "github.com/grafana/grafana/pkg/setting" "github.com/stretchr/testify/require" ) @@ -22,19 +23,19 @@ func TestManager(t *testing.T) { newManagerScenario(t, false, func(t *testing.T, ctx *managerScenarioCtx) { t.Run("Unregistered plugin scenario", func(t *testing.T) { err := ctx.manager.StartPlugin(context.Background(), testPluginID) - require.Equal(t, ErrPluginNotRegistered, err) + require.Equal(t, backendplugin.ErrPluginNotRegistered, err) _, err = ctx.manager.CollectMetrics(context.Background(), testPluginID) - require.Equal(t, ErrPluginNotRegistered, err) + require.Equal(t, backendplugin.ErrPluginNotRegistered, err) _, err = ctx.manager.CheckHealth(context.Background(), backend.PluginContext{PluginID: testPluginID}) - require.Equal(t, ErrPluginNotRegistered, err) + require.Equal(t, backendplugin.ErrPluginNotRegistered, err) req, err := http.NewRequest(http.MethodGet, "/test", nil) require.NoError(t, err) w := httptest.NewRecorder() err = ctx.manager.callResourceInternal(w, req, backend.PluginContext{PluginID: testPluginID}) - require.Equal(t, ErrPluginNotRegistered, err) + require.Equal(t, backendplugin.ErrPluginNotRegistered, err) }) }) @@ -121,12 +122,12 @@ func TestManager(t *testing.T) { t.Run("Unimplemented handlers", func(t *testing.T) { t.Run("Collect metrics should return method not implemented error", func(t *testing.T) { _, err = ctx.manager.CollectMetrics(context.Background(), testPluginID) - require.Equal(t, ErrMethodNotImplemented, err) + require.Equal(t, backendplugin.ErrMethodNotImplemented, err) }) t.Run("Check health should return method not implemented error", func(t *testing.T) { _, err = ctx.manager.CheckHealth(context.Background(), backend.PluginContext{PluginID: testPluginID}) - require.Equal(t, ErrMethodNotImplemented, err) + require.Equal(t, backendplugin.ErrMethodNotImplemented, err) }) t.Run("Call resource should return method not implemented error", func(t *testing.T) { @@ -134,17 +135,17 @@ func TestManager(t *testing.T) { require.NoError(t, err) w := httptest.NewRecorder() err = ctx.manager.callResourceInternal(w, req, backend.PluginContext{PluginID: testPluginID}) - require.Equal(t, ErrMethodNotImplemented, err) + require.Equal(t, backendplugin.ErrMethodNotImplemented, err) }) }) t.Run("Implemented handlers", func(t *testing.T) { t.Run("Collect metrics should return expected result", func(t *testing.T) { - ctx.plugin.CollectMetricsHandlerFunc = backend.CollectMetricsHandlerFunc(func(ctx context.Context) (*backend.CollectMetricsResult, error) { + ctx.plugin.CollectMetricsHandlerFunc = func(ctx context.Context) (*backend.CollectMetricsResult, error) { return &backend.CollectMetricsResult{ PrometheusMetrics: []byte("hello"), }, nil - }) + } res, err := ctx.manager.CollectMetrics(context.Background(), testPluginID) require.NoError(t, err) @@ -156,13 +157,13 @@ func TestManager(t *testing.T) { json := []byte(`{ "key": "value" }`) - ctx.plugin.CheckHealthHandlerFunc = backend.CheckHealthHandlerFunc(func(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) { + ctx.plugin.CheckHealthHandlerFunc = func(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) { return &backend.CheckHealthResult{ Status: backend.HealthStatusOk, Message: "All good", JSONDetails: json, }, nil - }) + } res, err := ctx.manager.CheckHealth(context.Background(), backend.PluginContext{PluginID: testPluginID}) require.NoError(t, err) @@ -173,11 +174,12 @@ func TestManager(t *testing.T) { }) t.Run("Call resource should return expected response", func(t *testing.T) { - ctx.plugin.CallResourceHandlerFunc = backend.CallResourceHandlerFunc(func(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error { + ctx.plugin.CallResourceHandlerFunc = func(ctx context.Context, + req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error { return sender.Send(&backend.CallResourceResponse{ Status: http.StatusOK, }) - }) + } req, err := http.NewRequest(http.MethodGet, "/test", bytes.NewReader([]byte{})) require.NoError(t, err) @@ -270,7 +272,7 @@ type managerScenarioCtx struct { cfg *setting.Cfg license *testLicensingService manager *manager - factory PluginFactoryFunc + factory backendplugin.PluginFactoryFunc plugin *testPlugin env []string } @@ -293,7 +295,7 @@ func newManagerScenario(t *testing.T, managed bool, fn func(t *testing.T, ctx *m err := ctx.manager.Init() require.NoError(t, err) - ctx.factory = PluginFactoryFunc(func(pluginID string, logger log.Logger, env []string) (Plugin, error) { + ctx.factory = func(pluginID string, logger log.Logger, env []string) (backendplugin.Plugin, error) { ctx.plugin = &testPlugin{ pluginID: pluginID, logger: logger, @@ -302,7 +304,7 @@ func newManagerScenario(t *testing.T, managed bool, fn func(t *testing.T, ctx *m ctx.env = env return ctx.plugin, nil - }) + } fn(t, ctx) } @@ -328,6 +330,10 @@ func (tp *testPlugin) Logger() log.Logger { return tp.logger } +func (tp *testPlugin) CanHandleDataQueries() bool { + return false +} + func (tp *testPlugin) Start(ctx context.Context) error { tp.mutex.Lock() defer tp.mutex.Unlock() @@ -364,7 +370,7 @@ func (tp *testPlugin) CollectMetrics(ctx context.Context) (*backend.CollectMetri return tp.CollectMetricsHandlerFunc(ctx) } - return nil, ErrMethodNotImplemented + return nil, backendplugin.ErrMethodNotImplemented } func (tp *testPlugin) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) { @@ -372,7 +378,7 @@ func (tp *testPlugin) CheckHealth(ctx context.Context, req *backend.CheckHealthR return tp.CheckHealthHandlerFunc(ctx, req) } - return nil, ErrMethodNotImplemented + return nil, backendplugin.ErrMethodNotImplemented } func (tp *testPlugin) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error { @@ -380,7 +386,7 @@ func (tp *testPlugin) CallResource(ctx context.Context, req *backend.CallResourc return tp.CallResourceHandlerFunc(ctx, req, sender) } - return ErrMethodNotImplemented + return backendplugin.ErrMethodNotImplemented } type testLicensingService struct { diff --git a/pkg/plugins/backendplugin/plugin_settings.go b/pkg/plugins/backendplugin/manager/plugin_settings.go similarity index 96% rename from pkg/plugins/backendplugin/plugin_settings.go rename to pkg/plugins/backendplugin/manager/plugin_settings.go index 0cc2353afb4..2b9987e0fd7 100644 --- a/pkg/plugins/backendplugin/plugin_settings.go +++ b/pkg/plugins/backendplugin/manager/plugin_settings.go @@ -1,4 +1,4 @@ -package backendplugin +package manager import ( "fmt" diff --git a/pkg/plugins/backendplugin/plugin_settings_test.go b/pkg/plugins/backendplugin/manager/plugin_settings_test.go similarity index 98% rename from pkg/plugins/backendplugin/plugin_settings_test.go rename to pkg/plugins/backendplugin/manager/plugin_settings_test.go index 742c13585e3..388993a324c 100644 --- a/pkg/plugins/backendplugin/plugin_settings_test.go +++ b/pkg/plugins/backendplugin/manager/plugin_settings_test.go @@ -1,4 +1,4 @@ -package backendplugin +package manager import ( "sort" diff --git a/pkg/plugins/backendplugin/resource_response_stream.go b/pkg/plugins/backendplugin/manager/resource_response_stream.go similarity index 98% rename from pkg/plugins/backendplugin/resource_response_stream.go rename to pkg/plugins/backendplugin/manager/resource_response_stream.go index 48567de109b..31ab51f0be6 100644 --- a/pkg/plugins/backendplugin/resource_response_stream.go +++ b/pkg/plugins/backendplugin/manager/resource_response_stream.go @@ -1,4 +1,4 @@ -package backendplugin +package manager import ( "context" diff --git a/pkg/plugins/backendplugin/plugin.go b/pkg/plugins/backendplugin/plugin.go deleted file mode 100644 index db2ae1c8c58..00000000000 --- a/pkg/plugins/backendplugin/plugin.go +++ /dev/null @@ -1,30 +0,0 @@ -package backendplugin - -import ( - "context" - - "github.com/grafana/grafana-plugin-sdk-go/backend" - "github.com/grafana/grafana/pkg/infra/log" -) - -// Plugin backend plugin interface. -type Plugin interface { - PluginID() string - Logger() log.Logger - Start(ctx context.Context) error - Stop(ctx context.Context) error - IsManaged() bool - Exited() bool - backend.CollectMetricsHandler - backend.CheckHealthHandler - backend.CallResourceHandler -} - -// PluginFactoryFunc factory for creating a Plugin. -type PluginFactoryFunc func(pluginID string, logger log.Logger, env []string) (Plugin, error) - -// CallResourceClientResponseStream is used for receiving resource call responses. -type CallResourceClientResponseStream interface { - Recv() (*backend.CallResourceResponse, error) - Close() error -} diff --git a/pkg/plugins/dashboard_importer_test.go b/pkg/plugins/dashboard_importer_test.go deleted file mode 100644 index a1af8a0584d..00000000000 --- a/pkg/plugins/dashboard_importer_test.go +++ /dev/null @@ -1,103 +0,0 @@ -package plugins - -import ( - "io/ioutil" - "testing" - - "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/services/dashboards" - "github.com/grafana/grafana/pkg/setting" - . "github.com/smartystreets/goconvey/convey" -) - -func TestDashboardImport(t *testing.T) { - pluginScenario("When importing a plugin dashboard", t, func() { - origNewDashboardService := dashboards.NewService - mock := &dashboards.FakeDashboardService{} - dashboards.MockDashboardService(mock) - - cmd := ImportDashboardCommand{ - PluginId: "test-app", - Path: "dashboards/connections.json", - OrgId: 1, - User: &models.SignedInUser{UserId: 1, OrgRole: models.ROLE_ADMIN}, - Inputs: []ImportDashboardInput{ - {Name: "*", Type: "datasource", Value: "graphite"}, - }, - } - - err := ImportDashboard(&cmd) - So(err, ShouldBeNil) - - Convey("should install dashboard", func() { - So(cmd.Result, ShouldNotBeNil) - - resultStr, _ := mock.SavedDashboards[0].Dashboard.Data.EncodePretty() - expectedBytes, _ := ioutil.ReadFile("testdata/test-app/dashboards/connections_result.json") - expectedJson, _ := simplejson.NewJson(expectedBytes) - expectedStr, _ := expectedJson.EncodePretty() - - So(string(resultStr), ShouldEqual, string(expectedStr)) - - panel := mock.SavedDashboards[0].Dashboard.Data.Get("rows").GetIndex(0).Get("panels").GetIndex(0) - So(panel.Get("datasource").MustString(), ShouldEqual, "graphite") - }) - - Reset(func() { - dashboards.NewService = origNewDashboardService - }) - }) - - Convey("When evaling dashboard template", t, func() { - template, _ := simplejson.NewJson([]byte(`{ - "__inputs": [ - { - "name": "DS_NAME", - "type": "datasource" - } - ], - "test": { - "prop": "${DS_NAME}_${DS_NAME}" - } - }`)) - - evaluator := &DashTemplateEvaluator{ - template: template, - inputs: []ImportDashboardInput{ - {Name: "*", Type: "datasource", Value: "my-server"}, - }, - } - - res, err := evaluator.Eval() - So(err, ShouldBeNil) - - Convey("should render template", func() { - So(res.GetPath("test", "prop").MustString(), ShouldEqual, "my-server_my-server") - }) - - Convey("should not include inputs in output", func() { - inputs := res.Get("__inputs") - So(inputs.Interface(), ShouldBeNil) - }) - }) -} - -func pluginScenario(desc string, t *testing.T, fn func()) { - Convey("Given a plugin", t, func() { - pm := &PluginManager{ - Cfg: &setting.Cfg{ - FeatureToggles: map[string]bool{}, - PluginSettings: setting.PluginSettings{ - "test-app": map[string]string{ - "path": "testdata/test-app", - }, - }, - }, - } - err := pm.Init() - So(err, ShouldBeNil) - - Convey(desc, fn) - }) -} diff --git a/pkg/plugins/dashboards.go b/pkg/plugins/dashboards.go index 431f69e74e7..f8973015a50 100644 --- a/pkg/plugins/dashboards.go +++ b/pkg/plugins/dashboards.go @@ -1,14 +1,5 @@ package plugins -import ( - "os" - "path/filepath" - - "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/models" -) - type PluginDashboardInfoDTO struct { PluginId string `json:"pluginId"` Title string `json:"title"` @@ -24,97 +15,3 @@ type PluginDashboardInfoDTO struct { Path string `json:"path"` Removed bool `json:"removed"` } - -func GetPluginDashboards(orgId int64, pluginId string) ([]*PluginDashboardInfoDTO, error) { - plugin, exists := Plugins[pluginId] - - if !exists { - return nil, PluginNotFoundError{pluginId} - } - - result := make([]*PluginDashboardInfoDTO, 0) - - // load current dashboards - query := models.GetDashboardsByPluginIdQuery{OrgId: orgId, PluginId: pluginId} - if err := bus.Dispatch(&query); err != nil { - return nil, err - } - - existingMatches := make(map[int64]bool) - - for _, include := range plugin.Includes { - if include.Type != PluginTypeDashboard { - continue - } - - res := &PluginDashboardInfoDTO{} - var dashboard *models.Dashboard - var err error - - if dashboard, err = loadPluginDashboard(plugin.Id, include.Path); err != nil { - return nil, err - } - - res.Path = include.Path - res.PluginId = plugin.Id - res.Title = dashboard.Title - res.Revision = dashboard.Data.Get("revision").MustInt64(1) - - // find existing dashboard - for _, existingDash := range query.Result { - if existingDash.Slug == dashboard.Slug { - res.DashboardId = existingDash.Id - res.Imported = true - res.ImportedUri = "db/" + existingDash.Slug - res.ImportedUrl = existingDash.GetUrl() - res.ImportedRevision = existingDash.Data.Get("revision").MustInt64(1) - existingMatches[existingDash.Id] = true - } - } - - result = append(result, res) - } - - // find deleted dashboards - for _, dash := range query.Result { - if _, exists := existingMatches[dash.Id]; !exists { - result = append(result, &PluginDashboardInfoDTO{ - Slug: dash.Slug, - DashboardId: dash.Id, - Removed: true, - }) - } - } - - return result, nil -} - -func loadPluginDashboard(pluginId, path string) (*models.Dashboard, error) { - plugin, exists := Plugins[pluginId] - if !exists { - return nil, PluginNotFoundError{pluginId} - } - - dashboardFilePath := filepath.Join(plugin.PluginDir, path) - // nolint:gosec - // We can ignore the gosec G304 warning on this one because `plugin.PluginDir` is based - // on plugin folder structure on disk and not user input. `path` comes from the - // `plugin.json` configuration file for the loaded plugin - reader, err := os.Open(dashboardFilePath) - if err != nil { - return nil, err - } - - defer func() { - if err := reader.Close(); err != nil { - plog.Warn("Failed to close file", "path", dashboardFilePath, "err", err) - } - }() - - data, err := simplejson.NewFromReader(reader) - if err != nil { - return nil, err - } - - return models.NewDashboardFromJson(data), nil -} diff --git a/pkg/plugins/dashboards_updater.go b/pkg/plugins/dashboards_updater.go deleted file mode 100644 index 507eaaf4656..00000000000 --- a/pkg/plugins/dashboards_updater.go +++ /dev/null @@ -1,131 +0,0 @@ -package plugins - -import ( - "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/models" -) - -func init() { - bus.AddEventListener(handlePluginStateChanged) -} - -func (pm *PluginManager) updateAppDashboards() { - pm.log.Debug("Looking for App Dashboard Updates") - - query := models.GetPluginSettingsQuery{OrgId: 0} - - if err := bus.Dispatch(&query); err != nil { - pm.log.Error("Failed to get all plugin settings", "error", err) - return - } - - for _, pluginSetting := range query.Result { - // ignore disabled plugins - if !pluginSetting.Enabled { - continue - } - - if pluginDef, exist := Plugins[pluginSetting.PluginId]; exist { - if pluginDef.Info.Version != pluginSetting.PluginVersion { - syncPluginDashboards(pluginDef, pluginSetting.OrgId) - } - } - } -} - -func autoUpdateAppDashboard(pluginDashInfo *PluginDashboardInfoDTO, orgId int64) error { - dash, err := loadPluginDashboard(pluginDashInfo.PluginId, pluginDashInfo.Path) - if err != nil { - return err - } - plog.Info("Auto updating App dashboard", "dashboard", dash.Title, "newRev", pluginDashInfo.Revision, "oldRev", pluginDashInfo.ImportedRevision) - updateCmd := ImportDashboardCommand{ - OrgId: orgId, - PluginId: pluginDashInfo.PluginId, - Overwrite: true, - Dashboard: dash.Data, - User: &models.SignedInUser{UserId: 0, OrgRole: models.ROLE_ADMIN}, - Path: pluginDashInfo.Path, - } - - return bus.Dispatch(&updateCmd) -} - -func syncPluginDashboards(pluginDef *PluginBase, orgId int64) { - plog.Info("Syncing plugin dashboards to DB", "pluginId", pluginDef.Id) - - // Get plugin dashboards - dashboards, err := GetPluginDashboards(orgId, pluginDef.Id) - - if err != nil { - plog.Error("Failed to load app dashboards", "error", err) - return - } - - // Update dashboards with updated revisions - for _, dash := range dashboards { - // remove removed ones - if dash.Removed { - plog.Info("Deleting plugin dashboard", "pluginId", pluginDef.Id, "dashboard", dash.Slug) - - deleteCmd := models.DeleteDashboardCommand{OrgId: orgId, Id: dash.DashboardId} - if err := bus.Dispatch(&deleteCmd); err != nil { - plog.Error("Failed to auto update app dashboard", "pluginId", pluginDef.Id, "error", err) - return - } - - continue - } - - // update updated ones - if dash.ImportedRevision != dash.Revision { - if err := autoUpdateAppDashboard(dash, orgId); err != nil { - plog.Error("Failed to auto update app dashboard", "pluginId", pluginDef.Id, "error", err) - return - } - } - } - - // update version in plugin_setting table to mark that we have processed the update - query := models.GetPluginSettingByIdQuery{PluginId: pluginDef.Id, OrgId: orgId} - if err := bus.Dispatch(&query); err != nil { - plog.Error("Failed to read plugin setting by id", "error", err) - return - } - - appSetting := query.Result - cmd := models.UpdatePluginSettingVersionCmd{ - OrgId: appSetting.OrgId, - PluginId: appSetting.PluginId, - PluginVersion: pluginDef.Info.Version, - } - - if err := bus.Dispatch(&cmd); err != nil { - plog.Error("Failed to update plugin setting version", "error", err) - } -} - -func handlePluginStateChanged(event *models.PluginStateChangedEvent) error { - plog.Info("Plugin state changed", "pluginId", event.PluginId, "enabled", event.Enabled) - - if event.Enabled { - syncPluginDashboards(Plugins[event.PluginId], event.OrgId) - } else { - query := models.GetDashboardsByPluginIdQuery{PluginId: event.PluginId, OrgId: event.OrgId} - - if err := bus.Dispatch(&query); err != nil { - return err - } - for _, dash := range query.Result { - deleteCmd := models.DeleteDashboardCommand{OrgId: dash.OrgId, Id: dash.Id} - - plog.Info("Deleting plugin dashboard", "pluginId", event.PluginId, "dashboard", dash.Slug) - - if err := bus.Dispatch(&deleteCmd); err != nil { - return err - } - } - } - - return nil -} diff --git a/pkg/plugins/dataframes.go b/pkg/plugins/dataframes.go new file mode 100644 index 00000000000..932e1f2b537 --- /dev/null +++ b/pkg/plugins/dataframes.go @@ -0,0 +1,87 @@ +package plugins + +import ( + "github.com/grafana/grafana-plugin-sdk-go/data" + jsoniter "github.com/json-iterator/go" +) + +// DataFrames is an interface for retrieving encoded and decoded data frames. +// +// See NewDecodedDataFrames and NewEncodedDataFrames for more information. +type DataFrames interface { + // Encoded encodes Frames into a slice of []byte. + // If an error occurs [][]byte will be nil. + // The encoded result, if any, will be cached and returned next time Encoded is called. + Encoded() ([][]byte, error) + + // Decoded decodes a slice of Arrow encoded frames to data.Frames ([]*data.Frame). + // If an error occurs Frames will be nil. + // The decoded result, if any, will be cached and returned next time Decoded is called. + Decoded() (data.Frames, error) +} + +type dataFrames struct { + decoded data.Frames + encoded [][]byte +} + +// NewDecodedDataFrames instantiates DataFrames from decoded frames. +// +// This should be the primary function for creating DataFrames if you're implementing a plugin. +// In a Grafana alerting scenario it needs to operate on decoded frames, which is why this function is +// preferrable. When encoded data frames are needed, e.g. returned from Grafana HTTP API, it will +// happen automatically when MarshalJSON() is called. +func NewDecodedDataFrames(decodedFrames data.Frames) DataFrames { + return &dataFrames{ + decoded: decodedFrames, + } +} + +// NewEncodedDataFrames instantiates DataFrames from encoded frames. +// +// This one is primarily used for creating DataFrames when receiving encoded data frames from an external +// plugin or similar. This may allow the encoded data frames to be returned to Grafana UI without any additional +// decoding/encoding required. In Grafana alerting scenario it needs to operate on decoded data frames why encoded +// frames needs to be decoded before usage. +func NewEncodedDataFrames(encodedFrames [][]byte) DataFrames { + return &dataFrames{ + encoded: encodedFrames, + } +} + +func (df *dataFrames) Encoded() ([][]byte, error) { + if df.encoded == nil { + encoded, err := df.decoded.MarshalArrow() + if err != nil { + return nil, err + } + df.encoded = encoded + } + + return df.encoded, nil +} + +func (df *dataFrames) Decoded() (data.Frames, error) { + if df.decoded == nil { + decoded, err := data.UnmarshalArrowFrames(df.encoded) + if err != nil { + return nil, err + } + df.decoded = decoded + } + + return df.decoded, nil +} + +func (df *dataFrames) MarshalJSON() ([]byte, error) { + encoded, err := df.Encoded() + if err != nil { + return nil, err + } + + // Use a configuration that's compatible with the standard library + // to minimize the risk of introducing bugs. This will make sure + // that map keys is ordered. + jsonCfg := jsoniter.ConfigCompatibleWithStandardLibrary + return jsonCfg.Marshal(encoded) +} diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index e6854ea7813..eb74b0e3970 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -1,15 +1,15 @@ package plugins import ( + "context" "encoding/json" + "fmt" "path/filepath" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins/backendplugin" "github.com/grafana/grafana/pkg/plugins/backendplugin/grpcplugin" - "github.com/grafana/grafana/pkg/plugins/datasource/wrapper" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/util/errutil" ) @@ -32,47 +32,65 @@ type DataSourcePlugin struct { Backend bool `json:"backend,omitempty"` Executable string `json:"executable,omitempty"` SDK bool `json:"sdk,omitempty"` + + client *grpcplugin.Client + legacyClient *grpcplugin.LegacyClient + logger log.Logger } -func (p *DataSourcePlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error { +func (p *DataSourcePlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) ( + interface{}, error) { if err := decoder.Decode(p); err != nil { - return errutil.Wrapf(err, "Failed to decode datasource plugin") - } - - if err := p.registerPlugin(base); err != nil { - return errutil.Wrapf(err, "Failed to register plugin") + return nil, errutil.Wrapf(err, "Failed to decode datasource plugin") } if p.Backend { cmd := ComposePluginStartCommand(p.Executable) - fullpath := filepath.Join(p.PluginDir, cmd) + fullpath := filepath.Join(base.PluginDir, cmd) factory := grpcplugin.NewBackendPlugin(p.Id, fullpath, grpcplugin.PluginStartFuncs{ OnLegacyStart: p.onLegacyPluginStart, OnStart: p.onPluginStart, }) if err := backendPluginManager.Register(p.Id, factory); err != nil { - return errutil.Wrapf(err, "Failed to register backend plugin") + return nil, errutil.Wrapf(err, "failed to register backend plugin") } } - DataSources[p.Id] = p - return nil + return p, nil +} + +func (p *DataSourcePlugin) DataQuery(ctx context.Context, dsInfo *models.DataSource, query DataQuery) (DataResponse, error) { + if !p.CanHandleDataQueries() { + return DataResponse{}, fmt.Errorf("plugin %q can't handle data queries", p.Id) + } + + if p.client != nil { + endpoint := newDataSourcePluginWrapperV2(p.logger, p.Id, p.Type, p.client.DataPlugin) + return endpoint.Query(ctx, dsInfo, query) + } + + endpoint := newDataSourcePluginWrapper(p.logger, p.legacyClient.DatasourcePlugin) + return endpoint.Query(ctx, dsInfo, query) } func (p *DataSourcePlugin) onLegacyPluginStart(pluginID string, client *grpcplugin.LegacyClient, logger log.Logger) error { - tsdb.RegisterTsdbQueryEndpoint(pluginID, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - return wrapper.NewDatasourcePluginWrapper(logger, client.DatasourcePlugin), nil - }) + p.legacyClient = client + p.logger = logger return nil } +func (p *DataSourcePlugin) CanHandleDataQueries() bool { + return p.client != nil || p.legacyClient != nil +} + func (p *DataSourcePlugin) onPluginStart(pluginID string, client *grpcplugin.Client, logger log.Logger) error { - if client.DataPlugin != nil { - tsdb.RegisterTsdbQueryEndpoint(pluginID, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - return wrapper.NewDatasourcePluginWrapperV2(logger, p.Id, p.Type, client.DataPlugin), nil - }) + if client.DataPlugin == nil { + return nil } + p.client = client + p.logger = logger + return nil } diff --git a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go b/pkg/plugins/datasource_plugin_wrapper.go similarity index 77% rename from pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go rename to pkg/plugins/datasource_plugin_wrapper.go index 3ca6f5b6ee6..ddf23537604 100644 --- a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go +++ b/pkg/plugins/datasource_plugin_wrapper.go @@ -1,4 +1,4 @@ -package wrapper +package plugins import ( "context" @@ -10,10 +10,9 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" ) -func NewDatasourcePluginWrapper(log log.Logger, plugin datasource.DatasourcePlugin) *DatasourcePluginWrapper { +func newDataSourcePluginWrapper(log log.Logger, plugin datasource.DatasourcePlugin) *DatasourcePluginWrapper { return &DatasourcePluginWrapper{DatasourcePlugin: plugin, logger: log} } @@ -22,10 +21,10 @@ type DatasourcePluginWrapper struct { logger log.Logger } -func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) { +func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSource, query DataQuery) (DataResponse, error) { jsonData, err := ds.JsonData.MarshalJSON() if err != nil { - return nil, err + return DataResponse{}, err } pbQuery := &datasource.DatasourceRequest{ @@ -48,31 +47,33 @@ func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSou } for _, q := range query.Queries { - modelJson, _ := q.Model.MarshalJSON() + modelJson, err := q.Model.MarshalJSON() + if err != nil { + return DataResponse{}, err + } pbQuery.Queries = append(pbQuery.Queries, &datasource.Query{ ModelJson: string(modelJson), - IntervalMs: q.IntervalMs, - RefId: q.RefId, + IntervalMs: q.IntervalMS, + RefId: q.RefID, MaxDataPoints: q.MaxDataPoints, }) } pbres, err := tw.DatasourcePlugin.Query(ctx, pbQuery) - if err != nil { - return nil, err + return DataResponse{}, err } - res := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{}, + res := DataResponse{ + Results: map[string]DataQueryResult{}, } for _, r := range pbres.Results { - qr := &tsdb.QueryResult{ - RefId: r.RefId, - Series: []*tsdb.TimeSeries{}, - Tables: []*tsdb.Table{}, + qr := DataQueryResult{ + RefID: r.RefId, + Series: []DataTimeSeries{}, + Tables: []DataTable{}, } if r.Error != "" { @@ -89,14 +90,14 @@ func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSou } for _, s := range r.GetSeries() { - points := tsdb.TimeSeriesPoints{} + points := DataTimeSeriesPoints{} for _, p := range s.Points { - po := tsdb.NewTimePoint(null.FloatFrom(p.Value), float64(p.Timestamp)) + po := DataTimePoint{null.FloatFrom(p.Value), null.FloatFrom(float64(p.Timestamp))} points = append(points, po) } - qr.Series = append(qr.Series, &tsdb.TimeSeries{ + qr.Series = append(qr.Series, DataTimeSeries{ Name: s.Name, Tags: s.Tags, Points: points, @@ -105,7 +106,7 @@ func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSou mappedTables, err := tw.mapTables(r) if err != nil { - return nil, err + return DataResponse{}, err } qr.Tables = mappedTables @@ -114,8 +115,9 @@ func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSou return res, nil } -func (tw *DatasourcePluginWrapper) mapTables(r *datasource.QueryResult) ([]*tsdb.Table, error) { - var tables []*tsdb.Table + +func (tw *DatasourcePluginWrapper) mapTables(r *datasource.QueryResult) ([]DataTable, error) { + var tables []DataTable for _, t := range r.GetTables() { mappedTable, err := tw.mapTable(t) if err != nil { @@ -126,21 +128,21 @@ func (tw *DatasourcePluginWrapper) mapTables(r *datasource.QueryResult) ([]*tsdb return tables, nil } -func (tw *DatasourcePluginWrapper) mapTable(t *datasource.Table) (*tsdb.Table, error) { - table := &tsdb.Table{} +func (tw *DatasourcePluginWrapper) mapTable(t *datasource.Table) (DataTable, error) { + table := DataTable{} for _, c := range t.GetColumns() { - table.Columns = append(table.Columns, tsdb.TableColumn{ + table.Columns = append(table.Columns, DataTableColumn{ Text: c.Name, }) } - table.Rows = make([]tsdb.RowValues, 0) + table.Rows = make([]DataRowValues, 0) for _, r := range t.GetRows() { - row := tsdb.RowValues{} + row := DataRowValues{} for _, rv := range r.Values { mappedRw, err := tw.mapRowValue(rv) if err != nil { - return nil, err + return table, err } row = append(row, mappedRw) diff --git a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go b/pkg/plugins/datasource_plugin_wrapper_test.go similarity index 82% rename from pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go rename to pkg/plugins/datasource_plugin_wrapper_test.go index fbb9518333c..da3487a5660 100644 --- a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go +++ b/pkg/plugins/datasource_plugin_wrapper_test.go @@ -1,16 +1,15 @@ -package wrapper +package plugins import ( "testing" "github.com/grafana/grafana-plugin-model/go/datasource" "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/tsdb" "github.com/stretchr/testify/require" ) func TestMapTables(t *testing.T) { - dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil) + dpw := newDataSourcePluginWrapper(log.New("test-logger"), nil) var qr = &datasource.QueryResult{} qr.Tables = append(qr.Tables, &datasource.Table{ Columns: []*datasource.TableColumn{}, @@ -23,7 +22,7 @@ func TestMapTables(t *testing.T) { } func TestMapTable(t *testing.T) { - dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil) + dpw := newDataSourcePluginWrapper(log.New("test-logger"), nil) source := &datasource.Table{ Columns: []*datasource.TableColumn{{Name: "column1"}, {Name: "column2"}}, @@ -41,8 +40,8 @@ func TestMapTable(t *testing.T) { }}, } - want := &tsdb.Table{ - Columns: []tsdb.TableColumn{{Text: "column1"}, {Text: "column2"}}, + want := DataTable{ + Columns: []DataTableColumn{{Text: "column1"}, {Text: "column2"}}, } have, err := dpw.mapTable(source) require.NoError(t, err) @@ -53,9 +52,10 @@ func TestMapTable(t *testing.T) { } func TestMappingRowValue(t *testing.T) { - dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil) + dpw := newDataSourcePluginWrapper(log.New("test-logger"), nil) - boolRowValue, _ := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_BOOL, BoolValue: true}) + boolRowValue, err := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_BOOL, BoolValue: true}) + require.NoError(t, err) haveBool, ok := boolRowValue.(bool) require.True(t, ok) require.True(t, haveBool) diff --git a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_v2.go b/pkg/plugins/datasource_plugin_wrapper_v2.go similarity index 58% rename from pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_v2.go rename to pkg/plugins/datasource_plugin_wrapper_v2.go index 2992fd6b8a1..33b3fb68585 100644 --- a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_v2.go +++ b/pkg/plugins/datasource_plugin_wrapper_v2.go @@ -1,4 +1,4 @@ -package wrapper +package plugins import ( "context" @@ -10,11 +10,11 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins/adapters" "github.com/grafana/grafana/pkg/services/oauthtoken" - "github.com/grafana/grafana/pkg/tsdb" ) -func NewDatasourcePluginWrapperV2(log log.Logger, pluginId, pluginType string, client grpcplugin.DataClient) *DatasourcePluginWrapperV2 { +func newDataSourcePluginWrapperV2(log log.Logger, pluginId, pluginType string, client grpcplugin.DataClient) *DatasourcePluginWrapperV2 { return &DatasourcePluginWrapperV2{DataClient: client, logger: log, pluginId: pluginId, pluginType: pluginType} } @@ -25,30 +25,10 @@ type DatasourcePluginWrapperV2 struct { pluginType string } -func ModelToInstanceSettings(ds *models.DataSource) (*backend.DataSourceInstanceSettings, error) { - jsonDataBytes, err := ds.JsonData.MarshalJSON() +func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataSource, query DataQuery) (DataResponse, error) { + instanceSettings, err := adapters.ModelToInstanceSettings(ds) if err != nil { - return nil, err - } - - return &backend.DataSourceInstanceSettings{ - ID: ds.Id, - Name: ds.Name, - URL: ds.Url, - Database: ds.Database, - User: ds.User, - BasicAuthEnabled: ds.BasicAuth, - BasicAuthUser: ds.BasicAuthUser, - JSONData: jsonDataBytes, - DecryptedSecureJSONData: ds.DecryptedValues(), - Updated: ds.Updated, - }, nil -} - -func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) { - instanceSettings, err := ModelToInstanceSettings(ds) - if err != nil { - return nil, err + return DataResponse{}, err } if query.Headers == nil { @@ -66,7 +46,7 @@ func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataS PluginContext: &pluginv2.PluginContext{ OrgId: ds.OrgId, PluginId: tw.pluginId, - User: backend.ToProto().User(BackendUserFromSignedInUser(query.User)), + User: backend.ToProto().User(adapters.BackendUserFromSignedInUser(query.User)), DataSourceInstanceSettings: backend.ToProto().DataSourceInstanceSettings(instanceSettings), }, Queries: []*pluginv2.DataQuery{}, @@ -76,12 +56,12 @@ func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataS for _, q := range query.Queries { modelJSON, err := q.Model.MarshalJSON() if err != nil { - return nil, err + return DataResponse{}, err } pbQuery.Queries = append(pbQuery.Queries, &pluginv2.DataQuery{ Json: modelJSON, - IntervalMS: q.IntervalMs, - RefId: q.RefId, + IntervalMS: q.IntervalMS, + RefId: q.RefID, MaxDataPoints: q.MaxDataPoints, TimeRange: &pluginv2.TimeRange{ ToEpochMS: query.TimeRange.GetToAsMsEpoch(), @@ -93,17 +73,17 @@ func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataS pbRes, err := tw.DataClient.QueryData(ctx, pbQuery) if err != nil { - return nil, err + return DataResponse{}, err } - tR := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult, len(pbRes.Responses)), + tR := DataResponse{ + Results: make(map[string]DataQueryResult, len(pbRes.Responses)), } for refID, pRes := range pbRes.Responses { - qr := &tsdb.QueryResult{ - RefId: refID, - Dataframes: tsdb.NewEncodedDataFrames(pRes.Frames), + qr := DataQueryResult{ + RefID: refID, + Dataframes: NewEncodedDataFrames(pRes.Frames), } if len(pRes.JsonMeta) != 0 { qr.Meta = simplejson.NewFromAny(pRes.JsonMeta) @@ -117,17 +97,3 @@ func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataS return tR, nil } - -// BackendUserFromSignedInUser converts Grafana's SignedInUser model -// to the backend plugin's model. -func BackendUserFromSignedInUser(su *models.SignedInUser) *backend.User { - if su == nil { - return nil - } - return &backend.User{ - Login: su.Login, - Name: su.Name, - Email: su.Email, - Role: string(su.OrgRole), - } -} diff --git a/pkg/plugins/error.go b/pkg/plugins/error.go index f2e58dc12c9..207d6c687ec 100644 --- a/pkg/plugins/error.go +++ b/pkg/plugins/error.go @@ -1,11 +1,5 @@ package plugins -const ( - signatureMissing ErrorCode = "signatureMissing" - signatureModified ErrorCode = "signatureModified" - signatureInvalid ErrorCode = "signatureInvalid" -) - type ErrorCode string type PluginError struct { diff --git a/pkg/plugins/frontend_plugin.go b/pkg/plugins/frontend_plugin.go index 89e8741507a..49b52d2115c 100644 --- a/pkg/plugins/frontend_plugin.go +++ b/pkg/plugins/frontend_plugin.go @@ -14,12 +14,15 @@ type FrontendPluginBase struct { PluginBase } -func (fp *FrontendPluginBase) initFrontendPlugin() { +func (fp *FrontendPluginBase) InitFrontendPlugin() []*PluginStaticRoute { + var staticRoutes []*PluginStaticRoute if isExternalPlugin(fp.PluginDir) { - StaticRoutes = append(StaticRoutes, &PluginStaticRoute{ - Directory: fp.PluginDir, - PluginId: fp.Id, - }) + staticRoutes = []*PluginStaticRoute{ + { + Directory: fp.PluginDir, + PluginId: fp.Id, + }, + } } fp.handleModuleDefaults() @@ -30,6 +33,8 @@ func (fp *FrontendPluginBase) initFrontendPlugin() { for i := 0; i < len(fp.Info.Screenshots); i++ { fp.Info.Screenshots[i].Path = evalRelativePluginUrlPath(fp.Info.Screenshots[i].Path, fp.BaseUrl) } + + return staticRoutes } func getPluginLogoUrl(pluginType, path, baseUrl string) string { diff --git a/pkg/plugins/dashboard_importer.go b/pkg/plugins/manager/dashboard_import.go similarity index 76% rename from pkg/plugins/dashboard_importer.go rename to pkg/plugins/manager/dashboard_import.go index ad565fdfa4d..429c64d7b4c 100644 --- a/pkg/plugins/dashboard_importer.go +++ b/pkg/plugins/manager/dashboard_import.go @@ -1,31 +1,19 @@ -package plugins +package manager import ( "encoding/json" "fmt" "regexp" - "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/dashboards" + "github.com/grafana/grafana/pkg/tsdb/tsdbifaces" ) var varRegex = regexp.MustCompile(`(\$\{.+?\})`) -type ImportDashboardCommand struct { - Dashboard *simplejson.Json - Path string - Inputs []ImportDashboardInput - Overwrite bool - FolderId int64 - - OrgId int64 - User *models.SignedInUser - PluginId string - Result *PluginDashboardInfoDTO -} - type ImportDashboardInput struct { Type string `json:"type"` PluginId string `json:"pluginId"` @@ -41,58 +29,54 @@ func (e DashboardInputMissingError) Error() string { return fmt.Sprintf("Dashboard input variable: %v missing from import command", e.VariableName) } -func init() { - bus.AddHandler("plugins", ImportDashboard) -} - -func ImportDashboard(cmd *ImportDashboardCommand) error { +func (pm *PluginManager) ImportDashboard(pluginID, path string, orgID, folderID int64, dashboardModel *simplejson.Json, + overwrite bool, inputs []ImportDashboardInput, user *models.SignedInUser, + requestHandler tsdbifaces.RequestHandler) (plugins.PluginDashboardInfoDTO, error) { var dashboard *models.Dashboard - var err error - - if cmd.PluginId != "" { - if dashboard, err = loadPluginDashboard(cmd.PluginId, cmd.Path); err != nil { - return err + if pluginID != "" { + var err error + if dashboard, err = pm.LoadPluginDashboard(pluginID, path); err != nil { + return plugins.PluginDashboardInfoDTO{}, err } } else { - dashboard = models.NewDashboardFromJson(cmd.Dashboard) + dashboard = models.NewDashboardFromJson(dashboardModel) } evaluator := &DashTemplateEvaluator{ template: dashboard.Data, - inputs: cmd.Inputs, + inputs: inputs, } generatedDash, err := evaluator.Eval() if err != nil { - return err + return plugins.PluginDashboardInfoDTO{}, err } saveCmd := models.SaveDashboardCommand{ Dashboard: generatedDash, - OrgId: cmd.OrgId, - UserId: cmd.User.UserId, - Overwrite: cmd.Overwrite, - PluginId: cmd.PluginId, - FolderId: cmd.FolderId, + OrgId: orgID, + UserId: user.UserId, + Overwrite: overwrite, + PluginId: pluginID, + FolderId: folderID, } dto := &dashboards.SaveDashboardDTO{ - OrgId: cmd.OrgId, + OrgId: orgID, Dashboard: saveCmd.GetDashboardModel(), Overwrite: saveCmd.Overwrite, - User: cmd.User, + User: user, } - savedDash, err := dashboards.NewService().ImportDashboard(dto) - + savedDash, err := dashboards.NewService(requestHandler).ImportDashboard(dto) if err != nil { - return err + return plugins.PluginDashboardInfoDTO{}, err } - cmd.Result = &PluginDashboardInfoDTO{ - PluginId: cmd.PluginId, + return plugins.PluginDashboardInfoDTO{ + PluginId: pluginID, Title: savedDash.Title, - Path: cmd.Path, + Path: path, Revision: savedDash.Data.Get("revision").MustInt64(1), FolderId: savedDash.FolderId, ImportedUri: "db/" + savedDash.Slug, @@ -101,9 +85,7 @@ func ImportDashboard(cmd *ImportDashboardCommand) error { Imported: true, DashboardId: savedDash.Id, Slug: savedDash.Slug, - } - - return nil + }, nil } type DashTemplateEvaluator struct { diff --git a/pkg/plugins/manager/dashboard_import_test.go b/pkg/plugins/manager/dashboard_import_test.go new file mode 100644 index 00000000000..ea4a1c92b96 --- /dev/null +++ b/pkg/plugins/manager/dashboard_import_test.go @@ -0,0 +1,97 @@ +package manager + +import ( + "io/ioutil" + "testing" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/dashboards" + "github.com/grafana/grafana/pkg/setting" + "github.com/stretchr/testify/require" +) + +func TestDashboardImport(t *testing.T) { + pluginScenario(t, "When importing a plugin dashboard", func(t *testing.T, pm *PluginManager) { + origNewDashboardService := dashboards.NewService + t.Cleanup(func() { + dashboards.NewService = origNewDashboardService + }) + mock := &dashboards.FakeDashboardService{} + dashboards.MockDashboardService(mock) + + info, err := pm.ImportDashboard("test-app", "dashboards/connections.json", 1, 0, nil, false, + []ImportDashboardInput{ + {Name: "*", Type: "datasource", Value: "graphite"}, + }, &models.SignedInUser{UserId: 1, OrgRole: models.ROLE_ADMIN}, nil) + require.NoError(t, err) + require.NotNil(t, info) + + resultStr, err := mock.SavedDashboards[0].Dashboard.Data.EncodePretty() + require.NoError(t, err) + expectedBytes, err := ioutil.ReadFile("testdata/test-app/dashboards/connections_result.json") + require.NoError(t, err) + expectedJson, err := simplejson.NewJson(expectedBytes) + require.NoError(t, err) + expectedStr, err := expectedJson.EncodePretty() + require.NoError(t, err) + + require.Equal(t, expectedStr, resultStr) + + panel := mock.SavedDashboards[0].Dashboard.Data.Get("rows").GetIndex(0).Get("panels").GetIndex(0) + require.Equal(t, "graphite", panel.Get("datasource").MustString()) + }) + + t.Run("When evaling dashboard template", func(t *testing.T) { + template, err := simplejson.NewJson([]byte(`{ + "__inputs": [ + { + "name": "DS_NAME", + "type": "datasource" + } + ], + "test": { + "prop": "${DS_NAME}_${DS_NAME}" + } + }`)) + require.NoError(t, err) + + evaluator := &DashTemplateEvaluator{ + template: template, + inputs: []ImportDashboardInput{ + {Name: "*", Type: "datasource", Value: "my-server"}, + }, + } + + res, err := evaluator.Eval() + require.NoError(t, err) + + require.Equal(t, "my-server_my-server", res.GetPath("test", "prop").MustString()) + + inputs := res.Get("__inputs") + require.Nil(t, inputs.Interface()) + }) +} + +func pluginScenario(t *testing.T, desc string, fn func(*testing.T, *PluginManager)) { + t.Helper() + + t.Run("Given a plugin", func(t *testing.T) { + pm := &PluginManager{ + Cfg: &setting.Cfg{ + FeatureToggles: map[string]bool{}, + PluginSettings: setting.PluginSettings{ + "test-app": map[string]string{ + "path": "testdata/test-app", + }, + }, + }, + } + err := pm.Init() + require.NoError(t, err) + + t.Run(desc, func(t *testing.T) { + fn(t, pm) + }) + }) +} diff --git a/pkg/plugins/manager/dashboards.go b/pkg/plugins/manager/dashboards.go new file mode 100644 index 00000000000..4a6de3baf84 --- /dev/null +++ b/pkg/plugins/manager/dashboards.go @@ -0,0 +1,101 @@ +package manager + +import ( + "os" + "path/filepath" + + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" +) + +func (pm *PluginManager) GetPluginDashboards(orgId int64, pluginId string) ([]*plugins.PluginDashboardInfoDTO, error) { + plugin, exists := Plugins[pluginId] + if !exists { + return nil, plugins.PluginNotFoundError{PluginID: pluginId} + } + + result := make([]*plugins.PluginDashboardInfoDTO, 0) + + // load current dashboards + query := models.GetDashboardsByPluginIdQuery{OrgId: orgId, PluginId: pluginId} + if err := bus.Dispatch(&query); err != nil { + return nil, err + } + + existingMatches := make(map[int64]bool) + for _, include := range plugin.Includes { + if include.Type != plugins.PluginTypeDashboard { + continue + } + + dashboard, err := pm.LoadPluginDashboard(plugin.Id, include.Path) + if err != nil { + return nil, err + } + + res := &plugins.PluginDashboardInfoDTO{} + res.Path = include.Path + res.PluginId = plugin.Id + res.Title = dashboard.Title + res.Revision = dashboard.Data.Get("revision").MustInt64(1) + + // find existing dashboard + for _, existingDash := range query.Result { + if existingDash.Slug == dashboard.Slug { + res.DashboardId = existingDash.Id + res.Imported = true + res.ImportedUri = "db/" + existingDash.Slug + res.ImportedUrl = existingDash.GetUrl() + res.ImportedRevision = existingDash.Data.Get("revision").MustInt64(1) + existingMatches[existingDash.Id] = true + } + } + + result = append(result, res) + } + + // find deleted dashboards + for _, dash := range query.Result { + if _, exists := existingMatches[dash.Id]; !exists { + result = append(result, &plugins.PluginDashboardInfoDTO{ + Slug: dash.Slug, + DashboardId: dash.Id, + Removed: true, + }) + } + } + + return result, nil +} + +func (pm *PluginManager) LoadPluginDashboard(pluginId, path string) (*models.Dashboard, error) { + plugin, exists := Plugins[pluginId] + if !exists { + return nil, plugins.PluginNotFoundError{PluginID: pluginId} + } + + dashboardFilePath := filepath.Join(plugin.PluginDir, path) + // nolint:gosec + // We can ignore the gosec G304 warning on this one because `plugin.PluginDir` is based + // on plugin folder structure on disk and not user input. `path` comes from the + // `plugin.json` configuration file for the loaded plugin + reader, err := os.Open(dashboardFilePath) + if err != nil { + return nil, err + } + + defer func() { + if err := reader.Close(); err != nil { + plog.Warn("Failed to close file", "path", dashboardFilePath, "err", err) + } + }() + + data, err := simplejson.NewFromReader(reader) + if err != nil { + return nil, err + } + + return models.NewDashboardFromJson(data), nil +} diff --git a/pkg/plugins/dashboards_test.go b/pkg/plugins/manager/dashboards_test.go similarity index 92% rename from pkg/plugins/dashboards_test.go rename to pkg/plugins/manager/dashboards_test.go index 91cb009367d..af04365ce55 100644 --- a/pkg/plugins/dashboards_test.go +++ b/pkg/plugins/manager/dashboards_test.go @@ -1,4 +1,4 @@ -package plugins +package manager import ( "testing" @@ -11,7 +11,7 @@ import ( ) func TestPluginDashboards(t *testing.T) { - Convey("When asking plugin dashboard info", t, func() { + Convey("When asking for plugin dashboard info", t, func() { pm := &PluginManager{ Cfg: &setting.Cfg{ FeatureToggles: map[string]bool{}, @@ -47,8 +47,7 @@ func TestPluginDashboards(t *testing.T) { return nil }) - dashboards, err := GetPluginDashboards(1, "test-app") - + dashboards, err := pm.GetPluginDashboards(1, "test-app") So(err, ShouldBeNil) Convey("should return 2 dashboards", func() { diff --git a/pkg/plugins/manager/errors.go b/pkg/plugins/manager/errors.go new file mode 100644 index 00000000000..f5543bf82d0 --- /dev/null +++ b/pkg/plugins/manager/errors.go @@ -0,0 +1,11 @@ +package manager + +import ( + "github.com/grafana/grafana/pkg/plugins" +) + +const ( + signatureMissing plugins.ErrorCode = "signatureMissing" + signatureModified plugins.ErrorCode = "signatureModified" + signatureInvalid plugins.ErrorCode = "signatureInvalid" +) diff --git a/pkg/plugins/manager/manager.go b/pkg/plugins/manager/manager.go new file mode 100644 index 00000000000..06b7ec48503 --- /dev/null +++ b/pkg/plugins/manager/manager.go @@ -0,0 +1,616 @@ +// Package manager contains plugin manager logic. +package manager + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "reflect" + "runtime" + "strings" + "time" + + "github.com/grafana/grafana/pkg/infra/fs" + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/infra/metrics" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/backendplugin" + "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" + "github.com/grafana/grafana/pkg/util/errutil" +) + +var ( + DataSources map[string]*plugins.DataSourcePlugin + Panels map[string]*plugins.PanelPlugin + StaticRoutes []*plugins.PluginStaticRoute + Apps map[string]*plugins.AppPlugin + Plugins map[string]*plugins.PluginBase + PluginTypes map[string]interface{} + Renderer *plugins.RendererPlugin + + plog log.Logger +) + +type unsignedPluginConditionFunc = func(plugin *plugins.PluginBase) bool + +type PluginScanner struct { + pluginPath string + errors []error + backendPluginManager backendplugin.Manager + cfg *setting.Cfg + requireSigned bool + log log.Logger + plugins map[string]*plugins.PluginBase + allowUnsignedPluginsCondition unsignedPluginConditionFunc +} + +type PluginManager struct { + BackendPluginManager backendplugin.Manager `inject:""` + Cfg *setting.Cfg `inject:""` + log log.Logger + scanningErrors []error + + // AllowUnsignedPluginsCondition changes the policy for allowing unsigned plugins. Signature validation only runs when plugins are starting + // and running plugins will not be terminated if they violate the new policy. + AllowUnsignedPluginsCondition unsignedPluginConditionFunc + GrafanaLatestVersion string + GrafanaHasUpdate bool + pluginScanningErrors map[string]plugins.PluginError +} + +func init() { + registry.RegisterService(&PluginManager{}) +} + +func (pm *PluginManager) Init() error { + pm.log = log.New("plugins") + plog = log.New("plugins") + + DataSources = map[string]*plugins.DataSourcePlugin{} + StaticRoutes = []*plugins.PluginStaticRoute{} + Panels = map[string]*plugins.PanelPlugin{} + Apps = map[string]*plugins.AppPlugin{} + Plugins = map[string]*plugins.PluginBase{} + PluginTypes = map[string]interface{}{ + "panel": plugins.PanelPlugin{}, + "datasource": plugins.DataSourcePlugin{}, + "app": plugins.AppPlugin{}, + "renderer": plugins.RendererPlugin{}, + } + pm.pluginScanningErrors = map[string]plugins.PluginError{} + + pm.log.Info("Starting plugin search") + + plugDir := filepath.Join(pm.Cfg.StaticRootPath, "app/plugins") + pm.log.Debug("Scanning core plugin directory", "dir", plugDir) + if err := pm.scan(plugDir, false); err != nil { + return errutil.Wrapf(err, "failed to scan core plugin directory '%s'", plugDir) + } + + plugDir = pm.Cfg.BundledPluginsPath + pm.log.Debug("Scanning bundled plugins directory", "dir", plugDir) + exists, err := fs.Exists(plugDir) + if err != nil { + return err + } + if exists { + if err := pm.scan(plugDir, false); err != nil { + return errutil.Wrapf(err, "failed to scan bundled plugins directory '%s'", plugDir) + } + } + + // check if plugins dir exists + exists, err = fs.Exists(pm.Cfg.PluginsPath) + if err != nil { + return err + } + if !exists { + if err = os.MkdirAll(pm.Cfg.PluginsPath, os.ModePerm); err != nil { + pm.log.Error("failed to create external plugins directory", "dir", pm.Cfg.PluginsPath, "error", err) + } else { + pm.log.Info("External plugins directory created", "directory", pm.Cfg.PluginsPath) + } + } else { + pm.log.Debug("Scanning external plugins directory", "dir", pm.Cfg.PluginsPath) + if err := pm.scan(pm.Cfg.PluginsPath, true); err != nil { + return errutil.Wrapf(err, "failed to scan external plugins directory '%s'", + pm.Cfg.PluginsPath) + } + } + + if err := pm.scanPluginPaths(); err != nil { + return err + } + + for _, panel := range Panels { + staticRoutes := panel.InitFrontendPlugin() + StaticRoutes = append(StaticRoutes, staticRoutes...) + } + + for _, ds := range DataSources { + staticRoutes := ds.InitFrontendPlugin() + StaticRoutes = append(StaticRoutes, staticRoutes...) + } + + for _, app := range Apps { + staticRoutes := app.InitApp(Panels, DataSources) + StaticRoutes = append(StaticRoutes, staticRoutes...) + } + + if Renderer != nil { + staticRoutes := Renderer.InitFrontendPlugin() + StaticRoutes = append(StaticRoutes, staticRoutes...) + } + + for _, p := range Plugins { + if p.IsCorePlugin { + p.Signature = plugins.PluginSignatureInternal + } else { + metrics.SetPluginBuildInformation(p.Id, p.Type, p.Info.Version) + } + } + + return nil +} + +func (pm *PluginManager) Run(ctx context.Context) error { + pm.checkForUpdates() + + ticker := time.NewTicker(time.Minute * 10) + run := true + + for run { + select { + case <-ticker.C: + pm.checkForUpdates() + case <-ctx.Done(): + run = false + } + } + + return ctx.Err() +} + +// scanPluginPaths scans configured plugin paths. +func (pm *PluginManager) scanPluginPaths() error { + for pluginID, settings := range pm.Cfg.PluginSettings { + path, exists := settings["path"] + if !exists || path == "" { + continue + } + + if err := pm.scan(path, true); err != nil { + return errutil.Wrapf(err, "failed to scan directory configured for plugin '%s': '%s'", pluginID, path) + } + } + + return nil +} + +// scan a directory for plugins. +func (pm *PluginManager) scan(pluginDir string, requireSigned bool) error { + scanner := &PluginScanner{ + pluginPath: pluginDir, + backendPluginManager: pm.BackendPluginManager, + cfg: pm.Cfg, + requireSigned: requireSigned, + log: pm.log, + plugins: map[string]*plugins.PluginBase{}, + allowUnsignedPluginsCondition: pm.AllowUnsignedPluginsCondition, + } + + // 1st pass: Scan plugins, also mapping plugins to their respective directories + if err := util.Walk(pluginDir, true, true, scanner.walker); err != nil { + if errors.Is(err, os.ErrNotExist) { + pm.log.Debug("Couldn't scan directory since it doesn't exist", "pluginDir", pluginDir, "err", err) + return nil + } + if errors.Is(err, os.ErrPermission) { + pm.log.Debug("Couldn't scan directory due to lack of permissions", "pluginDir", pluginDir, "err", err) + return nil + } + if pluginDir != "data/plugins" { + pm.log.Warn("Could not scan dir", "pluginDir", pluginDir, "err", err) + } + return err + } + + pm.log.Debug("Initial plugin loading done") + + // 2nd pass: Validate and register plugins + for dpath, plugin := range scanner.plugins { + // Try to find any root plugin + ancestors := strings.Split(dpath, string(filepath.Separator)) + ancestors = ancestors[0 : len(ancestors)-1] + aPath := "" + if runtime.GOOS != "windows" && filepath.IsAbs(dpath) { + aPath = "/" + } + for _, a := range ancestors { + aPath = filepath.Join(aPath, a) + if root, ok := scanner.plugins[aPath]; ok { + plugin.Root = root + break + } + } + + pm.log.Debug("Found plugin", "id", plugin.Id, "signature", plugin.Signature, "hasRoot", plugin.Root != nil) + signingError := scanner.validateSignature(plugin) + if signingError != nil { + pm.log.Debug("Failed to validate plugin signature. Will skip loading", "id", plugin.Id, + "signature", plugin.Signature, "status", signingError.ErrorCode) + pm.pluginScanningErrors[plugin.Id] = *signingError + continue + } + + pm.log.Debug("Attempting to add plugin", "id", plugin.Id) + + pluginGoType, exists := PluginTypes[plugin.Type] + if !exists { + return fmt.Errorf("unknown plugin type %q", plugin.Type) + } + + jsonFPath := filepath.Join(plugin.PluginDir, "plugin.json") + + // External plugins need a module.js file for SystemJS to load + if !strings.HasPrefix(jsonFPath, pm.Cfg.StaticRootPath) && !scanner.IsBackendOnlyPlugin(plugin.Type) { + module := filepath.Join(plugin.PluginDir, "module.js") + exists, err := fs.Exists(module) + if err != nil { + return err + } + if !exists { + scanner.log.Warn("Plugin missing module.js", + "name", plugin.Name, + "warning", "Missing module.js, If you loaded this plugin from git, make sure to compile it.", + "path", module) + } + } + + // nolint:gosec + // We can ignore the gosec G304 warning on this one because `jsonFPath` is based + // on plugin the folder structure on disk and not user input. + reader, err := os.Open(jsonFPath) + if err != nil { + return err + } + defer func() { + if err := reader.Close(); err != nil { + scanner.log.Warn("Failed to close JSON file", "path", jsonFPath, "err", err) + } + }() + + jsonParser := json.NewDecoder(reader) + + loader := reflect.New(reflect.TypeOf(pluginGoType)).Interface().(plugins.PluginLoader) + + // Load the full plugin, and add it to manager + if err := pm.loadPlugin(jsonParser, plugin, scanner, loader); err != nil { + return err + } + } + + if len(scanner.errors) > 0 { + pm.log.Warn("Some plugins failed to load", "errors", scanner.errors) + pm.scanningErrors = scanner.errors + } + + return nil +} + +func (pm *PluginManager) loadPlugin(jsonParser *json.Decoder, pluginBase *plugins.PluginBase, + scanner *PluginScanner, loader plugins.PluginLoader) error { + plug, err := loader.Load(jsonParser, pluginBase, scanner.backendPluginManager) + if err != nil { + return err + } + + var pb *plugins.PluginBase + switch p := plug.(type) { + case *plugins.DataSourcePlugin: + DataSources[p.Id] = p + pb = &p.PluginBase + case *plugins.PanelPlugin: + Panels[p.Id] = p + pb = &p.PluginBase + case *plugins.RendererPlugin: + Renderer = p + pb = &p.PluginBase + case *plugins.AppPlugin: + Apps[p.Id] = p + pb = &p.PluginBase + default: + panic(fmt.Sprintf("Unrecognized plugin type %T", plug)) + } + + if p, exists := Plugins[pb.Id]; exists { + pm.log.Warn("Plugin is duplicate", "id", pb.Id) + scanner.errors = append(scanner.errors, plugins.DuplicatePluginError{Plugin: pb, ExistingPlugin: p}) + return nil + } + + if !strings.HasPrefix(pluginBase.PluginDir, pm.Cfg.StaticRootPath) { + pm.log.Info("Registering plugin", "id", pb.Id) + } + + if len(pb.Dependencies.Plugins) == 0 { + pb.Dependencies.Plugins = []plugins.PluginDependencyItem{} + } + + if pb.Dependencies.GrafanaVersion == "" { + pb.Dependencies.GrafanaVersion = "*" + } + + for _, include := range pb.Includes { + if include.Role == "" { + include.Role = models.ROLE_VIEWER + } + } + + // Copy relevant fields from the base + pb.PluginDir = pluginBase.PluginDir + pb.Signature = pluginBase.Signature + pb.SignatureType = pluginBase.SignatureType + pb.SignatureOrg = pluginBase.SignatureOrg + + Plugins[pb.Id] = pb + pm.log.Debug("Successfully added plugin", "id", pb.Id) + return nil +} + +// GetDatasource returns a datasource based on passed pluginID if it exists +// +// This function fetches the datasource from the global variable DataSources in this package. +// Rather then refactor all dependencies on the global variable we can use this as an transition. +func (pm *PluginManager) GetDatasource(pluginID string) (*plugins.DataSourcePlugin, bool) { + ds, exists := DataSources[pluginID] + return ds, exists +} + +func (s *PluginScanner) walker(currentPath string, f os.FileInfo, err error) error { + // We scan all the subfolders for plugin.json (with some exceptions) so that we also load embedded plugins, for + // example https://github.com/raintank/worldping-app/tree/master/dist/grafana-worldmap-panel worldmap panel plugin + // is embedded in worldping app. + if err != nil { + return fmt.Errorf("filepath.Walk reported an error for %q: %w", currentPath, err) + } + + if f.Name() == "node_modules" || f.Name() == "Chromium.app" { + return util.ErrWalkSkipDir + } + + if f.IsDir() { + return nil + } + + if f.Name() != "plugin.json" { + return nil + } + + if err := s.loadPlugin(currentPath); err != nil { + s.log.Error("Failed to load plugin", "error", err, "pluginPath", filepath.Dir(currentPath)) + s.errors = append(s.errors, err) + } + + return nil +} + +func (s *PluginScanner) loadPlugin(pluginJSONFilePath string) error { + s.log.Debug("Loading plugin", "path", pluginJSONFilePath) + currentDir := filepath.Dir(pluginJSONFilePath) + // nolint:gosec + // We can ignore the gosec G304 warning on this one because `currentPath` is based + // on plugin the folder structure on disk and not user input. + reader, err := os.Open(pluginJSONFilePath) + if err != nil { + return err + } + defer func() { + if err := reader.Close(); err != nil { + s.log.Warn("Failed to close JSON file", "path", pluginJSONFilePath, "err", err) + } + }() + + jsonParser := json.NewDecoder(reader) + pluginCommon := plugins.PluginBase{} + if err := jsonParser.Decode(&pluginCommon); err != nil { + return err + } + + if pluginCommon.Id == "" || pluginCommon.Type == "" { + return errors.New("did not find type or id properties in plugin.json") + } + + pluginCommon.PluginDir = filepath.Dir(pluginJSONFilePath) + pluginCommon.Files, err = collectPluginFilesWithin(pluginCommon.PluginDir) + if err != nil { + s.log.Warn("Could not collect plugin file information in directory", "pluginID", pluginCommon.Id, "dir", pluginCommon.PluginDir) + return err + } + + signatureState, err := getPluginSignatureState(s.log, &pluginCommon) + if err != nil { + s.log.Warn("Could not get plugin signature state", "pluginID", pluginCommon.Id, "err", err) + return err + } + pluginCommon.Signature = signatureState.Status + pluginCommon.SignatureType = signatureState.Type + pluginCommon.SignatureOrg = signatureState.SigningOrg + + s.plugins[currentDir] = &pluginCommon + + return nil +} + +func (*PluginScanner) IsBackendOnlyPlugin(pluginType string) bool { + return pluginType == "renderer" +} + +// validateSignature validates a plugin's signature. +func (s *PluginScanner) validateSignature(plugin *plugins.PluginBase) *plugins.PluginError { + if plugin.Signature == plugins.PluginSignatureValid { + s.log.Debug("Plugin has valid signature", "id", plugin.Id) + return nil + } + + if plugin.Root != nil { + // If a descendant plugin with invalid signature, set signature to that of root + if plugin.IsCorePlugin || plugin.Signature == plugins.PluginSignatureInternal { + s.log.Debug("Not setting descendant plugin's signature to that of root since it's core or internal", + "plugin", plugin.Id, "signature", plugin.Signature, "isCore", plugin.IsCorePlugin) + } else { + s.log.Debug("Setting descendant plugin's signature to that of root", "plugin", plugin.Id, + "root", plugin.Root.Id, "signature", plugin.Signature, "rootSignature", plugin.Root.Signature) + plugin.Signature = plugin.Root.Signature + if plugin.Signature == plugins.PluginSignatureValid { + s.log.Debug("Plugin has valid signature (inherited from root)", "id", plugin.Id) + return nil + } + } + } else { + s.log.Debug("Non-valid plugin Signature", "pluginID", plugin.Id, "pluginDir", plugin.PluginDir, + "state", plugin.Signature) + } + + // For the time being, we choose to only require back-end plugins to be signed + // NOTE: the state is calculated again when setting metadata on the object + if !plugin.Backend || !s.requireSigned { + return nil + } + + switch plugin.Signature { + case plugins.PluginSignatureUnsigned: + if allowed := s.allowUnsigned(plugin); !allowed { + s.log.Debug("Plugin is unsigned", "id", plugin.Id) + s.errors = append(s.errors, fmt.Errorf("plugin %q is unsigned", plugin.Id)) + return &plugins.PluginError{ + ErrorCode: signatureMissing, + } + } + s.log.Warn("Running an unsigned backend plugin", "pluginID", plugin.Id, "pluginDir", + plugin.PluginDir) + return nil + case plugins.PluginSignatureInvalid: + s.log.Debug("Plugin %q has an invalid signature", plugin.Id) + s.errors = append(s.errors, fmt.Errorf("plugin %q has an invalid signature", plugin.Id)) + return &plugins.PluginError{ + ErrorCode: signatureInvalid, + } + case plugins.PluginSignatureModified: + s.log.Debug("Plugin %q has a modified signature", plugin.Id) + s.errors = append(s.errors, fmt.Errorf("plugin %q's signature has been modified", plugin.Id)) + return &plugins.PluginError{ + ErrorCode: signatureModified, + } + default: + panic(fmt.Sprintf("Plugin %q has unrecognized plugin signature state %q", plugin.Id, plugin.Signature)) + } +} + +func (s *PluginScanner) allowUnsigned(plugin *plugins.PluginBase) bool { + if s.allowUnsignedPluginsCondition != nil { + return s.allowUnsignedPluginsCondition(plugin) + } + + if s.cfg.Env == setting.Dev { + return true + } + + for _, plug := range s.cfg.PluginsAllowUnsigned { + if plug == plugin.Id { + return true + } + } + + return false +} + +// ScanningErrors returns plugin scanning errors encountered. +func (pm *PluginManager) ScanningErrors() []plugins.PluginError { + scanningErrs := make([]plugins.PluginError, 0) + for id, e := range pm.pluginScanningErrors { + scanningErrs = append(scanningErrs, plugins.PluginError{ + ErrorCode: e.ErrorCode, + PluginID: id, + }) + } + return scanningErrs +} + +func (pm *PluginManager) GetPluginMarkdown(pluginId string, name string) ([]byte, error) { + plug, exists := Plugins[pluginId] + if !exists { + return nil, plugins.PluginNotFoundError{PluginID: pluginId} + } + + // nolint:gosec + // We can ignore the gosec G304 warning on this one because `plug.PluginDir` is based + // on plugin the folder structure on disk and not user input. + path := filepath.Join(plug.PluginDir, fmt.Sprintf("%s.md", strings.ToUpper(name))) + exists, err := fs.Exists(path) + if err != nil { + return nil, err + } + if !exists { + path = filepath.Join(plug.PluginDir, fmt.Sprintf("%s.md", strings.ToLower(name))) + } + + exists, err = fs.Exists(path) + if err != nil { + return nil, err + } + if !exists { + return make([]byte, 0), nil + } + + // nolint:gosec + // We can ignore the gosec G304 warning on this one because `plug.PluginDir` is based + // on plugin the folder structure on disk and not user input. + data, err := ioutil.ReadFile(path) + if err != nil { + return nil, err + } + return data, nil +} + +// gets plugin filenames that require verification for plugin signing +func collectPluginFilesWithin(rootDir string) ([]string, error) { + var files []string + err := filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() && info.Name() != "MANIFEST.txt" { + file, err := filepath.Rel(rootDir, path) + if err != nil { + return err + } + files = append(files, filepath.ToSlash(file)) + } + return nil + }) + return files, err +} + +// GetDataPlugin gets a DataPlugin with a certain name. If none is found, nil is returned. +func (pm *PluginManager) GetDataPlugin(pluginID string) plugins.DataPlugin { + if p, exists := DataSources[pluginID]; exists && p.CanHandleDataQueries() { + return p + } + + // XXX: Might other plugins implement DataPlugin? + + p := pm.BackendPluginManager.GetDataPlugin(pluginID) + if p != nil { + return p.(plugins.DataPlugin) + } + + return nil +} diff --git a/pkg/plugins/plugins_test.go b/pkg/plugins/manager/manager_test.go similarity index 94% rename from pkg/plugins/plugins_test.go rename to pkg/plugins/manager/manager_test.go index 3ea2caa527b..b142bbb2792 100644 --- a/pkg/plugins/plugins_test.go +++ b/pkg/plugins/manager/manager_test.go @@ -1,4 +1,4 @@ -package plugins +package manager import ( "context" @@ -9,6 +9,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/backendplugin" "github.com/grafana/grafana/pkg/setting" "github.com/stretchr/testify/assert" @@ -17,7 +18,7 @@ import ( ) func TestPluginManager_Init(t *testing.T) { - staticRootPath, err := filepath.Abs("../../public/") + staticRootPath, err := filepath.Abs("../../../public/") require.NoError(t, err) origRootPath := setting.StaticRootPath @@ -132,7 +133,7 @@ func TestPluginManager_Init(t *testing.T) { require.NoError(t, err) assert.Len(t, pm.scanningErrors, 1) - assert.True(t, errors.Is(pm.scanningErrors[0], duplicatePluginError{})) + assert.True(t, errors.Is(pm.scanningErrors[0], plugins.DuplicatePluginError{})) }) t.Run("With external back-end plugin with valid v2 signature", func(t *testing.T) { @@ -152,8 +153,8 @@ func TestPluginManager_Init(t *testing.T) { assert.Equal(t, "Test", Plugins[pluginId].Name) assert.Equal(t, pluginId, Plugins[pluginId].Id) assert.Equal(t, "1.0.0", Plugins[pluginId].Info.Version) - assert.Equal(t, pluginSignatureValid, Plugins[pluginId].Signature) - assert.Equal(t, grafanaType, Plugins[pluginId].SignatureType) + assert.Equal(t, plugins.PluginSignatureValid, Plugins[pluginId].Signature) + assert.Equal(t, plugins.GrafanaType, Plugins[pluginId].SignatureType) assert.Equal(t, "Grafana Labs", Plugins[pluginId].SignatureOrg) assert.False(t, Plugins[pluginId].IsCorePlugin) }) @@ -200,8 +201,8 @@ func TestPluginManager_Init(t *testing.T) { assert.Equal(t, "Test", Plugins[pluginId].Name) assert.Equal(t, pluginId, Plugins[pluginId].Id) assert.Equal(t, "1.0.0", Plugins[pluginId].Info.Version) - assert.Equal(t, pluginSignatureValid, Plugins[pluginId].Signature) - assert.Equal(t, privateType, Plugins[pluginId].SignatureType) + assert.Equal(t, plugins.PluginSignatureValid, Plugins[pluginId].Signature) + assert.Equal(t, plugins.PrivateType, Plugins[pluginId].SignatureType) assert.Equal(t, "Will Browne", Plugins[pluginId].SignatureOrg) assert.False(t, Plugins[pluginId].IsCorePlugin) }) @@ -266,6 +267,8 @@ func TestPluginManager_IsBackendOnlyPlugin(t *testing.T) { } type fakeBackendPluginManager struct { + backendplugin.Manager + registeredPlugins []string } diff --git a/pkg/plugins/manifest.go b/pkg/plugins/manager/manifest.go similarity index 79% rename from pkg/plugins/manifest.go rename to pkg/plugins/manager/manifest.go index 1676d2e07ae..31108b0a27a 100644 --- a/pkg/plugins/manifest.go +++ b/pkg/plugins/manager/manifest.go @@ -1,4 +1,4 @@ -package plugins +package manager import ( "bytes" @@ -14,6 +14,7 @@ import ( "strings" "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util/errutil" @@ -56,11 +57,11 @@ type pluginManifest struct { Files map[string]string `json:"files"` // V2 supported fields - ManifestVersion string `json:"manifestVersion"` - SignatureType PluginSignatureType `json:"signatureType"` - SignedByOrg string `json:"signedByOrg"` - SignedByOrgName string `json:"signedByOrgName"` - RootURLs []string `json:"rootUrls"` + ManifestVersion string `json:"manifestVersion"` + SignatureType plugins.PluginSignatureType `json:"signatureType"` + SignedByOrg string `json:"signedByOrg"` + SignedByOrgName string `json:"signedByOrgName"` + RootURLs []string `json:"rootUrls"` } func (m *pluginManifest) isV2() bool { @@ -97,7 +98,7 @@ func readPluginManifest(body []byte) (*pluginManifest, error) { } // getPluginSignatureState returns the signature state for a plugin. -func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatureState, error) { +func getPluginSignatureState(log log.Logger, plugin *plugins.PluginBase) (plugins.PluginSignatureState, error) { log.Debug("Getting signature state of plugin", "plugin", plugin.Id, "isBackend", plugin.Backend) manifestPath := filepath.Join(plugin.PluginDir, "MANIFEST.txt") @@ -107,31 +108,31 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur byteValue, err := ioutil.ReadFile(manifestPath) if err != nil || len(byteValue) < 10 { log.Debug("Plugin is unsigned", "id", plugin.Id) - return PluginSignatureState{ - Status: pluginSignatureUnsigned, + return plugins.PluginSignatureState{ + Status: plugins.PluginSignatureUnsigned, }, nil } manifest, err := readPluginManifest(byteValue) if err != nil { log.Debug("Plugin signature invalid", "id", plugin.Id) - return PluginSignatureState{ - Status: pluginSignatureInvalid, + return plugins.PluginSignatureState{ + Status: plugins.PluginSignatureInvalid, }, nil } // Make sure the versions all match if manifest.Plugin != plugin.Id || manifest.Version != plugin.Info.Version { - return PluginSignatureState{ - Status: pluginSignatureModified, + return plugins.PluginSignatureState{ + Status: plugins.PluginSignatureModified, }, nil } // Validate that private is running within defined root URLs - if manifest.SignatureType == privateType { + if manifest.SignatureType == plugins.PrivateType { appURL, err := url.Parse(setting.AppUrl) if err != nil { - return PluginSignatureState{}, err + return plugins.PluginSignatureState{}, err } foundMatch := false @@ -139,7 +140,7 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur rootURL, err := url.Parse(u) if err != nil { log.Warn("Could not parse plugin root URL", "plugin", plugin.Id, "rootUrl", rootURL) - return PluginSignatureState{}, err + return plugins.PluginSignatureState{}, err } if rootURL.Scheme == appURL.Scheme && rootURL.Host == appURL.Host && @@ -150,9 +151,10 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur } if !foundMatch { - log.Warn("Could not find root URL that matches running application URL", "plugin", plugin.Id, "appUrl", appURL, "rootUrls", manifest.RootURLs) - return PluginSignatureState{ - Status: pluginSignatureInvalid, + log.Warn("Could not find root URL that matches running application URL", "plugin", plugin.Id, + "appUrl", appURL, "rootUrls", manifest.RootURLs) + return plugins.PluginSignatureState{ + Status: plugins.PluginSignatureInvalid, }, nil } } @@ -171,8 +173,8 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur f, err := os.Open(fp) if err != nil { log.Warn("Plugin file listed in the manifest was not found", "plugin", plugin.Id, "filename", p, "dir", plugin.PluginDir) - return PluginSignatureState{ - Status: pluginSignatureModified, + return plugins.PluginSignatureState{ + Status: plugins.PluginSignatureModified, }, nil } defer func() { @@ -184,15 +186,15 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur h := sha256.New() if _, err := io.Copy(h, f); err != nil { log.Warn("Couldn't read plugin file", "plugin", plugin.Id, "filename", fp) - return PluginSignatureState{ - Status: pluginSignatureModified, + return plugins.PluginSignatureState{ + Status: plugins.PluginSignatureModified, }, nil } sum := hex.EncodeToString(h.Sum(nil)) if sum != hash { log.Warn("Plugin file's signature has been modified versus manifest", "plugin", plugin.Id, "filename", fp) - return PluginSignatureState{ - Status: pluginSignatureModified, + return plugins.PluginSignatureState{ + Status: plugins.PluginSignatureModified, }, nil } manifestFiles[p] = true @@ -209,16 +211,16 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur if len(unsignedFiles) > 0 { log.Warn("The following files were not included in the signature", "plugin", plugin.Id, "files", unsignedFiles) - return PluginSignatureState{ - Status: pluginSignatureModified, + return plugins.PluginSignatureState{ + Status: plugins.PluginSignatureModified, }, nil } } // Everything OK log.Debug("Plugin signature valid", "id", plugin.Id) - return PluginSignatureState{ - Status: pluginSignatureValid, + return plugins.PluginSignatureState{ + Status: plugins.PluginSignatureValid, Type: manifest.SignatureType, SigningOrg: manifest.SignedByOrgName, }, nil diff --git a/pkg/plugins/manifest_test.go b/pkg/plugins/manager/manifest_test.go similarity index 96% rename from pkg/plugins/manifest_test.go rename to pkg/plugins/manager/manifest_test.go index 4f6d8b14329..80e20d340fb 100644 --- a/pkg/plugins/manifest_test.go +++ b/pkg/plugins/manager/manifest_test.go @@ -1,10 +1,11 @@ -package plugins +package manager import ( "sort" "strings" "testing" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -104,7 +105,7 @@ khdr/tZ1PDgRxMqB/u+Vtbpl0xSxgblnrDOYMSI= assert.Equal(t, int64(1605807018050), manifest.Time) assert.Equal(t, "7e4d0c6a708866e7", manifest.KeyID) assert.Equal(t, "2.0.0", manifest.ManifestVersion) - assert.Equal(t, privateType, manifest.SignatureType) + assert.Equal(t, plugins.PrivateType, manifest.SignatureType) assert.Equal(t, "willbrowne", manifest.SignedByOrg) assert.Equal(t, "Will Browne", manifest.SignedByOrgName) assert.Equal(t, []string{"http://localhost:3000/"}, manifest.RootURLs) diff --git a/pkg/plugins/queries.go b/pkg/plugins/manager/queries.go similarity index 65% rename from pkg/plugins/queries.go rename to pkg/plugins/manager/queries.go index ef92b984d19..6d101ba0ce5 100644 --- a/pkg/plugins/queries.go +++ b/pkg/plugins/manager/queries.go @@ -1,13 +1,13 @@ -package plugins +package manager import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" ) -func GetPluginSettings(orgId int64) (map[string]*models.PluginSettingInfoDTO, error) { +func (pm *PluginManager) GetPluginSettings(orgId int64) (map[string]*models.PluginSettingInfoDTO, error) { query := models.GetPluginSettingsQuery{OrgId: orgId} - if err := bus.Dispatch(&query); err != nil { return nil, err } @@ -52,39 +52,39 @@ func GetPluginSettings(orgId int64) (map[string]*models.PluginSettingInfoDTO, er return pluginMap, nil } -func GetEnabledPlugins(orgId int64) (*EnabledPlugins, error) { - enabledPlugins := NewEnabledPlugins() - pluginSettingMap, err := GetPluginSettings(orgId) +func (pm *PluginManager) GetEnabledPlugins(orgID int64) (*plugins.EnabledPlugins, error) { + enabledPlugins := &plugins.EnabledPlugins{ + Panels: make([]*plugins.PanelPlugin, 0), + DataSources: make(map[string]*plugins.DataSourcePlugin), + Apps: make([]*plugins.AppPlugin, 0), + } + + pluginSettingMap, err := pm.GetPluginSettings(orgID) if err != nil { - return nil, err + return enabledPlugins, err } - isPluginEnabled := func(pluginId string) bool { - _, ok := pluginSettingMap[pluginId] - return ok - } - - for pluginId, app := range Apps { - if b, ok := pluginSettingMap[pluginId]; ok { + for pluginID, app := range Apps { + if b, ok := pluginSettingMap[pluginID]; ok { app.Pinned = b.Pinned enabledPlugins.Apps = append(enabledPlugins.Apps, app) } } // add all plugins that are not part of an App. - for dsId, ds := range DataSources { - if isPluginEnabled(ds.Id) { - enabledPlugins.DataSources[dsId] = ds + for dsID, ds := range DataSources { + if _, exists := pluginSettingMap[ds.Id]; exists { + enabledPlugins.DataSources[dsID] = ds } } for _, panel := range Panels { - if isPluginEnabled(panel.Id) { + if _, exists := pluginSettingMap[panel.Id]; exists { enabledPlugins.Panels = append(enabledPlugins.Panels, panel) } } - return &enabledPlugins, nil + return enabledPlugins, nil } // IsAppInstalled checks if an app plugin with provided plugin ID is installed. diff --git a/pkg/plugins/testdata/duplicate-plugins/nested/nested/plugin.json b/pkg/plugins/manager/testdata/duplicate-plugins/nested/nested/plugin.json similarity index 100% rename from pkg/plugins/testdata/duplicate-plugins/nested/nested/plugin.json rename to pkg/plugins/manager/testdata/duplicate-plugins/nested/nested/plugin.json diff --git a/pkg/plugins/testdata/duplicate-plugins/nested/plugin.json b/pkg/plugins/manager/testdata/duplicate-plugins/nested/plugin.json similarity index 100% rename from pkg/plugins/testdata/duplicate-plugins/nested/plugin.json rename to pkg/plugins/manager/testdata/duplicate-plugins/nested/plugin.json diff --git a/pkg/plugins/testdata/invalid-v1-signature/plugin/MANIFEST.txt b/pkg/plugins/manager/testdata/invalid-v1-signature/plugin/MANIFEST.txt similarity index 100% rename from pkg/plugins/testdata/invalid-v1-signature/plugin/MANIFEST.txt rename to pkg/plugins/manager/testdata/invalid-v1-signature/plugin/MANIFEST.txt diff --git a/pkg/plugins/testdata/invalid-v1-signature/plugin/plugin.json b/pkg/plugins/manager/testdata/invalid-v1-signature/plugin/plugin.json similarity index 100% rename from pkg/plugins/testdata/invalid-v1-signature/plugin/plugin.json rename to pkg/plugins/manager/testdata/invalid-v1-signature/plugin/plugin.json diff --git a/pkg/plugins/testdata/invalid-v2-signature-2/plugin/MANIFEST.txt b/pkg/plugins/manager/testdata/invalid-v2-signature-2/plugin/MANIFEST.txt similarity index 100% rename from pkg/plugins/testdata/invalid-v2-signature-2/plugin/MANIFEST.txt rename to pkg/plugins/manager/testdata/invalid-v2-signature-2/plugin/MANIFEST.txt diff --git a/pkg/plugins/testdata/invalid-v2-signature-2/plugin/extraFile b/pkg/plugins/manager/testdata/invalid-v2-signature-2/plugin/extraFile similarity index 100% rename from pkg/plugins/testdata/invalid-v2-signature-2/plugin/extraFile rename to pkg/plugins/manager/testdata/invalid-v2-signature-2/plugin/extraFile diff --git a/pkg/plugins/testdata/invalid-v2-signature-2/plugin/plugin.json b/pkg/plugins/manager/testdata/invalid-v2-signature-2/plugin/plugin.json similarity index 100% rename from pkg/plugins/testdata/invalid-v2-signature-2/plugin/plugin.json rename to pkg/plugins/manager/testdata/invalid-v2-signature-2/plugin/plugin.json diff --git a/pkg/plugins/testdata/invalid-v2-signature/plugin/MANIFEST.txt b/pkg/plugins/manager/testdata/invalid-v2-signature/plugin/MANIFEST.txt similarity index 100% rename from pkg/plugins/testdata/invalid-v2-signature/plugin/MANIFEST.txt rename to pkg/plugins/manager/testdata/invalid-v2-signature/plugin/MANIFEST.txt diff --git a/pkg/plugins/testdata/invalid-v2-signature/plugin/plugin.json b/pkg/plugins/manager/testdata/invalid-v2-signature/plugin/plugin.json similarity index 100% rename from pkg/plugins/testdata/invalid-v2-signature/plugin/plugin.json rename to pkg/plugins/manager/testdata/invalid-v2-signature/plugin/plugin.json diff --git a/pkg/plugins/testdata/lacking-files/plugin/MANIFEST.txt b/pkg/plugins/manager/testdata/lacking-files/plugin/MANIFEST.txt similarity index 100% rename from pkg/plugins/testdata/lacking-files/plugin/MANIFEST.txt rename to pkg/plugins/manager/testdata/lacking-files/plugin/MANIFEST.txt diff --git a/pkg/plugins/testdata/lacking-files/plugin/plugin.json b/pkg/plugins/manager/testdata/lacking-files/plugin/plugin.json similarity index 100% rename from pkg/plugins/testdata/lacking-files/plugin/plugin.json rename to pkg/plugins/manager/testdata/lacking-files/plugin/plugin.json diff --git a/pkg/plugins/testdata/test-app/dashboards/connections.json b/pkg/plugins/manager/testdata/test-app/dashboards/connections.json similarity index 100% rename from pkg/plugins/testdata/test-app/dashboards/connections.json rename to pkg/plugins/manager/testdata/test-app/dashboards/connections.json diff --git a/pkg/plugins/testdata/test-app/dashboards/connections_result.json b/pkg/plugins/manager/testdata/test-app/dashboards/connections_result.json similarity index 100% rename from pkg/plugins/testdata/test-app/dashboards/connections_result.json rename to pkg/plugins/manager/testdata/test-app/dashboards/connections_result.json diff --git a/pkg/plugins/testdata/test-app/dashboards/memory.json b/pkg/plugins/manager/testdata/test-app/dashboards/memory.json similarity index 100% rename from pkg/plugins/testdata/test-app/dashboards/memory.json rename to pkg/plugins/manager/testdata/test-app/dashboards/memory.json diff --git a/pkg/plugins/testdata/test-app/plugin.json b/pkg/plugins/manager/testdata/test-app/plugin.json similarity index 100% rename from pkg/plugins/testdata/test-app/plugin.json rename to pkg/plugins/manager/testdata/test-app/plugin.json diff --git a/pkg/plugins/testdata/unsigned/plugin/plugin.json b/pkg/plugins/manager/testdata/unsigned/plugin/plugin.json similarity index 100% rename from pkg/plugins/testdata/unsigned/plugin/plugin.json rename to pkg/plugins/manager/testdata/unsigned/plugin/plugin.json diff --git a/pkg/plugins/testdata/valid-v2-pvt-signature/plugin/MANIFEST.txt b/pkg/plugins/manager/testdata/valid-v2-pvt-signature/plugin/MANIFEST.txt similarity index 100% rename from pkg/plugins/testdata/valid-v2-pvt-signature/plugin/MANIFEST.txt rename to pkg/plugins/manager/testdata/valid-v2-pvt-signature/plugin/MANIFEST.txt diff --git a/pkg/plugins/testdata/valid-v2-pvt-signature/plugin/plugin.json b/pkg/plugins/manager/testdata/valid-v2-pvt-signature/plugin/plugin.json similarity index 100% rename from pkg/plugins/testdata/valid-v2-pvt-signature/plugin/plugin.json rename to pkg/plugins/manager/testdata/valid-v2-pvt-signature/plugin/plugin.json diff --git a/pkg/plugins/testdata/valid-v2-signature/plugin/MANIFEST.txt b/pkg/plugins/manager/testdata/valid-v2-signature/plugin/MANIFEST.txt similarity index 100% rename from pkg/plugins/testdata/valid-v2-signature/plugin/MANIFEST.txt rename to pkg/plugins/manager/testdata/valid-v2-signature/plugin/MANIFEST.txt diff --git a/pkg/plugins/testdata/valid-v2-signature/plugin/plugin.json b/pkg/plugins/manager/testdata/valid-v2-signature/plugin/plugin.json similarity index 100% rename from pkg/plugins/testdata/valid-v2-signature/plugin/plugin.json rename to pkg/plugins/manager/testdata/valid-v2-signature/plugin/plugin.json diff --git a/pkg/plugins/update_checker.go b/pkg/plugins/manager/update_checker.go similarity index 98% rename from pkg/plugins/update_checker.go rename to pkg/plugins/manager/update_checker.go index 0209975117f..589dff03c5c 100644 --- a/pkg/plugins/update_checker.go +++ b/pkg/plugins/manager/update_checker.go @@ -1,4 +1,4 @@ -package plugins +package manager import ( "encoding/json" @@ -40,7 +40,7 @@ func getAllExternalPluginSlugs() string { } func (pm *PluginManager) checkForUpdates() { - if !setting.CheckForUpdates { + if !pm.Cfg.CheckForUpdates { return } diff --git a/pkg/plugins/models.go b/pkg/plugins/models.go index 734eeb0b943..4c1f2dea677 100644 --- a/pkg/plugins/models.go +++ b/pkg/plugins/models.go @@ -3,55 +3,16 @@ package plugins import ( "encoding/json" "fmt" - "strings" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins/backendplugin" - "github.com/grafana/grafana/pkg/setting" ) -var ( +const ( PluginTypeApp = "app" PluginTypeDashboard = "dashboard" ) -type PluginState string - -var ( - PluginStateAlpha PluginState = "alpha" -) - -type PluginSignatureState struct { - Status PluginSignatureStatus - Type PluginSignatureType - SigningOrg string -} - -type PluginSignatureStatus string - -func (pss PluginSignatureStatus) IsValid() bool { - return pss == pluginSignatureValid -} - -func (pss PluginSignatureStatus) IsInternal() bool { - return pss == pluginSignatureInternal -} - -const ( - pluginSignatureInternal PluginSignatureStatus = "internal" // core plugin, no signature - pluginSignatureValid PluginSignatureStatus = "valid" // signed and accurate MANIFEST - pluginSignatureInvalid PluginSignatureStatus = "invalid" // invalid signature - pluginSignatureModified PluginSignatureStatus = "modified" // valid signature, but content mismatch - pluginSignatureUnsigned PluginSignatureStatus = "unsigned" // no MANIFEST file -) - -type PluginSignatureType string - -const ( - grafanaType PluginSignatureType = "grafana" - privateType PluginSignatureType = "private" -) - type PluginNotFoundError struct { PluginID string } @@ -60,25 +21,25 @@ func (e PluginNotFoundError) Error() string { return fmt.Sprintf("plugin with ID %q not found", e.PluginID) } -type duplicatePluginError struct { +type DuplicatePluginError struct { Plugin *PluginBase ExistingPlugin *PluginBase } -func (e duplicatePluginError) Error() string { +func (e DuplicatePluginError) Error() string { return fmt.Sprintf("plugin with ID %q already loaded from %q", e.Plugin.Id, e.ExistingPlugin.PluginDir) } -func (e duplicatePluginError) Is(err error) bool { +func (e DuplicatePluginError) Is(err error) bool { // nolint:errorlint - _, ok := err.(duplicatePluginError) + _, ok := err.(DuplicatePluginError) return ok } // PluginLoader can load a plugin. type PluginLoader interface { - // Load loads a plugin and registers it with the manager. - Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error + // Load loads a plugin and returns it. + Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) (interface{}, error) } // PluginBase is the base plugin type. @@ -112,39 +73,6 @@ type PluginBase struct { Root *PluginBase } -func (pb *PluginBase) registerPlugin(base *PluginBase) error { - if p, exists := Plugins[pb.Id]; exists { - return duplicatePluginError{Plugin: pb, ExistingPlugin: p} - } - - if !strings.HasPrefix(base.PluginDir, setting.StaticRootPath) { - plog.Info("Registering plugin", "id", pb.Id) - } - - if len(pb.Dependencies.Plugins) == 0 { - pb.Dependencies.Plugins = []PluginDependencyItem{} - } - - if pb.Dependencies.GrafanaVersion == "" { - pb.Dependencies.GrafanaVersion = "*" - } - - for _, include := range pb.Includes { - if include.Role == "" { - include.Role = models.ROLE_VIEWER - } - } - - // Copy relevant fields from the base - pb.PluginDir = base.PluginDir - pb.Signature = base.Signature - pb.SignatureType = base.SignatureType - pb.SignatureOrg = base.SignatureOrg - - Plugins[pb.Id] = pb - return nil -} - type PluginDependencies struct { GrafanaVersion string `json:"grafanaVersion"` Plugins []PluginDependencyItem `json:"plugins"` @@ -214,11 +142,3 @@ type EnabledPlugins struct { DataSources map[string]*DataSourcePlugin Apps []*AppPlugin } - -func NewEnabledPlugins() EnabledPlugins { - return EnabledPlugins{ - Panels: make([]*PanelPlugin, 0), - DataSources: make(map[string]*DataSourcePlugin), - Apps: make([]*AppPlugin, 0), - } -} diff --git a/pkg/plugins/panel_plugin.go b/pkg/plugins/panel_plugin.go index 4376dac18ee..254a6bcda96 100644 --- a/pkg/plugins/panel_plugin.go +++ b/pkg/plugins/panel_plugin.go @@ -11,15 +11,11 @@ type PanelPlugin struct { SkipDataQuery bool `json:"skipDataQuery"` } -func (p *PanelPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error { +func (p *PanelPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) ( + interface{}, error) { if err := decoder.Decode(p); err != nil { - return err + return nil, err } - if err := p.registerPlugin(base); err != nil { - return err - } - - Panels[p.Id] = p - return nil + return p, nil } diff --git a/pkg/plugins/plugindashboards/service.go b/pkg/plugins/plugindashboards/service.go new file mode 100644 index 00000000000..53e3d0c4b8d --- /dev/null +++ b/pkg/plugins/plugindashboards/service.go @@ -0,0 +1,150 @@ +package plugindashboards + +import ( + "context" + + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/infra/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" + "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/tsdb" +) + +func init() { + registry.Register(®istry.Descriptor{ + Name: "PluginDashboardService", + Instance: &Service{}, + }) +} + +type Service struct { + DataService *tsdb.Service `inject:""` + PluginManager *manager.PluginManager `inject:""` + + logger log.Logger +} + +func (s *Service) Init() error { + bus.AddEventListener(s.handlePluginStateChanged) + s.logger = log.New("plugindashboards") + return nil +} + +func (s *Service) Run(ctx context.Context) error { + s.updateAppDashboards() + return nil +} + +func (s *Service) updateAppDashboards() { + s.logger.Debug("Looking for app dashboard updates") + + query := models.GetPluginSettingsQuery{OrgId: 0} + if err := bus.Dispatch(&query); err != nil { + s.logger.Error("Failed to get all plugin settings", "error", err) + return + } + + for _, pluginSetting := range query.Result { + // ignore disabled plugins + if !pluginSetting.Enabled { + continue + } + + if pluginDef, exists := manager.Plugins[pluginSetting.PluginId]; exists { + if pluginDef.Info.Version != pluginSetting.PluginVersion { + s.syncPluginDashboards(pluginDef, pluginSetting.OrgId) + } + } + } +} + +func (s *Service) syncPluginDashboards(pluginDef *plugins.PluginBase, orgID int64) { + s.logger.Info("Syncing plugin dashboards to DB", "pluginId", pluginDef.Id) + + // Get plugin dashboards + dashboards, err := s.PluginManager.GetPluginDashboards(orgID, pluginDef.Id) + if err != nil { + s.logger.Error("Failed to load app dashboards", "error", err) + return + } + + // Update dashboards with updated revisions + for _, dash := range dashboards { + // remove removed ones + if dash.Removed { + s.logger.Info("Deleting plugin dashboard", "pluginId", pluginDef.Id, "dashboard", dash.Slug) + + deleteCmd := models.DeleteDashboardCommand{OrgId: orgID, Id: dash.DashboardId} + if err := bus.Dispatch(&deleteCmd); err != nil { + s.logger.Error("Failed to auto update app dashboard", "pluginId", pluginDef.Id, "error", err) + return + } + + continue + } + + // update updated ones + if dash.ImportedRevision != dash.Revision { + if err := s.autoUpdateAppDashboard(dash, orgID); err != nil { + s.logger.Error("Failed to auto update app dashboard", "pluginId", pluginDef.Id, "error", err) + return + } + } + } + + // update version in plugin_setting table to mark that we have processed the update + query := models.GetPluginSettingByIdQuery{PluginId: pluginDef.Id, OrgId: orgID} + if err := bus.Dispatch(&query); err != nil { + s.logger.Error("Failed to read plugin setting by ID", "error", err) + return + } + + appSetting := query.Result + cmd := models.UpdatePluginSettingVersionCmd{ + OrgId: appSetting.OrgId, + PluginId: appSetting.PluginId, + PluginVersion: pluginDef.Info.Version, + } + + if err := bus.Dispatch(&cmd); err != nil { + s.logger.Error("Failed to update plugin setting version", "error", err) + } +} + +func (s *Service) handlePluginStateChanged(event *models.PluginStateChangedEvent) error { + s.logger.Info("Plugin state changed", "pluginId", event.PluginId, "enabled", event.Enabled) + + if event.Enabled { + s.syncPluginDashboards(manager.Plugins[event.PluginId], event.OrgId) + } else { + query := models.GetDashboardsByPluginIdQuery{PluginId: event.PluginId, OrgId: event.OrgId} + if err := bus.Dispatch(&query); err != nil { + return err + } + + for _, dash := range query.Result { + s.logger.Info("Deleting plugin dashboard", "pluginId", event.PluginId, "dashboard", dash.Slug) + deleteCmd := models.DeleteDashboardCommand{OrgId: dash.OrgId, Id: dash.Id} + if err := bus.Dispatch(&deleteCmd); err != nil { + return err + } + } + } + + return nil +} + +func (s *Service) autoUpdateAppDashboard(pluginDashInfo *plugins.PluginDashboardInfoDTO, orgID int64) error { + dash, err := s.PluginManager.LoadPluginDashboard(pluginDashInfo.PluginId, pluginDashInfo.Path) + if err != nil { + return err + } + s.logger.Info("Auto updating App dashboard", "dashboard", dash.Title, "newRev", + pluginDashInfo.Revision, "oldRev", pluginDashInfo.ImportedRevision) + user := &models.SignedInUser{UserId: 0, OrgRole: models.ROLE_ADMIN} + _, err = s.PluginManager.ImportDashboard(pluginDashInfo.PluginId, pluginDashInfo.Path, orgID, 0, dash.Data, true, + nil, user, s.DataService) + return err +} diff --git a/pkg/plugins/plugins.go b/pkg/plugins/plugins.go index e6370a29e75..9614d511072 100644 --- a/pkg/plugins/plugins.go +++ b/pkg/plugins/plugins.go @@ -1,541 +1,2 @@ +// Package plugins contains plugin related logic. package plugins - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "io/ioutil" - "os" - "path/filepath" - "reflect" - "runtime" - "strings" - "time" - - "github.com/grafana/grafana/pkg/infra/fs" - "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/infra/metrics" - "github.com/grafana/grafana/pkg/plugins/backendplugin" - "github.com/grafana/grafana/pkg/registry" - "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/util" - "github.com/grafana/grafana/pkg/util/errutil" -) - -var ( - DataSources map[string]*DataSourcePlugin - Panels map[string]*PanelPlugin - StaticRoutes []*PluginStaticRoute - Apps map[string]*AppPlugin - Plugins map[string]*PluginBase - PluginTypes map[string]interface{} - Renderer *RendererPlugin - - plog log.Logger -) - -type unsignedPluginConditionFunc = func(plugin *PluginBase) bool - -type PluginScanner struct { - pluginPath string - errors []error - backendPluginManager backendplugin.Manager - cfg *setting.Cfg - requireSigned bool - log log.Logger - plugins map[string]*PluginBase - allowUnsignedPluginsCondition unsignedPluginConditionFunc -} - -type PluginManager struct { - BackendPluginManager backendplugin.Manager `inject:""` - Cfg *setting.Cfg `inject:""` - log log.Logger - scanningErrors []error - - // AllowUnsignedPluginsCondition changes the policy for allowing unsigned plugins. Signature validation only runs when plugins are starting - // and running plugins will not be terminated if they violate the new policy. - AllowUnsignedPluginsCondition unsignedPluginConditionFunc - GrafanaLatestVersion string - GrafanaHasUpdate bool - pluginScanningErrors map[string]PluginError -} - -func init() { - registry.RegisterService(&PluginManager{}) -} - -func (pm *PluginManager) Init() error { - pm.log = log.New("plugins") - plog = log.New("plugins") - - DataSources = map[string]*DataSourcePlugin{} - StaticRoutes = []*PluginStaticRoute{} - Panels = map[string]*PanelPlugin{} - Apps = map[string]*AppPlugin{} - Plugins = map[string]*PluginBase{} - PluginTypes = map[string]interface{}{ - "panel": PanelPlugin{}, - "datasource": DataSourcePlugin{}, - "app": AppPlugin{}, - "renderer": RendererPlugin{}, - } - pm.pluginScanningErrors = map[string]PluginError{} - - pm.log.Info("Starting plugin search") - - plugDir := filepath.Join(pm.Cfg.StaticRootPath, "app/plugins") - pm.log.Debug("Scanning core plugin directory", "dir", plugDir) - if err := pm.scan(plugDir, false); err != nil { - return errutil.Wrapf(err, "failed to scan core plugin directory '%s'", plugDir) - } - - plugDir = pm.Cfg.BundledPluginsPath - pm.log.Debug("Scanning bundled plugins directory", "dir", plugDir) - exists, err := fs.Exists(plugDir) - if err != nil { - return err - } - if exists { - if err := pm.scan(plugDir, false); err != nil { - return errutil.Wrapf(err, "failed to scan bundled plugins directory '%s'", plugDir) - } - } - - // check if plugins dir exists - exists, err = fs.Exists(pm.Cfg.PluginsPath) - if err != nil { - return err - } - if !exists { - if err = os.MkdirAll(pm.Cfg.PluginsPath, os.ModePerm); err != nil { - pm.log.Error("failed to create external plugins directory", "dir", pm.Cfg.PluginsPath, "error", err) - } else { - pm.log.Info("External plugins directory created", "directory", pm.Cfg.PluginsPath) - } - } else { - pm.log.Debug("Scanning external plugins directory", "dir", pm.Cfg.PluginsPath) - if err := pm.scan(pm.Cfg.PluginsPath, true); err != nil { - return errutil.Wrapf(err, "failed to scan external plugins directory '%s'", - pm.Cfg.PluginsPath) - } - } - - if err := pm.scanPluginPaths(); err != nil { - return err - } - - for _, panel := range Panels { - panel.initFrontendPlugin() - } - - for _, ds := range DataSources { - ds.initFrontendPlugin() - } - - for _, app := range Apps { - app.initApp() - } - - if Renderer != nil { - Renderer.initFrontendPlugin() - } - - for _, p := range Plugins { - if p.IsCorePlugin { - p.Signature = pluginSignatureInternal - } else { - metrics.SetPluginBuildInformation(p.Id, p.Type, p.Info.Version) - } - } - - return nil -} - -func (pm *PluginManager) Run(ctx context.Context) error { - pm.updateAppDashboards() - pm.checkForUpdates() - - ticker := time.NewTicker(time.Minute * 10) - run := true - - for run { - select { - case <-ticker.C: - pm.checkForUpdates() - case <-ctx.Done(): - run = false - } - } - - return ctx.Err() -} - -// scanPluginPaths scans configured plugin paths. -func (pm *PluginManager) scanPluginPaths() error { - for pluginID, settings := range pm.Cfg.PluginSettings { - path, exists := settings["path"] - if !exists || path == "" { - continue - } - - if err := pm.scan(path, true); err != nil { - return errutil.Wrapf(err, "failed to scan directory configured for plugin '%s': '%s'", pluginID, path) - } - } - - return nil -} - -// scan a directory for plugins. -func (pm *PluginManager) scan(pluginDir string, requireSigned bool) error { - scanner := &PluginScanner{ - pluginPath: pluginDir, - backendPluginManager: pm.BackendPluginManager, - cfg: pm.Cfg, - requireSigned: requireSigned, - log: pm.log, - plugins: map[string]*PluginBase{}, - allowUnsignedPluginsCondition: pm.AllowUnsignedPluginsCondition, - } - - // 1st pass: Scan plugins, also mapping plugins to their respective directories - if err := util.Walk(pluginDir, true, true, scanner.walker); err != nil { - if errors.Is(err, os.ErrNotExist) { - pm.log.Debug("Couldn't scan directory since it doesn't exist", "pluginDir", pluginDir, "err", err) - return nil - } - if errors.Is(err, os.ErrPermission) { - pm.log.Debug("Couldn't scan directory due to lack of permissions", "pluginDir", pluginDir, "err", err) - return nil - } - if pluginDir != "data/plugins" { - pm.log.Warn("Could not scan dir", "pluginDir", pluginDir, "err", err) - } - return err - } - - pm.log.Debug("Initial plugin loading done") - - // 2nd pass: Validate and register plugins - for dpath, plugin := range scanner.plugins { - // Try to find any root plugin - ancestors := strings.Split(dpath, string(filepath.Separator)) - ancestors = ancestors[0 : len(ancestors)-1] - aPath := "" - if runtime.GOOS != "windows" && filepath.IsAbs(dpath) { - aPath = "/" - } - for _, a := range ancestors { - aPath = filepath.Join(aPath, a) - if root, ok := scanner.plugins[aPath]; ok { - plugin.Root = root - break - } - } - - pm.log.Debug("Found plugin", "id", plugin.Id, "signature", plugin.Signature, "hasRoot", plugin.Root != nil) - signingError := scanner.validateSignature(plugin) - if signingError != nil { - pm.log.Debug("Failed to validate plugin signature. Will skip loading", "id", plugin.Id, - "signature", plugin.Signature, "status", signingError.ErrorCode) - pm.pluginScanningErrors[plugin.Id] = *signingError - continue - } - - pm.log.Debug("Attempting to add plugin", "id", plugin.Id) - - pluginGoType, exists := PluginTypes[plugin.Type] - if !exists { - return fmt.Errorf("unknown plugin type %q", plugin.Type) - } - - jsonFPath := filepath.Join(plugin.PluginDir, "plugin.json") - - // External plugins need a module.js file for SystemJS to load - if !strings.HasPrefix(jsonFPath, pm.Cfg.StaticRootPath) && !scanner.IsBackendOnlyPlugin(plugin.Type) { - module := filepath.Join(plugin.PluginDir, "module.js") - exists, err := fs.Exists(module) - if err != nil { - return err - } - if !exists { - scanner.log.Warn("Plugin missing module.js", - "name", plugin.Name, - "warning", "Missing module.js, If you loaded this plugin from git, make sure to compile it.", - "path", module) - } - } - - // nolint:gosec - // We can ignore the gosec G304 warning on this one because `jsonFPath` is based - // on plugin the folder structure on disk and not user input. - reader, err := os.Open(jsonFPath) - if err != nil { - return err - } - defer func() { - if err := reader.Close(); err != nil { - scanner.log.Warn("Failed to close JSON file", "path", jsonFPath, "err", err) - } - }() - - jsonParser := json.NewDecoder(reader) - - loader := reflect.New(reflect.TypeOf(pluginGoType)).Interface().(PluginLoader) - - // Load the full plugin, and add it to manager - if err := loader.Load(jsonParser, plugin, scanner.backendPluginManager); err != nil { - if errors.Is(err, duplicatePluginError{}) { - pm.log.Warn("Plugin is duplicate", "error", err) - scanner.errors = append(scanner.errors, err) - continue - } - return err - } - pm.log.Debug("Successfully added plugin", "id", plugin.Id) - } - - if len(scanner.errors) > 0 { - pm.log.Warn("Some plugins failed to load", "errors", scanner.errors) - pm.scanningErrors = scanner.errors - } - - return nil -} - -// GetDatasource returns a datasource based on passed pluginID if it exists -// -// This function fetches the datasource from the global variable DataSources in this package. -// Rather then refactor all dependencies on the global variable we can use this as an transition. -func (pm *PluginManager) GetDatasource(pluginID string) (*DataSourcePlugin, bool) { - ds, exist := DataSources[pluginID] - return ds, exist -} - -func (s *PluginScanner) walker(currentPath string, f os.FileInfo, err error) error { - // We scan all the subfolders for plugin.json (with some exceptions) so that we also load embedded plugins, for - // example https://github.com/raintank/worldping-app/tree/master/dist/grafana-worldmap-panel worldmap panel plugin - // is embedded in worldping app. - if err != nil { - return fmt.Errorf("filepath.Walk reported an error for %q: %w", currentPath, err) - } - - if f.Name() == "node_modules" || f.Name() == "Chromium.app" { - return util.ErrWalkSkipDir - } - - if f.IsDir() { - return nil - } - - if f.Name() != "plugin.json" { - return nil - } - - if err := s.loadPlugin(currentPath); err != nil { - s.log.Error("Failed to load plugin", "error", err, "pluginPath", filepath.Dir(currentPath)) - s.errors = append(s.errors, err) - } - - return nil -} - -func (s *PluginScanner) loadPlugin(pluginJSONFilePath string) error { - s.log.Debug("Loading plugin", "path", pluginJSONFilePath) - currentDir := filepath.Dir(pluginJSONFilePath) - // nolint:gosec - // We can ignore the gosec G304 warning on this one because `currentPath` is based - // on plugin the folder structure on disk and not user input. - reader, err := os.Open(pluginJSONFilePath) - if err != nil { - return err - } - defer func() { - if err := reader.Close(); err != nil { - s.log.Warn("Failed to close JSON file", "path", pluginJSONFilePath, "err", err) - } - }() - - jsonParser := json.NewDecoder(reader) - pluginCommon := PluginBase{} - if err := jsonParser.Decode(&pluginCommon); err != nil { - return err - } - - if pluginCommon.Id == "" || pluginCommon.Type == "" { - return errors.New("did not find type or id properties in plugin.json") - } - - pluginCommon.PluginDir = filepath.Dir(pluginJSONFilePath) - pluginCommon.Files, err = collectPluginFilesWithin(pluginCommon.PluginDir) - if err != nil { - s.log.Warn("Could not collect plugin file information in directory", "pluginID", pluginCommon.Id, "dir", pluginCommon.PluginDir) - return err - } - - signatureState, err := getPluginSignatureState(s.log, &pluginCommon) - if err != nil { - s.log.Warn("Could not get plugin signature state", "pluginID", pluginCommon.Id, "err", err) - return err - } - pluginCommon.Signature = signatureState.Status - pluginCommon.SignatureType = signatureState.Type - pluginCommon.SignatureOrg = signatureState.SigningOrg - - s.plugins[currentDir] = &pluginCommon - - return nil -} - -func (*PluginScanner) IsBackendOnlyPlugin(pluginType string) bool { - return pluginType == "renderer" -} - -// validateSignature validates a plugin's signature. -func (s *PluginScanner) validateSignature(plugin *PluginBase) *PluginError { - if plugin.Signature == pluginSignatureValid { - s.log.Debug("Plugin has valid signature", "id", plugin.Id) - return nil - } - - if plugin.Root != nil { - // If a descendant plugin with invalid signature, set signature to that of root - if plugin.IsCorePlugin || plugin.Signature == pluginSignatureInternal { - s.log.Debug("Not setting descendant plugin's signature to that of root since it's core or internal", - "plugin", plugin.Id, "signature", plugin.Signature, "isCore", plugin.IsCorePlugin) - } else { - s.log.Debug("Setting descendant plugin's signature to that of root", "plugin", plugin.Id, - "root", plugin.Root.Id, "signature", plugin.Signature, "rootSignature", plugin.Root.Signature) - plugin.Signature = plugin.Root.Signature - if plugin.Signature == pluginSignatureValid { - s.log.Debug("Plugin has valid signature (inherited from root)", "id", plugin.Id) - return nil - } - } - } else { - s.log.Debug("Non-valid plugin Signature", "pluginID", plugin.Id, "pluginDir", plugin.PluginDir, - "state", plugin.Signature) - } - - // For the time being, we choose to only require back-end plugins to be signed - // NOTE: the state is calculated again when setting metadata on the object - if !plugin.Backend || !s.requireSigned { - return nil - } - - switch plugin.Signature { - case pluginSignatureUnsigned: - if allowed := s.allowUnsigned(plugin); !allowed { - s.log.Debug("Plugin is unsigned", "id", plugin.Id) - s.errors = append(s.errors, fmt.Errorf("plugin %q is unsigned", plugin.Id)) - return &PluginError{ - ErrorCode: signatureMissing, - } - } - s.log.Warn("Running an unsigned backend plugin", "pluginID", plugin.Id, "pluginDir", - plugin.PluginDir) - return nil - case pluginSignatureInvalid: - s.log.Debug("Plugin %q has an invalid signature", plugin.Id) - s.errors = append(s.errors, fmt.Errorf("plugin %q has an invalid signature", plugin.Id)) - return &PluginError{ - ErrorCode: signatureInvalid, - } - case pluginSignatureModified: - s.log.Debug("Plugin %q has a modified signature", plugin.Id) - s.errors = append(s.errors, fmt.Errorf("plugin %q's signature has been modified", plugin.Id)) - return &PluginError{ - ErrorCode: signatureModified, - } - default: - panic(fmt.Sprintf("Plugin %q has unrecognized plugin signature state %q", plugin.Id, plugin.Signature)) - } -} - -func (s *PluginScanner) allowUnsigned(plugin *PluginBase) bool { - if s.allowUnsignedPluginsCondition != nil { - return s.allowUnsignedPluginsCondition(plugin) - } - - if s.cfg.Env == setting.Dev { - return true - } - - for _, plug := range s.cfg.PluginsAllowUnsigned { - if plug == plugin.Id { - return true - } - } - - return false -} - -// ScanningErrors returns plugin scanning errors encountered. -func (pm *PluginManager) ScanningErrors() []PluginError { - scanningErrs := make([]PluginError, 0) - for id, e := range pm.pluginScanningErrors { - scanningErrs = append(scanningErrs, PluginError{ - ErrorCode: e.ErrorCode, - PluginID: id, - }) - } - return scanningErrs -} - -func GetPluginMarkdown(pluginId string, name string) ([]byte, error) { - plug, exists := Plugins[pluginId] - if !exists { - return nil, PluginNotFoundError{pluginId} - } - - // nolint:gosec - // We can ignore the gosec G304 warning on this one because `plug.PluginDir` is based - // on plugin the folder structure on disk and not user input. - path := filepath.Join(plug.PluginDir, fmt.Sprintf("%s.md", strings.ToUpper(name))) - exists, err := fs.Exists(path) - if err != nil { - return nil, err - } - if !exists { - path = filepath.Join(plug.PluginDir, fmt.Sprintf("%s.md", strings.ToLower(name))) - } - - exists, err = fs.Exists(path) - if err != nil { - return nil, err - } - if !exists { - return make([]byte, 0), nil - } - - // nolint:gosec - // We can ignore the gosec G304 warning on this one because `plug.PluginDir` is based - // on plugin the folder structure on disk and not user input. - data, err := ioutil.ReadFile(path) - if err != nil { - return nil, err - } - return data, nil -} - -// gets plugin filenames that require verification for plugin signing -func collectPluginFilesWithin(rootDir string) ([]string, error) { - var files []string - - err := filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error { - if err != nil { - return err - } - if !info.IsDir() && info.Name() != "MANIFEST.txt" { - file, err := filepath.Rel(rootDir, path) - if err != nil { - return err - } - files = append(files, filepath.ToSlash(file)) - } - return nil - }) - return files, err -} diff --git a/pkg/plugins/renderer_plugin.go b/pkg/plugins/renderer_plugin.go index 7e891d1ae9f..711c7207fd3 100644 --- a/pkg/plugins/renderer_plugin.go +++ b/pkg/plugins/renderer_plugin.go @@ -22,29 +22,25 @@ type RendererPlugin struct { backendPluginManager backendplugin.Manager } -func (r *RendererPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error { +func (r *RendererPlugin) Load(decoder *json.Decoder, base *PluginBase, + backendPluginManager backendplugin.Manager) (interface{}, error) { if err := decoder.Decode(r); err != nil { - return err - } - - if err := r.registerPlugin(base); err != nil { - return err + return nil, err } r.backendPluginManager = backendPluginManager cmd := ComposePluginStartCommand("plugin_start") - fullpath := filepath.Join(r.PluginDir, cmd) + fullpath := filepath.Join(base.PluginDir, cmd) factory := grpcplugin.NewRendererPlugin(r.Id, fullpath, grpcplugin.PluginStartFuncs{ OnLegacyStart: r.onLegacyPluginStart, OnStart: r.onPluginStart, }) if err := backendPluginManager.Register(r.Id, factory); err != nil { - return errutil.Wrapf(err, "Failed to register backend plugin") + return nil, errutil.Wrapf(err, "failed to register backend plugin") } - Renderer = r - return nil + return r, nil } func (r *RendererPlugin) Start(ctx context.Context) error { diff --git a/pkg/plugins/state.go b/pkg/plugins/state.go new file mode 100644 index 00000000000..313b6ec3c83 --- /dev/null +++ b/pkg/plugins/state.go @@ -0,0 +1,38 @@ +package plugins + +type PluginSignatureStatus string + +func (pss PluginSignatureStatus) IsValid() bool { + return pss == PluginSignatureValid +} + +func (pss PluginSignatureStatus) IsInternal() bool { + return pss == PluginSignatureInternal +} + +const ( + PluginSignatureInternal PluginSignatureStatus = "internal" // core plugin, no signature + PluginSignatureValid PluginSignatureStatus = "valid" // signed and accurate MANIFEST + PluginSignatureInvalid PluginSignatureStatus = "invalid" // invalid signature + PluginSignatureModified PluginSignatureStatus = "modified" // valid signature, but content mismatch + PluginSignatureUnsigned PluginSignatureStatus = "unsigned" // no MANIFEST file +) + +type PluginState string + +const ( + PluginStateAlpha PluginState = "alpha" +) + +type PluginSignatureType string + +const ( + GrafanaType PluginSignatureType = "grafana" + PrivateType PluginSignatureType = "private" +) + +type PluginSignatureState struct { + Status PluginSignatureStatus + Type PluginSignatureType + SigningOrg string +} diff --git a/pkg/plugins/tsdb.go b/pkg/plugins/tsdb.go new file mode 100644 index 00000000000..ce10a4508e6 --- /dev/null +++ b/pkg/plugins/tsdb.go @@ -0,0 +1,311 @@ +package plugins + +import ( + "context" + "encoding/base64" + "encoding/json" + "fmt" + "strconv" + "time" + + "github.com/grafana/grafana-plugin-sdk-go/data" + "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/models" + "github.com/timberio/go-datemath" +) + +// DataSubQuery represents a data sub-query. +type DataSubQuery struct { + RefID string `json:"refId"` + Model *simplejson.Json `json:"model,omitempty"` + DataSource *models.DataSource `json:"datasource"` + MaxDataPoints int64 `json:"maxDataPoints"` + IntervalMS int64 `json:"intervalMs"` + QueryType string `json:"queryType"` +} + +// DataQuery contains all information about a data query request. +type DataQuery struct { + TimeRange *DataTimeRange + Queries []DataSubQuery + Headers map[string]string + Debug bool + User *models.SignedInUser +} + +type DataTimeRange struct { + From string + To string + Now time.Time +} + +type DataTable struct { + Columns []DataTableColumn `json:"columns"` + Rows []DataRowValues `json:"rows"` +} + +type DataTableColumn struct { + Text string `json:"text"` +} + +type DataTimePoint [2]null.Float +type DataTimeSeriesPoints []DataTimePoint +type DataTimeSeriesSlice []DataTimeSeries +type DataRowValues []interface{} + +type DataQueryResult struct { + Error error `json:"-"` + ErrorString string `json:"error,omitempty"` + RefID string `json:"refId"` + Meta *simplejson.Json `json:"meta,omitempty"` + Series DataTimeSeriesSlice `json:"series"` + Tables []DataTable `json:"tables"` + Dataframes DataFrames `json:"dataframes"` +} + +// UnmarshalJSON deserializes a DataQueryResult from JSON. +// +// Deserialization support is required by tests. +func (r *DataQueryResult) UnmarshalJSON(b []byte) error { + m := map[string]interface{}{} + if err := json.Unmarshal(b, &m); err != nil { + return err + } + + refID, ok := m["refId"].(string) + if !ok { + return fmt.Errorf("can't decode field refId - not a string") + } + var meta *simplejson.Json + if m["meta"] != nil { + mm, ok := m["meta"].(map[string]interface{}) + if !ok { + return fmt.Errorf("can't decode field meta - not a JSON object") + } + meta = simplejson.NewFromAny(mm) + } + var series DataTimeSeriesSlice + /* TODO + if m["series"] != nil { + } + */ + var tables []DataTable + if m["tables"] != nil { + ts, ok := m["tables"].([]interface{}) + if !ok { + return fmt.Errorf("can't decode field tables - not an array of Tables") + } + for _, ti := range ts { + tm, ok := ti.(map[string]interface{}) + if !ok { + return fmt.Errorf("can't decode field tables - not an array of Tables") + } + var columns []DataTableColumn + cs, ok := tm["columns"].([]interface{}) + if !ok { + return fmt.Errorf("can't decode field tables - not an array of Tables") + } + for _, ci := range cs { + cm, ok := ci.(map[string]interface{}) + if !ok { + return fmt.Errorf("can't decode field tables - not an array of Tables") + } + val, ok := cm["text"].(string) + if !ok { + return fmt.Errorf("can't decode field tables - not an array of Tables") + } + + columns = append(columns, DataTableColumn{Text: val}) + } + + rs, ok := tm["rows"].([]interface{}) + if !ok { + return fmt.Errorf("can't decode field tables - not an array of Tables") + } + var rows []DataRowValues + for _, ri := range rs { + vals, ok := ri.([]interface{}) + if !ok { + return fmt.Errorf("can't decode field tables - not an array of Tables") + } + rows = append(rows, vals) + } + + tables = append(tables, DataTable{ + Columns: columns, + Rows: rows, + }) + } + } + + var dfs *dataFrames + if m["dataframes"] != nil { + raw, ok := m["dataframes"].([]interface{}) + if !ok { + return fmt.Errorf("can't decode field dataframes - not an array of byte arrays") + } + + var encoded [][]byte + for _, ra := range raw { + encS, ok := ra.(string) + if !ok { + return fmt.Errorf("can't decode field dataframes - not an array of byte arrays") + } + enc, err := base64.StdEncoding.DecodeString(encS) + if err != nil { + return fmt.Errorf("can't decode field dataframes - not an array of arrow frames") + } + encoded = append(encoded, enc) + } + decoded, err := data.UnmarshalArrowFrames(encoded) + if err != nil { + return err + } + dfs = &dataFrames{ + decoded: decoded, + encoded: encoded, + } + } + + r.RefID = refID + r.Meta = meta + r.Series = series + r.Tables = tables + if dfs != nil { + r.Dataframes = dfs + } + return nil +} + +type DataTimeSeries struct { + Name string `json:"name"` + Points DataTimeSeriesPoints `json:"points"` + Tags map[string]string `json:"tags,omitempty"` +} + +type DataResponse struct { + Results map[string]DataQueryResult `json:"results"` + Message string `json:"message,omitempty"` +} + +type DataPlugin interface { + DataQuery(ctx context.Context, ds *models.DataSource, query DataQuery) (DataResponse, error) +} + +func NewDataTimeRange(from, to string) DataTimeRange { + return DataTimeRange{ + From: from, + To: to, + Now: time.Now(), + } +} + +func (tr *DataTimeRange) GetFromAsMsEpoch() int64 { + return tr.MustGetFrom().UnixNano() / int64(time.Millisecond) +} + +func (tr *DataTimeRange) GetFromAsSecondsEpoch() int64 { + return tr.GetFromAsMsEpoch() / 1000 +} + +func (tr *DataTimeRange) GetFromAsTimeUTC() time.Time { + return tr.MustGetFrom().UTC() +} + +func (tr *DataTimeRange) GetToAsMsEpoch() int64 { + return tr.MustGetTo().UnixNano() / int64(time.Millisecond) +} + +func (tr *DataTimeRange) GetToAsSecondsEpoch() int64 { + return tr.GetToAsMsEpoch() / 1000 +} + +func (tr *DataTimeRange) GetToAsTimeUTC() time.Time { + return tr.MustGetTo().UTC() +} + +func (tr *DataTimeRange) MustGetFrom() time.Time { + res, err := tr.ParseFrom() + if err != nil { + return time.Unix(0, 0) + } + return res +} + +func (tr *DataTimeRange) MustGetTo() time.Time { + res, err := tr.ParseTo() + if err != nil { + return time.Unix(0, 0) + } + return res +} + +func (tr DataTimeRange) ParseFrom() (time.Time, error) { + return parseTimeRange(tr.From, tr.Now, false, nil) +} + +func (tr DataTimeRange) ParseTo() (time.Time, error) { + return parseTimeRange(tr.To, tr.Now, true, nil) +} + +func (tr DataTimeRange) ParseFromWithLocation(location *time.Location) (time.Time, error) { + return parseTimeRange(tr.From, tr.Now, false, location) +} + +func (tr DataTimeRange) ParseToWithLocation(location *time.Location) (time.Time, error) { + return parseTimeRange(tr.To, tr.Now, true, location) +} + +func parseTimeRange(s string, now time.Time, withRoundUp bool, location *time.Location) (time.Time, error) { + if val, err := strconv.ParseInt(s, 10, 64); err == nil { + seconds := val / 1000 + nano := (val - seconds*1000) * 1000000 + return time.Unix(seconds, nano), nil + } + + diff, err := time.ParseDuration("-" + s) + if err != nil { + options := []func(*datemath.Options){ + datemath.WithNow(now), + datemath.WithRoundUp(withRoundUp), + } + if location != nil { + options = append(options, datemath.WithLocation(location)) + } + + return datemath.ParseAndEvaluate(s, options...) + } + + return now.Add(diff), nil +} + +// SeriesToFrame converts a DataTimeSeries to an SDK frame. +func SeriesToFrame(series DataTimeSeries) (*data.Frame, error) { + timeVec := make([]*time.Time, len(series.Points)) + floatVec := make([]*float64, len(series.Points)) + for idx, point := range series.Points { + timeVec[idx], floatVec[idx] = convertDataTimePoint(point) + } + frame := data.NewFrame(series.Name, + data.NewField("time", nil, timeVec), + data.NewField("value", data.Labels(series.Tags), floatVec), + ) + + return frame, nil +} + +// convertDataTimePoint converts a DataTimePoint into two values appropriate +// for Series values. +func convertDataTimePoint(point DataTimePoint) (t *time.Time, f *float64) { + timeIdx, valueIdx := 1, 0 + if point[timeIdx].Valid { // Assuming valid is null? + tI := int64(point[timeIdx].Float64) + uT := time.Unix(tI/int64(1e+3), (tI%int64(1e+3))*int64(1e+6)) // time.Time from millisecond unix ts + t = &uT + } + if point[valueIdx].Valid { + f = &point[valueIdx].Float64 + } + return +} diff --git a/pkg/services/alerting/conditions/query.go b/pkg/services/alerting/conditions/query.go index 5062ee51d45..fb38250eea1 100644 --- a/pkg/services/alerting/conditions/query.go +++ b/pkg/services/alerting/conditions/query.go @@ -6,7 +6,9 @@ import ( "strings" "time" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/tsdb/prometheus" + "github.com/grafana/grafana/pkg/tsdb/tsdbifaces" gocontext "context" @@ -16,7 +18,6 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/util/errutil" ) @@ -29,12 +30,11 @@ func init() { // QueryCondition is responsible for issue and query, reduce the // timeseries into single values and evaluate if they are firing or not. type QueryCondition struct { - Index int - Query AlertQuery - Reducer *queryReducer - Evaluator AlertEvaluator - Operator string - HandleRequest tsdb.HandleRequestFunc + Index int + Query AlertQuery + Reducer *queryReducer + Evaluator AlertEvaluator + Operator string } // AlertQuery contains information about what datasource a query @@ -47,10 +47,10 @@ type AlertQuery struct { } // Eval evaluates the `QueryCondition`. -func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.ConditionResult, error) { - timeRange := tsdb.NewTimeRange(c.Query.From, c.Query.To) +func (c *QueryCondition) Eval(context *alerting.EvalContext, requestHandler tsdbifaces.RequestHandler) (*alerting.ConditionResult, error) { + timeRange := plugins.NewDataTimeRange(c.Query.From, c.Query.To) - seriesList, err := c.executeQuery(context, timeRange) + seriesList, err := c.executeQuery(context, timeRange, requestHandler) if err != nil { return nil, err } @@ -109,7 +109,8 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.Conditio }, nil } -func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *tsdb.TimeRange) (tsdb.TimeSeriesSlice, error) { +func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange plugins.DataTimeRange, + requestHandler tsdbifaces.RequestHandler) (plugins.DataTimeSeriesSlice, error) { getDsInfo := &models.GetDataSourceQuery{ Id: c.Query.DatasourceID, OrgId: context.Rule.OrgID, @@ -125,7 +126,7 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange * } req := c.getRequestForAlertRule(getDsInfo.Result, timeRange, context.IsDebug) - result := make(tsdb.TimeSeriesSlice, 0) + result := make(plugins.DataTimeSeriesSlice, 0) if context.IsDebug { data := simplejson.New() @@ -139,20 +140,20 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange * Model *simplejson.Json `json:"model"` Datasource *simplejson.Json `json:"datasource"` MaxDataPoints int64 `json:"maxDataPoints"` - IntervalMs int64 `json:"intervalMs"` + IntervalMS int64 `json:"intervalMs"` } queries := []*queryDto{} for _, q := range req.Queries { queries = append(queries, &queryDto{ - RefID: q.RefId, + RefID: q.RefID, Model: q.Model, Datasource: simplejson.NewFromAny(map[string]interface{}{ "id": q.DataSource.Id, "name": q.DataSource.Name, }), MaxDataPoints: q.MaxDataPoints, - IntervalMs: q.IntervalMs, + IntervalMS: q.IntervalMS, }) } @@ -164,29 +165,30 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange * }) } - resp, err := c.HandleRequest(context.Ctx, getDsInfo.Result, req) + resp, err := requestHandler.HandleRequest(context.Ctx, getDsInfo.Result, req) if err != nil { return nil, toCustomError(err) } for _, v := range resp.Results { if v.Error != nil { - return nil, fmt.Errorf("tsdb.HandleRequest() response error %v", v) + return nil, fmt.Errorf("request handler response error %v", v) } // If there are dataframes but no series on the result useDataframes := v.Dataframes != nil && (v.Series == nil || len(v.Series) == 0) - if useDataframes { // convert the dataframes to tsdb.TimeSeries + if useDataframes { // convert the dataframes to plugins.DataTimeSeries frames, err := v.Dataframes.Decoded() if err != nil { - return nil, errutil.Wrap("tsdb.HandleRequest() failed to unmarshal arrow dataframes from bytes", err) + return nil, errutil.Wrap("request handler failed to unmarshal arrow dataframes from bytes", err) } for _, frame := range frames { ss, err := FrameToSeriesSlice(frame) if err != nil { - return nil, errutil.Wrapf(err, `tsdb.HandleRequest() failed to convert dataframe "%v" to tsdb.TimeSeriesSlice`, frame.Name) + return nil, errutil.Wrapf(err, + `request handler failed to convert dataframe "%v" to plugins.DataTimeSeriesSlice`, frame.Name) } result = append(result, ss...) } @@ -218,13 +220,14 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange * return result, nil } -func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, timeRange *tsdb.TimeRange, debug bool) *tsdb.TsdbQuery { +func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, timeRange plugins.DataTimeRange, + debug bool) plugins.DataQuery { queryModel := c.Query.Model - req := &tsdb.TsdbQuery{ - TimeRange: timeRange, - Queries: []*tsdb.Query{ + req := plugins.DataQuery{ + TimeRange: &timeRange, + Queries: []plugins.DataSubQuery{ { - RefId: "A", + RefID: "A", Model: queryModel, DataSource: datasource, QueryType: queryModel.Get("queryType").MustString(""), @@ -242,7 +245,6 @@ func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, t func newQueryCondition(model *simplejson.Json, index int) (*QueryCondition, error) { condition := QueryCondition{} condition.Index = index - condition.HandleRequest = tsdb.HandleRequest queryJSON := model.Get("query") @@ -301,23 +303,23 @@ func validateToValue(to string) error { } // FrameToSeriesSlice converts a frame that is a valid time series as per data.TimeSeriesSchema() -// to a TimeSeriesSlice. -func FrameToSeriesSlice(frame *data.Frame) (tsdb.TimeSeriesSlice, error) { +// to a DataTimeSeriesSlice. +func FrameToSeriesSlice(frame *data.Frame) (plugins.DataTimeSeriesSlice, error) { tsSchema := frame.TimeSeriesSchema() if tsSchema.Type == data.TimeSeriesTypeNot { - // If no fields, or only a time field, create an empty tsdb.TimeSeriesSlice with a single + // If no fields, or only a time field, create an empty plugins.DataTimeSeriesSlice with a single // time series in order to trigger "no data" in alerting. if len(frame.Fields) == 0 || (len(frame.Fields) == 1 && frame.Fields[0].Type().Time()) { - return tsdb.TimeSeriesSlice{{ + return plugins.DataTimeSeriesSlice{{ Name: frame.Name, - Points: make(tsdb.TimeSeriesPoints, 0), + Points: make(plugins.DataTimeSeriesPoints, 0), }}, nil } return nil, fmt.Errorf("input frame is not recognized as a time series") } seriesCount := len(tsSchema.ValueIndices) - seriesSlice := make(tsdb.TimeSeriesSlice, 0, seriesCount) + seriesSlice := make(plugins.DataTimeSeriesSlice, 0, seriesCount) timeField := frame.Fields[tsSchema.TimeIndex] timeNullFloatSlice := make([]null.Float, timeField.Len()) @@ -331,8 +333,8 @@ func FrameToSeriesSlice(frame *data.Frame) (tsdb.TimeSeriesSlice, error) { for _, fieldIdx := range tsSchema.ValueIndices { // create a TimeSeries for each value Field field := frame.Fields[fieldIdx] - ts := &tsdb.TimeSeries{ - Points: make(tsdb.TimeSeriesPoints, field.Len()), + ts := plugins.DataTimeSeries{ + Points: make(plugins.DataTimeSeriesPoints, field.Len()), } if len(field.Labels) > 0 { @@ -355,9 +357,10 @@ func FrameToSeriesSlice(frame *data.Frame) (tsdb.TimeSeriesSlice, error) { for rowIdx := 0; rowIdx < field.Len(); rowIdx++ { // for each value in the field, make a TimePoint val, err := field.FloatAt(rowIdx) if err != nil { - return nil, errutil.Wrapf(err, "failed to convert frame to tsdb.series, can not convert value %v to float", field.At(rowIdx)) + return nil, errutil.Wrapf(err, + "failed to convert frame to DataTimeSeriesSlice, can not convert value %v to float", field.At(rowIdx)) } - ts.Points[rowIdx] = tsdb.TimePoint{ + ts.Points[rowIdx] = plugins.DataTimePoint{ null.FloatFrom(val), timeNullFloatSlice[rowIdx], } @@ -381,5 +384,5 @@ func toCustomError(err error) error { } // generic fallback - return fmt.Errorf("tsdb.HandleRequest() error %v", err) + return fmt.Errorf("request handler error: %w", err) } diff --git a/pkg/services/alerting/conditions/query_test.go b/pkg/services/alerting/conditions/query_test.go index 7dbc64482a5..5a71576d026 100644 --- a/pkg/services/alerting/conditions/query_test.go +++ b/pkg/services/alerting/conditions/query_test.go @@ -15,18 +15,18 @@ import ( "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/alerting" - "github.com/grafana/grafana/pkg/tsdb" . "github.com/smartystreets/goconvey/convey" "github.com/stretchr/testify/require" "github.com/xorcare/pointer" ) -func newTimeSeriesPointsFromArgs(values ...float64) tsdb.TimeSeriesPoints { - points := make(tsdb.TimeSeriesPoints, 0) +func newTimeSeriesPointsFromArgs(values ...float64) plugins.DataTimeSeriesPoints { + points := make(plugins.DataTimeSeriesPoints, 0) for i := 0; i < len(values); i += 2 { - points = append(points, tsdb.NewTimePoint(null.FloatFrom(values[i]), values[i+1])) + points = append(points, plugins.DataTimePoint{null.FloatFrom(values[i]), null.FloatFrom(values[i+1])}) } return points @@ -60,7 +60,7 @@ func TestQueryCondition(t *testing.T) { Convey("should fire when avg is above 100", func() { points := newTimeSeriesPointsFromArgs(120, 0) - ctx.series = tsdb.TimeSeriesSlice{&tsdb.TimeSeries{Name: "test1", Points: points}} + ctx.series = plugins.DataTimeSeriesSlice{plugins.DataTimeSeries{Name: "test1", Points: points}} cr, err := ctx.exec() So(err, ShouldBeNil) @@ -80,7 +80,7 @@ func TestQueryCondition(t *testing.T) { Convey("Should not fire when avg is below 100", func() { points := newTimeSeriesPointsFromArgs(90, 0) - ctx.series = tsdb.TimeSeriesSlice{&tsdb.TimeSeries{Name: "test1", Points: points}} + ctx.series = plugins.DataTimeSeriesSlice{plugins.DataTimeSeries{Name: "test1", Points: points}} cr, err := ctx.exec() So(err, ShouldBeNil) @@ -99,9 +99,9 @@ func TestQueryCondition(t *testing.T) { }) Convey("Should fire if only first series matches", func() { - ctx.series = tsdb.TimeSeriesSlice{ - &tsdb.TimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs(120, 0)}, - &tsdb.TimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(0, 0)}, + ctx.series = plugins.DataTimeSeriesSlice{ + plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs(120, 0)}, + plugins.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(0, 0)}, } cr, err := ctx.exec() @@ -111,7 +111,7 @@ func TestQueryCondition(t *testing.T) { Convey("No series", func() { Convey("Should set NoDataFound when condition is gt", func() { - ctx.series = tsdb.TimeSeriesSlice{} + ctx.series = plugins.DataTimeSeriesSlice{} cr, err := ctx.exec() So(err, ShouldBeNil) @@ -121,7 +121,7 @@ func TestQueryCondition(t *testing.T) { Convey("Should be firing when condition is no_value", func() { ctx.evaluator = `{"type": "no_value", "params": []}` - ctx.series = tsdb.TimeSeriesSlice{} + ctx.series = plugins.DataTimeSeriesSlice{} cr, err := ctx.exec() So(err, ShouldBeNil) @@ -132,8 +132,8 @@ func TestQueryCondition(t *testing.T) { Convey("Empty series", func() { Convey("Should set Firing if eval match", func() { ctx.evaluator = `{"type": "no_value", "params": []}` - ctx.series = tsdb.TimeSeriesSlice{ - &tsdb.TimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()}, + ctx.series = plugins.DataTimeSeriesSlice{ + plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()}, } cr, err := ctx.exec() @@ -142,9 +142,9 @@ func TestQueryCondition(t *testing.T) { }) Convey("Should set NoDataFound both series are empty", func() { - ctx.series = tsdb.TimeSeriesSlice{ - &tsdb.TimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()}, - &tsdb.TimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs()}, + ctx.series = plugins.DataTimeSeriesSlice{ + plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()}, + plugins.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs()}, } cr, err := ctx.exec() @@ -153,9 +153,9 @@ func TestQueryCondition(t *testing.T) { }) Convey("Should set NoDataFound both series contains null", func() { - ctx.series = tsdb.TimeSeriesSlice{ - &tsdb.TimeSeries{Name: "test1", Points: tsdb.TimeSeriesPoints{tsdb.TimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}}, - &tsdb.TimeSeries{Name: "test2", Points: tsdb.TimeSeriesPoints{tsdb.TimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}}, + ctx.series = plugins.DataTimeSeriesSlice{ + plugins.DataTimeSeries{Name: "test1", Points: plugins.DataTimeSeriesPoints{plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}}, + plugins.DataTimeSeries{Name: "test2", Points: plugins.DataTimeSeriesPoints{plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}}, } cr, err := ctx.exec() @@ -164,9 +164,9 @@ func TestQueryCondition(t *testing.T) { }) Convey("Should not set NoDataFound if one series is empty", func() { - ctx.series = tsdb.TimeSeriesSlice{ - &tsdb.TimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()}, - &tsdb.TimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(120, 0)}, + ctx.series = plugins.DataTimeSeriesSlice{ + plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()}, + plugins.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(120, 0)}, } cr, err := ctx.exec() @@ -181,7 +181,7 @@ func TestQueryCondition(t *testing.T) { type queryConditionTestContext struct { reducer string evaluator string - series tsdb.TimeSeriesSlice + series plugins.DataTimeSeriesSlice frame *data.Frame result *alerting.EvalContext condition *QueryCondition @@ -207,25 +207,33 @@ func (ctx *queryConditionTestContext) exec() (*alerting.ConditionResult, error) ctx.condition = condition - qr := &tsdb.QueryResult{ + qr := plugins.DataQueryResult{ Series: ctx.series, } if ctx.frame != nil { - qr = &tsdb.QueryResult{ - Dataframes: tsdb.NewDecodedDataFrames(data.Frames{ctx.frame}), + qr = plugins.DataQueryResult{ + Dataframes: plugins.NewDecodedDataFrames(data.Frames{ctx.frame}), } } - - condition.HandleRequest = func(context context.Context, dsInfo *models.DataSource, req *tsdb.TsdbQuery) (*tsdb.Response, error) { - return &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + reqHandler := fakeReqHandler{ + response: plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "A": qr, }, - }, nil + }, } - return condition.Eval(ctx.result) + return condition.Eval(ctx.result, reqHandler) +} + +type fakeReqHandler struct { + response plugins.DataResponse +} + +func (rh fakeReqHandler) HandleRequest(context.Context, *models.DataSource, plugins.DataQuery) ( + plugins.DataResponse, error) { + return rh.response, nil } func queryConditionScenario(desc string, fn queryConditionScenarioFunc) { @@ -249,7 +257,7 @@ func TestFrameToSeriesSlice(t *testing.T) { tests := []struct { name string frame *data.Frame - seriesSlice tsdb.TimeSeriesSlice + seriesSlice plugins.DataTimeSeriesSlice Err require.ErrorAssertionFunc }{ { @@ -268,21 +276,21 @@ func TestFrameToSeriesSlice(t *testing.T) { 4.0, })), - seriesSlice: tsdb.TimeSeriesSlice{ - &tsdb.TimeSeries{ + seriesSlice: plugins.DataTimeSeriesSlice{ + plugins.DataTimeSeries{ Name: "Values Int64s {Animal Factor=cat}", Tags: map[string]string{"Animal Factor": "cat"}, - Points: tsdb.TimeSeriesPoints{ - tsdb.TimePoint{null.FloatFrom(math.NaN()), null.FloatFrom(1577934240000)}, - tsdb.TimePoint{null.FloatFrom(3), null.FloatFrom(1577934270000)}, + Points: plugins.DataTimeSeriesPoints{ + plugins.DataTimePoint{null.FloatFrom(math.NaN()), null.FloatFrom(1577934240000)}, + plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(1577934270000)}, }, }, - &tsdb.TimeSeries{ + plugins.DataTimeSeries{ Name: "Values Floats {Animal Factor=sloth}", Tags: map[string]string{"Animal Factor": "sloth"}, - Points: tsdb.TimeSeriesPoints{ - tsdb.TimePoint{null.FloatFrom(2), null.FloatFrom(1577934240000)}, - tsdb.TimePoint{null.FloatFrom(4), null.FloatFrom(1577934270000)}, + Points: plugins.DataTimeSeriesPoints{ + plugins.DataTimePoint{null.FloatFrom(2), null.FloatFrom(1577934240000)}, + plugins.DataTimePoint{null.FloatFrom(4), null.FloatFrom(1577934270000)}, }, }, }, @@ -295,16 +303,16 @@ func TestFrameToSeriesSlice(t *testing.T) { data.NewField(`Values Int64s`, data.Labels{"Animal Factor": "cat"}, []*int64{}), data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []float64{})), - seriesSlice: tsdb.TimeSeriesSlice{ - &tsdb.TimeSeries{ + seriesSlice: plugins.DataTimeSeriesSlice{ + plugins.DataTimeSeries{ Name: "Values Int64s {Animal Factor=cat}", Tags: map[string]string{"Animal Factor": "cat"}, - Points: tsdb.TimeSeriesPoints{}, + Points: plugins.DataTimeSeriesPoints{}, }, - &tsdb.TimeSeries{ + plugins.DataTimeSeries{ Name: "Values Floats {Animal Factor=sloth}", Tags: map[string]string{"Animal Factor": "sloth"}, - Points: tsdb.TimeSeriesPoints{}, + Points: plugins.DataTimeSeriesPoints{}, }, }, Err: require.NoError, @@ -315,10 +323,10 @@ func TestFrameToSeriesSlice(t *testing.T) { data.NewField("Time", data.Labels{}, []time.Time{}), data.NewField(`Values`, data.Labels{}, []float64{})), - seriesSlice: tsdb.TimeSeriesSlice{ - &tsdb.TimeSeries{ + seriesSlice: plugins.DataTimeSeriesSlice{ + plugins.DataTimeSeries{ Name: "Values", - Points: tsdb.TimeSeriesPoints{}, + Points: plugins.DataTimeSeriesPoints{}, }, }, Err: require.NoError, @@ -331,10 +339,10 @@ func TestFrameToSeriesSlice(t *testing.T) { DisplayNameFromDS: "sloth", })), - seriesSlice: tsdb.TimeSeriesSlice{ - &tsdb.TimeSeries{ + seriesSlice: plugins.DataTimeSeriesSlice{ + plugins.DataTimeSeries{ Name: "sloth", - Points: tsdb.TimeSeriesPoints{}, + Points: plugins.DataTimeSeriesPoints{}, Tags: map[string]string{"Rating": "10"}, }, }, @@ -349,10 +357,10 @@ func TestFrameToSeriesSlice(t *testing.T) { DisplayNameFromDS: "sloth #2", })), - seriesSlice: tsdb.TimeSeriesSlice{ - &tsdb.TimeSeries{ + seriesSlice: plugins.DataTimeSeriesSlice{ + plugins.DataTimeSeries{ Name: "sloth #1", - Points: tsdb.TimeSeriesPoints{}, + Points: plugins.DataTimeSeriesPoints{}, }, }, Err: require.NoError, diff --git a/pkg/services/alerting/conditions/reducer.go b/pkg/services/alerting/conditions/reducer.go index 6a23e6f9e48..442cb548277 100644 --- a/pkg/services/alerting/conditions/reducer.go +++ b/pkg/services/alerting/conditions/reducer.go @@ -6,7 +6,7 @@ import ( "sort" "github.com/grafana/grafana/pkg/components/null" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) // queryReducer reduces a timeseries to a nullable float @@ -18,7 +18,7 @@ type queryReducer struct { } //nolint: gocyclo -func (s *queryReducer) Reduce(series *tsdb.TimeSeries) null.Float { +func (s *queryReducer) Reduce(series plugins.DataTimeSeries) null.Float { if len(series.Points) == 0 { return null.FloatFromPtr(nil) } @@ -126,7 +126,7 @@ func newSimpleReducer(t string) *queryReducer { return &queryReducer{Type: t} } -func calculateDiff(series *tsdb.TimeSeries, allNull bool, value float64, fn func(float64, float64) float64) (bool, float64) { +func calculateDiff(series plugins.DataTimeSeries, allNull bool, value float64, fn func(float64, float64) float64) (bool, float64) { var ( points = series.Points first float64 diff --git a/pkg/services/alerting/conditions/reducer_test.go b/pkg/services/alerting/conditions/reducer_test.go index 0f9ffc13d34..baa46925785 100644 --- a/pkg/services/alerting/conditions/reducer_test.go +++ b/pkg/services/alerting/conditions/reducer_test.go @@ -7,7 +7,7 @@ import ( . "github.com/smartystreets/goconvey/convey" "github.com/grafana/grafana/pkg/components/null" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) func TestSimpleReducer(t *testing.T) { @@ -54,16 +54,16 @@ func TestSimpleReducer(t *testing.T) { Convey("median should ignore null values", func() { reducer := newSimpleReducer("median") - series := &tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: "test time series", } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 3)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(1)), 4)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(2)), 5)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(3)), 6)) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(3)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(float64(1)), null.FloatFrom(4)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(float64(2)), null.FloatFrom(5)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(float64(3)), null.FloatFrom(6)}) result := reducer.Reduce(series) So(result.Valid, ShouldEqual, true) @@ -77,25 +77,25 @@ func TestSimpleReducer(t *testing.T) { Convey("avg with only nulls", func() { reducer := newSimpleReducer("avg") - series := &tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: "test time series", } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)}) So(reducer.Reduce(series).Valid, ShouldEqual, false) }) Convey("count_non_null", func() { Convey("with null values and real values", func() { reducer := newSimpleReducer("count_non_null") - series := &tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: "test time series", } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 3)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 4)) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(3)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(4)}) So(reducer.Reduce(series).Valid, ShouldEqual, true) So(reducer.Reduce(series).Float64, ShouldEqual, 2) @@ -103,12 +103,12 @@ func TestSimpleReducer(t *testing.T) { Convey("with null values", func() { reducer := newSimpleReducer("count_non_null") - series := &tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: "test time series", } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)}) So(reducer.Reduce(series).Valid, ShouldEqual, false) }) @@ -116,14 +116,14 @@ func TestSimpleReducer(t *testing.T) { Convey("avg of number values and null values should ignore nulls", func() { reducer := newSimpleReducer("avg") - series := &tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: "test time series", } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 1)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 3)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 4)) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(1)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(3)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(4)}) So(reducer.Reduce(series).Float64, ShouldEqual, float64(3)) }) @@ -181,12 +181,12 @@ func TestSimpleReducer(t *testing.T) { Convey("diff with only nulls", func() { reducer := newSimpleReducer("diff") - series := &tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: "test time series", } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)}) So(reducer.Reduce(series).Valid, ShouldEqual, false) }) @@ -244,12 +244,12 @@ func TestSimpleReducer(t *testing.T) { Convey("diff_abs with only nulls", func() { reducer := newSimpleReducer("diff_abs") - series := &tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: "test time series", } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)}) So(reducer.Reduce(series).Valid, ShouldEqual, false) }) @@ -307,12 +307,12 @@ func TestSimpleReducer(t *testing.T) { Convey("percent_diff with only nulls", func() { reducer := newSimpleReducer("percent_diff") - series := &tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: "test time series", } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)}) So(reducer.Reduce(series).Valid, ShouldEqual, false) }) @@ -370,12 +370,12 @@ func TestSimpleReducer(t *testing.T) { Convey("percent_diff_abs with only nulls", func() { reducer := newSimpleReducer("percent_diff_abs") - series := &tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: "test time series", } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)}) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)}) So(reducer.Reduce(series).Valid, ShouldEqual, false) }) @@ -399,12 +399,12 @@ func TestSimpleReducer(t *testing.T) { func testReducer(reducerType string, datapoints ...float64) float64 { reducer := newSimpleReducer(reducerType) - series := &tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: "test time series", } for idx := range datapoints { - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(datapoints[idx]), 1234134)) + series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(datapoints[idx]), null.FloatFrom(1234134)}) } return reducer.Reduce(series).Float64 diff --git a/pkg/services/alerting/engine.go b/pkg/services/alerting/engine.go index f836e581f97..86b360e4482 100644 --- a/pkg/services/alerting/engine.go +++ b/pkg/services/alerting/engine.go @@ -13,6 +13,7 @@ import ( "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/services/rendering" "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb" "github.com/opentracing/opentracing-go" "github.com/opentracing/opentracing-go/ext" tlog "github.com/opentracing/opentracing-go/log" @@ -26,6 +27,7 @@ type AlertEngine struct { RenderService rendering.Service `inject:""` Bus bus.Bus `inject:""` RequestValidator models.PluginRequestValidator `inject:""` + DataService *tsdb.Service `inject:""` execQueue chan *Job ticker *Ticker @@ -50,7 +52,7 @@ func (e *AlertEngine) Init() error { e.ticker = NewTicker(time.Now(), time.Second*0, clock.New(), 1) e.execQueue = make(chan *Job, 1000) e.scheduler = newScheduler() - e.evalHandler = NewEvalHandler() + e.evalHandler = NewEvalHandler(e.DataService) e.ruleReader = newRuleReader() e.log = log.New("alerting.engine") e.resultHandler = newResultHandler(e.RenderService) diff --git a/pkg/services/alerting/eval_handler.go b/pkg/services/alerting/eval_handler.go index 323570a2f26..8cff8877d7f 100644 --- a/pkg/services/alerting/eval_handler.go +++ b/pkg/services/alerting/eval_handler.go @@ -7,19 +7,22 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/metrics" + "github.com/grafana/grafana/pkg/tsdb/tsdbifaces" ) // DefaultEvalHandler is responsible for evaluating the alert rule. type DefaultEvalHandler struct { log log.Logger alertJobTimeout time.Duration + requestHandler tsdbifaces.RequestHandler } // NewEvalHandler is the `DefaultEvalHandler` constructor. -func NewEvalHandler() *DefaultEvalHandler { +func NewEvalHandler(requestHandler tsdbifaces.RequestHandler) *DefaultEvalHandler { return &DefaultEvalHandler{ log: log.New("alerting.evalHandler"), alertJobTimeout: time.Second * 5, + requestHandler: requestHandler, } } @@ -31,7 +34,7 @@ func (e *DefaultEvalHandler) Eval(context *EvalContext) { for i := 0; i < len(context.Rule.Conditions); i++ { condition := context.Rule.Conditions[i] - cr, err := condition.Eval(context) + cr, err := condition.Eval(context, e.requestHandler) if err != nil { context.Error = err } diff --git a/pkg/services/alerting/eval_handler_test.go b/pkg/services/alerting/eval_handler_test.go index 218dffca158..debce806d8b 100644 --- a/pkg/services/alerting/eval_handler_test.go +++ b/pkg/services/alerting/eval_handler_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/grafana/grafana/pkg/services/validations" + "github.com/grafana/grafana/pkg/tsdb/tsdbifaces" . "github.com/smartystreets/goconvey/convey" ) @@ -16,13 +17,13 @@ type conditionStub struct { noData bool } -func (c *conditionStub) Eval(context *EvalContext) (*ConditionResult, error) { +func (c *conditionStub) Eval(context *EvalContext, reqHandler tsdbifaces.RequestHandler) (*ConditionResult, error) { return &ConditionResult{Firing: c.firing, EvalMatches: c.matches, Operator: c.operator, NoDataFound: c.noData}, nil } func TestAlertingEvaluationHandler(t *testing.T) { Convey("Test alert evaluation handler", t, func() { - handler := NewEvalHandler() + handler := NewEvalHandler(nil) Convey("Show return triggered with single passing condition", func() { context := NewEvalContext(context.TODO(), &Rule{ diff --git a/pkg/services/alerting/interfaces.go b/pkg/services/alerting/interfaces.go index 93d3127d6bd..fd812262a63 100644 --- a/pkg/services/alerting/interfaces.go +++ b/pkg/services/alerting/interfaces.go @@ -5,6 +5,7 @@ import ( "time" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/tsdb/tsdbifaces" ) type evalHandler interface { @@ -59,5 +60,5 @@ type ConditionResult struct { // Condition is responsible for evaluating an alert condition. type Condition interface { - Eval(result *EvalContext) (*ConditionResult, error) + Eval(result *EvalContext, requestHandler tsdbifaces.RequestHandler) (*ConditionResult, error) } diff --git a/pkg/services/alerting/rule_test.go b/pkg/services/alerting/rule_test.go index 212fc6578e5..bfaac9fa975 100644 --- a/pkg/services/alerting/rule_test.go +++ b/pkg/services/alerting/rule_test.go @@ -6,6 +6,7 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/tsdb/tsdbifaces" . "github.com/smartystreets/goconvey/convey" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -13,7 +14,7 @@ import ( type FakeCondition struct{} -func (f *FakeCondition) Eval(context *EvalContext) (*ConditionResult, error) { +func (f *FakeCondition) Eval(context *EvalContext, reqHandler tsdbifaces.RequestHandler) (*ConditionResult, error) { return &ConditionResult{}, nil } diff --git a/pkg/services/alerting/test_rule.go b/pkg/services/alerting/test_rule.go index 8d1c1426b72..fe9e5ea3e75 100644 --- a/pkg/services/alerting/test_rule.go +++ b/pkg/services/alerting/test_rule.go @@ -4,59 +4,39 @@ import ( "context" "fmt" - "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" ) -// AlertTestCommand initiates an test evaluation -// of an alert rule. -type AlertTestCommand struct { - Dashboard *simplejson.Json - PanelID int64 - OrgID int64 - User *models.SignedInUser +// AlertTest makes a test alert. +func (e *AlertEngine) AlertTest(orgID int64, dashboard *simplejson.Json, panelID int64, user *models.SignedInUser) (*EvalContext, error) { + dash := models.NewDashboardFromJson(dashboard) - Result *EvalContext -} - -func init() { - bus.AddHandler("alerting", handleAlertTestCommand) -} - -func handleAlertTestCommand(cmd *AlertTestCommand) error { - dash := models.NewDashboardFromJson(cmd.Dashboard) - - extractor := NewDashAlertExtractor(dash, cmd.OrgID, cmd.User) + extractor := NewDashAlertExtractor(dash, orgID, user) alerts, err := extractor.GetAlerts() if err != nil { - return err + return nil, err } for _, alert := range alerts { - if alert.PanelId == cmd.PanelID { - rule, err := NewRuleFromDBAlert(alert, true) - if err != nil { - return err - } - - cmd.Result = testAlertRule(rule) - return nil + if alert.PanelId != panelID { + continue } + rule, err := NewRuleFromDBAlert(alert, true) + if err != nil { + return nil, err + } + + handler := NewEvalHandler(e.DataService) + + context := NewEvalContext(context.Background(), rule, fakeRequestValidator{}) + context.IsTestRun = true + context.IsDebug = true + + handler.Eval(context) + context.Rule.State = context.GetNewState() + return context, nil } - return fmt.Errorf("could not find alert with panel ID %d", cmd.PanelID) -} - -func testAlertRule(rule *Rule) *EvalContext { - handler := NewEvalHandler() - - context := NewEvalContext(context.Background(), rule, fakeRequestValidator{}) - context.IsTestRun = true - context.IsDebug = true - - handler.Eval(context) - context.Rule.State = context.GetNewState() - - return context + return nil, fmt.Errorf("could not find alert with panel ID %d", panelID) } diff --git a/pkg/services/dashboards/dashboard_service.go b/pkg/services/dashboards/dashboard_service.go index e55536af5e2..94b622b6e43 100644 --- a/pkg/services/dashboards/dashboard_service.go +++ b/pkg/services/dashboards/dashboard_service.go @@ -6,6 +6,7 @@ import ( "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb/tsdbifaces" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/infra/log" @@ -33,9 +34,10 @@ type DashboardProvisioningService interface { } // NewService factory for creating a new dashboard service -var NewService = func() DashboardService { +var NewService = func(reqHandler tsdbifaces.RequestHandler) DashboardService { return &dashboardServiceImpl{ - log: log.New("dashboard-service"), + log: log.New("dashboard-service"), + reqHandler: reqHandler, } } @@ -56,9 +58,10 @@ type SaveDashboardDTO struct { } type dashboardServiceImpl struct { - orgId int64 - user *models.SignedInUser - log log.Logger + orgId int64 + user *models.SignedInUser + log log.Logger + reqHandler tsdbifaces.RequestHandler } func (dr *dashboardServiceImpl) GetProvisionedDashboardData(name string) ([]*models.DashboardProvisioning, error) { @@ -386,7 +389,7 @@ func (s *FakeDashboardService) DeleteDashboard(dashboardId int64, orgId int64) e } func MockDashboardService(mock *FakeDashboardService) { - NewService = func() DashboardService { + NewService = func(tsdbifaces.RequestHandler) DashboardService { return mock } } diff --git a/pkg/services/librarypanels/librarypanels_test.go b/pkg/services/librarypanels/librarypanels_test.go index 501775ed83f..1e4b00fa062 100644 --- a/pkg/services/librarypanels/librarypanels_test.go +++ b/pkg/services/librarypanels/librarypanels_test.go @@ -763,7 +763,7 @@ func createDashboard(t *testing.T, user models.SignedInUser, title string, folde return nil }) - dashboard, err := dashboards.NewService().SaveDashboard(dashItem, true) + dashboard, err := dashboards.NewService(nil).SaveDashboard(dashItem, true) require.NoError(t, err) return dashboard diff --git a/pkg/services/live/live.go b/pkg/services/live/live.go index 263482885d5..4bda59eb273 100644 --- a/pkg/services/live/live.go +++ b/pkg/services/live/live.go @@ -8,7 +8,7 @@ import ( "github.com/grafana/grafana/pkg/api/routing" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/services/live/features" "github.com/grafana/grafana/pkg/setting" @@ -222,7 +222,7 @@ func (g *GrafanaLive) GetChannelHandlerFactory(scope string, name string) (model }, nil } - p, ok := plugins.Plugins[name] + p, ok := manager.Plugins[name] if ok { h := &PluginHandler{ Plugin: p, diff --git a/pkg/services/ngalert/api.go b/pkg/services/ngalert/api.go index 69397381105..101eceb77f5 100644 --- a/pkg/services/ngalert/api.go +++ b/pkg/services/ngalert/api.go @@ -9,6 +9,7 @@ import ( "github.com/grafana/grafana/pkg/api/routing" "github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/datasources" "github.com/grafana/grafana/pkg/services/ngalert/eval" "github.com/grafana/grafana/pkg/setting" @@ -20,6 +21,7 @@ type apiImpl struct { Cfg *setting.Cfg `inject:""` DatasourceCache datasources.CacheService `inject:""` RouteRegister routing.RouteRegister `inject:""` + DataService *tsdb.Service schedule scheduleService store store } @@ -64,13 +66,13 @@ func (api *apiImpl) conditionEvalEndpoint(c *models.ReqContext, cmd evalAlertCon } evaluator := eval.Evaluator{Cfg: api.Cfg} - evalResults, err := evaluator.ConditionEval(&evalCond, timeNow()) + evalResults, err := evaluator.ConditionEval(&evalCond, timeNow(), api.DataService) if err != nil { return response.Error(400, "Failed to evaluate conditions", err) } frame := evalResults.AsDataFrame() - df := tsdb.NewDecodedDataFrames([]*data.Frame{&frame}) + df := plugins.NewDecodedDataFrames([]*data.Frame{&frame}) instances, err := df.Encoded() if err != nil { return response.Error(400, "Failed to encode result dataframes", err) @@ -95,13 +97,13 @@ func (api *apiImpl) alertDefinitionEvalEndpoint(c *models.ReqContext) response.R } evaluator := eval.Evaluator{Cfg: api.Cfg} - evalResults, err := evaluator.ConditionEval(condition, timeNow()) + evalResults, err := evaluator.ConditionEval(condition, timeNow(), api.DataService) if err != nil { return response.Error(400, "Failed to evaluate alert", err) } frame := evalResults.AsDataFrame() - df := tsdb.NewDecodedDataFrames([]*data.Frame{&frame}) + df := plugins.NewDecodedDataFrames([]*data.Frame{&frame}) if err != nil { return response.Error(400, "Failed to instantiate Dataframes from the decoded frames", err) } diff --git a/pkg/services/ngalert/eval/eval.go b/pkg/services/ngalert/eval/eval.go index 72d4be0548d..7d0d551430c 100644 --- a/pkg/services/ngalert/eval/eval.go +++ b/pkg/services/ngalert/eval/eval.go @@ -8,6 +8,7 @@ import ( "time" "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/data" @@ -100,7 +101,7 @@ type AlertExecCtx struct { } // execute runs the Condition's expressions or queries. -func (c *Condition) execute(ctx AlertExecCtx, now time.Time) (*ExecutionResults, error) { +func (c *Condition) execute(ctx AlertExecCtx, now time.Time, dataService *tsdb.Service) (*ExecutionResults, error) { result := ExecutionResults{} if !c.IsValid() { return nil, fmt.Errorf("invalid conditions") @@ -140,7 +141,10 @@ func (c *Condition) execute(ctx AlertExecCtx, now time.Time) (*ExecutionResults, }) } - exprService := expr.Service{Cfg: &setting.Cfg{ExpressionsEnabled: ctx.ExpressionsEnabled}} + exprService := expr.Service{ + Cfg: &setting.Cfg{ExpressionsEnabled: ctx.ExpressionsEnabled}, + DataService: dataService, + } pbRes, err := exprService.TransformData(ctx.Ctx, queryDataReq) if err != nil { return &result, err @@ -218,13 +222,13 @@ func (evalResults Results) AsDataFrame() data.Frame { } // ConditionEval executes conditions and evaluates the result. -func (e *Evaluator) ConditionEval(condition *Condition, now time.Time) (Results, error) { +func (e *Evaluator) ConditionEval(condition *Condition, now time.Time, dataService *tsdb.Service) (Results, error) { alertCtx, cancelFn := context.WithTimeout(context.Background(), alertingEvaluationTimeout) defer cancelFn() alertExecCtx := AlertExecCtx{OrgID: condition.OrgID, Ctx: alertCtx, ExpressionsEnabled: e.Cfg.ExpressionsEnabled} - execResult, err := condition.execute(alertExecCtx, now) + execResult, err := condition.execute(alertExecCtx, now, dataService) if err != nil { return nil, fmt.Errorf("failed to execute conditions: %w", err) } diff --git a/pkg/services/ngalert/ngalert.go b/pkg/services/ngalert/ngalert.go index 76dcbf28ce0..03552ceb145 100644 --- a/pkg/services/ngalert/ngalert.go +++ b/pkg/services/ngalert/ngalert.go @@ -7,6 +7,7 @@ import ( "github.com/benbjohnson/clock" "github.com/grafana/grafana/pkg/services/ngalert/eval" "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/api/routing" "github.com/grafana/grafana/pkg/infra/log" @@ -34,6 +35,7 @@ type AlertNG struct { DatasourceCache datasources.CacheService `inject:""` RouteRegister routing.RouteRegister `inject:""` SQLStore *sqlstore.SQLStore `inject:""` + DataService *tsdb.Service `inject:""` log log.Logger schedule scheduleService } @@ -57,14 +59,16 @@ func (ng *AlertNG) Init() error { evaluator: eval.Evaluator{Cfg: ng.Cfg}, store: store, } - ng.schedule = newScheduler(schedCfg) + ng.schedule = newScheduler(schedCfg, ng.DataService) api := apiImpl{ Cfg: ng.Cfg, DatasourceCache: ng.DatasourceCache, RouteRegister: ng.RouteRegister, + DataService: ng.DataService, schedule: ng.schedule, - store: store} + store: store, + } api.registerAPIEndpoints() return nil diff --git a/pkg/services/ngalert/schedule.go b/pkg/services/ngalert/schedule.go index 0bfe5e4790e..05aa17f6a3f 100644 --- a/pkg/services/ngalert/schedule.go +++ b/pkg/services/ngalert/schedule.go @@ -10,6 +10,7 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/ngalert/eval" + "github.com/grafana/grafana/pkg/tsdb" "golang.org/x/sync/errgroup" ) @@ -24,7 +25,8 @@ type scheduleService interface { overrideCfg(cfg schedulerCfg) } -func (sch *schedule) definitionRoutine(grafanaCtx context.Context, key alertDefinitionKey, evalCh <-chan *evalContext, stopCh <-chan struct{}) error { +func (sch *schedule) definitionRoutine(grafanaCtx context.Context, key alertDefinitionKey, + evalCh <-chan *evalContext, stopCh <-chan struct{}) error { sch.log.Debug("alert definition routine started", "key", key) evalRunning := false @@ -58,11 +60,12 @@ func (sch *schedule) definitionRoutine(grafanaCtx context.Context, key alertDefi OrgID: alertDefinition.OrgID, QueriesAndExpressions: alertDefinition.Data, } - results, err := sch.evaluator.ConditionEval(&condition, ctx.now) + results, err := sch.evaluator.ConditionEval(&condition, ctx.now, sch.dataService) end = timeNow() if err != nil { // consider saving alert instance on error - sch.log.Error("failed to evaluate alert definition", "title", alertDefinition.Title, "key", key, "attempt", attempt, "now", ctx.now, "duration", end.Sub(start), "error", err) + sch.log.Error("failed to evaluate alert definition", "title", alertDefinition.Title, + "key", key, "attempt", attempt, "now", ctx.now, "duration", end.Sub(start), "error", err) return err } for _, r := range results { @@ -129,6 +132,8 @@ type schedule struct { evaluator eval.Evaluator store store + + dataService *tsdb.Service } type schedulerCfg struct { @@ -142,7 +147,7 @@ type schedulerCfg struct { } // newScheduler returns a new schedule. -func newScheduler(cfg schedulerCfg) *schedule { +func newScheduler(cfg schedulerCfg, dataService *tsdb.Service) *schedule { ticker := alerting.NewTicker(cfg.c.Now(), time.Second*0, cfg.c, int64(cfg.baseInterval.Seconds())) sch := schedule{ registry: alertDefinitionRegistry{alertDefinitionInfo: make(map[alertDefinitionKey]alertDefinitionInfo)}, @@ -155,6 +160,7 @@ func newScheduler(cfg schedulerCfg) *schedule { stopAppliedFunc: cfg.stopAppliedFunc, evaluator: cfg.evaluator, store: cfg.store, + dataService: dataService, } return &sch } diff --git a/pkg/services/provisioning/plugins/config_reader.go b/pkg/services/provisioning/plugins/config_reader.go index fe5175cf066..40550c8bdee 100644 --- a/pkg/services/provisioning/plugins/config_reader.go +++ b/pkg/services/provisioning/plugins/config_reader.go @@ -8,7 +8,7 @@ import ( "strings" "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "gopkg.in/yaml.v2" ) @@ -112,7 +112,7 @@ func validatePluginsConfig(apps []*pluginsAsConfig) error { } for _, app := range apps[i].Apps { - if !plugins.IsAppInstalled(app.PluginID) { + if !manager.IsAppInstalled(app.PluginID) { return fmt.Errorf("app plugin not installed: %s", app.PluginID) } } diff --git a/pkg/services/provisioning/plugins/config_reader_test.go b/pkg/services/provisioning/plugins/config_reader_test.go index 992e815b25f..fef840d5734 100644 --- a/pkg/services/provisioning/plugins/config_reader_test.go +++ b/pkg/services/provisioning/plugins/config_reader_test.go @@ -6,10 +6,11 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/stretchr/testify/require" ) -var ( +const ( incorrectSettings = "./testdata/test-configs/incorrect-settings" brokenYaml = "./testdata/test-configs/broken-yaml" emptyFolder = "./testdata/test-configs/empty_folder" @@ -46,7 +47,7 @@ func TestConfigReader(t *testing.T) { }) t.Run("Can read correct properties", func(t *testing.T) { - plugins.Apps = map[string]*plugins.AppPlugin{ + manager.Apps = map[string]*plugins.AppPlugin{ "test-plugin": {}, "test-plugin-2": {}, } diff --git a/pkg/services/rendering/rendering.go b/pkg/services/rendering/rendering.go index 43f7180e612..847ee787fe8 100644 --- a/pkg/services/rendering/rendering.go +++ b/pkg/services/rendering/rendering.go @@ -17,6 +17,7 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" @@ -86,7 +87,7 @@ func (rs *RenderingService) Run(ctx context.Context) error { if rs.pluginAvailable() { rs.log = rs.log.New("renderer", "plugin") - rs.pluginInfo = plugins.Renderer + rs.pluginInfo = manager.Renderer if err := rs.startPlugin(ctx); err != nil { return err @@ -106,7 +107,7 @@ func (rs *RenderingService) Run(ctx context.Context) error { } func (rs *RenderingService) pluginAvailable() bool { - return plugins.Renderer != nil + return manager.Renderer != nil } func (rs *RenderingService) remoteAvailable() bool { diff --git a/pkg/services/sqlstore/dashboard_service_integration_test.go b/pkg/services/sqlstore/dashboard_service_integration_test.go index bc999f48a7b..a824ef3fa80 100644 --- a/pkg/services/sqlstore/dashboard_service_integration_test.go +++ b/pkg/services/sqlstore/dashboard_service_integration_test.go @@ -8,6 +8,7 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/guardian" + "github.com/stretchr/testify/require" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/models" @@ -34,11 +35,11 @@ func TestIntegratedDashboardService(t *testing.T) { return nil }) - savedFolder := saveTestFolder("Saved folder", testOrgId) - savedDashInFolder := saveTestDashboard("Saved dash in folder", testOrgId, savedFolder.Id) - saveTestDashboard("Other saved dash in folder", testOrgId, savedFolder.Id) - savedDashInGeneralFolder := saveTestDashboard("Saved dashboard in general folder", testOrgId, 0) - otherSavedFolder := saveTestFolder("Other saved folder", testOrgId) + savedFolder := saveTestFolder(t, "Saved folder", testOrgId) + savedDashInFolder := saveTestDashboard(t, "Saved dash in folder", testOrgId, savedFolder.Id) + saveTestDashboard(t, "Other saved dash in folder", testOrgId, savedFolder.Id) + savedDashInGeneralFolder := saveTestDashboard(t, "Saved dashboard in general folder", testOrgId, 0) + otherSavedFolder := saveTestFolder(t, "Other saved folder", testOrgId) Convey("Should return dashboard model", func() { So(savedFolder.Title, ShouldEqual, "Saved folder") @@ -110,7 +111,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: false, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) Convey("It should create a new dashboard in organization B", func() { So(res, ShouldNotBeNil) @@ -385,7 +386,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) So(res, ShouldNotBeNil) Convey("It should create a new dashboard", func() { @@ -409,7 +410,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) So(res, ShouldNotBeNil) Convey("It should create a new dashboard", func() { @@ -434,7 +435,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) So(res, ShouldNotBeNil) Convey("It should create a new folder", func() { @@ -459,7 +460,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) So(res, ShouldNotBeNil) Convey("It should create a new dashboard", func() { @@ -484,7 +485,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) So(res, ShouldNotBeNil) Convey("It should create a new dashboard", func() { @@ -545,7 +546,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) So(res, ShouldNotBeNil) Convey("It should update dashboard", func() { @@ -590,7 +591,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) So(res, ShouldNotBeNil) Convey("It should update dashboard", func() { @@ -676,7 +677,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) So(res, ShouldNotBeNil) Convey("It should update dashboard", func() { @@ -701,7 +702,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) So(res, ShouldNotBeNil) Convey("It should update dashboard", func() { @@ -726,7 +727,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) Convey("It should update dashboard", func() { So(res, ShouldNotBeNil) @@ -772,7 +773,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) Convey("It should overwrite existing dashboard", func() { So(res, ShouldNotBeNil) @@ -799,7 +800,7 @@ func TestIntegratedDashboardService(t *testing.T) { Overwrite: shouldOverwrite, } - res := callSaveWithResult(cmd) + res := callSaveWithResult(t, cmd) Convey("It should overwrite existing dashboard", func() { So(res, ShouldNotBeNil) @@ -962,19 +963,25 @@ func permissionScenario(desc string, canSave bool, fn dashboardPermissionScenari dashboardPermissionScenario(desc, mock, fn) } -func callSaveWithResult(cmd models.SaveDashboardCommand) *models.Dashboard { +func callSaveWithResult(t *testing.T, cmd models.SaveDashboardCommand) *models.Dashboard { + t.Helper() + dto := toSaveDashboardDto(cmd) - res, _ := dashboards.NewService().SaveDashboard(&dto, false) + res, err := dashboards.NewService(nil).SaveDashboard(&dto, false) + require.NoError(t, err) + return res } func callSaveWithError(cmd models.SaveDashboardCommand) error { dto := toSaveDashboardDto(cmd) - _, err := dashboards.NewService().SaveDashboard(&dto, false) + _, err := dashboards.NewService(nil).SaveDashboard(&dto, false) return err } -func saveTestDashboard(title string, orgId int64, folderId int64) *models.Dashboard { +func saveTestDashboard(t *testing.T, title string, orgId int64, folderId int64) *models.Dashboard { + t.Helper() + cmd := models.SaveDashboardCommand{ OrgId: orgId, FolderId: folderId, @@ -994,13 +1001,14 @@ func saveTestDashboard(title string, orgId int64, folderId int64) *models.Dashbo }, } - res, err := dashboards.NewService().SaveDashboard(&dto, false) - So(err, ShouldBeNil) + res, err := dashboards.NewService(nil).SaveDashboard(&dto, false) + require.NoError(t, err) return res } -func saveTestFolder(title string, orgId int64) *models.Dashboard { +func saveTestFolder(t *testing.T, title string, orgId int64) *models.Dashboard { + t.Helper() cmd := models.SaveDashboardCommand{ OrgId: orgId, FolderId: 0, @@ -1020,8 +1028,8 @@ func saveTestFolder(title string, orgId int64) *models.Dashboard { }, } - res, err := dashboards.NewService().SaveDashboard(&dto, false) - So(err, ShouldBeNil) + res, err := dashboards.NewService(nil).SaveDashboard(&dto, false) + require.NoError(t, err) return res } diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index 0251b05dae9..41eb78a6f4d 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -143,7 +143,6 @@ var ( // analytics ReportingEnabled bool ReportingDistributor string - CheckForUpdates bool GoogleAnalyticsId string GoogleTagManagerId string @@ -335,6 +334,9 @@ type Cfg struct { Env string + // Analytics + CheckForUpdates bool + // LDAP LDAPEnabled bool LDAPAllowSignup bool @@ -821,7 +823,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { cfg.MetricsEndpointDisableTotalStats = iniFile.Section("metrics").Key("disable_total_stats").MustBool(false) analytics := iniFile.Section("analytics") - CheckForUpdates = analytics.Key("check_for_updates").MustBool(true) + cfg.CheckForUpdates = analytics.Key("check_for_updates").MustBool(true) GoogleAnalyticsId = analytics.Key("google_analytics_ua_id").String() GoogleTagManagerId = analytics.Key("google_tag_manager_id").String() ReportingEnabled = analytics.Key("reporting_enabled").MustBool(true) diff --git a/pkg/tests/api/metrics/api_metrics_test.go b/pkg/tests/api/metrics/api_metrics_test.go index 6ad40c985ef..7586ce2b0b7 100644 --- a/pkg/tests/api/metrics/api_metrics_test.go +++ b/pkg/tests/api/metrics/api_metrics_test.go @@ -16,9 +16,9 @@ import ( "github.com/aws/aws-sdk-go/service/cloudwatchlogs/cloudwatchlogsiface" "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/tests/testinfra" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb/cloudwatch" cwapi "github.com/aws/aws-sdk-go/service/cloudwatch" @@ -69,16 +69,16 @@ func TestQueryCloudWatchMetrics(t *testing.T) { } tr := makeCWRequest(t, req, addr) - assert.Equal(t, tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "A": { - RefId: "A", + RefID: "A", Meta: simplejson.NewFromAny(map[string]interface{}{ "rowCount": float64(1), }), - Tables: []*tsdb.Table{ + Tables: []plugins.DataTable{ { - Columns: []tsdb.TableColumn{ + Columns: []plugins.DataTableColumn{ { Text: "text", }, @@ -86,7 +86,7 @@ func TestQueryCloudWatchMetrics(t *testing.T) { Text: "value", }, }, - Rows: []tsdb.RowValues{ + Rows: []plugins.DataRowValues{ { "Test_MetricName", "Test_MetricName", @@ -130,7 +130,7 @@ func TestQueryCloudWatchLogs(t *testing.T) { } tr := makeCWRequest(t, req, addr) - dataFrames := tsdb.NewDecodedDataFrames(data.Frames{ + dataFrames := plugins.NewDecodedDataFrames(data.Frames{ &data.Frame{ Name: "logGroups", Fields: []*data.Field{ @@ -145,10 +145,10 @@ func TestQueryCloudWatchLogs(t *testing.T) { // In the future we should use gocmp instead and ignore this field _, err := dataFrames.Encoded() require.NoError(t, err) - assert.Equal(t, tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "A": { - RefId: "A", + RefID: "A", Dataframes: dataFrames, }, }, @@ -156,7 +156,7 @@ func TestQueryCloudWatchLogs(t *testing.T) { }) } -func makeCWRequest(t *testing.T, req dtos.MetricRequest, addr string) tsdb.Response { +func makeCWRequest(t *testing.T, req dtos.MetricRequest, addr string) plugins.DataResponse { t.Helper() buf := bytes.Buffer{} @@ -179,7 +179,7 @@ func makeCWRequest(t *testing.T, req dtos.MetricRequest, addr string) tsdb.Respo require.NoError(t, err) require.Equal(t, 200, resp.StatusCode) - var tr tsdb.Response + var tr plugins.DataResponse err = json.Unmarshal(buf.Bytes(), &tr) require.NoError(t, err) diff --git a/pkg/tsdb/azuremonitor/applicationinsights-datasource.go b/pkg/tsdb/azuremonitor/applicationinsights-datasource.go index 44033230ad0..a369ca0786e 100644 --- a/pkg/tsdb/azuremonitor/applicationinsights-datasource.go +++ b/pkg/tsdb/azuremonitor/applicationinsights-datasource.go @@ -18,8 +18,8 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/util/errutil" "github.com/opentracing/opentracing-go" "golang.org/x/net/context/ctxhttp" @@ -49,20 +49,22 @@ type ApplicationInsightsQuery struct { aggregation string } -func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{}, +func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Context, + originalQueries []plugins.DataSubQuery, + timeRange plugins.DataTimeRange) (plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{}, } queries, err := e.buildQueries(originalQueries, timeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } for _, query := range queries { queryRes, err := e.executeQuery(ctx, query) if err != nil { - return nil, err + return plugins.DataResponse{}, err } result.Results[query.RefID] = queryRes } @@ -70,7 +72,8 @@ func (e *ApplicationInsightsDatasource) executeTimeSeriesQuery(ctx context.Conte return result, nil } -func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*ApplicationInsightsQuery, error) { +func (e *ApplicationInsightsDatasource) buildQueries(queries []plugins.DataSubQuery, + timeRange plugins.DataTimeRange) ([]*ApplicationInsightsQuery, error) { applicationInsightsQueries := []*ApplicationInsightsQuery{} startTime, err := timeRange.ParseFrom() if err != nil { @@ -100,7 +103,7 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time timeGrain := insightsJSONModel.TimeGrain timeGrains := insightsJSONModel.AllowedTimeGrainsMs if timeGrain == "auto" { - timeGrain, err = setAutoTimeGrain(query.IntervalMs, timeGrains) + timeGrain, err = setAutoTimeGrain(query.IntervalMS, timeGrains) if err != nil { return nil, err } @@ -122,7 +125,7 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time params.Add("segment", strings.Join(insightsJSONModel.Dimensions, ",")) } applicationInsightsQueries = append(applicationInsightsQueries, &ApplicationInsightsQuery{ - RefID: query.RefId, + RefID: query.RefID, ApiURL: azureURL, Params: params, Alias: insightsJSONModel.Alias, @@ -136,8 +139,9 @@ func (e *ApplicationInsightsDatasource) buildQueries(queries []*tsdb.Query, time return applicationInsightsQueries, nil } -func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery) (*tsdb.QueryResult, error) { - queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID} +func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query *ApplicationInsightsQuery) ( + plugins.DataQueryResult, error) { + queryResult := plugins.DataQueryResult{Meta: simplejson.New(), RefID: query.RefID} req, err := e.createRequest(ctx, e.dsInfo) if err != nil { @@ -178,18 +182,18 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query } }() if err != nil { - return nil, err + return plugins.DataQueryResult{}, err } if res.StatusCode/100 != 2 { azlog.Debug("Request failed", "status", res.Status, "body", string(body)) - return nil, fmt.Errorf("request failed, status: %s", res.Status) + return plugins.DataQueryResult{}, fmt.Errorf("request failed, status: %s", res.Status) } mr := MetricsResult{} err = json.Unmarshal(body, &mr) if err != nil { - return nil, err + return plugins.DataQueryResult{}, err } frame, err := InsightsMetricsResultToFrame(mr, query.metricName, query.aggregation, query.dimensions) @@ -200,13 +204,13 @@ func (e *ApplicationInsightsDatasource) executeQuery(ctx context.Context, query applyInsightsMetricAlias(frame, query.Alias) - queryResult.Dataframes = tsdb.NewDecodedDataFrames(data.Frames{frame}) + queryResult.Dataframes = plugins.NewDecodedDataFrames(data.Frames{frame}) return queryResult, nil } func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { // find plugin - plugin, ok := plugins.DataSources[dsInfo.Type] + plugin, ok := manager.DataSources[dsInfo.Type] if !ok { return nil, errors.New("unable to find datasource plugin Azure Application Insights") } @@ -239,7 +243,8 @@ func (e *ApplicationInsightsDatasource) createRequest(ctx context.Context, dsInf return req, nil } -func (e *ApplicationInsightsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, cloudName string) (*plugins.AppPluginRoute, string, error) { +func (e *ApplicationInsightsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, cloudName string) ( + *plugins.AppPluginRoute, string, error) { pluginRouteName := "appinsights" if cloudName == "chinaazuremonitor" { @@ -247,7 +252,6 @@ func (e *ApplicationInsightsDatasource) getPluginRoute(plugin *plugins.DataSourc } var pluginRoute *plugins.AppPluginRoute - for _, route := range plugin.Routes { if route.Path == pluginRouteName { pluginRoute = route diff --git a/pkg/tsdb/azuremonitor/applicationinsights-datasource_test.go b/pkg/tsdb/azuremonitor/applicationinsights-datasource_test.go index 3b8080b54ad..515c4a71f2b 100644 --- a/pkg/tsdb/azuremonitor/applicationinsights-datasource_test.go +++ b/pkg/tsdb/azuremonitor/applicationinsights-datasource_test.go @@ -11,7 +11,6 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" - "github.com/grafana/grafana/pkg/tsdb" "github.com/stretchr/testify/require" . "github.com/smartystreets/goconvey/convey" @@ -23,12 +22,12 @@ func TestApplicationInsightsDatasource(t *testing.T) { Convey("Parse queries from frontend and build AzureMonitor API queries", func() { fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) - tsdbQuery := &tsdb.TsdbQuery{ - TimeRange: &tsdb.TimeRange{ + tsdbQuery := plugins.DataQuery{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, - Queries: []*tsdb.Query{ + Queries: []plugins.DataSubQuery{ { DataSource: &models.DataSource{ JsonData: simplejson.NewFromAny(map[string]interface{}{}), @@ -43,13 +42,13 @@ func TestApplicationInsightsDatasource(t *testing.T) { "queryType": "Application Insights", }, }), - RefId: "A", - IntervalMs: 1234, + RefID: "A", + IntervalMS: 1234, }, }, } Convey("and is a normal query", func() { - queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) + queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange) So(err, ShouldBeNil) So(len(queries), ShouldEqual, 1) @@ -74,9 +73,9 @@ func TestApplicationInsightsDatasource(t *testing.T) { "queryType": "Application Insights", }, }) - tsdbQuery.Queries[0].IntervalMs = 400000 + tsdbQuery.Queries[0].IntervalMS = 400000 - queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) + queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange) So(err, ShouldBeNil) So(queries[0].Params["interval"][0], ShouldEqual, "PT15M") @@ -94,9 +93,9 @@ func TestApplicationInsightsDatasource(t *testing.T) { "allowedTimeGrainsMs": []int64{60000, 300000}, }, }) - tsdbQuery.Queries[0].IntervalMs = 400000 + tsdbQuery.Queries[0].IntervalMS = 400000 - queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) + queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange) So(err, ShouldBeNil) So(queries[0].Params["interval"][0], ShouldEqual, "PT5M") @@ -116,7 +115,7 @@ func TestApplicationInsightsDatasource(t *testing.T) { }, }) - queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) + queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange) So(err, ShouldBeNil) So(queries[0].Target, ShouldEqual, "aggregation=Average&filter=blob+eq+%27%2A%27&interval=PT1M&segment=blob×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") @@ -136,7 +135,7 @@ func TestApplicationInsightsDatasource(t *testing.T) { }, }) - queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) + queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange) So(err, ShouldBeNil) So(queries[0].Target, ShouldEqual, "aggregation=Average&interval=PT1M×pan=2018-03-15T13%3A00%3A00Z%2F2018-03-15T13%3A34%3A00Z") diff --git a/pkg/tsdb/azuremonitor/azure-log-analytics-datasource.go b/pkg/tsdb/azuremonitor/azure-log-analytics-datasource.go index 911466f5635..ccce0ee68ad 100644 --- a/pkg/tsdb/azuremonitor/azure-log-analytics-datasource.go +++ b/pkg/tsdb/azuremonitor/azure-log-analytics-datasource.go @@ -17,8 +17,8 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/util/errutil" "github.com/opentracing/opentracing-go" "golang.org/x/net/context/ctxhttp" @@ -45,14 +45,15 @@ type AzureLogAnalyticsQuery struct { // 1. build the AzureMonitor url and querystring for each query // 2. executes each query by calling the Azure Monitor API // 3. parses the responses for each query into the timeseries format -func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{}, +func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []plugins.DataSubQuery, + timeRange plugins.DataTimeRange) (plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{}, } queries, err := e.buildQueries(originalQueries, timeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } for _, query := range queries { @@ -62,7 +63,8 @@ func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context return result, nil } -func (e *AzureLogAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*AzureLogAnalyticsQuery, error) { +func (e *AzureLogAnalyticsDatasource) buildQueries(queries []plugins.DataSubQuery, + timeRange plugins.DataTimeRange) ([]*AzureLogAnalyticsQuery, error) { azureLogAnalyticsQueries := []*AzureLogAnalyticsQuery{} for _, query := range queries { @@ -97,7 +99,7 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRa params.Add("query", rawQuery) azureLogAnalyticsQueries = append(azureLogAnalyticsQueries, &AzureLogAnalyticsQuery{ - RefID: query.RefId, + RefID: query.RefID, ResultFormat: resultFormat, URL: apiURL, Model: query.Model, @@ -109,10 +111,11 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRa return azureLogAnalyticsQueries, nil } -func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, queries []*tsdb.Query, timeRange *tsdb.TimeRange) *tsdb.QueryResult { - queryResult := &tsdb.QueryResult{RefId: query.RefID} +func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, + queries []plugins.DataSubQuery, timeRange plugins.DataTimeRange) plugins.DataQueryResult { + queryResult := plugins.DataQueryResult{RefID: query.RefID} - queryResultErrorWithExecuted := func(err error) *tsdb.QueryResult { + queryResultErrorWithExecuted := func(err error) plugins.DataQueryResult { queryResult.Error = err frames := data.Frames{ &data.Frame{ @@ -122,7 +125,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A }, }, } - queryResult.Dataframes = tsdb.NewDecodedDataFrames(frames) + queryResult.Dataframes = plugins.NewDecodedDataFrames(frames) return queryResult } @@ -193,7 +196,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A } } frames := data.Frames{frame} - queryResult.Dataframes = tsdb.NewDecodedDataFrames(frames) + queryResult.Dataframes = plugins.NewDecodedDataFrames(frames) return queryResult } @@ -214,7 +217,7 @@ func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) // find plugin - plugin, ok := plugins.DataSources[dsInfo.Type] + plugin, ok := manager.DataSources[dsInfo.Type] if !ok { return nil, errors.New("unable to find datasource plugin Azure Monitor") } @@ -229,7 +232,8 @@ func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo return req, nil } -func (e *AzureLogAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, cloudName string) (*plugins.AppPluginRoute, string, error) { +func (e *AzureLogAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, cloudName string) ( + *plugins.AppPluginRoute, string, error) { pluginRouteName := "loganalyticsazure" switch cloudName { @@ -240,7 +244,6 @@ func (e *AzureLogAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourceP } var logAnalyticsRoute *plugins.AppPluginRoute - for _, route := range plugin.Routes { if route.Path == pluginRouteName { logAnalyticsRoute = route diff --git a/pkg/tsdb/azuremonitor/azure-log-analytics-datasource_test.go b/pkg/tsdb/azuremonitor/azure-log-analytics-datasource_test.go index 13dcee69517..d5401d6a011 100644 --- a/pkg/tsdb/azuremonitor/azure-log-analytics-datasource_test.go +++ b/pkg/tsdb/azuremonitor/azure-log-analytics-datasource_test.go @@ -11,7 +11,6 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" - "github.com/grafana/grafana/pkg/tsdb" "github.com/stretchr/testify/require" ) @@ -21,18 +20,18 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) { tests := []struct { name string - queryModel []*tsdb.Query - timeRange *tsdb.TimeRange + queryModel []plugins.DataSubQuery + timeRange plugins.DataTimeRange azureLogAnalyticsQueries []*AzureLogAnalyticsQuery Err require.ErrorAssertionFunc }{ { name: "Query with macros should be interpolated", - timeRange: &tsdb.TimeRange{ + timeRange: plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, - queryModel: []*tsdb.Query{ + queryModel: []plugins.DataSubQuery{ { DataSource: &models.DataSource{ JsonData: simplejson.NewFromAny(map[string]interface{}{}), @@ -45,7 +44,7 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) { "resultFormat": "time_series", }, }), - RefId: "A", + RefID: "A", }, }, azureLogAnalyticsQueries: []*AzureLogAnalyticsQuery{ diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go index 679611e6e27..ce72904ce4b 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource.go @@ -17,12 +17,11 @@ import ( "github.com/grafana/grafana/pkg/api/pluginproxy" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util/errutil" opentracing "github.com/opentracing/opentracing-go" "golang.org/x/net/context/ctxhttp" - - "github.com/grafana/grafana/pkg/tsdb" ) // AzureMonitorDatasource calls the Azure Monitor API - one of the four API's supported @@ -42,25 +41,28 @@ const azureMonitorAPIVersion = "2018-01-01" // 1. build the AzureMonitor url and querystring for each query // 2. executes each query by calling the Azure Monitor API // 3. parses the responses for each query into the timeseries format -func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{}, +func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []plugins.DataSubQuery, + timeRange plugins.DataTimeRange) (plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{}, } queries, err := e.buildQueries(originalQueries, timeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } for _, query := range queries { queryRes, resp, err := e.executeQuery(ctx, query, originalQueries, timeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - err = e.parseResponse(queryRes, resp, query) + frames, err := e.parseResponse(resp, query) if err != nil { queryRes.Error = err + } else { + queryRes.Dataframes = frames } result.Results[query.RefID] = queryRes } @@ -68,7 +70,7 @@ func (e *AzureMonitorDatasource) executeTimeSeriesQuery(ctx context.Context, ori return result, nil } -func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*AzureMonitorQuery, error) { +func (e *AzureMonitorDatasource) buildQueries(queries []plugins.DataSubQuery, timeRange plugins.DataTimeRange) ([]*AzureMonitorQuery, error) { azureMonitorQueries := []*AzureMonitorQuery{} startTime, err := timeRange.ParseFrom() if err != nil { @@ -115,7 +117,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange * timeGrain := azJSONModel.TimeGrain timeGrains := azJSONModel.AllowedTimeGrainsMs if timeGrain == "auto" { - timeGrain, err = setAutoTimeGrain(query.IntervalMs, timeGrains) + timeGrain, err = setAutoTimeGrain(query.IntervalMS, timeGrains) if err != nil { return nil, err } @@ -162,7 +164,7 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange * UrlComponents: urlComponents, Target: target, Params: params, - RefID: query.RefId, + RefID: query.RefID, Alias: alias, }) } @@ -170,8 +172,9 @@ func (e *AzureMonitorDatasource) buildQueries(queries []*tsdb.Query, timeRange * return azureMonitorQueries, nil } -func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, queries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.QueryResult, AzureMonitorResponse, error) { - queryResult := &tsdb.QueryResult{RefId: query.RefID} +func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureMonitorQuery, queries []plugins.DataSubQuery, + timeRange plugins.DataTimeRange) (plugins.DataQueryResult, AzureMonitorResponse, error) { + queryResult := plugins.DataQueryResult{RefID: query.RefID} req, err := e.createRequest(ctx, e.dsInfo) if err != nil { @@ -223,7 +226,7 @@ func (e *AzureMonitorDatasource) executeQuery(ctx context.Context, query *AzureM func (e *AzureMonitorDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { // find plugin - plugin, ok := plugins.DataSources[dsInfo.Type] + plugin, ok := manager.DataSources[dsInfo.Type] if !ok { return nil, errors.New("unable to find datasource plugin Azure Monitor") } @@ -280,9 +283,10 @@ func (e *AzureMonitorDatasource) unmarshalResponse(res *http.Response) (AzureMon return data, nil } -func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, amr AzureMonitorResponse, query *AzureMonitorQuery) error { +func (e *AzureMonitorDatasource) parseResponse(amr AzureMonitorResponse, query *AzureMonitorQuery) ( + plugins.DataFrames, error) { if len(amr.Value) == 0 { - return nil + return nil, nil } frames := data.Frames{} @@ -340,14 +344,13 @@ func (e *AzureMonitorDatasource) parseResponse(queryRes *tsdb.QueryResult, amr A frames = append(frames, frame) } - queryRes.Dataframes = tsdb.NewDecodedDataFrames(frames) - - return nil + return plugins.NewDecodedDataFrames(frames), nil } // formatAzureMonitorLegendKey builds the legend key or timeseries name // Alias patterns like {{resourcename}} are replaced with the appropriate data values. -func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string, metadataValue string, namespace string, seriesID string, labels data.Labels) string { +func formatAzureMonitorLegendKey(alias string, resourceName string, metricName string, metadataName string, + metadataValue string, namespace string, seriesID string, labels data.Labels) string { startIndex := strings.Index(seriesID, "/resourceGroups/") + 16 endIndex := strings.Index(seriesID, "/providers") resourceGroup := seriesID[startIndex:endIndex] diff --git a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go index d5ec77c053c..59e2baaa400 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go +++ b/pkg/tsdb/azuremonitor/azuremonitor-datasource_test.go @@ -14,7 +14,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/require" ptr "github.com/xorcare/pointer" ) @@ -125,12 +125,12 @@ func TestAzureMonitorBuildQueries(t *testing.T) { for k, v := range commonAzureModelProps { tt.azureMonitorVariedProperties[k] = v } - tsdbQuery := &tsdb.TsdbQuery{ - TimeRange: &tsdb.TimeRange{ + tsdbQuery := plugins.DataQuery{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, - Queries: []*tsdb.Query{ + Queries: []plugins.DataSubQuery{ { DataSource: &models.DataSource{ JsonData: simplejson.NewFromAny(map[string]interface{}{ @@ -142,8 +142,8 @@ func TestAzureMonitorBuildQueries(t *testing.T) { "azureMonitor": tt.azureMonitorVariedProperties, }, ), - RefId: "A", - IntervalMs: tt.queryIntervalMS, + RefID: "A", + IntervalMS: tt.queryIntervalMS, }, }, } @@ -161,7 +161,7 @@ func TestAzureMonitorBuildQueries(t *testing.T) { Alias: "testalias", } - queries, err := datasource.buildQueries(tsdbQuery.Queries, tsdbQuery.TimeRange) + queries, err := datasource.buildQueries(tsdbQuery.Queries, *tsdbQuery.TimeRange) require.NoError(t, err) if diff := cmp.Diff(azureMonitorQuery, queries[0], cmpopts.IgnoreUnexported(simplejson.Json{}), cmpopts.IgnoreFields(AzureMonitorQuery{}, "Params")); diff != "" { t.Errorf("Result mismatch (-want +got):\n%s", diff) @@ -430,15 +430,16 @@ func TestAzureMonitorParseResponse(t *testing.T) { } datasource := &AzureMonitorDatasource{} - for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { azData := loadTestFile(t, "azuremonitor/"+tt.responseFile) - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - err := datasource.parseResponse(res, azData, tt.mockQuery) + res := plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + require.NotNil(t, res) + dframes, err := datasource.parseResponse(azData, tt.mockQuery) require.NoError(t, err) + require.NotNil(t, dframes) - frames, err := res.Dataframes.Decoded() + frames, err := dframes.Decoded() require.NoError(t, err) if diff := cmp.Diff(tt.expectedFrames, frames, data.FrameTestCompareOptions()...); diff != "" { t.Errorf("Result mismatch (-want +got):\n%s", diff) diff --git a/pkg/tsdb/azuremonitor/azuremonitor.go b/pkg/tsdb/azuremonitor/azuremonitor.go index 0dec9ceba8d..0d2437d151e 100644 --- a/pkg/tsdb/azuremonitor/azuremonitor.go +++ b/pkg/tsdb/azuremonitor/azuremonitor.go @@ -8,14 +8,30 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/registry" ) var ( - azlog log.Logger - legendKeyFormat *regexp.Regexp + azlog = log.New("tsdb.azuremonitor") + legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) ) +func init() { + registry.Register(®istry.Descriptor{ + Name: "AzureMonitorService", + InitPriority: registry.Low, + Instance: &Service{}, + }) +} + +type Service struct { +} + +func (s *Service) Init() error { + return nil +} + // AzureMonitorExecutor executes queries for the Azure Monitor datasource - all four services type AzureMonitorExecutor struct { httpClient *http.Client @@ -23,7 +39,7 @@ type AzureMonitorExecutor struct { } // NewAzureMonitorExecutor initializes a http client -func NewAzureMonitorExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { +func (s *Service) NewExecutor(dsInfo *models.DataSource) (plugins.DataPlugin, error) { httpClient, err := dsInfo.GetHttpClient() if err != nil { return nil, err @@ -35,23 +51,18 @@ func NewAzureMonitorExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, }, nil } -func init() { - azlog = log.New("tsdb.azuremonitor") - tsdb.RegisterTsdbQueryEndpoint("grafana-azure-monitor-datasource", NewAzureMonitorExecutor) - legendKeyFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) -} - // Query takes in the frontend queries, parses them into the query format // expected by chosen Azure Monitor service (Azure Monitor, App Insights etc.) // executes the queries against the API and parses the response into // the right format -func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { +func (e *AzureMonitorExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource, + tsdbQuery plugins.DataQuery) (plugins.DataResponse, error) { var err error - var azureMonitorQueries []*tsdb.Query - var applicationInsightsQueries []*tsdb.Query - var azureLogAnalyticsQueries []*tsdb.Query - var insightsAnalyticsQueries []*tsdb.Query + var azureMonitorQueries []plugins.DataSubQuery + var applicationInsightsQueries []plugins.DataSubQuery + var azureLogAnalyticsQueries []plugins.DataSubQuery + var insightsAnalyticsQueries []plugins.DataSubQuery for _, query := range tsdbQuery.Queries { queryType := query.Model.Get("queryType").MustString("") @@ -66,7 +77,7 @@ func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSou case "Insights Analytics": insightsAnalyticsQueries = append(insightsAnalyticsQueries, query) default: - return nil, fmt.Errorf("alerting not supported for %q", queryType) + return plugins.DataResponse{}, fmt.Errorf("alerting not supported for %q", queryType) } } @@ -90,24 +101,24 @@ func (e *AzureMonitorExecutor) Query(ctx context.Context, dsInfo *models.DataSou dsInfo: e.dsInfo, } - azResult, err := azDatasource.executeTimeSeriesQuery(ctx, azureMonitorQueries, tsdbQuery.TimeRange) + azResult, err := azDatasource.executeTimeSeriesQuery(ctx, azureMonitorQueries, *tsdbQuery.TimeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - aiResult, err := aiDatasource.executeTimeSeriesQuery(ctx, applicationInsightsQueries, tsdbQuery.TimeRange) + aiResult, err := aiDatasource.executeTimeSeriesQuery(ctx, applicationInsightsQueries, *tsdbQuery.TimeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - alaResult, err := alaDatasource.executeTimeSeriesQuery(ctx, azureLogAnalyticsQueries, tsdbQuery.TimeRange) + alaResult, err := alaDatasource.executeTimeSeriesQuery(ctx, azureLogAnalyticsQueries, *tsdbQuery.TimeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - iaResult, err := iaDatasource.executeTimeSeriesQuery(ctx, insightsAnalyticsQueries, tsdbQuery.TimeRange) + iaResult, err := iaDatasource.executeTimeSeriesQuery(ctx, insightsAnalyticsQueries, *tsdbQuery.TimeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } for k, v := range aiResult.Results { diff --git a/pkg/tsdb/azuremonitor/insights-analytics-datasource.go b/pkg/tsdb/azuremonitor/insights-analytics-datasource.go index 4070c6dc381..b639aacf777 100644 --- a/pkg/tsdb/azuremonitor/insights-analytics-datasource.go +++ b/pkg/tsdb/azuremonitor/insights-analytics-datasource.go @@ -15,8 +15,8 @@ import ( "github.com/grafana/grafana/pkg/api/pluginproxy" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/util/errutil" "github.com/opentracing/opentracing-go" "golang.org/x/net/context/ctxhttp" @@ -39,14 +39,15 @@ type InsightsAnalyticsQuery struct { Target string } -func (e *InsightsAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, originalQueries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{}, +func (e *InsightsAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context, + originalQueries []plugins.DataSubQuery, timeRange plugins.DataTimeRange) (plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{}, } queries, err := e.buildQueries(originalQueries, timeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } for _, query := range queries { @@ -56,7 +57,8 @@ func (e *InsightsAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context return result, nil } -func (e *InsightsAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRange *tsdb.TimeRange) ([]*InsightsAnalyticsQuery, error) { +func (e *InsightsAnalyticsDatasource) buildQueries(queries []plugins.DataSubQuery, + timeRange plugins.DataTimeRange) ([]*InsightsAnalyticsQuery, error) { iaQueries := []*InsightsAnalyticsQuery{} for _, query := range queries { @@ -74,7 +76,7 @@ func (e *InsightsAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRa qm.RawQuery = queryJSONModel.InsightsAnalytics.Query qm.ResultFormat = queryJSONModel.InsightsAnalytics.ResultFormat - qm.RefID = query.RefId + qm.RefID = query.RefID if qm.RawQuery == "" { return nil, fmt.Errorf("query is missing query string property") @@ -94,10 +96,10 @@ func (e *InsightsAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRa return iaQueries, nil } -func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery) *tsdb.QueryResult { - queryResult := &tsdb.QueryResult{RefId: query.RefID} +func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *InsightsAnalyticsQuery) plugins.DataQueryResult { + queryResult := plugins.DataQueryResult{RefID: query.RefID} - queryResultError := func(err error) *tsdb.QueryResult { + queryResultError := func(err error) plugins.DataQueryResult { queryResult.Error = err return queryResult } @@ -170,19 +172,22 @@ func (e *InsightsAnalyticsDatasource) executeQuery(ctx context.Context, query *I if err == nil { frame = wideFrame } else { - frame.AppendNotices(data.Notice{Severity: data.NoticeSeverityWarning, Text: "could not convert frame to time series, returning raw table: " + err.Error()}) + frame.AppendNotices(data.Notice{ + Severity: data.NoticeSeverityWarning, + Text: "could not convert frame to time series, returning raw table: " + err.Error(), + }) } } } frames := data.Frames{frame} - queryResult.Dataframes = tsdb.NewDecodedDataFrames(frames) + queryResult.Dataframes = plugins.NewDecodedDataFrames(frames) return queryResult } func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) { // find plugin - plugin, ok := plugins.DataSources[dsInfo.Type] + plugin, ok := manager.DataSources[dsInfo.Type] if !ok { return nil, errors.New("unable to find datasource plugin Azure Application Insights") } @@ -215,7 +220,8 @@ func (e *InsightsAnalyticsDatasource) createRequest(ctx context.Context, dsInfo return req, nil } -func (e *InsightsAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, cloudName string) (*plugins.AppPluginRoute, string, error) { +func (e *InsightsAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourcePlugin, cloudName string) ( + *plugins.AppPluginRoute, string, error) { pluginRouteName := "appinsights" if cloudName == "chinaazuremonitor" { @@ -223,7 +229,6 @@ func (e *InsightsAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourceP } var pluginRoute *plugins.AppPluginRoute - for _, route := range plugin.Routes { if route.Path == pluginRouteName { pluginRoute = route diff --git a/pkg/tsdb/azuremonitor/macros.go b/pkg/tsdb/azuremonitor/macros.go index 11cd9f1e83b..7e05aff6c8a 100644 --- a/pkg/tsdb/azuremonitor/macros.go +++ b/pkg/tsdb/azuremonitor/macros.go @@ -6,7 +6,8 @@ import ( "strings" "time" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/tsdb/interval" ) const rsIdentifier = `__(timeFilter|timeFrom|timeTo|interval|contains|escapeMulti)` @@ -14,8 +15,8 @@ const sExpr = `\$` + rsIdentifier + `(?:\(([^\)]*)\))?` const escapeMultiExpr = `\$__escapeMulti\(('.*')\)` type kqlMacroEngine struct { - timeRange *tsdb.TimeRange - query *tsdb.Query + timeRange plugins.DataTimeRange + query plugins.DataSubQuery } // Macros: @@ -28,7 +29,7 @@ type kqlMacroEngine struct { // - $__escapeMulti('\\vm\eth0\Total','\\vm\eth2\Total') -> @'\\vm\eth0\Total',@'\\vm\eth2\Total' // KqlInterpolate interpolates macros for Kusto Query Language (KQL) queries -func KqlInterpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string, defaultTimeField ...string) (string, error) { +func KqlInterpolate(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, kql string, defaultTimeField ...string) (string, error) { engine := kqlMacroEngine{} defaultTimeFieldForAllDatasources := "timestamp" @@ -38,7 +39,7 @@ func KqlInterpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string, de return engine.Interpolate(query, timeRange, kql, defaultTimeFieldForAllDatasources) } -func (m *kqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, kql string, defaultTimeField string) (string, error) { +func (m *kqlMacroEngine) Interpolate(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, kql string, defaultTimeField string) (string, error) { m.timeRange = timeRange m.query = query rExp, _ := regexp.Compile(sExpr) @@ -90,28 +91,30 @@ func (m *kqlMacroEngine) evaluateMacro(name string, defaultTimeField string, arg if len(args) > 0 && args[0] != "" { timeColumn = args[0] } - return fmt.Sprintf("['%s'] >= datetime('%s') and ['%s'] <= datetime('%s')", timeColumn, m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), timeColumn, m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil + return fmt.Sprintf("['%s'] >= datetime('%s') and ['%s'] <= datetime('%s')", timeColumn, + m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339), timeColumn, + m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "timeFrom", "__from": return fmt.Sprintf("datetime('%s')", m.timeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "timeTo", "__to": return fmt.Sprintf("datetime('%s')", m.timeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "interval": - var interval time.Duration - if m.query.IntervalMs == 0 { + var it time.Duration + if m.query.IntervalMS == 0 { to := m.timeRange.MustGetTo().UnixNano() from := m.timeRange.MustGetFrom().UnixNano() // default to "100 datapoints" if nothing in the query is more specific defaultInterval := time.Duration((to - from) / 60) var err error - interval, err = tsdb.GetIntervalFrom(m.query.DataSource, m.query.Model, defaultInterval) + it, err = interval.GetIntervalFrom(m.query.DataSource, m.query.Model, defaultInterval) if err != nil { azlog.Warn("Unable to get interval from query", "datasource", m.query.DataSource, "model", m.query.Model) - interval = defaultInterval + it = defaultInterval } } else { - interval = time.Millisecond * time.Duration(m.query.IntervalMs) + it = time.Millisecond * time.Duration(m.query.IntervalMS) } - return fmt.Sprintf("%dms", int(interval/time.Millisecond)), nil + return fmt.Sprintf("%dms", int(it/time.Millisecond)), nil case "contains": if len(args) < 2 || args[0] == "" || args[1] == "" { return "", fmt.Errorf("macro %v needs colName and variableSet", name) diff --git a/pkg/tsdb/azuremonitor/macros_test.go b/pkg/tsdb/azuremonitor/macros_test.go index 98554832a06..2bf1671a106 100644 --- a/pkg/tsdb/azuremonitor/macros_test.go +++ b/pkg/tsdb/azuremonitor/macros_test.go @@ -9,77 +9,77 @@ import ( "github.com/google/go-cmp/cmp/cmpopts" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/require" ) func TestAzureLogAnalyticsMacros(t *testing.T) { fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) - timeRange := &tsdb.TimeRange{ + timeRange := plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), } tests := []struct { name string - query *tsdb.Query - timeRange *tsdb.TimeRange + query plugins.DataSubQuery + timeRange plugins.DataTimeRange kql string expected string Err require.ErrorAssertionFunc }{ { name: "invalid macro should be ignored", - query: &tsdb.Query{}, + query: plugins.DataSubQuery{}, kql: "$__invalid()", expected: "$__invalid()", Err: require.NoError, }, { name: "Kusto variables should be ignored", - query: &tsdb.Query{}, + query: plugins.DataSubQuery{}, kql: ") on $left.b == $right.y", expected: ") on $left.b == $right.y", Err: require.NoError, }, { name: "$__contains macro with a multi template variable that has multiple selected values as a parameter should build in clause", - query: &tsdb.Query{}, + query: plugins.DataSubQuery{}, kql: "$__contains(col, 'val1','val2')", expected: "['col'] in ('val1','val2')", Err: require.NoError, }, { name: "$__contains macro with a multi template variable that has a single selected value as a parameter should build in clause", - query: &tsdb.Query{}, + query: plugins.DataSubQuery{}, kql: "$__contains(col, 'val1' )", expected: "['col'] in ('val1')", Err: require.NoError, }, { name: "$__contains macro with multi template variable has custom All value as a parameter should return a true expression", - query: &tsdb.Query{}, + query: plugins.DataSubQuery{}, kql: "$__contains(col, all)", expected: "1 == 1", Err: require.NoError, }, { name: "$__timeFilter has no column parameter should use default time field", - query: &tsdb.Query{}, + query: plugins.DataSubQuery{}, kql: "$__timeFilter()", expected: "['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z')", Err: require.NoError, }, { name: "$__timeFilter has time field parameter", - query: &tsdb.Query{}, + query: plugins.DataSubQuery{}, kql: "$__timeFilter(myTimeField)", expected: "['myTimeField'] >= datetime('2018-03-15T13:00:00Z') and ['myTimeField'] <= datetime('2018-03-15T13:34:00Z')", Err: require.NoError, }, { name: "$__timeFrom and $__timeTo is in the query and range is a specific interval", - query: &tsdb.Query{}, + query: plugins.DataSubQuery{}, kql: "myTimeField >= $__timeFrom() and myTimeField <= $__timeTo()", expected: "myTimeField >= datetime('2018-03-15T13:00:00Z') and myTimeField <= datetime('2018-03-15T13:34:00Z')", Err: require.NoError, @@ -87,7 +87,7 @@ func TestAzureLogAnalyticsMacros(t *testing.T) { { name: "$__interval should use the defined interval from the query", timeRange: timeRange, - query: &tsdb.Query{ + query: plugins.DataSubQuery{ Model: simplejson.NewFromAny(map[string]interface{}{ "interval": "5m", }), @@ -98,7 +98,7 @@ func TestAzureLogAnalyticsMacros(t *testing.T) { }, { name: "$__interval should use the default interval if none is specified", - query: &tsdb.Query{ + query: plugins.DataSubQuery{ DataSource: &models.DataSource{}, Model: simplejson.NewFromAny(map[string]interface{}{}), }, @@ -108,7 +108,7 @@ func TestAzureLogAnalyticsMacros(t *testing.T) { }, { name: "$__escapeMulti with multi template variable should replace values with KQL style escaped strings", - query: &tsdb.Query{ + query: plugins.DataSubQuery{ DataSource: &models.DataSource{}, Model: simplejson.NewFromAny(map[string]interface{}{}), }, @@ -118,7 +118,7 @@ func TestAzureLogAnalyticsMacros(t *testing.T) { }, { name: "$__escapeMulti with multi template variable and has one selected value that contains comma", - query: &tsdb.Query{ + query: plugins.DataSubQuery{ DataSource: &models.DataSource{}, Model: simplejson.NewFromAny(map[string]interface{}{}), }, @@ -128,7 +128,7 @@ func TestAzureLogAnalyticsMacros(t *testing.T) { }, { name: "$__escapeMulti with multi template variable and is not wrapped in single quotes should fail", - query: &tsdb.Query{ + query: plugins.DataSubQuery{ DataSource: &models.DataSource{}, Model: simplejson.NewFromAny(map[string]interface{}{}), }, diff --git a/pkg/tsdb/azuremonitor/time-grain.go b/pkg/tsdb/azuremonitor/time-grain.go index 5e28d64353e..17c4c9ee46f 100644 --- a/pkg/tsdb/azuremonitor/time-grain.go +++ b/pkg/tsdb/azuremonitor/time-grain.go @@ -6,7 +6,7 @@ import ( "strings" "time" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/tsdb/interval" ) // TimeGrain handles conversions between @@ -18,8 +18,8 @@ var ( smallTimeUnits = []string{"hour", "minute", "h", "m"} ) -func (tg *TimeGrain) createISO8601DurationFromIntervalMS(interval int64) (string, error) { - formatted := tsdb.FormatDuration(time.Duration(interval) * time.Millisecond) +func (tg *TimeGrain) createISO8601DurationFromIntervalMS(it int64) (string, error) { + formatted := interval.FormatDuration(time.Duration(it) * time.Millisecond) if strings.Contains(formatted, "ms") { return "PT1M", nil @@ -28,7 +28,7 @@ func (tg *TimeGrain) createISO8601DurationFromIntervalMS(interval int64) (string timeValueString := formatted[0 : len(formatted)-1] timeValue, err := strconv.Atoi(timeValueString) if err != nil { - return "", fmt.Errorf("could not parse interval %q to an ISO 8061 duration: %w", interval, err) + return "", fmt.Errorf("could not parse interval %q to an ISO 8061 duration: %w", it, err) } unit := formatted[len(formatted)-1:] diff --git a/pkg/tsdb/cloudmonitoring/annotation_query.go b/pkg/tsdb/cloudmonitoring/annotation_query.go index a08f6e60f82..503768d8c74 100644 --- a/pkg/tsdb/cloudmonitoring/annotation_query.go +++ b/pkg/tsdb/cloudmonitoring/annotation_query.go @@ -4,24 +4,25 @@ import ( "context" "strings" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) -func (e *CloudMonitoringExecutor) executeAnnotationQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), +func (e *Executor) executeAnnotationQuery(ctx context.Context, tsdbQuery plugins.DataQuery) ( + plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), } firstQuery := tsdbQuery.Queries[0] queries, err := e.buildQueryExecutors(tsdbQuery) if err != nil { - return nil, err + return plugins.DataResponse{}, err } queryRes, resp, _, err := queries[0].run(ctx, tsdbQuery, e) if err != nil { - return nil, err + return plugins.DataResponse{}, err } metricQuery := firstQuery.Model.Get("metricQuery") @@ -29,16 +30,16 @@ func (e *CloudMonitoringExecutor) executeAnnotationQuery(ctx context.Context, ts text := metricQuery.Get("text").MustString() tags := metricQuery.Get("tags").MustString() - err = queries[0].parseToAnnotations(queryRes, resp, title, text, tags) - result.Results[firstQuery.RefId] = queryRes + err = queries[0].parseToAnnotations(&queryRes, resp, title, text, tags) + result.Results[firstQuery.RefID] = queryRes return result, err } -func transformAnnotationToTable(data []map[string]string, result *tsdb.QueryResult) { - table := &tsdb.Table{ - Columns: make([]tsdb.TableColumn, 4), - Rows: make([]tsdb.RowValues, 0), +func transformAnnotationToTable(data []map[string]string, result *plugins.DataQueryResult) { + table := plugins.DataTable{ + Columns: make([]plugins.DataTableColumn, 4), + Rows: make([]plugins.DataRowValues, 0), } table.Columns[0].Text = "time" table.Columns[1].Text = "title" diff --git a/pkg/tsdb/cloudmonitoring/annotation_query_test.go b/pkg/tsdb/cloudmonitoring/annotation_query_test.go index 857517232ca..8f2ab498e43 100644 --- a/pkg/tsdb/cloudmonitoring/annotation_query_test.go +++ b/pkg/tsdb/cloudmonitoring/annotation_query_test.go @@ -4,23 +4,25 @@ import ( "testing" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestCloudMonitoringExecutor_parseToAnnotations(t *testing.T) { +func TestExecutor_parseToAnnotations(t *testing.T) { d, err := loadTestFile("./test-data/2-series-response-no-agg.json") require.NoError(t, err) require.Len(t, d.TimeSeries, 3) - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "annotationQuery"} + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "annotationQuery"} query := &cloudMonitoringTimeSeriesFilter{} - err = query.parseToAnnotations(res, d, "atitle {{metric.label.instance_name}} {{metric.value}}", "atext {{resource.label.zone}}", "atag") + err = query.parseToAnnotations(res, d, "atitle {{metric.label.instance_name}} {{metric.value}}", + "atext {{resource.label.zone}}", "atag") require.NoError(t, err) - decoded, _ := res.Dataframes.Decoded() + decoded, err := res.Dataframes.Decoded() + require.NoError(t, err) require.Len(t, decoded, 3) assert.Equal(t, "title", decoded[0].Fields[1].Name) assert.Equal(t, "tags", decoded[0].Fields[2].Name) @@ -28,7 +30,7 @@ func TestCloudMonitoringExecutor_parseToAnnotations(t *testing.T) { } func TestCloudMonitoringExecutor_parseToAnnotations_emptyTimeSeries(t *testing.T) { - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "annotationQuery"} + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "annotationQuery"} query := &cloudMonitoringTimeSeriesFilter{} response := cloudMonitoringResponse{ @@ -38,12 +40,13 @@ func TestCloudMonitoringExecutor_parseToAnnotations_emptyTimeSeries(t *testing.T err := query.parseToAnnotations(res, response, "atitle", "atext", "atag") require.NoError(t, err) - decoded, _ := res.Dataframes.Decoded() + decoded, err := res.Dataframes.Decoded() + require.NoError(t, err) require.Len(t, decoded, 0) } func TestCloudMonitoringExecutor_parseToAnnotations_noPointsInSeries(t *testing.T) { - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "annotationQuery"} + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "annotationQuery"} query := &cloudMonitoringTimeSeriesFilter{} response := cloudMonitoringResponse{ diff --git a/pkg/tsdb/cloudmonitoring/cloudmonitoring.go b/pkg/tsdb/cloudmonitoring/cloudmonitoring.go index d716334b8eb..16cb99cc35b 100644 --- a/pkg/tsdb/cloudmonitoring/cloudmonitoring.go +++ b/pkg/tsdb/cloudmonitoring/cloudmonitoring.go @@ -16,14 +16,16 @@ import ( "strings" "time" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" + "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/api/pluginproxy" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" "golang.org/x/oauth2/google" ) @@ -62,20 +64,35 @@ const ( mqlEditorMode string = "mql" ) -// CloudMonitoringExecutor executes queries for the CloudMonitoring datasource -type CloudMonitoringExecutor struct { +func init() { + registry.Register(®istry.Descriptor{ + Name: "CloudMonitoringService", + InitPriority: registry.Low, + Instance: &Service{}, + }) +} + +type Service struct { +} + +func (s *Service) Init() error { + return nil +} + +// Executor executes queries for the CloudMonitoring datasource. +type Executor struct { httpClient *http.Client dsInfo *models.DataSource } -// NewCloudMonitoringExecutor initializes a http client -func NewCloudMonitoringExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { +// NewExecutor returns an Executor. +func (s *Service) NewExecutor(dsInfo *models.DataSource) (plugins.DataPlugin, error) { httpClient, err := dsInfo.GetHttpClient() if err != nil { return nil, err } - return &CloudMonitoringExecutor{ + return &Executor{ httpClient: httpClient, dsInfo: dsInfo, }, nil @@ -83,14 +100,14 @@ func NewCloudMonitoringExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoi func init() { slog = log.New("tsdb.cloudMonitoring") - tsdb.RegisterTsdbQueryEndpoint("stackdriver", NewCloudMonitoringExecutor) } // Query takes in the frontend queries, parses them into the CloudMonitoring query format // executes the queries against the CloudMonitoring API and parses the response into // the time series or table format -func (e *CloudMonitoringExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - var result *tsdb.Response +func (e *Executor) DataQuery(ctx context.Context, dsInfo *models.DataSource, tsdbQuery plugins.DataQuery) ( + plugins.DataResponse, error) { + var result plugins.DataResponse var err error queryType := tsdbQuery.Queries[0].Model.Get("type").MustString("") @@ -108,32 +125,34 @@ func (e *CloudMonitoringExecutor) Query(ctx context.Context, dsInfo *models.Data return result, err } -func (e *CloudMonitoringExecutor) getGCEDefaultProject(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), +func (e *Executor) getGCEDefaultProject(ctx context.Context, tsdbQuery plugins.DataQuery) (plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), } - refId := tsdbQuery.Queries[0].RefId - queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: refId} + refID := tsdbQuery.Queries[0].RefID + queryResult := plugins.DataQueryResult{Meta: simplejson.New(), RefID: refID} gceDefaultProject, err := e.getDefaultProject(ctx) if err != nil { - return nil, fmt.Errorf("failed to retrieve default project from GCE metadata server, error: %w", err) + return plugins.DataResponse{}, fmt.Errorf( + "failed to retrieve default project from GCE metadata server, error: %w", err) } queryResult.Meta.Set("defaultProject", gceDefaultProject) - result.Results[refId] = queryResult + result.Results[refID] = queryResult return result, nil } -func (e *CloudMonitoringExecutor) executeTimeSeriesQuery(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), +func (e *Executor) executeTimeSeriesQuery(ctx context.Context, tsdbQuery plugins.DataQuery) ( + plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), } queryExecutors, err := e.buildQueryExecutors(tsdbQuery) if err != nil { - return nil, err + return plugins.DataResponse{}, err } unit := e.resolvePanelUnitFromQueries(queryExecutors) @@ -141,9 +160,9 @@ func (e *CloudMonitoringExecutor) executeTimeSeriesQuery(ctx context.Context, ts for _, queryExecutor := range queryExecutors { queryRes, resp, executedQueryString, err := queryExecutor.run(ctx, tsdbQuery, e) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - err = queryExecutor.parseResponse(queryRes, resp, executedQueryString) + err = queryExecutor.parseResponse(&queryRes, resp, executedQueryString) if err != nil { queryRes.Error = err } @@ -158,7 +177,7 @@ func (e *CloudMonitoringExecutor) executeTimeSeriesQuery(ctx context.Context, ts } frames[i].Fields[1].Config.Unit = unit } - queryRes.Dataframes = tsdb.NewDecodedDataFrames(frames) + queryRes.Dataframes = plugins.NewDecodedDataFrames(frames) } result.Results[queryExecutor.getRefID()] = queryRes } @@ -166,7 +185,7 @@ func (e *CloudMonitoringExecutor) executeTimeSeriesQuery(ctx context.Context, ts return result, nil } -func (e *CloudMonitoringExecutor) resolvePanelUnitFromQueries(executors []cloudMonitoringQueryExecutor) string { +func (e *Executor) resolvePanelUnitFromQueries(executors []cloudMonitoringQueryExecutor) string { if len(executors) == 0 { return "" } @@ -186,7 +205,7 @@ func (e *CloudMonitoringExecutor) resolvePanelUnitFromQueries(executors []cloudM return "" } -func (e *CloudMonitoringExecutor) buildQueryExecutors(tsdbQuery *tsdb.TsdbQuery) ([]cloudMonitoringQueryExecutor, error) { +func (e *Executor) buildQueryExecutors(tsdbQuery plugins.DataQuery) ([]cloudMonitoringQueryExecutor, error) { cloudMonitoringQueryExecutors := []cloudMonitoringQueryExecutor{} startTime, err := tsdbQuery.TimeRange.ParseFrom() @@ -201,10 +220,14 @@ func (e *CloudMonitoringExecutor) buildQueryExecutors(tsdbQuery *tsdb.TsdbQuery) durationSeconds := int(endTime.Sub(startTime).Seconds()) - for _, query := range tsdbQuery.Queries { - migrateLegacyQueryModel(query) + for i := range tsdbQuery.Queries { + migrateLegacyQueryModel(&tsdbQuery.Queries[i]) + query := tsdbQuery.Queries[i] q := grafanaQuery{} - model, _ := query.Model.MarshalJSON() + model, err := query.Model.MarshalJSON() + if err != nil { + return nil, err + } if err := json.Unmarshal(model, &q); err != nil { return nil, fmt.Errorf("could not unmarshal CloudMonitoringQuery json: %w", err) } @@ -215,20 +238,19 @@ func (e *CloudMonitoringExecutor) buildQueryExecutors(tsdbQuery *tsdb.TsdbQuery) var queryInterface cloudMonitoringQueryExecutor cmtsf := &cloudMonitoringTimeSeriesFilter{ - RefID: query.RefId, + RefID: query.RefID, GroupBys: []string{}, } - switch q.QueryType { case metricQueryType: if q.MetricQuery.EditorMode == mqlEditorMode { queryInterface = &cloudMonitoringTimeSeriesQuery{ - RefID: query.RefId, + RefID: query.RefID, ProjectName: q.MetricQuery.ProjectName, Query: q.MetricQuery.Query, - IntervalMS: query.IntervalMs, + IntervalMS: query.IntervalMS, AliasBy: q.MetricQuery.AliasBy, - timeRange: tsdbQuery.TimeRange, + timeRange: *tsdbQuery.TimeRange, } } else { cmtsf.AliasBy = q.MetricQuery.AliasBy @@ -239,7 +261,7 @@ func (e *CloudMonitoringExecutor) buildQueryExecutors(tsdbQuery *tsdb.TsdbQuery) } params.Add("filter", buildFilterString(q.MetricQuery.MetricType, q.MetricQuery.Filters)) params.Add("view", q.MetricQuery.View) - setMetricAggParams(¶ms, &q.MetricQuery, durationSeconds, query.IntervalMs) + setMetricAggParams(¶ms, &q.MetricQuery, durationSeconds, query.IntervalMS) queryInterface = cmtsf } case sloQueryType: @@ -249,8 +271,10 @@ func (e *CloudMonitoringExecutor) buildQueryExecutors(tsdbQuery *tsdb.TsdbQuery) cmtsf.Service = q.SloQuery.ServiceId cmtsf.Slo = q.SloQuery.SloId params.Add("filter", buildSLOFilterExpression(q.SloQuery)) - setSloAggParams(¶ms, &q.SloQuery, durationSeconds, query.IntervalMs) + setSloAggParams(¶ms, &q.SloQuery, durationSeconds, query.IntervalMS) queryInterface = cmtsf + default: + panic(fmt.Sprintf("Unrecognized query type %q", q.QueryType)) } target = params.Encode() @@ -268,7 +292,7 @@ func (e *CloudMonitoringExecutor) buildQueryExecutors(tsdbQuery *tsdb.TsdbQuery) return cloudMonitoringQueryExecutors, nil } -func migrateLegacyQueryModel(query *tsdb.Query) { +func migrateLegacyQueryModel(query *plugins.DataSubQuery) { mq := query.Model.Get("metricQuery").MustMap() if mq == nil { migratedModel := simplejson.NewFromAny(map[string]interface{}{ @@ -402,7 +426,8 @@ func containsLabel(labels []string, newLabel string) bool { return false } -func formatLegendKeys(metricType string, defaultMetricName string, labels map[string]string, additionalLabels map[string]string, query *cloudMonitoringTimeSeriesFilter) string { +func formatLegendKeys(metricType string, defaultMetricName string, labels map[string]string, + additionalLabels map[string]string, query *cloudMonitoringTimeSeriesFilter) string { if query.AliasBy == "" { return defaultMetricName } @@ -488,7 +513,7 @@ func calcBucketBound(bucketOptions cloudMonitoringBucketOptions, n int) string { return bucketBound } -func (e *CloudMonitoringExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource, proxyPass string, body io.Reader) (*http.Request, error) { +func (e *Executor) createRequest(ctx context.Context, dsInfo *models.DataSource, proxyPass string, body io.Reader) (*http.Request, error) { u, err := url.Parse(dsInfo.Url) if err != nil { return nil, err @@ -509,7 +534,7 @@ func (e *CloudMonitoringExecutor) createRequest(ctx context.Context, dsInfo *mod req.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion)) // find plugin - plugin, ok := plugins.DataSources[dsInfo.Type] + plugin, ok := manager.DataSources[dsInfo.Type] if !ok { return nil, errors.New("unable to find datasource plugin CloudMonitoring") } @@ -527,7 +552,7 @@ func (e *CloudMonitoringExecutor) createRequest(ctx context.Context, dsInfo *mod return req, nil } -func (e *CloudMonitoringExecutor) getDefaultProject(ctx context.Context) (string, error) { +func (e *Executor) getDefaultProject(ctx context.Context) (string, error) { authenticationType := e.dsInfo.JsonData.Get("authenticationType").MustString(jwtAuthentication) if authenticationType == gceAuthentication { defaultCredentials, err := google.FindDefaultCredentials(ctx, "https://www.googleapis.com/auth/monitoring.read") diff --git a/pkg/tsdb/cloudmonitoring/cloudmonitoring_test.go b/pkg/tsdb/cloudmonitoring/cloudmonitoring_test.go index 0caebfbde56..6569d8d195e 100644 --- a/pkg/tsdb/cloudmonitoring/cloudmonitoring_test.go +++ b/pkg/tsdb/cloudmonitoring/cloudmonitoring_test.go @@ -8,1112 +8,1076 @@ import ( "net/url" "reflect" "strconv" + "strings" "testing" "time" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" - . "github.com/smartystreets/goconvey/convey" + "github.com/grafana/grafana/pkg/plugins" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestCloudMonitoring(t *testing.T) { - Convey("Google Cloud Monitoring", t, func() { - executor := &CloudMonitoringExecutor{} - Convey("Parse migrated queries from frontend and build Google Cloud Monitoring API queries", func() { - fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) - tsdbQuery := &tsdb.TsdbQuery{ - TimeRange: &tsdb.TimeRange{ - From: fmt.Sprintf("%v", fromStart.Unix()*1000), - To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), - }, - Queries: []*tsdb.Query{ - { - Model: simplejson.NewFromAny(map[string]interface{}{ + executor := &Executor{} + + t.Run("Parse migrated queries from frontend and build Google Cloud Monitoring API queries", func(t *testing.T) { + t.Run("and query has no aggregation set", func(t *testing.T) { + qes, err := executor.buildQueryExecutors(getBaseQuery()) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + + require.Len(t, queries, 1) + assert.Equal(t, "A", queries[0].RefID) + assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].Target) + assert.Equal(t, 7, len(queries[0].Params)) + assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].Params["interval.startTime"][0]) + assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].Params["interval.endTime"][0]) + assert.Equal(t, "ALIGN_MEAN", queries[0].Params["aggregation.perSeriesAligner"][0]) + assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].Params["filter"][0]) + assert.Equal(t, "FULL", queries[0].Params["view"][0]) + assert.Equal(t, "testalias", queries[0].AliasBy) + + t.Run("and generated deep link has correct parameters", func(t *testing.T) { + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-03-15T13:00:00Z", + "end": "2018-03-15T13:34:00Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "perSeriesAligner": "ALIGN_MEAN", + "filter": "resource.type=\"a/resource/type\" metric.type=\"a/metric/type\"", + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + }) + + t.Run("and query has filters", func(t *testing.T) { + query := getBaseQuery() + query.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2", "AND", "resource.type", "=", "another/resource/type"}, + }) + + qes, err := executor.buildQueryExecutors(query) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, 1, len(queries)) + assert.Equal(t, `metric.type="a/metric/type" key="value" key2="value2" resource.type="another/resource/type"`, queries[0].Params["filter"][0]) + + // assign a resource type to query parameters + // in the actual workflow this information comes from the response of the Monitoring API + // the deep link should not contain this resource type since another resource type is included in the query filters + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-03-15T13:00:00Z", + "end": "2018-03-15T13:34:00Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "filter": `metric.type="a/metric/type" key="value" key2="value2" resource.type="another/resource/type"`, + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + + t.Run("and alignmentPeriod is set to grafana-auto", func(t *testing.T) { + t.Run("and IntervalMS is larger than 60000", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.Queries[0].IntervalMS = 1000000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "grafana-auto", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+1000s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-03-15T13:00:00Z", + "end": "2018-03-15T13:34:00Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "minAlignmentPeriod": `1000s`, + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + t.Run("and IntervalMS is less than 60000", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.Queries[0].IntervalMS = 30000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "grafana-auto", + "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-03-15T13:00:00Z", + "end": "2018-03-15T13:34:00Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "minAlignmentPeriod": `60s`, + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + }) + + t.Run("and alignmentPeriod is set to cloud-monitoring-auto", func(t *testing.T) { // legacy + t.Run("and range is two hours", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.TimeRange.From = "1538033322461" + tsdbQuery.TimeRange.To = "1538040522461" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "cloud-monitoring-auto", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + }) + + t.Run("and range is 22 hours", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.TimeRange.From = "1538034524922" + tsdbQuery.TimeRange.To = "1538113724922" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "cloud-monitoring-auto", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + }) + + t.Run("and range is 23 hours", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.TimeRange.From = "1538034567985" + tsdbQuery.TimeRange.To = "1538117367985" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "cloud-monitoring-auto", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+300s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + }) + + t.Run("and range is 7 days", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.TimeRange.From = "1538036324073" + tsdbQuery.TimeRange.To = "1538641124073" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "cloud-monitoring-auto", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+3600s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + }) + }) + + t.Run("and alignmentPeriod is set to stackdriver-auto", func(t *testing.T) { // legacy + t.Run("and range is two hours", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.TimeRange.From = "1538033322461" + tsdbQuery.TimeRange.To = "1538040522461" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-09-27T07:28:42Z", + "end": "2018-09-27T09:28:42Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "minAlignmentPeriod": `60s`, + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + + t.Run("and range is 22 hours", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.TimeRange.From = "1538034524922" + tsdbQuery.TimeRange.To = "1538113724922" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-09-27T07:48:44Z", + "end": "2018-09-28T05:48:44Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "minAlignmentPeriod": `60s`, + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + + t.Run("and range is 23 hours", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.TimeRange.From = "1538034567985" + tsdbQuery.TimeRange.To = "1538117367985" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+300s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-09-27T07:49:27Z", + "end": "2018-09-28T06:49:27Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "minAlignmentPeriod": `300s`, + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + + t.Run("and range is 7 days", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.TimeRange.From = "1538036324073" + tsdbQuery.TimeRange.To = "1538641124073" + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "target": "target", + "alignmentPeriod": "stackdriver-auto", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+3600s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-09-27T08:18:44Z", + "end": "2018-10-04T08:18:44Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "minAlignmentPeriod": `3600s`, + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + }) + + t.Run("and alignmentPeriod is set in frontend", func(t *testing.T) { + t.Run("and alignment period is within accepted range", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.Queries[0].IntervalMS = 1000 + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "alignmentPeriod": "+600s", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, `+600s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-03-15T13:00:00Z", + "end": "2018-03-15T13:34:00Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "minAlignmentPeriod": `600s`, + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + }) + + t.Run("and query has aggregation mean set", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "crossSeriesReducer": "REDUCE_SUM", + "view": "FULL", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + + assert.Equal(t, 1, len(queries)) + assert.Equal(t, "A", queries[0].RefID) + assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_SUM&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].Target) + assert.Equal(t, 7, len(queries[0].Params)) + assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].Params["interval.startTime"][0]) + assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].Params["interval.endTime"][0]) + assert.Equal(t, "REDUCE_SUM", queries[0].Params["aggregation.crossSeriesReducer"][0]) + assert.Equal(t, "ALIGN_MEAN", queries[0].Params["aggregation.perSeriesAligner"][0]) + assert.Equal(t, "+60s", queries[0].Params["aggregation.alignmentPeriod"][0]) + assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].Params["filter"][0]) + assert.Equal(t, "FULL", queries[0].Params["view"][0]) + + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-03-15T13:00:00Z", + "end": "2018-03-15T13:34:00Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "minAlignmentPeriod": `60s`, + "crossSeriesReducer": "REDUCE_SUM", + "perSeriesAligner": "ALIGN_MEAN", + "filter": "resource.type=\"a/resource/type\" metric.type=\"a/metric/type\"", + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + + t.Run("and query has group bys", func(t *testing.T) { + tsdbQuery := getBaseQuery() + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "crossSeriesReducer": "REDUCE_NONE", + "groupBys": []interface{}{"metric.label.group1", "metric.label.group2"}, + "view": "FULL", + }) + + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + + assert.Equal(t, 1, len(queries)) + assert.Equal(t, "A", queries[0].RefID) + assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].Target) + assert.Equal(t, 8, len(queries[0].Params)) + assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].Params["interval.startTime"][0]) + assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].Params["interval.endTime"][0]) + assert.Equal(t, "ALIGN_MEAN", queries[0].Params["aggregation.perSeriesAligner"][0]) + assert.Equal(t, "metric.label.group1", queries[0].Params["aggregation.groupByFields"][0]) + assert.Equal(t, "metric.label.group2", queries[0].Params["aggregation.groupByFields"][1]) + assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].Params["filter"][0]) + assert.Equal(t, "FULL", queries[0].Params["view"][0]) + + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-03-15T13:00:00Z", + "end": "2018-03-15T13:34:00Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "minAlignmentPeriod": `60s`, + "perSeriesAligner": "ALIGN_MEAN", + "filter": "resource.type=\"a/resource/type\" metric.type=\"a/metric/type\"", + "groupByFields": []interface{}{"metric.label.group1", "metric.label.group2"}, + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + }) + }) + + t.Run("Parse queries from frontend and build Google Cloud Monitoring API queries", func(t *testing.T) { + fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + tsdbQuery := plugins.DataQuery{ + TimeRange: &plugins.DataTimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), + }, + Queries: []plugins.DataSubQuery{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "queryType": metricQueryType, + "metricQuery": map[string]interface{}{ "metricType": "a/metric/type", "view": "FULL", "aliasBy": "testalias", "type": "timeSeriesQuery", - }), - RefId: "A", - }, + "groupBys": []interface{}{"metric.label.group1", "metric.label.group2"}, + }, + }), + RefID: "A", }, + }, + } + + t.Run("and query type is metrics", func(t *testing.T) { + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) + + assert.Equal(t, 1, len(queries)) + assert.Equal(t, "A", queries[0].RefID) + assert.Equal(t, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL", queries[0].Target) + assert.Equal(t, 8, len(queries[0].Params)) + assert.Equal(t, "metric.label.group1", queries[0].Params["aggregation.groupByFields"][0]) + assert.Equal(t, "metric.label.group2", queries[0].Params["aggregation.groupByFields"][1]) + assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].Params["interval.startTime"][0]) + assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].Params["interval.endTime"][0]) + assert.Equal(t, "ALIGN_MEAN", queries[0].Params["aggregation.perSeriesAligner"][0]) + assert.Equal(t, "metric.type=\"a/metric/type\"", queries[0].Params["filter"][0]) + assert.Equal(t, "FULL", queries[0].Params["view"][0]) + assert.Equal(t, "testalias", queries[0].AliasBy) + assert.Equal(t, []string{"metric.label.group1", "metric.label.group2"}, queries[0].GroupBys) + + // assign resource type to query parameters to be included in the deep link filter + // in the actual workflow this information comes from the response of the Monitoring API + queries[0].Params.Set("resourceType", "a/resource/type") + dl := queries[0].buildDeepLink() + + expectedTimeSelection := map[string]string{ + "timeRange": "custom", + "start": "2018-03-15T13:00:00Z", + "end": "2018-03-15T13:34:00Z", + } + expectedTimeSeriesFilter := map[string]interface{}{ + "minAlignmentPeriod": `60s`, + "perSeriesAligner": "ALIGN_MEAN", + "filter": "resource.type=\"a/resource/type\" metric.type=\"a/metric/type\"", + "groupByFields": []interface{}{"metric.label.group1", "metric.label.group2"}, + } + verifyDeepLink(t, dl, expectedTimeSelection, expectedTimeSeriesFilter) + + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "queryType": metricQueryType, + "metricQuery": map[string]interface{}{ + "editorMode": mqlEditorMode, + "projectName": "test-proj", + "query": "test-query", + "aliasBy": "test-alias", + }, + "sloQuery": map[string]interface{}{}, + }) + + qes, err = executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + tqueries := make([]*cloudMonitoringTimeSeriesQuery, 0) + for _, qi := range qes { + q, ok := qi.(*cloudMonitoringTimeSeriesQuery) + assert.True(t, ok) + tqueries = append(tqueries, q) } - Convey("and query has no aggregation set", func() { - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) + assert.Equal(t, 1, len(tqueries)) + assert.Equal(t, "A", tqueries[0].RefID) + assert.Equal(t, "test-proj", tqueries[0].ProjectName) + assert.Equal(t, "test-query", tqueries[0].Query) + assert.Equal(t, "test-alias", tqueries[0].AliasBy) + }) - So(len(queries), ShouldEqual, 1) - So(queries[0].RefID, ShouldEqual, "A") - So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") - So(len(queries[0].Params), ShouldEqual, 7) - So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") - So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") - So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") - So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") - So(queries[0].Params["view"][0], ShouldEqual, "FULL") - So(queries[0].AliasBy, ShouldEqual, "testalias") - - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() - - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-03-15T13:00:00Z", - "end": "2018-03-15T13:34:00Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "perSeriesAligner": "ALIGN_MEAN", - "filter": "resource.type=\"a/resource/type\" metric.type=\"a/metric/type\"", - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) + t.Run("and query type is SLOs", func(t *testing.T) { + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "queryType": sloQueryType, + "metricQuery": map[string]interface{}{}, + "sloQuery": map[string]interface{}{ + "projectName": "test-proj", + "alignmentPeriod": "stackdriver-auto", + "perSeriesAligner": "ALIGN_NEXT_OLDER", + "aliasBy": "", + "selectorName": "select_slo_health", + "serviceId": "test-service", + "sloId": "test-slo", + }, }) - Convey("and query has filters", func() { - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "metricType": "a/metric/type", - "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2", "AND", "resource.type", "=", "another/resource/type"}, - }) + qes, err := executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + queries := getCloudMonitoringQueriesFromInterface(t, qes) - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(len(queries), ShouldEqual, 1) - So(queries[0].Params["filter"][0], ShouldEqual, `metric.type="a/metric/type" key="value" key2="value2" resource.type="another/resource/type"`) + assert.Equal(t, 1, len(queries)) + assert.Equal(t, "A", queries[0].RefID) + assert.Equal(t, "2018-03-15T13:00:00Z", queries[0].Params["interval.startTime"][0]) + assert.Equal(t, "2018-03-15T13:34:00Z", queries[0].Params["interval.endTime"][0]) + assert.Equal(t, `+60s`, queries[0].Params["aggregation.alignmentPeriod"][0]) + assert.Equal(t, "", queries[0].AliasBy) + assert.Equal(t, "ALIGN_MEAN", queries[0].Params["aggregation.perSeriesAligner"][0]) + assert.Equal(t, `aggregation.alignmentPeriod=%2B60s&aggregation.perSeriesAligner=ALIGN_MEAN&filter=select_slo_health%28%22projects%2Ftest-proj%2Fservices%2Ftest-service%2FserviceLevelObjectives%2Ftest-slo%22%29&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z`, queries[0].Target) + assert.Equal(t, 5, len(queries[0].Params)) - Convey("and generated deep link has correct parameters", func() { - // assign a resource type to query parameters - // in the actual workflow this information comes from the response of the Monitoring API - // the deep link should not contain this resource type since another resource type is included in the query filters - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() - - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-03-15T13:00:00Z", - "end": "2018-03-15T13:34:00Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "filter": `metric.type="a/metric/type" key="value" key2="value2" resource.type="another/resource/type"`, - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) + tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ + "queryType": sloQueryType, + "metricQuery": map[string]interface{}{}, + "sloQuery": map[string]interface{}{ + "projectName": "test-proj", + "alignmentPeriod": "stackdriver-auto", + "perSeriesAligner": "ALIGN_NEXT_OLDER", + "aliasBy": "", + "selectorName": "select_slo_compliance", + "serviceId": "test-service", + "sloId": "test-slo", + }, }) - Convey("and alignmentPeriod is set to grafana-auto", func() { - Convey("and IntervalMs is larger than 60000", func() { - tsdbQuery.Queries[0].IntervalMs = 1000000 - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "alignmentPeriod": "grafana-auto", - "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, - }) + qes, err = executor.buildQueryExecutors(tsdbQuery) + require.NoError(t, err) + qqueries := getCloudMonitoringQueriesFromInterface(t, qes) + assert.Equal(t, "ALIGN_NEXT_OLDER", qqueries[0].Params["aggregation.perSeriesAligner"][0]) - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+1000s`) + dl := qqueries[0].buildDeepLink() + assert.Empty(t, dl) + }) + }) - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() + t.Run("Parse cloud monitoring response in the time series format", func(t *testing.T) { + t.Run("when data from query aggregated to one time series", func(t *testing.T) { + data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json") + require.NoError(t, err) + assert.Equal(t, 1, len(data.TimeSeries)) - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-03-15T13:00:00Z", - "end": "2018-03-15T13:34:00Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "minAlignmentPeriod": `1000s`, - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) - }) - Convey("and IntervalMs is less than 60000", func() { - tsdbQuery.Queries[0].IntervalMs = 30000 - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "alignmentPeriod": "grafana-auto", - "filters": []interface{}{"key", "=", "value", "AND", "key2", "=", "value2"}, - }) + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}} + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + require.Len(t, frames, 1) + assert.Equal(t, "serviceruntime.googleapis.com/api/request_count", frames[0].Fields[1].Name) + assert.Equal(t, 3, frames[0].Fields[1].Len()) - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) + assert.Equal(t, 0.05, frames[0].Fields[1].At(0)) + assert.Equal(t, time.Unix(int64(1536670020000/1000), 0).UTC(), frames[0].Fields[0].At(0)) - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() + assert.Equal(t, 1.05, frames[0].Fields[1].At(1)) + assert.Equal(t, time.Unix(int64(1536670080000/1000), 0).UTC(), frames[0].Fields[0].At(1)) - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-03-15T13:00:00Z", - "end": "2018-03-15T13:34:00Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "minAlignmentPeriod": `60s`, - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) - }) + assert.Equal(t, 1.0666666666667, frames[0].Fields[1].At(2)) + assert.Equal(t, time.Unix(int64(1536670260000/1000), 0).UTC(), frames[0].Fields[0].At(2)) + }) + + t.Run("when data from query with no aggregation", func(t *testing.T) { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + require.NoError(t, err) + assert.Equal(t, 3, len(data.TimeSeries)) + + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}} + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + + assert.Equal(t, 3, len(frames)) + assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1", frames[0].Fields[1].Name) + assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1", frames[1].Fields[1].Name) + assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1", frames[2].Fields[1].Name) + + assert.Equal(t, 3, frames[0].Fields[1].Len()) + assert.Equal(t, 9.8566497180145, frames[0].Fields[1].At(0)) + assert.Equal(t, 9.7323568146676, frames[0].Fields[1].At(1)) + assert.Equal(t, 9.7730520330369, frames[0].Fields[1].At(2)) + + labels := res.Meta.Get("labels").Interface().(map[string][]string) + require.NotNil(t, labels) + assert.Equal(t, 3, len(labels["metric.label.instance_name"])) + assert.Contains(t, labels["metric.label.instance_name"], "collector-asia-east-1") + assert.Contains(t, labels["metric.label.instance_name"], "collector-europe-west-1") + assert.Contains(t, labels["metric.label.instance_name"], "collector-us-east-1") + + assert.Equal(t, 3, len(labels["resource.label.zone"])) + assert.Contains(t, labels["resource.label.zone"], "asia-east1-a") + assert.Contains(t, labels["resource.label.zone"], "europe-west1-b") + assert.Contains(t, labels["resource.label.zone"], "us-east1-b") + + assert.Equal(t, 1, len(labels["resource.label.project_id"])) + assert.Equal(t, "grafana-prod", labels["resource.label.project_id"][0]) + }) + + t.Run("when data from query with no aggregation and group bys", func(t *testing.T) { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + require.NoError(t, err) + assert.Equal(t, 3, len(data.TimeSeries)) + + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, GroupBys: []string{ + "metric.label.instance_name", "resource.label.zone", + }} + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + + assert.Equal(t, 3, len(frames)) + assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a", frames[0].Fields[1].Name) + assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b", frames[1].Fields[1].Name) + assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b", frames[2].Fields[1].Name) + }) + + t.Run("when data from query with no aggregation and alias by", func(t *testing.T) { + data, err := loadTestFile("./test-data/2-series-response-no-agg.json") + require.NoError(t, err) + assert.Equal(t, 3, len(data.TimeSeries)) + + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + + t.Run("and the alias pattern is for metric type, a metric label and a resource label", func(t *testing.T) { + query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + + assert.Equal(t, 3, len(frames)) + assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a", frames[0].Fields[1].Name) + assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b", frames[1].Fields[1].Name) + assert.Equal(t, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b", frames[2].Fields[1].Name) }) - Convey("and alignmentPeriod is set to cloud-monitoring-auto", func() { // legacy - Convey("and range is two hours", func() { - tsdbQuery.TimeRange.From = "1538033322461" - tsdbQuery.TimeRange.To = "1538040522461" - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "target": "target", - "alignmentPeriod": "cloud-monitoring-auto", - }) + t.Run("and the alias pattern is for metric name", func(t *testing.T) { + query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) - }) - - Convey("and range is 22 hours", func() { - tsdbQuery.TimeRange.From = "1538034524922" - tsdbQuery.TimeRange.To = "1538113724922" - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "target": "target", - "alignmentPeriod": "cloud-monitoring-auto", - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) - }) - - Convey("and range is 23 hours", func() { - tsdbQuery.TimeRange.From = "1538034567985" - tsdbQuery.TimeRange.To = "1538117367985" - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "target": "target", - "alignmentPeriod": "cloud-monitoring-auto", - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+300s`) - }) - - Convey("and range is 7 days", func() { - tsdbQuery.TimeRange.From = "1538036324073" - tsdbQuery.TimeRange.To = "1538641124073" - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "target": "target", - "alignmentPeriod": "cloud-monitoring-auto", - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`) - }) - }) - - Convey("and alignmentPeriod is set to stackdriver-auto", func() { // legacy - Convey("and range is two hours", func() { - tsdbQuery.TimeRange.From = "1538033322461" - tsdbQuery.TimeRange.To = "1538040522461" - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "target": "target", - "alignmentPeriod": "stackdriver-auto", - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) - - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() - - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-09-27T07:28:42Z", - "end": "2018-09-27T09:28:42Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "minAlignmentPeriod": `60s`, - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) - }) - - Convey("and range is 22 hours", func() { - tsdbQuery.TimeRange.From = "1538034524922" - tsdbQuery.TimeRange.To = "1538113724922" - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "target": "target", - "alignmentPeriod": "stackdriver-auto", - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) - - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() - - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-09-27T07:48:44Z", - "end": "2018-09-28T05:48:44Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "minAlignmentPeriod": `60s`, - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) - }) - - Convey("and range is 23 hours", func() { - tsdbQuery.TimeRange.From = "1538034567985" - tsdbQuery.TimeRange.To = "1538117367985" - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "target": "target", - "alignmentPeriod": "stackdriver-auto", - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+300s`) - - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() - - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-09-27T07:49:27Z", - "end": "2018-09-28T06:49:27Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "minAlignmentPeriod": `300s`, - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) - }) - - Convey("and range is 7 days", func() { - tsdbQuery.TimeRange.From = "1538036324073" - tsdbQuery.TimeRange.To = "1538641124073" - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "target": "target", - "alignmentPeriod": "stackdriver-auto", - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+3600s`) - - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() - - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-09-27T08:18:44Z", - "end": "2018-10-04T08:18:44Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "minAlignmentPeriod": `3600s`, - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) - }) - }) - - Convey("and alignmentPeriod is set in frontend", func() { - Convey("and alignment period is within accepted range", func() { - tsdbQuery.Queries[0].IntervalMs = 1000 - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "alignmentPeriod": "+600s", - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+600s`) - - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() - - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-03-15T13:00:00Z", - "end": "2018-03-15T13:34:00Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "minAlignmentPeriod": `600s`, - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) - }) - }) - - Convey("and query has aggregation mean set", func() { - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "metricType": "a/metric/type", - "crossSeriesReducer": "REDUCE_SUM", - "view": "FULL", - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - - So(len(queries), ShouldEqual, 1) - So(queries[0].RefID, ShouldEqual, "A") - So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_SUM&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") - So(len(queries[0].Params), ShouldEqual, 7) - So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") - So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") - So(queries[0].Params["aggregation.crossSeriesReducer"][0], ShouldEqual, "REDUCE_SUM") - So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, "+60s") - So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") - So(queries[0].Params["view"][0], ShouldEqual, "FULL") - - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() - - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-03-15T13:00:00Z", - "end": "2018-03-15T13:34:00Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "minAlignmentPeriod": `60s`, - "crossSeriesReducer": "REDUCE_SUM", - "perSeriesAligner": "ALIGN_MEAN", - "filter": "resource.type=\"a/resource/type\" metric.type=\"a/metric/type\"", - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) - }) - - Convey("and query has group bys", func() { - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "metricType": "a/metric/type", - "crossSeriesReducer": "REDUCE_NONE", - "groupBys": []interface{}{"metric.label.group1", "metric.label.group2"}, - "view": "FULL", - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - - So(len(queries), ShouldEqual, 1) - So(queries[0].RefID, ShouldEqual, "A") - So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") - So(len(queries[0].Params), ShouldEqual, 8) - So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") - So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") - So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") - So(queries[0].Params["aggregation.groupByFields"][0], ShouldEqual, "metric.label.group1") - So(queries[0].Params["aggregation.groupByFields"][1], ShouldEqual, "metric.label.group2") - So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") - So(queries[0].Params["view"][0], ShouldEqual, "FULL") - - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() - - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-03-15T13:00:00Z", - "end": "2018-03-15T13:34:00Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "minAlignmentPeriod": `60s`, - "perSeriesAligner": "ALIGN_MEAN", - "filter": "resource.type=\"a/resource/type\" metric.type=\"a/metric/type\"", - "groupByFields": []interface{}{"metric.label.group1", "metric.label.group2"}, - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) + assert.Equal(t, 3, len(frames)) + assert.Equal(t, "metric instance/cpu/usage_time service compute", frames[0].Fields[1].Name) + assert.Equal(t, "metric instance/cpu/usage_time service compute", frames[1].Fields[1].Name) + assert.Equal(t, "metric instance/cpu/usage_time service compute", frames[2].Fields[1].Name) }) }) - Convey("Parse queries from frontend and build Google Cloud Monitoring API queries", func() { - fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) - tsdbQuery := &tsdb.TsdbQuery{ - TimeRange: &tsdb.TimeRange{ - From: fmt.Sprintf("%v", fromStart.Unix()*1000), - To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), - }, - Queries: []*tsdb.Query{ - { - Model: simplejson.NewFromAny(map[string]interface{}{ - "queryType": metricQueryType, - "metricQuery": map[string]interface{}{ - "metricType": "a/metric/type", - "view": "FULL", - "aliasBy": "testalias", - "type": "timeSeriesQuery", - "groupBys": []interface{}{"metric.label.group1", "metric.label.group2"}, - }, - }), - RefId: "A", - }, - }, + t.Run("when data from query is distribution with exponential bounds", func(t *testing.T) { + data, err := loadTestFile("./test-data/3-series-response-distribution-exponential.json") + require.NoError(t, err) + assert.Equal(t, 1, len(data.TimeSeries)) + + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"} + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + assert.Equal(t, 11, len(frames)) + for i := 0; i < 11; i++ { + if i == 0 { + assert.Equal(t, "0", frames[i].Fields[1].Name) + } else { + assert.Equal(t, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10), frames[i].Fields[1].Name) + } + assert.Equal(t, 3, frames[i].Fields[0].Len()) } - Convey("and query type is metrics", func() { - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) + assert.Equal(t, time.Unix(int64(1536668940000/1000), 0).UTC(), frames[0].Fields[0].At(0)) + assert.Equal(t, time.Unix(int64(1536669000000/1000), 0).UTC(), frames[0].Fields[0].At(1)) + assert.Equal(t, time.Unix(int64(1536669060000/1000), 0).UTC(), frames[0].Fields[0].At(2)) - So(len(queries), ShouldEqual, 1) - So(queries[0].RefID, ShouldEqual, "A") - So(queries[0].Target, ShouldEqual, "aggregation.alignmentPeriod=%2B60s&aggregation.crossSeriesReducer=REDUCE_NONE&aggregation.groupByFields=metric.label.group1&aggregation.groupByFields=metric.label.group2&aggregation.perSeriesAligner=ALIGN_MEAN&filter=metric.type%3D%22a%2Fmetric%2Ftype%22&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z&view=FULL") - So(len(queries[0].Params), ShouldEqual, 8) - So(queries[0].Params["aggregation.groupByFields"][0], ShouldEqual, "metric.label.group1") - So(queries[0].Params["aggregation.groupByFields"][1], ShouldEqual, "metric.label.group2") - So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") - So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") - So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") - So(queries[0].Params["filter"][0], ShouldEqual, "metric.type=\"a/metric/type\"") - So(queries[0].Params["view"][0], ShouldEqual, "FULL") - So(queries[0].AliasBy, ShouldEqual, "testalias") - So(queries[0].GroupBys, ShouldResemble, []string{"metric.label.group1", "metric.label.group2"}) + assert.Equal(t, "0", frames[0].Fields[1].Name) + assert.Equal(t, "1", frames[1].Fields[1].Name) + assert.Equal(t, "2", frames[2].Fields[1].Name) + assert.Equal(t, "4", frames[3].Fields[1].Name) + assert.Equal(t, "8", frames[4].Fields[1].Name) - Convey("and generated deep link has correct parameters", func() { - // assign resource type to query parameters to be included in the deep link filter - // in the actual workflow this information comes from the response of the Monitoring API - queries[0].Params.Set("resourceType", "a/resource/type") - dl := queries[0].buildDeepLink() + assert.Equal(t, float64(1), frames[8].Fields[1].At(0)) + assert.Equal(t, float64(1), frames[9].Fields[1].At(0)) + assert.Equal(t, float64(1), frames[10].Fields[1].At(0)) + assert.Equal(t, float64(0), frames[8].Fields[1].At(1)) + assert.Equal(t, float64(0), frames[9].Fields[1].At(1)) + assert.Equal(t, float64(1), frames[10].Fields[1].At(1)) + assert.Equal(t, float64(0), frames[8].Fields[1].At(2)) + assert.Equal(t, float64(1), frames[9].Fields[1].At(2)) + assert.Equal(t, float64(0), frames[10].Fields[1].At(2)) + }) - expectedTimeSelection := map[string]string{ - "timeRange": "custom", - "start": "2018-03-15T13:00:00Z", - "end": "2018-03-15T13:34:00Z", - } - expectedTimeSeriesFilter := map[string]interface{}{ - "minAlignmentPeriod": `60s`, - "perSeriesAligner": "ALIGN_MEAN", - "filter": "resource.type=\"a/resource/type\" metric.type=\"a/metric/type\"", - "groupByFields": []interface{}{"metric.label.group1", "metric.label.group2"}, - } - verifyDeepLink(dl, expectedTimeSelection, expectedTimeSeriesFilter) - }) + t.Run("when data from query is distribution with explicit bounds", func(t *testing.T) { + data, err := loadTestFile("./test-data/4-series-response-distribution-explicit.json") + require.NoError(t, err) + assert.Equal(t, 1, len(data.TimeSeries)) - Convey("and editor mode is MQL", func() { - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "queryType": metricQueryType, - "metricQuery": map[string]interface{}{ - "editorMode": mqlEditorMode, - "projectName": "test-proj", - "query": "test-query", - "aliasBy": "test-alias", - }, - "sloQuery": map[string]interface{}{}, - }) + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"} + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + assert.Equal(t, 33, len(frames)) + for i := 0; i < 33; i++ { + if i == 0 { + assert.Equal(t, "0", frames[i].Fields[1].Name) + } + assert.Equal(t, 2, frames[i].Fields[1].Len()) + } - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := make([]*cloudMonitoringTimeSeriesQuery, 0) - for _, qi := range qes { - q, ok := qi.(*cloudMonitoringTimeSeriesQuery) - So(ok, ShouldBeTrue) - queries = append(queries, q) - } + assert.Equal(t, time.Unix(int64(1550859086000/1000), 0).UTC(), frames[0].Fields[0].At(0)) + assert.Equal(t, time.Unix(int64(1550859146000/1000), 0).UTC(), frames[0].Fields[0].At(1)) - So(len(queries), ShouldEqual, 1) - So(queries[0].RefID, ShouldEqual, "A") - So(queries[0].ProjectName, ShouldEqual, "test-proj") - So(queries[0].Query, ShouldEqual, "test-query") - So(queries[0].AliasBy, ShouldEqual, "test-alias") - }) + assert.Equal(t, "0", frames[0].Fields[1].Name) + assert.Equal(t, "0.01", frames[1].Fields[1].Name) + assert.Equal(t, "0.05", frames[2].Fields[1].Name) + assert.Equal(t, "0.1", frames[3].Fields[1].Name) + + assert.Equal(t, float64(381), frames[8].Fields[1].At(0)) + assert.Equal(t, float64(212), frames[9].Fields[1].At(0)) + assert.Equal(t, float64(56), frames[10].Fields[1].At(0)) + assert.Equal(t, float64(375), frames[8].Fields[1].At(1)) + assert.Equal(t, float64(213), frames[9].Fields[1].At(1)) + assert.Equal(t, float64(56), frames[10].Fields[1].At(1)) + }) + + t.Run("when data from query returns metadata system labels", func(t *testing.T) { + data, err := loadTestFile("./test-data/5-series-response-meta-data.json") + require.NoError(t, err) + assert.Equal(t, 3, len(data.TimeSeries)) + + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"} + err = query.parseResponse(res, data, "") + require.NoError(t, err) + labels := res.Meta.Get("labels").Interface().(map[string][]string) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + assert.Equal(t, 3, len(frames)) + + assert.Equal(t, 5, len(labels["metadata.system_labels.test"])) + assert.Contains(t, labels["metadata.system_labels.test"], "value1") + assert.Contains(t, labels["metadata.system_labels.test"], "value2") + assert.Contains(t, labels["metadata.system_labels.test"], "value3") + assert.Contains(t, labels["metadata.system_labels.test"], "value4") + assert.Contains(t, labels["metadata.system_labels.test"], "value5") + + assert.Equal(t, 2, len(labels["metadata.system_labels.region"])) + assert.Contains(t, labels["metadata.system_labels.region"], "us-central1") + assert.Contains(t, labels["metadata.system_labels.region"], "us-west1") + + assert.Equal(t, 2, len(labels["metadata.user_labels.region"])) + assert.Contains(t, labels["metadata.user_labels.region"], "region1") + assert.Contains(t, labels["metadata.user_labels.region"], "region3") + + assert.Equal(t, 2, len(labels["metadata.user_labels.name"])) + assert.Contains(t, labels["metadata.user_labels.name"], "name1") + assert.Contains(t, labels["metadata.user_labels.name"], "name3") + }) + + t.Run("when data from query returns metadata system labels and alias by is defined", func(t *testing.T) { + data, err := loadTestFile("./test-data/5-series-response-meta-data.json") + require.NoError(t, err) + assert.Equal(t, 3, len(data.TimeSeries)) + + t.Run("and systemlabel contains key with array of string", func(t *testing.T) { + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metadata.system_labels.test}}"} + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + assert.Equal(t, 3, len(frames)) + fmt.Println(frames[0].Fields[1].Name) + assert.Equal(t, "value1, value2", frames[0].Fields[1].Name) + assert.Equal(t, "value1, value2, value3", frames[1].Fields[1].Name) + assert.Equal(t, "value1, value2, value4, value5", frames[2].Fields[1].Name) }) - Convey("and query type is SLOs", func() { - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "queryType": sloQueryType, - "metricQuery": map[string]interface{}{}, - "sloQuery": map[string]interface{}{ - "projectName": "test-proj", - "alignmentPeriod": "stackdriver-auto", - "perSeriesAligner": "ALIGN_NEXT_OLDER", - "aliasBy": "", - "selectorName": "select_slo_health", - "serviceId": "test-service", - "sloId": "test-slo", - }, - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - - So(len(queries), ShouldEqual, 1) - So(queries[0].RefID, ShouldEqual, "A") - So(queries[0].Params["interval.startTime"][0], ShouldEqual, "2018-03-15T13:00:00Z") - So(queries[0].Params["interval.endTime"][0], ShouldEqual, "2018-03-15T13:34:00Z") - So(queries[0].Params["aggregation.alignmentPeriod"][0], ShouldEqual, `+60s`) - So(queries[0].AliasBy, ShouldEqual, "") - So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_MEAN") - So(queries[0].Target, ShouldEqual, `aggregation.alignmentPeriod=%2B60s&aggregation.perSeriesAligner=ALIGN_MEAN&filter=select_slo_health%28%22projects%2Ftest-proj%2Fservices%2Ftest-service%2FserviceLevelObjectives%2Ftest-slo%22%29&interval.endTime=2018-03-15T13%3A34%3A00Z&interval.startTime=2018-03-15T13%3A00%3A00Z`) - So(len(queries[0].Params), ShouldEqual, 5) - - Convey("and perSeriesAligner is inferred by SLO selector", func() { - tsdbQuery.Queries[0].Model = simplejson.NewFromAny(map[string]interface{}{ - "queryType": sloQueryType, - "metricQuery": map[string]interface{}{}, - "sloQuery": map[string]interface{}{ - "projectName": "test-proj", - "alignmentPeriod": "stackdriver-auto", - "perSeriesAligner": "ALIGN_NEXT_OLDER", - "aliasBy": "", - "selectorName": "select_slo_compliance", - "serviceId": "test-service", - "sloId": "test-slo", - }, - }) - - qes, err := executor.buildQueryExecutors(tsdbQuery) - So(err, ShouldBeNil) - queries := getCloudMonitoringQueriesFromInterface(qes) - So(queries[0].Params["aggregation.perSeriesAligner"][0], ShouldEqual, "ALIGN_NEXT_OLDER") - - Convey("and empty deep link", func() { - dl := queries[0].buildDeepLink() - So(dl, ShouldBeEmpty) - }) - }) + t.Run("and systemlabel contains key with array of string2", func(t *testing.T) { + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metadata.system_labels.test2}}"} + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + assert.Equal(t, 3, len(frames)) + assert.Equal(t, "testvalue", frames[2].Fields[1].Name) }) }) - Convey("Parse cloud monitoring response in the time series format", func() { - Convey("when data from query aggregated to one time series", func() { - data, err := loadTestFile("./test-data/1-series-response-agg-one-metric.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 1) + t.Run("when data from query returns slo and alias by is defined", func(t *testing.T) { + data, err := loadTestFile("./test-data/6-series-response-slo.json") + require.NoError(t, err) + assert.Equal(t, 1, len(data.TimeSeries)) - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}} - err = query.parseResponse(res, data, "") - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - So(len(frames), ShouldEqual, 1) - So(frames[0].Fields[1].Name, ShouldEqual, "serviceruntime.googleapis.com/api/request_count") - So(frames[0].Fields[1].Len(), ShouldEqual, 3) - - Convey("timestamps should be in ascending order", func() { - So(frames[0].Fields[1].At(0), ShouldEqual, 0.05) - So(frames[0].Fields[0].At(0), ShouldEqual, time.Unix(int64(1536670020000/1000), 0)) - - So(frames[0].Fields[1].At(1), ShouldEqual, 1.05) - So(frames[0].Fields[0].At(1), ShouldEqual, time.Unix(int64(1536670080000/1000), 0)) - - So(frames[0].Fields[1].At(2), ShouldEqual, 1.0666666666667) - So(frames[0].Fields[0].At(2), ShouldEqual, time.Unix(int64(1536670260000/1000), 0)) - }) - }) - - Convey("when data from query with no aggregation", func() { - data, err := loadTestFile("./test-data/2-series-response-no-agg.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 3) - - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}} - err = query.parseResponse(res, data, "") - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - - Convey("Should add labels to metric name", func() { - So(len(frames), ShouldEqual, 3) - So(frames[0].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1") - So(frames[1].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1") - So(frames[2].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1") - }) - - Convey("Should parse to time series", func() { - So(frames[0].Fields[1].Len(), ShouldEqual, 3) - So(frames[0].Fields[1].At(0), ShouldEqual, 9.8566497180145) - So(frames[0].Fields[1].At(1), ShouldEqual, 9.7323568146676) - So(frames[0].Fields[1].At(2), ShouldEqual, 9.7730520330369) - }) - - Convey("Should add meta for labels to the response", func() { - labels := res.Meta.Get("labels").Interface().(map[string][]string) - So(labels, ShouldNotBeNil) - So(len(labels["metric.label.instance_name"]), ShouldEqual, 3) - So(labels["metric.label.instance_name"], ShouldContain, "collector-asia-east-1") - So(labels["metric.label.instance_name"], ShouldContain, "collector-europe-west-1") - So(labels["metric.label.instance_name"], ShouldContain, "collector-us-east-1") - - So(len(labels["resource.label.zone"]), ShouldEqual, 3) - So(labels["resource.label.zone"], ShouldContain, "asia-east1-a") - So(labels["resource.label.zone"], ShouldContain, "europe-west1-b") - So(labels["resource.label.zone"], ShouldContain, "us-east1-b") - - So(len(labels["resource.label.project_id"]), ShouldEqual, 1) - So(labels["resource.label.project_id"][0], ShouldEqual, "grafana-prod") - }) - }) - - Convey("when data from query with no aggregation and group bys", func() { - data, err := loadTestFile("./test-data/2-series-response-no-agg.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 3) - - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} - err = query.parseResponse(res, data, "") - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - Convey("Should add instance name and zone labels to metric name", func() { - So(len(frames), ShouldEqual, 3) - So(frames[0].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-asia-east-1 asia-east1-a") - So(frames[1].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-europe-west-1 europe-west1-b") - So(frames[2].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time collector-us-east-1 us-east1-b") - }) - }) - - Convey("when data from query with no aggregation and alias by", func() { - data, err := loadTestFile("./test-data/2-series-response-no-agg.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 3) - - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - - Convey("and the alias pattern is for metric type, a metric label and a resource label", func() { - query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metric.type}} - {{metric.label.instance_name}} - {{resource.label.zone}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} - err = query.parseResponse(res, data, "") - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - Convey("Should use alias by formatting and only show instance name", func() { - So(len(frames), ShouldEqual, 3) - So(frames[0].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-asia-east-1 - asia-east1-a") - So(frames[1].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-europe-west-1 - europe-west1-b") - So(frames[2].Fields[1].Name, ShouldEqual, "compute.googleapis.com/instance/cpu/usage_time - collector-us-east-1 - us-east1-b") - }) - }) - - Convey("and the alias pattern is for metric name", func() { - query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "metric {{metric.name}} service {{metric.service}}", GroupBys: []string{"metric.label.instance_name", "resource.label.zone"}} - err = query.parseResponse(res, data, "") - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - Convey("Should use alias by formatting and only show instance name", func() { - So(len(frames), ShouldEqual, 3) - So(frames[0].Fields[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") - So(frames[1].Fields[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") - So(frames[2].Fields[1].Name, ShouldEqual, "metric instance/cpu/usage_time service compute") - }) - }) - }) - - Convey("when data from query is distribution with exponential bounds", func() { - data, err := loadTestFile("./test-data/3-series-response-distribution-exponential.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 1) - - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"} - err = query.parseResponse(res, data, "") - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - So(len(frames), ShouldEqual, 11) - for i := 0; i < 11; i++ { - if i == 0 { - So(frames[i].Fields[1].Name, ShouldEqual, "0") - } else { - So(frames[i].Fields[1].Name, ShouldEqual, strconv.FormatInt(int64(math.Pow(float64(2), float64(i-1))), 10)) - } - So(frames[i].Fields[0].Len(), ShouldEqual, 3) + t.Run("and alias by is expanded", func(t *testing.T) { + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesFilter{ + Params: url.Values{}, + ProjectName: "test-proj", + Selector: "select_slo_compliance", + Service: "test-service", + Slo: "test-slo", + AliasBy: "{{project}} - {{service}} - {{slo}} - {{selector}}", } - - Convey("timestamps should be in ascending order", func() { - So(frames[0].Fields[0].At(0), ShouldEqual, time.Unix(int64(1536668940000/1000), 0)) - So(frames[0].Fields[0].At(1), ShouldEqual, time.Unix(int64(1536669000000/1000), 0)) - So(frames[0].Fields[0].At(2), ShouldEqual, time.Unix(int64(1536669060000/1000), 0)) - }) - - Convey("bucket bounds should be correct", func() { - So(frames[0].Fields[1].Name, ShouldEqual, "0") - So(frames[1].Fields[1].Name, ShouldEqual, "1") - So(frames[2].Fields[1].Name, ShouldEqual, "2") - So(frames[3].Fields[1].Name, ShouldEqual, "4") - So(frames[4].Fields[1].Name, ShouldEqual, "8") - }) - - Convey("value should be correct", func() { - So(frames[8].Fields[1].At(0), ShouldEqual, 1) - So(frames[9].Fields[1].At(0), ShouldEqual, 1) - So(frames[10].Fields[1].At(0), ShouldEqual, 1) - So(frames[8].Fields[1].At(1), ShouldEqual, 0) - So(frames[9].Fields[1].At(1), ShouldEqual, 0) - So(frames[10].Fields[1].At(1), ShouldEqual, 1) - So(frames[8].Fields[1].At(2), ShouldEqual, 0) - So(frames[9].Fields[1].At(2), ShouldEqual, 1) - So(frames[10].Fields[1].At(2), ShouldEqual, 0) - }) - }) - - Convey("when data from query is distribution with explicit bounds", func() { - data, err := loadTestFile("./test-data/4-series-response-distribution-explicit.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 1) - - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"} err = query.parseResponse(res, data, "") - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - So(len(frames), ShouldEqual, 33) - for i := 0; i < 33; i++ { - if i == 0 { - So(frames[i].Fields[1].Name, ShouldEqual, "0") - } - So(frames[i].Fields[1].Len(), ShouldEqual, 2) - } - - Convey("timestamps should be in ascending order", func() { - So(frames[0].Fields[0].At(0), ShouldEqual, time.Unix(int64(1550859086000/1000), 0)) - So(frames[0].Fields[0].At(1), ShouldEqual, time.Unix(int64(1550859146000/1000), 0)) - }) - - Convey("bucket bounds should be correct", func() { - So(frames[0].Fields[1].Name, ShouldEqual, "0") - So(frames[1].Fields[1].Name, ShouldEqual, "0.01") - So(frames[2].Fields[1].Name, ShouldEqual, "0.05") - So(frames[3].Fields[1].Name, ShouldEqual, "0.1") - }) - - Convey("value should be correct", func() { - So(frames[8].Fields[1].At(0), ShouldEqual, 381) - So(frames[9].Fields[1].At(0), ShouldEqual, 212) - So(frames[10].Fields[1].At(0), ShouldEqual, 56) - So(frames[8].Fields[1].At(1), ShouldEqual, 375) - So(frames[9].Fields[1].At(1), ShouldEqual, 213) - So(frames[10].Fields[1].At(1), ShouldEqual, 56) - }) - }) - - Convey("when data from query returns metadata system labels", func() { - data, err := loadTestFile("./test-data/5-series-response-meta-data.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 3) - - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{bucket}}"} - err = query.parseResponse(res, data, "") - labels := res.Meta.Get("labels").Interface().(map[string][]string) - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - So(len(frames), ShouldEqual, 3) - - Convey("and systemlabel contains key with array of string", func() { - So(len(labels["metadata.system_labels.test"]), ShouldEqual, 5) - So(labels["metadata.system_labels.test"], ShouldContain, "value1") - So(labels["metadata.system_labels.test"], ShouldContain, "value2") - So(labels["metadata.system_labels.test"], ShouldContain, "value3") - So(labels["metadata.system_labels.test"], ShouldContain, "value4") - So(labels["metadata.system_labels.test"], ShouldContain, "value5") - }) - - Convey("and systemlabel contains key with primitive strings", func() { - So(len(labels["metadata.system_labels.region"]), ShouldEqual, 2) - So(labels["metadata.system_labels.region"], ShouldContain, "us-central1") - So(labels["metadata.system_labels.region"], ShouldContain, "us-west1") - }) - - Convey("and userLabel contains key with primitive strings", func() { - So(len(labels["metadata.user_labels.region"]), ShouldEqual, 2) - So(labels["metadata.user_labels.region"], ShouldContain, "region1") - So(labels["metadata.user_labels.region"], ShouldContain, "region3") - - So(len(labels["metadata.user_labels.name"]), ShouldEqual, 2) - So(labels["metadata.user_labels.name"], ShouldContain, "name1") - So(labels["metadata.user_labels.name"], ShouldContain, "name3") - }) - }) - Convey("when data from query returns metadata system labels and alias by is defined", func() { - data, err := loadTestFile("./test-data/5-series-response-meta-data.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 3) - - Convey("and systemlabel contains key with array of string", func() { - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metadata.system_labels.test}}"} - err = query.parseResponse(res, data, "") - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - So(len(frames), ShouldEqual, 3) - fmt.Println(frames[0].Fields[1].Name) - So(frames[0].Fields[1].Name, ShouldEqual, "value1, value2") - So(frames[1].Fields[1].Name, ShouldEqual, "value1, value2, value3") - So(frames[2].Fields[1].Name, ShouldEqual, "value1, value2, value4, value5") - }) - - Convey("and systemlabel contains key with array of string2", func() { - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, AliasBy: "{{metadata.system_labels.test2}}"} - err = query.parseResponse(res, data, "") - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - So(len(frames), ShouldEqual, 3) - So(frames[2].Fields[1].Name, ShouldEqual, "testvalue") - }) - }) - - Convey("when data from query returns slo and alias by is defined", func() { - data, err := loadTestFile("./test-data/6-series-response-slo.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 1) - - Convey("and alias by is expanded", func() { - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesFilter{ - Params: url.Values{}, - ProjectName: "test-proj", - Selector: "select_slo_compliance", - Service: "test-service", - Slo: "test-slo", - AliasBy: "{{project}} - {{service}} - {{slo}} - {{selector}}", - } - err = query.parseResponse(res, data, "") - frames, _ := res.Dataframes.Decoded() - So(err, ShouldBeNil) - So(frames[0].Fields[1].Name, ShouldEqual, "test-proj - test-service - test-slo - select_slo_compliance") - }) - }) - - Convey("when data from query returns slo and alias by is not defined", func() { - data, err := loadTestFile("./test-data/6-series-response-slo.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 1) - - Convey("and alias by is expanded", func() { - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesFilter{ - Params: url.Values{}, - ProjectName: "test-proj", - Selector: "select_slo_compliance", - Service: "test-service", - Slo: "test-slo", - } - err = query.parseResponse(res, data, "") - frames, _ := res.Dataframes.Decoded() - So(err, ShouldBeNil) - So(frames[0].Fields[1].Name, ShouldEqual, "select_slo_compliance(\"projects/test-proj/services/test-service/serviceLevelObjectives/test-slo\")") - }) + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + assert.Equal(t, "test-proj - test-service - test-slo - select_slo_compliance", frames[0].Fields[1].Name) }) }) - Convey("Parse cloud monitoring unit", func() { - Convey("when there is only one query", func() { - Convey("and cloud monitoring unit does not have a corresponding grafana unit", func() { - executors := []cloudMonitoringQueryExecutor{ + + t.Run("when data from query returns slo and alias by is not defined", func(t *testing.T) { + data, err := loadTestFile("./test-data/6-series-response-slo.json") + require.NoError(t, err) + assert.Equal(t, 1, len(data.TimeSeries)) + + t.Run("and alias by is expanded", func(t *testing.T) { + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesFilter{ + Params: url.Values{}, + ProjectName: "test-proj", + Selector: "select_slo_compliance", + Service: "test-service", + Slo: "test-slo", + } + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + assert.Equal(t, "select_slo_compliance(\"projects/test-proj/services/test-service/serviceLevelObjectives/test-slo\")", frames[0].Fields[1].Name) + }) + }) + }) + + t.Run("Parse cloud monitoring unit", func(t *testing.T) { + t.Run("when there is only one query", func(t *testing.T) { + t.Run("and cloud monitoring unit does not have a corresponding grafana unit", func(t *testing.T) { + executors := []cloudMonitoringQueryExecutor{ + &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service", + Slo: "test-slo", Unit: "megaseconds"}, + } + unit := executor.resolvePanelUnitFromQueries(executors) + assert.Equal(t, "", unit) + }) + + t.Run("and cloud monitoring unit has a corresponding grafana unit", func(t *testing.T) { + for key, element := range cloudMonitoringUnitMappings { + queries := []cloudMonitoringQueryExecutor{ &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service", - Slo: "test-slo", Unit: "megaseconds"}, - } - unit := executor.resolvePanelUnitFromQueries(executors) - So(unit, ShouldEqual, "") - }) - - Convey("and cloud monitoring unit has a corresponding grafana unit", func() { - for key, element := range cloudMonitoringUnitMappings { - queries := []cloudMonitoringQueryExecutor{ - &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service", - Slo: "test-slo", Unit: key}, - } - unit := executor.resolvePanelUnitFromQueries(queries) - So(unit, ShouldEqual, element) - } - }) - }) - - Convey("when there are more than one query", func() { - Convey("and all target units are the same", func() { - for key, element := range cloudMonitoringUnitMappings { - queries := []cloudMonitoringQueryExecutor{ - &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service1", - Slo: "test-slo", Unit: key}, - &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service2", - Slo: "test-slo", Unit: key}, - } - unit := executor.resolvePanelUnitFromQueries(queries) - So(unit, ShouldEqual, element) - } - }) - - Convey("and all target units are the same but does not have grafana mappings", func() { - queries := []cloudMonitoringQueryExecutor{ - &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service1", - Slo: "test-slo", Unit: "megaseconds"}, - &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service2", - Slo: "test-slo", Unit: "megaseconds"}, + Slo: "test-slo", Unit: key}, } unit := executor.resolvePanelUnitFromQueries(queries) - So(unit, ShouldEqual, "") - }) - - Convey("and all target units are not the same", func() { - queries := []cloudMonitoringQueryExecutor{ - &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service1", - Slo: "test-slo", Unit: "bit"}, - &cloudMonitoringTimeSeriesFilter{Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", Service: "test-service2", - Slo: "test-slo", Unit: "min"}, - } - unit := executor.resolvePanelUnitFromQueries(queries) - So(unit, ShouldEqual, "") - }) + assert.Equal(t, element, unit) + } }) + }) - Convey("when data from query returns MQL and alias by is defined", func() { - data, err := loadTestFile("./test-data/7-series-response-mql.json") - So(err, ShouldBeNil) - So(len(data.TimeSeries), ShouldEqual, 0) - So(len(data.TimeSeriesData), ShouldEqual, 1) - - Convey("and alias by is expanded", func() { - fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) - res := &tsdb.QueryResult{Meta: simplejson.New(), RefId: "A"} - query := &cloudMonitoringTimeSeriesQuery{ - ProjectName: "test-proj", - Query: "test-query", - AliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}}", - timeRange: &tsdb.TimeRange{ - From: fmt.Sprintf("%v", fromStart.Unix()*1000), - To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), + t.Run("when there are more than one query", func(t *testing.T) { + t.Run("and all target units are the same", func(t *testing.T) { + for key, element := range cloudMonitoringUnitMappings { + queries := []cloudMonitoringQueryExecutor{ + &cloudMonitoringTimeSeriesFilter{ + Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", + Service: "test-service1", Slo: "test-slo", Unit: key, + }, + &cloudMonitoringTimeSeriesFilter{ + Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", + Service: "test-service2", Slo: "test-slo", Unit: key, }, } - err = query.parseResponse(res, data, "") - So(err, ShouldBeNil) - frames, _ := res.Dataframes.Decoded() - So(frames[0].Fields[1].Name, ShouldEqual, "test-proj - asia-northeast1-c - 6724404429462225363") - }) + unit := executor.resolvePanelUnitFromQueries(queries) + assert.Equal(t, element, unit) + } + }) + + t.Run("and all target units are the same but does not have grafana mappings", func(t *testing.T) { + queries := []cloudMonitoringQueryExecutor{ + &cloudMonitoringTimeSeriesFilter{ + Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", + Service: "test-service1", Slo: "test-slo", Unit: "megaseconds", + }, + &cloudMonitoringTimeSeriesFilter{ + Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", + Service: "test-service2", Slo: "test-slo", Unit: "megaseconds", + }, + } + unit := executor.resolvePanelUnitFromQueries(queries) + assert.Equal(t, "", unit) + }) + + t.Run("and all target units are not the same", func(t *testing.T) { + queries := []cloudMonitoringQueryExecutor{ + &cloudMonitoringTimeSeriesFilter{ + Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", + Service: "test-service1", Slo: "test-slo", Unit: "bit", + }, + &cloudMonitoringTimeSeriesFilter{ + Params: url.Values{}, ProjectName: "test-proj", Selector: "select_slo_compliance", + Service: "test-service2", Slo: "test-slo", Unit: "min", + }, + } + unit := executor.resolvePanelUnitFromQueries(queries) + assert.Equal(t, "", unit) }) }) - Convey("when interpolating filter wildcards", func() { - Convey("and wildcard is used in the beginning and the end of the word", func() { - Convey("and there's no wildcard in the middle of the word", func() { - value := interpolateFilterWildcards("*-central1*") - So(value, ShouldEqual, `has_substring("-central1")`) - }) - Convey("and there is a wildcard in the middle of the word", func() { - value := interpolateFilterWildcards("*-cent*ral1*") - So(value, ShouldNotStartWith, `has_substring`) - }) + t.Run("when data from query returns MQL and alias by is defined", func(t *testing.T) { + data, err := loadTestFile("./test-data/7-series-response-mql.json") + require.NoError(t, err) + assert.Equal(t, 0, len(data.TimeSeries)) + assert.Equal(t, 1, len(data.TimeSeriesData)) + + t.Run("and alias by is expanded", func(t *testing.T) { + fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + res := &plugins.DataQueryResult{Meta: simplejson.New(), RefID: "A"} + query := &cloudMonitoringTimeSeriesQuery{ + ProjectName: "test-proj", + Query: "test-query", + AliasBy: "{{project}} - {{resource.label.zone}} - {{resource.label.instance_id}}", + timeRange: plugins.DataTimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), + }, + } + err = query.parseResponse(res, data, "") + require.NoError(t, err) + frames, err := res.Dataframes.Decoded() + require.NoError(t, err) + assert.Equal(t, "test-proj - asia-northeast1-c - 6724404429462225363", frames[0].Fields[1].Name) }) + }) + }) - Convey("and wildcard is used in the beginning of the word", func() { - Convey("and there is not a wildcard elsewhere in the word", func() { - value := interpolateFilterWildcards("*-central1") - So(value, ShouldEqual, `ends_with("-central1")`) - }) - Convey("and there is a wildcard elsewhere in the word", func() { - value := interpolateFilterWildcards("*-cent*al1") - So(value, ShouldNotStartWith, `ends_with`) - }) + t.Run("when interpolating filter wildcards", func(t *testing.T) { + t.Run("and wildcard is used in the beginning and the end of the word", func(t *testing.T) { + t.Run("and there's no wildcard in the middle of the word", func(t *testing.T) { + value := interpolateFilterWildcards("*-central1*") + assert.Equal(t, `has_substring("-central1")`, value) }) - - Convey("and wildcard is used at the end of the word", func() { - Convey("and there is not a wildcard elsewhere in the word", func() { - value := interpolateFilterWildcards("us-central*") - So(value, ShouldEqual, `starts_with("us-central")`) - }) - Convey("and there is a wildcard elsewhere in the word", func() { - value := interpolateFilterWildcards("*us-central*") - So(value, ShouldNotStartWith, `starts_with`) - }) - }) - - Convey("and wildcard is used in the middle of the word", func() { - Convey("and there is only one wildcard", func() { - value := interpolateFilterWildcards("us-ce*tral1-b") - So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tral1\\-b$")`) - }) - - Convey("and there is more than one wildcard", func() { - value := interpolateFilterWildcards("us-ce*tra*1-b") - So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tra.*1\\-b$")`) - }) - }) - - Convey("and wildcard is used in the middle of the word and in the beginning of the word", func() { - value := interpolateFilterWildcards("*s-ce*tral1-b") - So(value, ShouldEqual, `monitoring.regex.full_match("^.*s\\-ce.*tral1\\-b$")`) - }) - - Convey("and wildcard is used in the middle of the word and in the ending of the word", func() { - value := interpolateFilterWildcards("us-ce*tral1-*") - So(value, ShouldEqual, `monitoring.regex.full_match("^us\\-ce.*tral1\\-.*$")`) - }) - - Convey("and no wildcard is used", func() { - value := interpolateFilterWildcards("us-central1-a}") - So(value, ShouldEqual, `us-central1-a}`) + t.Run("and there is a wildcard in the middle of the word", func(t *testing.T) { + value := interpolateFilterWildcards("*-cent*ral1*") + assert.False(t, strings.HasPrefix(value, `has_substring`)) }) }) - Convey("when building filter string", func() { - Convey("and there's no regex operator", func() { - Convey("and there are wildcards in a filter value", func() { - filterParts := []string{"zone", "=", "*-central1*"} - value := buildFilterString("somemetrictype", filterParts) - So(value, ShouldEqual, `metric.type="somemetrictype" zone=has_substring("-central1")`) - }) + t.Run("and wildcard is used in the beginning of the word", func(t *testing.T) { + t.Run("and there is not a wildcard elsewhere in the word", func(t *testing.T) { + value := interpolateFilterWildcards("*-central1") + assert.Equal(t, `ends_with("-central1")`, value) + }) + t.Run("and there is a wildcard elsewhere in the word", func(t *testing.T) { + value := interpolateFilterWildcards("*-cent*al1") + assert.False(t, strings.HasPrefix(value, `ends_with`)) + }) + }) - Convey("and there are no wildcards in any filter value", func() { - filterParts := []string{"zone", "!=", "us-central1-a"} - value := buildFilterString("somemetrictype", filterParts) - So(value, ShouldEqual, `metric.type="somemetrictype" zone!="us-central1-a"`) - }) + t.Run("and wildcard is used at the end of the word", func(t *testing.T) { + t.Run("and there is not a wildcard elsewhere in the word", func(t *testing.T) { + value := interpolateFilterWildcards("us-central*") + assert.Equal(t, `starts_with("us-central")`, value) + }) + t.Run("and there is a wildcard elsewhere in the word", func(t *testing.T) { + value := interpolateFilterWildcards("*us-central*") + assert.False(t, strings.HasPrefix(value, `starts_with`)) + }) + }) + + t.Run("and wildcard is used in the middle of the word", func(t *testing.T) { + t.Run("and there is only one wildcard", func(t *testing.T) { + value := interpolateFilterWildcards("us-ce*tral1-b") + assert.Equal(t, `monitoring.regex.full_match("^us\\-ce.*tral1\\-b$")`, value) }) - Convey("and there is a regex operator", func() { - filterParts := []string{"zone", "=~", "us-central1-a~"} + t.Run("and there is more than one wildcard", func(t *testing.T) { + value := interpolateFilterWildcards("us-ce*tra*1-b") + assert.Equal(t, `monitoring.regex.full_match("^us\\-ce.*tra.*1\\-b$")`, value) + }) + }) + + t.Run("and wildcard is used in the middle of the word and in the beginning of the word", func(t *testing.T) { + value := interpolateFilterWildcards("*s-ce*tral1-b") + assert.Equal(t, `monitoring.regex.full_match("^.*s\\-ce.*tral1\\-b$")`, value) + }) + + t.Run("and wildcard is used in the middle of the word and in the ending of the word", func(t *testing.T) { + value := interpolateFilterWildcards("us-ce*tral1-*") + assert.Equal(t, `monitoring.regex.full_match("^us\\-ce.*tral1\\-.*$")`, value) + }) + + t.Run("and no wildcard is used", func(t *testing.T) { + value := interpolateFilterWildcards("us-central1-a}") + assert.Equal(t, `us-central1-a}`, value) + }) + }) + + t.Run("when building filter string", func(t *testing.T) { + t.Run("and there's no regex operator", func(t *testing.T) { + t.Run("and there are wildcards in a filter value", func(t *testing.T) { + filterParts := []string{"zone", "=", "*-central1*"} value := buildFilterString("somemetrictype", filterParts) - Convey("it should remove the ~ character from the operator that belongs to the value", func() { - So(value, ShouldNotContainSubstring, `=~`) - So(value, ShouldContainSubstring, `zone=`) - }) - - Convey("it should insert monitoring.regex.full_match before filter value", func() { - So(value, ShouldContainSubstring, `zone=monitoring.regex.full_match("us-central1-a~")`) - }) + assert.Equal(t, `metric.type="somemetrictype" zone=has_substring("-central1")`, value) }) + + t.Run("and there are no wildcards in any filter value", func(t *testing.T) { + filterParts := []string{"zone", "!=", "us-central1-a"} + value := buildFilterString("somemetrictype", filterParts) + assert.Equal(t, `metric.type="somemetrictype" zone!="us-central1-a"`, value) + }) + }) + + t.Run("and there is a regex operator", func(t *testing.T) { + filterParts := []string{"zone", "=~", "us-central1-a~"} + value := buildFilterString("somemetrictype", filterParts) + assert.NotContains(t, value, `=~`) + assert.Contains(t, value, `zone=`) + + assert.Contains(t, value, `zone=monitoring.regex.full_match("us-central1-a~")`) }) }) } @@ -1131,70 +1095,97 @@ func loadTestFile(path string) (cloudMonitoringResponse, error) { return data, err } -func getCloudMonitoringQueriesFromInterface(qes []cloudMonitoringQueryExecutor) []*cloudMonitoringTimeSeriesFilter { +func getCloudMonitoringQueriesFromInterface(t *testing.T, qes []cloudMonitoringQueryExecutor) []*cloudMonitoringTimeSeriesFilter { + t.Helper() + queries := make([]*cloudMonitoringTimeSeriesFilter, 0) for _, qi := range qes { q, ok := qi.(*cloudMonitoringTimeSeriesFilter) - So(ok, ShouldBeTrue) + require.Truef(t, ok, "Received wrong type %T", qi) queries = append(queries, q) } return queries } -func verifyDeepLink(dl string, expectedTimeSelection map[string]string, expectedTimeSeriesFilter map[string]interface{}) { +func verifyDeepLink(t *testing.T, dl string, expectedTimeSelection map[string]string, + expectedTimeSeriesFilter map[string]interface{}) { + t.Helper() + u, err := url.Parse(dl) - So(err, ShouldBeNil) - So(u.Scheme, ShouldEqual, "https") - So(u.Host, ShouldEqual, "accounts.google.com") - So(u.Path, ShouldEqual, "/AccountChooser") + require.NoError(t, err) + assert.Equal(t, "https", u.Scheme) + assert.Equal(t, "accounts.google.com", u.Host) + assert.Equal(t, "/AccountChooser", u.Path) params, err := url.ParseQuery(u.RawQuery) - So(err, ShouldBeNil) + require.NoError(t, err) continueParam := params.Get("continue") - So(continueParam, ShouldNotBeEmpty) + assert.NotEmpty(t, continueParam) u, err = url.Parse(continueParam) - So(err, ShouldBeNil) + require.NoError(t, err) params, err = url.ParseQuery(u.RawQuery) - So(err, ShouldBeNil) + require.NoError(t, err) deepLinkParam := params.Get("Grafana_deeplink") - So(deepLinkParam, ShouldNotBeEmpty) + assert.NotEmpty(t, deepLinkParam) pageStateStr := params.Get("pageState") - So(pageStateStr, ShouldNotBeEmpty) + assert.NotEmpty(t, pageStateStr) var pageState map[string]map[string]interface{} err = json.Unmarshal([]byte(pageStateStr), &pageState) - So(err, ShouldBeNil) + require.NoError(t, err) timeSelection, ok := pageState["timeSelection"] - So(ok, ShouldBeTrue) + assert.True(t, ok) for k, v := range expectedTimeSelection { s, ok := timeSelection[k].(string) - So(ok, ShouldBeTrue) - So(s, ShouldEqual, v) + assert.True(t, ok) + assert.Equal(t, v, s) } dataSets, ok := pageState["xyChart"]["dataSets"].([]interface{}) - So(ok, ShouldBeTrue) - So(len(dataSets), ShouldEqual, 1) + assert.True(t, ok) + assert.Equal(t, 1, len(dataSets)) dataSet, ok := dataSets[0].(map[string]interface{}) - So(ok, ShouldBeTrue) + assert.True(t, ok) i, ok := dataSet["timeSeriesFilter"] - So(ok, ShouldBeTrue) + assert.True(t, ok) timeSeriesFilter := i.(map[string]interface{}) for k, v := range expectedTimeSeriesFilter { s, ok := timeSeriesFilter[k] - So(ok, ShouldBeTrue) + assert.True(t, ok) rt := reflect.TypeOf(v) switch rt.Kind() { case reflect.Slice, reflect.Array: - So(s, ShouldResemble, v) + assert.Equal(t, v, s) default: - So(s, ShouldEqual, v) + assert.Equal(t, v, s) } } } + +func getBaseQuery() plugins.DataQuery { + fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local) + query := plugins.DataQuery{ + TimeRange: &plugins.DataTimeRange{ + From: fmt.Sprintf("%v", fromStart.Unix()*1000), + To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), + }, + Queries: []plugins.DataSubQuery{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "metricType": "a/metric/type", + "view": "FULL", + "aliasBy": "testalias", + "type": "timeSeriesQuery", + }), + RefID: "A", + }, + }, + } + return query +} diff --git a/pkg/tsdb/cloudmonitoring/time_series_filter.go b/pkg/tsdb/cloudmonitoring/time_series_filter.go index 4924f333530..00f53b3f3b6 100644 --- a/pkg/tsdb/cloudmonitoring/time_series_filter.go +++ b/pkg/tsdb/cloudmonitoring/time_series_filter.go @@ -12,13 +12,14 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/opentracing/opentracing-go" "golang.org/x/net/context/ctxhttp" ) -func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context, tsdbQuery *tsdb.TsdbQuery, e *CloudMonitoringExecutor) (*tsdb.QueryResult, cloudMonitoringResponse, string, error) { - queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: timeSeriesFilter.RefID} +func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context, tsdbQuery plugins.DataQuery, + e *Executor) (plugins.DataQueryResult, cloudMonitoringResponse, string, error) { + queryResult := plugins.DataQueryResult{Meta: simplejson.New(), RefID: timeSeriesFilter.RefID} projectName := timeSeriesFilter.ProjectName if projectName == "" { defaultProject, err := e.getDefaultProject(ctx) @@ -78,7 +79,8 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) run(ctx context.Context return queryResult, data, req.URL.RawQuery, nil } -func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes *tsdb.QueryResult, response cloudMonitoringResponse, executedQueryString string) error { +func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes *plugins.DataQueryResult, + response cloudMonitoringResponse, executedQueryString string) error { labels := make(map[string]map[string]bool) frames := data.Frames{} for _, series := range response.TimeSeries { @@ -199,7 +201,8 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes additionalLabels := data.Labels{"bucket": bucketBound} timeField := data.NewField(data.TimeSeriesTimeFieldName, nil, []time.Time{}) valueField := data.NewField(data.TimeSeriesValueFieldName, nil, []float64{}) - frameName := formatLegendKeys(series.Metric.Type, defaultMetricName, seriesLabels, additionalLabels, timeSeriesFilter) + frameName := formatLegendKeys(series.Metric.Type, defaultMetricName, seriesLabels, + additionalLabels, timeSeriesFilter) valueField.Name = frameName valueField.Labels = seriesLabels setDisplayNameAsFieldName(valueField) @@ -224,7 +227,7 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes frames = addConfigData(frames, dl) } - queryRes.Dataframes = tsdb.NewDecodedDataFrames(frames) + queryRes.Dataframes = plugins.NewDecodedDataFrames(frames) labelsByKey := make(map[string][]string) for key, values := range labels { @@ -238,8 +241,9 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseResponse(queryRes return nil } -func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) handleNonDistributionSeries(series timeSeries, defaultMetricName string, seriesLabels map[string]string, - queryRes *tsdb.QueryResult, frame *data.Frame) { +func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) handleNonDistributionSeries(series timeSeries, + defaultMetricName string, seriesLabels map[string]string, queryRes *plugins.DataQueryResult, + frame *data.Frame) { for i := 0; i < len(series.Points); i++ { point := series.Points[i] value := point.Value.DoubleValue @@ -268,7 +272,8 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) handleNonDistributionSe setDisplayNameAsFieldName(dataField) } -func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseToAnnotations(queryRes *tsdb.QueryResult, response cloudMonitoringResponse, title string, text string, tags string) error { +func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseToAnnotations(queryRes *plugins.DataQueryResult, + response cloudMonitoringResponse, title string, text string, tags string) error { frames := data.Frames{} for _, series := range response.TimeSeries { if len(series.Points) == 0 { @@ -282,18 +287,20 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) parseToAnnotations(quer value = point.Value.StringValue } annotation["time"] = append(annotation["time"], point.Interval.EndTime.UTC().Format(time.RFC3339)) - annotation["title"] = append(annotation["title"], formatAnnotationText(title, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels)) + annotation["title"] = append(annotation["title"], formatAnnotationText(title, value, series.Metric.Type, + series.Metric.Labels, series.Resource.Labels)) annotation["tags"] = append(annotation["tags"], tags) - annotation["text"] = append(annotation["text"], formatAnnotationText(text, value, series.Metric.Type, series.Metric.Labels, series.Resource.Labels)) + annotation["text"] = append(annotation["text"], formatAnnotationText(text, value, series.Metric.Type, + series.Metric.Labels, series.Resource.Labels)) } - frames = append(frames, data.NewFrame(queryRes.RefId, + frames = append(frames, data.NewFrame(queryRes.RefID, data.NewField("time", nil, annotation["time"]), data.NewField("title", nil, annotation["title"]), data.NewField("tags", nil, annotation["tags"]), data.NewField("text", nil, annotation["text"]), )) } - queryRes.Dataframes = tsdb.NewDecodedDataFrames(frames) + queryRes.Dataframes = plugins.NewDecodedDataFrames(frames) return nil } @@ -313,7 +320,8 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) buildDeepLink() string u, err := url.Parse("https://console.cloud.google.com/monitoring/metrics-explorer") if err != nil { - slog.Error("Failed to generate deep link: unable to parse metrics explorer URL", "ProjectName", timeSeriesFilter.ProjectName, "query", timeSeriesFilter.RefID) + slog.Error("Failed to generate deep link: unable to parse metrics explorer URL", "ProjectName", + timeSeriesFilter.ProjectName, "query", timeSeriesFilter.RefID) return "" } @@ -353,7 +361,8 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) buildDeepLink() string blob, err := json.Marshal(pageState) if err != nil { - slog.Error("Failed to generate deep link", "pageState", pageState, "ProjectName", timeSeriesFilter.ProjectName, "query", timeSeriesFilter.RefID) + slog.Error("Failed to generate deep link", "pageState", pageState, "ProjectName", timeSeriesFilter.ProjectName, + "query", timeSeriesFilter.RefID) return "" } @@ -362,7 +371,8 @@ func (timeSeriesFilter *cloudMonitoringTimeSeriesFilter) buildDeepLink() string accountChooserURL, err := url.Parse("https://accounts.google.com/AccountChooser") if err != nil { - slog.Error("Failed to generate deep link: unable to parse account chooser URL", "ProjectName", timeSeriesFilter.ProjectName, "query", timeSeriesFilter.RefID) + slog.Error("Failed to generate deep link: unable to parse account chooser URL", "ProjectName", + timeSeriesFilter.ProjectName, "query", timeSeriesFilter.RefID) return "" } accountChooserQuery := accountChooserURL.Query() diff --git a/pkg/tsdb/cloudmonitoring/time_series_query.go b/pkg/tsdb/cloudmonitoring/time_series_query.go index a9dcd6ae79b..623406a44a8 100644 --- a/pkg/tsdb/cloudmonitoring/time_series_query.go +++ b/pkg/tsdb/cloudmonitoring/time_series_query.go @@ -13,13 +13,15 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/tsdb/interval" "github.com/opentracing/opentracing-go" "golang.org/x/net/context/ctxhttp" ) -func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) run(ctx context.Context, tsdbQuery *tsdb.TsdbQuery, e *CloudMonitoringExecutor) (*tsdb.QueryResult, cloudMonitoringResponse, string, error) { - queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: timeSeriesQuery.RefID} +func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) run(ctx context.Context, tsdbQuery plugins.DataQuery, + e *Executor) (plugins.DataQueryResult, cloudMonitoringResponse, string, error) { + queryResult := plugins.DataQueryResult{Meta: simplejson.New(), RefID: timeSeriesQuery.RefID} projectName := timeSeriesQuery.ProjectName if projectName == "" { defaultProject, err := e.getDefaultProject(ctx) @@ -41,8 +43,8 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) run(ctx context.Context, t queryResult.Error = err return queryResult, cloudMonitoringResponse{}, "", nil } - intervalCalculator := tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{}) - interval := intervalCalculator.Calculate(tsdbQuery.TimeRange, time.Duration(timeSeriesQuery.IntervalMS/1000)*time.Second) + intervalCalculator := interval.NewCalculator(interval.CalculatorOptions{}) + interval := intervalCalculator.Calculate(*tsdbQuery.TimeRange, time.Duration(timeSeriesQuery.IntervalMS/1000)*time.Second) timeFormat := "2006/01/02-15:04:05" timeSeriesQuery.Query += fmt.Sprintf(" | graph_period %s | within d'%s', d'%s'", interval.Text, from.UTC().Format(timeFormat), to.UTC().Format(timeFormat)) @@ -92,7 +94,8 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) run(ctx context.Context, t return queryResult, data, timeSeriesQuery.Query, nil } -func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *tsdb.QueryResult, response cloudMonitoringResponse, executedQueryString string) error { +func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *plugins.DataQueryResult, + response cloudMonitoringResponse, executedQueryString string) error { labels := make(map[string]map[string]bool) frames := data.Frames{} for _, series := range response.TimeSeriesData { @@ -157,7 +160,10 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *ts frame.SetRow(len(series.PointData)-1-i, series.PointData[i].TimeInterval.EndTime, value) } - metricName := formatLegendKeys(d.Key, defaultMetricName, seriesLabels, nil, &cloudMonitoringTimeSeriesFilter{ProjectName: timeSeriesQuery.ProjectName, AliasBy: timeSeriesQuery.AliasBy}) + metricName := formatLegendKeys(d.Key, defaultMetricName, seriesLabels, nil, + &cloudMonitoringTimeSeriesFilter{ + ProjectName: timeSeriesQuery.ProjectName, AliasBy: timeSeriesQuery.AliasBy, + }) dataField := frame.Fields[1] dataField.Name = metricName dataField.Labels = seriesLabels @@ -244,7 +250,7 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *ts frames = addConfigData(frames, dl) } - queryRes.Dataframes = tsdb.NewDecodedDataFrames(frames) + queryRes.Dataframes = plugins.NewDecodedDataFrames(frames) labelsByKey := make(map[string][]string) for key, values := range labels { @@ -258,7 +264,8 @@ func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseResponse(queryRes *ts return nil } -func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseToAnnotations(queryRes *tsdb.QueryResult, data cloudMonitoringResponse, title string, text string, tags string) error { +func (timeSeriesQuery cloudMonitoringTimeSeriesQuery) parseToAnnotations(queryRes *plugins.DataQueryResult, + data cloudMonitoringResponse, title string, text string, tags string) error { annotations := make([]map[string]string, 0) for _, series := range data.TimeSeriesData { diff --git a/pkg/tsdb/cloudmonitoring/types.go b/pkg/tsdb/cloudmonitoring/types.go index 7c43ff87c11..43e6bd99254 100644 --- a/pkg/tsdb/cloudmonitoring/types.go +++ b/pkg/tsdb/cloudmonitoring/types.go @@ -5,14 +5,15 @@ import ( "net/url" "time" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) type ( cloudMonitoringQueryExecutor interface { - run(ctx context.Context, tsdbQuery *tsdb.TsdbQuery, e *CloudMonitoringExecutor) (*tsdb.QueryResult, cloudMonitoringResponse, string, error) - parseResponse(queryRes *tsdb.QueryResult, data cloudMonitoringResponse, executedQueryString string) error - parseToAnnotations(queryRes *tsdb.QueryResult, data cloudMonitoringResponse, title string, text string, tags string) error + run(ctx context.Context, tsdbQuery plugins.DataQuery, e *Executor) ( + plugins.DataQueryResult, cloudMonitoringResponse, string, error) + parseResponse(queryRes *plugins.DataQueryResult, data cloudMonitoringResponse, executedQueryString string) error + parseToAnnotations(queryRes *plugins.DataQueryResult, data cloudMonitoringResponse, title string, text string, tags string) error buildDeepLink() string getRefID() string getUnit() string @@ -39,7 +40,7 @@ type ( Query string IntervalMS int64 AliasBy string - timeRange *tsdb.TimeRange + timeRange plugins.DataTimeRange Unit string } diff --git a/pkg/tsdb/cloudwatch/annotation_query.go b/pkg/tsdb/cloudwatch/annotation_query.go index 6af828e0ee4..06e9ad58436 100644 --- a/pkg/tsdb/cloudwatch/annotation_query.go +++ b/pkg/tsdb/cloudwatch/annotation_query.go @@ -8,16 +8,17 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/util/errutil" ) -func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), +func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryContext plugins.DataQuery) ( + plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), } firstQuery := queryContext.Queries[0] - queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: firstQuery.RefId} + queryResult := plugins.DataQueryResult{Meta: simplejson.New(), RefID: firstQuery.RefID} parameters := firstQuery.Model usePrefixMatch := parameters.Get("prefixMatching").MustBool(false) @@ -27,7 +28,7 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo dimensions := parameters.Get("dimensions").MustMap() statistics, err := parseStatistics(parameters) if err != nil { - return nil, err + return plugins.DataResponse{}, err } period := int64(parameters.Get("period").MustInt(0)) if period == 0 && !usePrefixMatch { @@ -38,7 +39,7 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo cli, err := e.getCWClient(region) if err != nil { - return nil, err + return plugins.DataResponse{}, err } var alarmNames []*string @@ -50,7 +51,7 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo } resp, err := cli.DescribeAlarms(params) if err != nil { - return nil, errutil.Wrap("failed to call cloudwatch:DescribeAlarms", err) + return plugins.DataResponse{}, errutil.Wrap("failed to call cloudwatch:DescribeAlarms", err) } alarmNames = filterAlarms(resp, namespace, metricName, dimensions, statistics, period) } else { @@ -81,7 +82,7 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo } resp, err := cli.DescribeAlarmsForMetric(params) if err != nil { - return nil, errutil.Wrap("failed to call cloudwatch:DescribeAlarmsForMetric", err) + return plugins.DataResponse{}, errutil.Wrap("failed to call cloudwatch:DescribeAlarmsForMetric", err) } for _, alarm := range resp.MetricAlarms { alarmNames = append(alarmNames, alarm.AlarmName) @@ -91,11 +92,11 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo startTime, err := queryContext.TimeRange.ParseFrom() if err != nil { - return nil, err + return plugins.DataResponse{}, err } endTime, err := queryContext.TimeRange.ParseTo() if err != nil { - return nil, err + return plugins.DataResponse{}, err } annotations := make([]map[string]string, 0) @@ -108,7 +109,7 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo } resp, err := cli.DescribeAlarmHistory(params) if err != nil { - return nil, errutil.Wrap("failed to call cloudwatch:DescribeAlarmHistory", err) + return plugins.DataResponse{}, errutil.Wrap("failed to call cloudwatch:DescribeAlarmHistory", err) } for _, history := range resp.AlarmHistoryItems { annotation := make(map[string]string) @@ -120,15 +121,15 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo } } - transformAnnotationToTable(annotations, queryResult) - result.Results[firstQuery.RefId] = queryResult - return result, err + transformAnnotationToTable(annotations, &queryResult) + result.Results[firstQuery.RefID] = queryResult + return result, nil } -func transformAnnotationToTable(data []map[string]string, result *tsdb.QueryResult) { - table := &tsdb.Table{ - Columns: make([]tsdb.TableColumn, 4), - Rows: make([]tsdb.RowValues, 0), +func transformAnnotationToTable(data []map[string]string, result *plugins.DataQueryResult) { + table := plugins.DataTable{ + Columns: make([]plugins.DataTableColumn, 4), + Rows: make([]plugins.DataRowValues, 0), } table.Columns[0].Text = "time" table.Columns[1].Text = "title" @@ -147,7 +148,8 @@ func transformAnnotationToTable(data []map[string]string, result *tsdb.QueryResu result.Meta.Set("rowCount", len(data)) } -func filterAlarms(alarms *cloudwatch.DescribeAlarmsOutput, namespace string, metricName string, dimensions map[string]interface{}, statistics []string, period int64) []*string { +func filterAlarms(alarms *cloudwatch.DescribeAlarmsOutput, namespace string, metricName string, + dimensions map[string]interface{}, statistics []string, period int64) []*string { alarmNames := make([]*string, 0) for _, alarm := range alarms.MetricAlarms { diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 88b3591f598..0d329afe342 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -26,9 +26,9 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" ) type datasourceInfo struct { @@ -67,15 +67,13 @@ type CloudWatchService struct { } func (s *CloudWatchService) Init() error { - plog.Debug("initing") - - tsdb.RegisterTsdbQueryEndpoint("cloudwatch", func(ds *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - return newExecutor(s.LogsService), nil - }) - return nil } +func (s *CloudWatchService) NewExecutor(*models.DataSource) (plugins.DataPlugin, error) { + return newExecutor(s.LogsService), nil +} + func newExecutor(logsService *LogsService) *cloudWatchExecutor { return &cloudWatchExecutor{ logsService: logsService, @@ -248,12 +246,12 @@ func (e *cloudWatchExecutor) getRGTAClient(region string) (resourcegroupstagging } func (e *cloudWatchExecutor) alertQuery(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, - queryContext *tsdb.TsdbQuery) (*cloudwatchlogs.GetQueryResultsOutput, error) { + queryContext plugins.DataQuery) (*cloudwatchlogs.GetQueryResultsOutput, error) { const maxAttempts = 8 const pollPeriod = 1000 * time.Millisecond queryParams := queryContext.Queries[0].Model - startQueryOutput, err := e.executeStartQuery(ctx, logsClient, queryParams, queryContext.TimeRange) + startQueryOutput, err := e.executeStartQuery(ctx, logsClient, queryParams, *queryContext.TimeRange) if err != nil { return nil, err } @@ -285,15 +283,17 @@ func (e *cloudWatchExecutor) alertQuery(ctx context.Context, logsClient cloudwat return nil, nil } -// Query executes a CloudWatch query. -func (e *cloudWatchExecutor) Query(ctx context.Context, dsInfo *models.DataSource, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { +// DataQuery executes a CloudWatch query. +func (e *cloudWatchExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource, + queryContext plugins.DataQuery) (plugins.DataResponse, error) { e.DataSource = dsInfo /* - Unlike many other data sources, with Cloudwatch Logs query requests don't receive the results as the response to the query, but rather - an ID is first returned. Following this, a client is expected to send requests along with the ID until the status of the query is complete, - receiving (possibly partial) results each time. For queries made via dashboards and Explore, the logic of making these repeated queries is handled on - the frontend, but because alerts are executed on the backend the logic needs to be reimplemented here. + Unlike many other data sources, with Cloudwatch Logs query requests don't receive the results as the response + to the query, but rather an ID is first returned. Following this, a client is expected to send requests along + with the ID until the status of the query is complete, receiving (possibly partial) results each time. For + queries made via dashboards and Explore, the logic of making these repeated queries is handled on the + frontend, but because alerts are executed on the backend the logic needs to be reimplemented here. */ queryParams := queryContext.Queries[0].Model _, fromAlert := queryContext.Headers["FromAlert"] @@ -306,7 +306,7 @@ func (e *cloudWatchExecutor) Query(ctx context.Context, dsInfo *models.DataSourc queryType := queryParams.Get("type").MustString("") var err error - var result *tsdb.Response + var result plugins.DataResponse switch queryType { case "metricFindQuery": result, err = e.executeMetricFindQuery(ctx, queryContext) @@ -325,7 +325,8 @@ func (e *cloudWatchExecutor) Query(ctx context.Context, dsInfo *models.DataSourc return result, err } -func (e *cloudWatchExecutor) executeLogAlertQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { +func (e *cloudWatchExecutor) executeLogAlertQuery(ctx context.Context, queryContext plugins.DataQuery) ( + plugins.DataResponse, error) { queryParams := queryContext.Queries[0].Model queryParams.Set("subtype", "StartQuery") queryParams.Set("queryString", queryParams.Get("expression").MustString("")) @@ -338,12 +339,12 @@ func (e *cloudWatchExecutor) executeLogAlertQuery(ctx context.Context, queryCont logsClient, err := e.getCWLogsClient(region) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - result, err := e.executeStartQuery(ctx, logsClient, queryParams, queryContext.TimeRange) + result, err := e.executeStartQuery(ctx, logsClient, queryParams, *queryContext.TimeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } queryParams.Set("queryId", *result.QueryId) @@ -351,38 +352,38 @@ func (e *cloudWatchExecutor) executeLogAlertQuery(ctx context.Context, queryCont // Get query results getQueryResultsOutput, err := e.alertQuery(ctx, logsClient, queryContext) if err != nil { - return nil, err + return plugins.DataResponse{}, err } dataframe, err := logsResultsToDataframes(getQueryResultsOutput) if err != nil { - return nil, err + return plugins.DataResponse{}, err } statsGroups := queryParams.Get("statsGroups").MustStringArray() if len(statsGroups) > 0 && len(dataframe.Fields) > 0 { groupedFrames, err := groupResults(dataframe, statsGroups) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - response := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), + response := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), } - response.Results["A"] = &tsdb.QueryResult{ - RefId: "A", - Dataframes: tsdb.NewDecodedDataFrames(groupedFrames), + response.Results["A"] = plugins.DataQueryResult{ + RefID: "A", + Dataframes: plugins.NewDecodedDataFrames(groupedFrames), } return response, nil } - response := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + response := plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "A": { - RefId: "A", - Dataframes: tsdb.NewDecodedDataFrames(data.Frames{dataframe}), + RefID: "A", + Dataframes: plugins.NewDecodedDataFrames(data.Frames{dataframe}), }, }, } diff --git a/pkg/tsdb/cloudwatch/live.go b/pkg/tsdb/cloudwatch/live.go index fa81454187e..6397bcb3ae0 100644 --- a/pkg/tsdb/cloudwatch/live.go +++ b/pkg/tsdb/cloudwatch/live.go @@ -18,8 +18,8 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/util/retryer" "golang.org/x/sync/errgroup" ) @@ -108,20 +108,21 @@ func (r *logQueryRunner) publishResults(channelName string) error { // executeLiveLogQuery executes a CloudWatch Logs query with live updates over WebSocket. // A WebSocket channel is created, which goroutines send responses over. -func (e *cloudWatchExecutor) executeLiveLogQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { +func (e *cloudWatchExecutor) executeLiveLogQuery(ctx context.Context, queryContext plugins.DataQuery) ( + plugins.DataResponse, error) { responseChannelName := uuid.New().String() - responseChannel := make(chan *tsdb.Response) + responseChannel := make(chan plugins.DataResponse) if err := e.logsService.AddResponseChannel("plugin/cloudwatch/"+responseChannelName, responseChannel); err != nil { close(responseChannel) - return nil, err + return plugins.DataResponse{}, err } go e.sendLiveQueriesToChannel(queryContext, responseChannel) - response := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + response := plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "A": { - RefId: "A", + RefID: "A", Meta: simplejson.NewFromAny(map[string]interface{}{ "channelName": responseChannelName, }), @@ -132,7 +133,8 @@ func (e *cloudWatchExecutor) executeLiveLogQuery(ctx context.Context, queryConte return response, nil } -func (e *cloudWatchExecutor) sendLiveQueriesToChannel(queryContext *tsdb.TsdbQuery, responseChannel chan *tsdb.Response) { +func (e *cloudWatchExecutor) sendLiveQueriesToChannel(queryContext plugins.DataQuery, + responseChannel chan plugins.DataResponse) { defer close(responseChannel) ctx, cancel := context.WithTimeout(context.Background(), 15*time.Minute) @@ -142,7 +144,7 @@ func (e *cloudWatchExecutor) sendLiveQueriesToChannel(queryContext *tsdb.TsdbQue for _, query := range queryContext.Queries { query := query eg.Go(func() error { - return e.startLiveQuery(ectx, responseChannel, query, queryContext.TimeRange) + return e.startLiveQuery(ectx, responseChannel, query, *queryContext.TimeRange) }) } @@ -200,7 +202,8 @@ func (e *cloudWatchExecutor) fetchConcurrentQueriesQuota(region string) int { return defaultConcurrentQueries } - if defaultConcurrentQueriesQuota != nil && defaultConcurrentQueriesQuota.Quota != nil && defaultConcurrentQueriesQuota.Quota.Value != nil { + if defaultConcurrentQueriesQuota != nil && defaultConcurrentQueriesQuota.Quota != nil && + defaultConcurrentQueriesQuota.Quota.Value != nil { return int(*defaultConcurrentQueriesQuota.Quota.Value) } @@ -208,7 +211,8 @@ func (e *cloudWatchExecutor) fetchConcurrentQueriesQuota(region string) int { return defaultConcurrentQueries } -func (e *cloudWatchExecutor) startLiveQuery(ctx context.Context, responseChannel chan *tsdb.Response, query *tsdb.Query, timeRange *tsdb.TimeRange) error { +func (e *cloudWatchExecutor) startLiveQuery(ctx context.Context, responseChannel chan plugins.DataResponse, + query plugins.DataSubQuery, timeRange plugins.DataTimeRange) error { defaultRegion := e.DataSource.JsonData.Get("defaultRegion").MustString() parameters := query.Model region := parameters.Get("region").MustString(defaultRegion) @@ -250,8 +254,8 @@ func (e *cloudWatchExecutor) startLiveQuery(ctx context.Context, responseChannel return retryer.FuncError, err } - dataFrame.Name = query.RefId - dataFrame.RefID = query.RefId + dataFrame.Name = query.RefID + dataFrame.RefID = query.RefID var dataFrames data.Frames // When a query of the form "stats ... by ..." is made, we want to return @@ -281,11 +285,11 @@ func (e *cloudWatchExecutor) startLiveQuery(ctx context.Context, responseChannel dataFrames = data.Frames{dataFrame} } - responseChannel <- &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ - query.RefId: { - RefId: query.RefId, - Dataframes: tsdb.NewDecodedDataFrames(dataFrames), + responseChannel <- plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ + query.RefID: { + RefID: query.RefID, + Dataframes: plugins.NewDecodedDataFrames(dataFrames), }, }, } diff --git a/pkg/tsdb/cloudwatch/log_actions.go b/pkg/tsdb/cloudwatch/log_actions.go index 617fc7da088..bb3635950ba 100644 --- a/pkg/tsdb/cloudwatch/log_actions.go +++ b/pkg/tsdb/cloudwatch/log_actions.go @@ -12,12 +12,13 @@ import ( "github.com/aws/aws-sdk-go/service/cloudwatchlogs/cloudwatchlogsiface" "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "golang.org/x/sync/errgroup" ) -func (e *cloudWatchExecutor) executeLogActions(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { - resultChan := make(chan *tsdb.QueryResult, len(queryContext.Queries)) +func (e *cloudWatchExecutor) executeLogActions(ctx context.Context, + queryContext plugins.DataQuery) (plugins.DataResponse, error) { + resultChan := make(chan plugins.DataQueryResult, len(queryContext.Queries)) eg, ectx := errgroup.WithContext(ctx) for _, query := range queryContext.Queries { @@ -42,7 +43,10 @@ func (e *cloudWatchExecutor) executeLogActions(ctx context.Context, queryContext return err } - resultChan <- &tsdb.QueryResult{RefId: query.RefId, Dataframes: tsdb.NewDecodedDataFrames(groupedFrames)} + resultChan <- plugins.DataQueryResult{ + RefID: query.RefID, + Dataframes: plugins.NewDecodedDataFrames(groupedFrames), + } return nil } @@ -54,30 +58,31 @@ func (e *cloudWatchExecutor) executeLogActions(ctx context.Context, queryContext } } - resultChan <- &tsdb.QueryResult{ - RefId: query.RefId, - Dataframes: tsdb.NewDecodedDataFrames(data.Frames{dataframe}), + resultChan <- plugins.DataQueryResult{ + RefID: query.RefID, + Dataframes: plugins.NewDecodedDataFrames(data.Frames{dataframe}), } return nil }) } if err := eg.Wait(); err != nil { - return nil, err + return plugins.DataResponse{}, err } close(resultChan) - response := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), + response := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), } for result := range resultChan { - response.Results[result.RefId] = result + response.Results[result.RefID] = result } return response, nil } -func (e *cloudWatchExecutor) executeLogAction(ctx context.Context, queryContext *tsdb.TsdbQuery, query *tsdb.Query) (*data.Frame, error) { +func (e *cloudWatchExecutor) executeLogAction(ctx context.Context, queryContext plugins.DataQuery, + query plugins.DataSubQuery) (*data.Frame, error) { parameters := query.Model subType := query.Model.Get("subtype").MustString() @@ -94,13 +99,13 @@ func (e *cloudWatchExecutor) executeLogAction(ctx context.Context, queryContext case "DescribeLogGroups": data, err = e.handleDescribeLogGroups(ctx, logsClient, parameters) case "GetLogGroupFields": - data, err = e.handleGetLogGroupFields(ctx, logsClient, parameters, query.RefId) + data, err = e.handleGetLogGroupFields(ctx, logsClient, parameters, query.RefID) case "StartQuery": - data, err = e.handleStartQuery(ctx, logsClient, parameters, queryContext.TimeRange, query.RefId) + data, err = e.handleStartQuery(ctx, logsClient, parameters, *queryContext.TimeRange, query.RefID) case "StopQuery": data, err = e.handleStopQuery(ctx, logsClient, parameters) case "GetQueryResults": - data, err = e.handleGetQueryResults(ctx, logsClient, parameters, query.RefId) + data, err = e.handleGetQueryResults(ctx, logsClient, parameters, query.RefID) case "GetLogEvents": data, err = e.handleGetLogEvents(ctx, logsClient, parameters) } @@ -195,7 +200,7 @@ func (e *cloudWatchExecutor) handleDescribeLogGroups(ctx context.Context, } func (e *cloudWatchExecutor) executeStartQuery(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, - parameters *simplejson.Json, timeRange *tsdb.TimeRange) (*cloudwatchlogs.StartQueryOutput, error) { + parameters *simplejson.Json, timeRange plugins.DataTimeRange) (*cloudwatchlogs.StartQueryOutput, error) { startTime, err := timeRange.ParseFrom() if err != nil { return nil, err @@ -214,7 +219,8 @@ func (e *cloudWatchExecutor) executeStartQuery(ctx context.Context, logsClient c // so that a row's context can be retrieved later if necessary. // The usage of ltrim around the @log/@logStream fields is a necessary workaround, as without it, // CloudWatch wouldn't consider a query using a non-alised @log/@logStream valid. - modifiedQueryString := "fields @timestamp,ltrim(@log) as " + logIdentifierInternal + ",ltrim(@logStream) as " + logStreamIdentifierInternal + "|" + parameters.Get("queryString").MustString("") + modifiedQueryString := "fields @timestamp,ltrim(@log) as " + logIdentifierInternal + ",ltrim(@logStream) as " + + logStreamIdentifierInternal + "|" + parameters.Get("queryString").MustString("") startQueryInput := &cloudwatchlogs.StartQueryInput{ StartTime: aws.Int64(startTime.Unix()), @@ -231,7 +237,7 @@ func (e *cloudWatchExecutor) executeStartQuery(ctx context.Context, logsClient c } func (e *cloudWatchExecutor) handleStartQuery(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, - parameters *simplejson.Json, timeRange *tsdb.TimeRange, refID string) (*data.Frame, error) { + parameters *simplejson.Json, timeRange plugins.DataTimeRange, refID string) (*data.Frame, error) { startQueryResponse, err := e.executeStartQuery(ctx, logsClient, parameters, timeRange) if err != nil { return nil, err diff --git a/pkg/tsdb/cloudwatch/log_actions_test.go b/pkg/tsdb/cloudwatch/log_actions_test.go index 58290e11d4b..3e7d24c042d 100644 --- a/pkg/tsdb/cloudwatch/log_actions_test.go +++ b/pkg/tsdb/cloudwatch/log_actions_test.go @@ -12,7 +12,7 @@ import ( "github.com/aws/aws-sdk-go/service/cloudwatchlogs/cloudwatchlogsiface" "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -48,8 +48,8 @@ func TestQuery_DescribeLogGroups(t *testing.T) { } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "type": "logAction", @@ -62,10 +62,10 @@ func TestQuery_DescribeLogGroups(t *testing.T) { require.NoError(t, err) require.NotNil(t, resp) - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "": { - Dataframes: tsdb.NewDecodedDataFrames(data.Frames{ + Dataframes: plugins.NewDecodedDataFrames(data.Frames{ &data.Frame{ Name: "logGroups", Fields: []*data.Field{ @@ -101,8 +101,8 @@ func TestQuery_DescribeLogGroups(t *testing.T) { } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "type": "logAction", @@ -115,10 +115,10 @@ func TestQuery_DescribeLogGroups(t *testing.T) { require.NoError(t, err) require.NotNil(t, resp) - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "": { - Dataframes: tsdb.NewDecodedDataFrames(data.Frames{ + Dataframes: plugins.NewDecodedDataFrames(data.Frames{ &data.Frame{ Name: "logGroups", Fields: []*data.Field{ @@ -171,10 +171,10 @@ func TestQuery_GetLogGroupFields(t *testing.T) { const refID = "A" executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { - RefId: refID, + RefID: refID, Model: simplejson.NewFromAny(map[string]interface{}{ "type": "logAction", "subtype": "GetLogGroupFields", @@ -202,11 +202,11 @@ func TestQuery_GetLogGroupFields(t *testing.T) { }, } expFrame.RefID = refID - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ refID: { - Dataframes: tsdb.NewDecodedDataFrames(data.Frames{expFrame}), - RefId: refID, + Dataframes: plugins.NewDecodedDataFrames(data.Frames{expFrame}), + RefID: refID, }, }, }, resp) @@ -244,15 +244,15 @@ func TestQuery_StartQuery(t *testing.T) { }, } - timeRange := &tsdb.TimeRange{ + timeRange := plugins.DataTimeRange{ From: "1584873443000", To: "1584700643000", } executor := newExecutor(nil) - _, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - TimeRange: timeRange, - Queries: []*tsdb.Query{ + _, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + TimeRange: &timeRange, + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "type": "logAction", @@ -290,17 +290,17 @@ func TestQuery_StartQuery(t *testing.T) { }, } - timeRange := &tsdb.TimeRange{ + timeRange := plugins.DataTimeRange{ From: "1584700643000", To: "1584873443000", } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - TimeRange: timeRange, - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + TimeRange: &timeRange, + Queries: []plugins.DataSubQuery{ { - RefId: refID, + RefID: refID, Model: simplejson.NewFromAny(map[string]interface{}{ "type": "logAction", "subtype": "StartQuery", @@ -324,11 +324,11 @@ func TestQuery_StartQuery(t *testing.T) { }, PreferredVisualization: "logs", } - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ refID: { - Dataframes: tsdb.NewDecodedDataFrames(data.Frames{expFrame}), - RefId: refID, + Dataframes: plugins.NewDecodedDataFrames(data.Frames{expFrame}), + RefID: refID, }, }, }, resp) @@ -366,15 +366,15 @@ func TestQuery_StopQuery(t *testing.T) { }, } - timeRange := &tsdb.TimeRange{ + timeRange := plugins.DataTimeRange{ From: "1584873443000", To: "1584700643000", } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - TimeRange: timeRange, - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + TimeRange: &timeRange, + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "type": "logAction", @@ -395,10 +395,10 @@ func TestQuery_StopQuery(t *testing.T) { PreferredVisualization: "logs", }, } - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "": { - Dataframes: tsdb.NewDecodedDataFrames(data.Frames{expFrame}), + Dataframes: plugins.NewDecodedDataFrames(data.Frames{expFrame}), }, }, }, resp) @@ -459,10 +459,10 @@ func TestQuery_GetQueryResults(t *testing.T) { } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { - RefId: refID, + RefID: refID, Model: simplejson.NewFromAny(map[string]interface{}{ "type": "logAction", "subtype": "GetQueryResults", @@ -507,11 +507,11 @@ func TestQuery_GetQueryResults(t *testing.T) { PreferredVisualization: "logs", } - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ refID: { - RefId: refID, - Dataframes: tsdb.NewDecodedDataFrames(data.Frames{expFrame}), + RefID: refID, + Dataframes: plugins.NewDecodedDataFrames(data.Frames{expFrame}), }, }, }, resp) diff --git a/pkg/tsdb/cloudwatch/logs.go b/pkg/tsdb/cloudwatch/logs.go index 960f1fc8394..b8314cb8135 100644 --- a/pkg/tsdb/cloudwatch/logs.go +++ b/pkg/tsdb/cloudwatch/logs.go @@ -4,8 +4,8 @@ import ( "fmt" "sync" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/registry" - "github.com/grafana/grafana/pkg/tsdb" ) func init() { @@ -15,19 +15,19 @@ func init() { // LogsService provides methods for querying CloudWatch Logs. type LogsService struct { channelMu sync.Mutex - responseChannels map[string]chan *tsdb.Response + responseChannels map[string]chan plugins.DataResponse queues map[string](chan bool) queueLock sync.Mutex } // Init is called by the DI framework to initialize the instance. func (s *LogsService) Init() error { - s.responseChannels = make(map[string]chan *tsdb.Response) + s.responseChannels = make(map[string]chan plugins.DataResponse) s.queues = make(map[string](chan bool)) return nil } -func (s *LogsService) AddResponseChannel(name string, channel chan *tsdb.Response) error { +func (s *LogsService) AddResponseChannel(name string, channel chan plugins.DataResponse) error { s.channelMu.Lock() defer s.channelMu.Unlock() @@ -39,7 +39,7 @@ func (s *LogsService) AddResponseChannel(name string, channel chan *tsdb.Respons return nil } -func (s *LogsService) GetResponseChannel(name string) (chan *tsdb.Response, error) { +func (s *LogsService) GetResponseChannel(name string) (chan plugins.DataResponse, error) { s.channelMu.Lock() defer s.channelMu.Unlock() diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index d4503d95695..dd1a14653d5 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -17,7 +17,7 @@ import ( "github.com/aws/aws-sdk-go/service/resourcegroupstaggingapi" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/metrics" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/util/errutil" ) @@ -241,7 +241,8 @@ var dimensionsMap = map[string][]string{ var regionCache sync.Map -func (e *cloudWatchExecutor) executeMetricFindQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { +func (e *cloudWatchExecutor) executeMetricFindQuery(ctx context.Context, queryContext plugins.DataQuery) ( + plugins.DataResponse, error) { firstQuery := queryContext.Queries[0] parameters := firstQuery.Model @@ -267,22 +268,22 @@ func (e *cloudWatchExecutor) executeMetricFindQuery(ctx context.Context, queryCo data, err = e.handleGetResourceArns(ctx, parameters, queryContext) } if err != nil { - return nil, err + return plugins.DataResponse{}, err } - queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: firstQuery.RefId} - transformToTable(data, queryResult) - result := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ - firstQuery.RefId: queryResult, + queryResult := plugins.DataQueryResult{Meta: simplejson.New(), RefID: firstQuery.RefID} + transformToTable(data, &queryResult) + result := plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ + firstQuery.RefID: queryResult, }, } return result, nil } -func transformToTable(data []suggestData, result *tsdb.QueryResult) { - table := &tsdb.Table{ - Columns: []tsdb.TableColumn{ +func transformToTable(data []suggestData, result *plugins.DataQueryResult) { + table := plugins.DataTable{ + Columns: []plugins.DataTableColumn{ { Text: "text", }, @@ -290,7 +291,7 @@ func transformToTable(data []suggestData, result *tsdb.QueryResult) { Text: "value", }, }, - Rows: make([]tsdb.RowValues, 0), + Rows: make([]plugins.DataRowValues, 0), } for _, r := range data { @@ -321,7 +322,7 @@ func parseMultiSelectValue(input string) []string { // Whenever this list is updated, the frontend list should also be updated. // Please update the region list in public/app/plugins/datasource/cloudwatch/partials/config.html func (e *cloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, - queryContext *tsdb.TsdbQuery) ([]suggestData, error) { + queryContext plugins.DataQuery) ([]suggestData, error) { dsInfo := e.getDSInfo(defaultRegion) profile := dsInfo.Profile if cache, ok := regionCache.Load(profile); ok { @@ -366,7 +367,7 @@ func (e *cloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *s return result, nil } -func (e *cloudWatchExecutor) handleGetNamespaces(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { +func (e *cloudWatchExecutor) handleGetNamespaces(ctx context.Context, parameters *simplejson.Json, queryContext plugins.DataQuery) ([]suggestData, error) { keys := []string{} for key := range metricsMap { keys = append(keys, key) @@ -385,7 +386,7 @@ func (e *cloudWatchExecutor) handleGetNamespaces(ctx context.Context, parameters return result, nil } -func (e *cloudWatchExecutor) handleGetMetrics(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { +func (e *cloudWatchExecutor) handleGetMetrics(ctx context.Context, parameters *simplejson.Json, queryContext plugins.DataQuery) ([]suggestData, error) { region := parameters.Get("region").MustString() namespace := parameters.Get("namespace").MustString() @@ -411,7 +412,7 @@ func (e *cloudWatchExecutor) handleGetMetrics(ctx context.Context, parameters *s return result, nil } -func (e *cloudWatchExecutor) handleGetDimensions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { +func (e *cloudWatchExecutor) handleGetDimensions(ctx context.Context, parameters *simplejson.Json, queryContext plugins.DataQuery) ([]suggestData, error) { region := parameters.Get("region").MustString() namespace := parameters.Get("namespace").MustString() @@ -437,7 +438,7 @@ func (e *cloudWatchExecutor) handleGetDimensions(ctx context.Context, parameters return result, nil } -func (e *cloudWatchExecutor) handleGetDimensionValues(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { +func (e *cloudWatchExecutor) handleGetDimensionValues(ctx context.Context, parameters *simplejson.Json, queryContext plugins.DataQuery) ([]suggestData, error) { region := parameters.Get("region").MustString() namespace := parameters.Get("namespace").MustString() metricName := parameters.Get("metricName").MustString() @@ -489,7 +490,7 @@ func (e *cloudWatchExecutor) handleGetDimensionValues(ctx context.Context, param } func (e *cloudWatchExecutor) handleGetEbsVolumeIds(ctx context.Context, parameters *simplejson.Json, - queryContext *tsdb.TsdbQuery) ([]suggestData, error) { + queryContext plugins.DataQuery) ([]suggestData, error) { region := parameters.Get("region").MustString() instanceId := parameters.Get("instanceId").MustString() @@ -512,7 +513,7 @@ func (e *cloudWatchExecutor) handleGetEbsVolumeIds(ctx context.Context, paramete } func (e *cloudWatchExecutor) handleGetEc2InstanceAttribute(ctx context.Context, parameters *simplejson.Json, - queryContext *tsdb.TsdbQuery) ([]suggestData, error) { + queryContext plugins.DataQuery) ([]suggestData, error) { region := parameters.Get("region").MustString() attributeName := parameters.Get("attributeName").MustString() filterJson := parameters.Get("filters").MustMap() @@ -592,7 +593,7 @@ func (e *cloudWatchExecutor) handleGetEc2InstanceAttribute(ctx context.Context, } func (e *cloudWatchExecutor) handleGetResourceArns(ctx context.Context, parameters *simplejson.Json, - queryContext *tsdb.TsdbQuery) ([]suggestData, error) { + queryContext plugins.DataQuery) ([]suggestData, error) { region := parameters.Get("region").MustString() resourceType := parameters.Get("resourceType").MustString() filterJson := parameters.Get("tags").MustMap() diff --git a/pkg/tsdb/cloudwatch/metric_find_query_test.go b/pkg/tsdb/cloudwatch/metric_find_query_test.go index 0a710b8a419..c2311e86bc5 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query_test.go +++ b/pkg/tsdb/cloudwatch/metric_find_query_test.go @@ -14,7 +14,7 @@ import ( "github.com/aws/aws-sdk-go/service/resourcegroupstaggingapi" "github.com/aws/aws-sdk-go/service/resourcegroupstaggingapi/resourcegroupstaggingapiiface" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -45,8 +45,8 @@ func TestQuery_Metrics(t *testing.T) { }, } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "type": "metricFindQuery", @@ -59,15 +59,15 @@ func TestQuery_Metrics(t *testing.T) { }) require.NoError(t, err) - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "": { Meta: simplejson.NewFromAny(map[string]interface{}{ "rowCount": 1, }), - Tables: []*tsdb.Table{ + Tables: []plugins.DataTable{ { - Columns: []tsdb.TableColumn{ + Columns: []plugins.DataTableColumn{ { Text: "text", }, @@ -75,7 +75,7 @@ func TestQuery_Metrics(t *testing.T) { Text: "value", }, }, - Rows: []tsdb.RowValues{ + Rows: []plugins.DataRowValues{ { "Test_MetricName", "Test_MetricName", @@ -102,8 +102,8 @@ func TestQuery_Metrics(t *testing.T) { }, } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "type": "metricFindQuery", @@ -116,15 +116,15 @@ func TestQuery_Metrics(t *testing.T) { }) require.NoError(t, err) - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "": { Meta: simplejson.NewFromAny(map[string]interface{}{ "rowCount": 1, }), - Tables: []*tsdb.Table{ + Tables: []plugins.DataTable{ { - Columns: []tsdb.TableColumn{ + Columns: []plugins.DataTableColumn{ { Text: "text", }, @@ -132,7 +132,7 @@ func TestQuery_Metrics(t *testing.T) { Text: "value", }, }, - Rows: []tsdb.RowValues{ + Rows: []plugins.DataRowValues{ { "Test_DimensionName", "Test_DimensionName", @@ -164,8 +164,8 @@ func TestQuery_Regions(t *testing.T) { regions: []string{regionName}, } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "type": "metricFindQuery", @@ -178,7 +178,7 @@ func TestQuery_Regions(t *testing.T) { }) require.NoError(t, err) - rows := []tsdb.RowValues{} + rows := []plugins.DataRowValues{} for _, region := range knownRegions { rows = append(rows, []interface{}{ region, @@ -189,15 +189,15 @@ func TestQuery_Regions(t *testing.T) { regionName, regionName, }) - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "": { Meta: simplejson.NewFromAny(map[string]interface{}{ "rowCount": len(knownRegions) + 1, }), - Tables: []*tsdb.Table{ + Tables: []plugins.DataTable{ { - Columns: []tsdb.TableColumn{ + Columns: []plugins.DataTableColumn{ { Text: "text", }, @@ -246,8 +246,8 @@ func TestQuery_InstanceAttributes(t *testing.T) { }, } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "type": "metricFindQuery", @@ -263,15 +263,15 @@ func TestQuery_InstanceAttributes(t *testing.T) { }) require.NoError(t, err) - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "": { Meta: simplejson.NewFromAny(map[string]interface{}{ "rowCount": 1, }), - Tables: []*tsdb.Table{ + Tables: []plugins.DataTable{ { - Columns: []tsdb.TableColumn{ + Columns: []plugins.DataTableColumn{ { Text: "text", }, @@ -279,7 +279,7 @@ func TestQuery_InstanceAttributes(t *testing.T) { Text: "value", }, }, - Rows: []tsdb.RowValues{ + Rows: []plugins.DataRowValues{ { instanceID, instanceID, @@ -349,8 +349,8 @@ func TestQuery_EBSVolumeIDs(t *testing.T) { }, } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "type": "metricFindQuery", @@ -363,15 +363,15 @@ func TestQuery_EBSVolumeIDs(t *testing.T) { }) require.NoError(t, err) - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "": { Meta: simplejson.NewFromAny(map[string]interface{}{ "rowCount": 6, }), - Tables: []*tsdb.Table{ + Tables: []plugins.DataTable{ { - Columns: []tsdb.TableColumn{ + Columns: []plugins.DataTableColumn{ { Text: "text", }, @@ -379,7 +379,7 @@ func TestQuery_EBSVolumeIDs(t *testing.T) { Text: "value", }, }, - Rows: []tsdb.RowValues{ + Rows: []plugins.DataRowValues{ { "vol-1-1", "vol-1-1", @@ -449,8 +449,8 @@ func TestQuery_ResourceARNs(t *testing.T) { }, } executor := newExecutor(nil) - resp, err := executor.Query(context.Background(), fakeDataSource(), &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + resp, err := executor.DataQuery(context.Background(), fakeDataSource(), plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "type": "metricFindQuery", @@ -466,15 +466,15 @@ func TestQuery_ResourceARNs(t *testing.T) { }) require.NoError(t, err) - assert.Equal(t, &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{ + assert.Equal(t, plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ "": { Meta: simplejson.NewFromAny(map[string]interface{}{ "rowCount": 2, }), - Tables: []*tsdb.Table{ + Tables: []plugins.DataTable{ { - Columns: []tsdb.TableColumn{ + Columns: []plugins.DataTableColumn{ { Text: "text", }, @@ -482,7 +482,7 @@ func TestQuery_ResourceARNs(t *testing.T) { Text: "value", }, }, - Rows: []tsdb.RowValues{ + Rows: []plugins.DataRowValues{ { "arn:aws:ec2:us-east-1:123456789012:instance/i-12345678901234567", "arn:aws:ec2:us-east-1:123456789012:instance/i-12345678901234567", diff --git a/pkg/tsdb/cloudwatch/query_transformer.go b/pkg/tsdb/cloudwatch/query_transformer.go index 87511118615..493a6568f17 100644 --- a/pkg/tsdb/cloudwatch/query_transformer.go +++ b/pkg/tsdb/cloudwatch/query_transformer.go @@ -9,7 +9,7 @@ import ( "time" "github.com/grafana/grafana-plugin-sdk-go/data" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) // returns a map of queries with query id as key. In the case a q request query @@ -55,7 +55,8 @@ func (e *cloudWatchExecutor) transformRequestQueriesToCloudWatchQueries(requestQ return cloudwatchQueries, nil } -func (e *cloudWatchExecutor) transformQueryResponsesToQueryResult(cloudwatchResponses []*cloudwatchResponse, requestQueries []*requestQuery, startTime time.Time, endTime time.Time) (map[string]*tsdb.QueryResult, error) { +func (e *cloudWatchExecutor) transformQueryResponsesToQueryResult(cloudwatchResponses []*cloudwatchResponse, + requestQueries []*requestQuery, startTime time.Time, endTime time.Time) (map[string]plugins.DataQueryResult, error) { responsesByRefID := make(map[string][]*cloudwatchResponse) refIDs := sort.StringSlice{} for _, res := range cloudwatchResponses { @@ -65,12 +66,13 @@ func (e *cloudWatchExecutor) transformQueryResponsesToQueryResult(cloudwatchResp // Ensure stable results refIDs.Sort() - results := make(map[string]*tsdb.QueryResult) + results := make(map[string]plugins.DataQueryResult) for _, refID := range refIDs { responses := responsesByRefID[refID] - queryResult := tsdb.NewQueryResult() - queryResult.RefId = refID - queryResult.Series = tsdb.TimeSeriesSlice{} + queryResult := plugins.DataQueryResult{ + RefID: refID, + Series: plugins.DataTimeSeriesSlice{}, + } frames := make(data.Frames, 0, len(responses)) requestExceededMaxLimit := false @@ -133,15 +135,17 @@ func (e *cloudWatchExecutor) transformQueryResponsesToQueryResult(cloudwatchResp frame.Fields[1].Config.Links = createDataLinks(link) } - queryResult.Dataframes = tsdb.NewDecodedDataFrames(frames) + queryResult.Dataframes = plugins.NewDecodedDataFrames(frames) results[refID] = queryResult } return results, nil } -// buildDeepLink generates a deep link from Grafana to the CloudWatch console. The link params are based on metric(s) for a given query row in the Query Editor. -func buildDeepLink(refID string, requestQueries []*requestQuery, executedQueries []executedQuery, startTime time.Time, endTime time.Time) (string, error) { +// buildDeepLink generates a deep link from Grafana to the CloudWatch console. The link params are based on +// metric(s) for a given query row in the Query Editor. +func buildDeepLink(refID string, requestQueries []*requestQuery, executedQueries []executedQuery, startTime time.Time, + endTime time.Time) (string, error) { if isMathExpression(executedQueries) { return "", nil } diff --git a/pkg/tsdb/cloudwatch/request_parser.go b/pkg/tsdb/cloudwatch/request_parser.go index ab8980fb306..fc66b3ca3b1 100644 --- a/pkg/tsdb/cloudwatch/request_parser.go +++ b/pkg/tsdb/cloudwatch/request_parser.go @@ -11,11 +11,12 @@ import ( "github.com/aws/aws-sdk-go/aws" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) // Parses the json queries and returns a requestQuery. The requestQuery has a 1 to 1 mapping to a query editor row -func (e *cloudWatchExecutor) parseQueries(queryContext *tsdb.TsdbQuery, startTime time.Time, endTime time.Time) (map[string][]*requestQuery, error) { +func (e *cloudWatchExecutor) parseQueries(queryContext plugins.DataQuery, startTime time.Time, + endTime time.Time) (map[string][]*requestQuery, error) { requestQueries := make(map[string][]*requestQuery) for i, query := range queryContext.Queries { queryType := query.Model.Get("type").MustString() @@ -23,7 +24,7 @@ func (e *cloudWatchExecutor) parseQueries(queryContext *tsdb.TsdbQuery, startTim continue } - refID := query.RefId + refID := query.RefID query, err := parseRequestQuery(queryContext.Queries[i].Model, refID, startTime, endTime) if err != nil { return nil, &queryError{err: err, RefID: refID} diff --git a/pkg/tsdb/cloudwatch/request_parser_test.go b/pkg/tsdb/cloudwatch/request_parser_test.go index 391a5aaeb0e..3f1be83be95 100644 --- a/pkg/tsdb/cloudwatch/request_parser_test.go +++ b/pkg/tsdb/cloudwatch/request_parser_test.go @@ -5,13 +5,13 @@ import ( "time" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestRequestParser(t *testing.T) { - timeRange := tsdb.NewTimeRange("now-1h", "now-2h") + timeRange := plugins.NewDataTimeRange("now-1h", "now-2h") from, err := timeRange.ParseFrom() require.NoError(t, err) to, err := timeRange.ParseTo() @@ -102,7 +102,7 @@ func TestRequestParser(t *testing.T) { "hide": false, }) query.Set("period", "900") - timeRange := tsdb.NewTimeRange("now-1h", "now-2h") + timeRange := plugins.NewDataTimeRange("now-1h", "now-2h") from, err := timeRange.ParseFrom() require.NoError(t, err) to, err := timeRange.ParseTo() diff --git a/pkg/tsdb/cloudwatch/time_series_query.go b/pkg/tsdb/cloudwatch/time_series_query.go index bb29df431a4..80e93b7230f 100644 --- a/pkg/tsdb/cloudwatch/time_series_query.go +++ b/pkg/tsdb/cloudwatch/time_series_query.go @@ -5,37 +5,38 @@ import ( "fmt" "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/util/errutil" "golang.org/x/sync/errgroup" ) -func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { +func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryContext plugins.DataQuery) ( + plugins.DataResponse, error) { plog.Debug("Executing time series query") startTime, err := queryContext.TimeRange.ParseFrom() if err != nil { - return nil, errutil.Wrap("failed to parse start time", err) + return plugins.DataResponse{}, errutil.Wrap("failed to parse start time", err) } endTime, err := queryContext.TimeRange.ParseTo() if err != nil { - return nil, errutil.Wrap("failed to parse end time", err) + return plugins.DataResponse{}, errutil.Wrap("failed to parse end time", err) } if !startTime.Before(endTime) { - return nil, fmt.Errorf("invalid time range: start time must be before end time") + return plugins.DataResponse{}, fmt.Errorf("invalid time range: start time must be before end time") } requestQueriesByRegion, err := e.parseQueries(queryContext, startTime, endTime) if err != nil { - return nil, err + return plugins.DataResponse{}, err } if len(requestQueriesByRegion) == 0 { - return &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), + return plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), }, nil } - resultChan := make(chan *tsdb.QueryResult, len(queryContext.Queries)) + resultChan := make(chan plugins.DataQueryResult, len(queryContext.Queries)) eg, ectx := errgroup.WithContext(ctx) for r, q := range requestQueriesByRegion { requestQueries := q @@ -45,7 +46,7 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo if err := recover(); err != nil { plog.Error("Execute Get Metric Data Query Panic", "error", err, "stack", log.Stack(1)) if theErr, ok := err.(error); ok { - resultChan <- &tsdb.QueryResult{ + resultChan <- plugins.DataQueryResult{ Error: theErr, } } @@ -60,8 +61,8 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo queries, err := e.transformRequestQueriesToCloudWatchQueries(requestQueries) if err != nil { for _, query := range requestQueries { - resultChan <- &tsdb.QueryResult{ - RefId: query.RefId, + resultChan <- plugins.DataQueryResult{ + RefID: query.RefId, Error: err, } } @@ -77,8 +78,8 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo mdo, err := e.executeRequest(ectx, client, metricDataInput) if err != nil { for _, query := range requestQueries { - resultChan <- &tsdb.QueryResult{ - RefId: query.RefId, + resultChan <- plugins.DataQueryResult{ + RefID: query.RefId, Error: err, } } @@ -88,8 +89,8 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo responses, err := e.parseResponse(mdo, queries) if err != nil { for _, query := range requestQueries { - resultChan <- &tsdb.QueryResult{ - RefId: query.RefId, + resultChan <- plugins.DataQueryResult{ + RefID: query.RefId, Error: err, } } @@ -100,8 +101,8 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo res, err := e.transformQueryResponsesToQueryResult(cloudwatchResponses, requestQueries, startTime, endTime) if err != nil { for _, query := range requestQueries { - resultChan <- &tsdb.QueryResult{ - RefId: query.RefId, + resultChan <- plugins.DataQueryResult{ + RefID: query.RefId, Error: err, } } @@ -115,15 +116,15 @@ func (e *cloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo }) } if err := eg.Wait(); err != nil { - return nil, err + return plugins.DataResponse{}, err } close(resultChan) - results := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), + results := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), } for result := range resultChan { - results.Results[result.RefId] = result + results.Results[result.RefID] = result } return results, nil } diff --git a/pkg/tsdb/cloudwatch/time_series_query_test.go b/pkg/tsdb/cloudwatch/time_series_query_test.go index 8afdf3bfa11..ec66ec8db19 100644 --- a/pkg/tsdb/cloudwatch/time_series_query_test.go +++ b/pkg/tsdb/cloudwatch/time_series_query_test.go @@ -4,7 +4,7 @@ import ( "context" "testing" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/assert" ) @@ -12,12 +12,16 @@ func TestTimeSeriesQuery(t *testing.T) { executor := newExecutor(nil) t.Run("End time before start time should result in error", func(t *testing.T) { - _, err := executor.executeTimeSeriesQuery(context.TODO(), &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("now-1h", "now-2h")}) + timeRange := plugins.NewDataTimeRange("now-1h", "now-2h") + _, err := executor.executeTimeSeriesQuery( + context.TODO(), plugins.DataQuery{TimeRange: &timeRange}) assert.EqualError(t, err, "invalid time range: start time must be before end time") }) t.Run("End time equals start time should result in error", func(t *testing.T) { - _, err := executor.executeTimeSeriesQuery(context.TODO(), &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("now-1h", "now-1h")}) + timeRange := plugins.NewDataTimeRange("now-1h", "now-1h") + _, err := executor.executeTimeSeriesQuery( + context.TODO(), plugins.DataQuery{TimeRange: &timeRange}) assert.EqualError(t, err, "invalid time range: start time must be before end time") }) } diff --git a/pkg/tsdb/elasticsearch/client/client.go b/pkg/tsdb/elasticsearch/client/client.go index ee746cf1a41..226bb5a0091 100644 --- a/pkg/tsdb/elasticsearch/client/client.go +++ b/pkg/tsdb/elasticsearch/client/client.go @@ -15,9 +15,10 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/tsdb/interval" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "golang.org/x/net/context/ctxhttp" ) @@ -42,7 +43,7 @@ type Client interface { } // NewClient creates a new elasticsearch client -var NewClient = func(ctx context.Context, ds *models.DataSource, timeRange *tsdb.TimeRange) (Client, error) { +var NewClient = func(ctx context.Context, ds *models.DataSource, timeRange plugins.DataTimeRange) (Client, error) { version, err := ds.JsonData.Get("esVersion").Int() if err != nil { return nil, fmt.Errorf("elasticsearch version is required, err=%v", err) @@ -87,7 +88,7 @@ type baseClientImpl struct { version int timeField string indices []string - timeRange *tsdb.TimeRange + timeRange plugins.DataTimeRange debugEnabled bool } @@ -100,7 +101,7 @@ func (c *baseClientImpl) GetTimeField() string { } func (c *baseClientImpl) GetMinInterval(queryInterval string) (time.Duration, error) { - return tsdb.GetIntervalFrom(c.ds, simplejson.NewFromAny(map[string]interface{}{ + return interval.GetIntervalFrom(c.ds, simplejson.NewFromAny(map[string]interface{}{ "interval": queryInterval, }), 5*time.Second) } @@ -112,7 +113,7 @@ func (c *baseClientImpl) getSettings() *simplejson.Json { type multiRequest struct { header map[string]interface{} body interface{} - interval tsdb.Interval + interval interval.Interval } func (c *baseClientImpl) executeBatchRequest(uriPath, uriQuery string, requests []*multiRequest) (*response, error) { diff --git a/pkg/tsdb/elasticsearch/client/client_test.go b/pkg/tsdb/elasticsearch/client/client_test.go index a0800a86e73..e20e523e17a 100644 --- a/pkg/tsdb/elasticsearch/client/client_test.go +++ b/pkg/tsdb/elasticsearch/client/client_test.go @@ -12,7 +12,8 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/tsdb/interval" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -23,7 +24,7 @@ func TestNewClient(t *testing.T) { JsonData: simplejson.NewFromAny(make(map[string]interface{})), } - _, err := NewClient(context.Background(), ds, nil) + _, err := NewClient(context.Background(), ds, plugins.DataTimeRange{}) require.Error(t, err) }) @@ -34,7 +35,7 @@ func TestNewClient(t *testing.T) { }), } - _, err := NewClient(context.Background(), ds, nil) + _, err := NewClient(context.Background(), ds, plugins.DataTimeRange{}) require.Error(t, err) }) @@ -46,7 +47,7 @@ func TestNewClient(t *testing.T) { }), } - _, err := NewClient(context.Background(), ds, nil) + _, err := NewClient(context.Background(), ds, plugins.DataTimeRange{}) require.Error(t, err) }) @@ -58,7 +59,7 @@ func TestNewClient(t *testing.T) { }), } - c, err := NewClient(context.Background(), ds, nil) + c, err := NewClient(context.Background(), ds, plugins.DataTimeRange{}) require.NoError(t, err) assert.Equal(t, 2, c.GetVersion()) }) @@ -71,7 +72,7 @@ func TestNewClient(t *testing.T) { }), } - c, err := NewClient(context.Background(), ds, nil) + c, err := NewClient(context.Background(), ds, plugins.DataTimeRange{}) require.NoError(t, err) assert.Equal(t, 5, c.GetVersion()) }) @@ -84,7 +85,7 @@ func TestNewClient(t *testing.T) { }), } - c, err := NewClient(context.Background(), ds, nil) + c, err := NewClient(context.Background(), ds, plugins.DataTimeRange{}) require.NoError(t, err) assert.Equal(t, 56, c.GetVersion()) }) @@ -97,7 +98,7 @@ func TestNewClient(t *testing.T) { }), } - c, err := NewClient(context.Background(), ds, nil) + c, err := NewClient(context.Background(), ds, plugins.DataTimeRange{}) require.NoError(t, err) assert.Equal(t, 60, c.GetVersion()) }) @@ -110,7 +111,7 @@ func TestNewClient(t *testing.T) { }), } - c, err := NewClient(context.Background(), ds, nil) + c, err := NewClient(context.Background(), ds, plugins.DataTimeRange{}) require.NoError(t, err) assert.Equal(t, 70, c.GetVersion()) }) @@ -329,7 +330,7 @@ func createMultisearchForTest(t *testing.T, c Client) (*MultiSearchRequest, erro t.Helper() msb := c.MultiSearch() - s := msb.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + s := msb.Search(interval.Interval{Value: 15 * time.Second, Text: "15s"}) s.Agg().DateHistogram("2", "@timestamp", func(a *DateHistogramAgg, ab AggBuilder) { a.Interval = "$__interval" @@ -376,7 +377,7 @@ func httpClientScenario(t *testing.T, desc string, ds *models.DataSource, fn sce to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC) fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond)) toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond)) - timeRange := tsdb.NewTimeRange(fromStr, toStr) + timeRange := plugins.NewDataTimeRange(fromStr, toStr) c, err := NewClient(context.Background(), ds, timeRange) require.NoError(t, err) diff --git a/pkg/tsdb/elasticsearch/client/index_pattern.go b/pkg/tsdb/elasticsearch/client/index_pattern.go index 48c15442d30..658b2efae5c 100644 --- a/pkg/tsdb/elasticsearch/client/index_pattern.go +++ b/pkg/tsdb/elasticsearch/client/index_pattern.go @@ -6,7 +6,7 @@ import ( "strings" "time" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) const ( @@ -19,7 +19,7 @@ const ( ) type indexPattern interface { - GetIndices(timeRange *tsdb.TimeRange) ([]string, error) + GetIndices(timeRange plugins.DataTimeRange) ([]string, error) } var newIndexPattern = func(interval string, pattern string) (indexPattern, error) { @@ -34,7 +34,7 @@ type staticIndexPattern struct { indexName string } -func (ip *staticIndexPattern) GetIndices(timeRange *tsdb.TimeRange) ([]string, error) { +func (ip *staticIndexPattern) GetIndices(timeRange plugins.DataTimeRange) ([]string, error) { return []string{ip.indexName}, nil } @@ -73,7 +73,7 @@ func newDynamicIndexPattern(interval, pattern string) (*dynamicIndexPattern, err }, nil } -func (ip *dynamicIndexPattern) GetIndices(timeRange *tsdb.TimeRange) ([]string, error) { +func (ip *dynamicIndexPattern) GetIndices(timeRange plugins.DataTimeRange) ([]string, error) { from := timeRange.GetFromAsTimeUTC() to := timeRange.GetToAsTimeUTC() intervals := ip.intervalGenerator.Generate(from, to) diff --git a/pkg/tsdb/elasticsearch/client/index_pattern_test.go b/pkg/tsdb/elasticsearch/client/index_pattern_test.go index bf413fc2605..0638e7b55a4 100644 --- a/pkg/tsdb/elasticsearch/client/index_pattern_test.go +++ b/pkg/tsdb/elasticsearch/client/index_pattern_test.go @@ -5,19 +5,18 @@ import ( "testing" "time" - "github.com/grafana/grafana/pkg/tsdb" - + "github.com/grafana/grafana/pkg/plugins" . "github.com/smartystreets/goconvey/convey" ) func TestIndexPattern(t *testing.T) { Convey("Static index patterns", t, func() { - indexPatternScenario(noInterval, "data-*", nil, func(indices []string) { + indexPatternScenario(noInterval, "data-*", plugins.DataTimeRange{}, func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-*") }) - indexPatternScenario(noInterval, "es-index-name", nil, func(indices []string) { + indexPatternScenario(noInterval, "es-index-name", plugins.DataTimeRange{}, func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "es-index-name") }) @@ -27,62 +26,62 @@ func TestIndexPattern(t *testing.T) { from := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond)) to := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond)) - indexPatternScenario(intervalHourly, "[data-]YYYY.MM.DD.HH", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalHourly, "[data-]YYYY.MM.DD.HH", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05.15.17") }) - indexPatternScenario(intervalHourly, "YYYY.MM.DD.HH[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalHourly, "YYYY.MM.DD.HH[-data]", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "2018.05.15.17-data") }) - indexPatternScenario(intervalDaily, "[data-]YYYY.MM.DD", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalDaily, "[data-]YYYY.MM.DD", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05.15") }) - indexPatternScenario(intervalDaily, "YYYY.MM.DD[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalDaily, "YYYY.MM.DD[-data]", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "2018.05.15-data") }) - indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.20") }) - indexPatternScenario(intervalWeekly, "GGGG.WW[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalWeekly, "GGGG.WW[-data]", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "2018.20-data") }) - indexPatternScenario(intervalMonthly, "[data-]YYYY.MM", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalMonthly, "[data-]YYYY.MM", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.05") }) - indexPatternScenario(intervalMonthly, "YYYY.MM[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalMonthly, "YYYY.MM[-data]", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "2018.05-data") }) - indexPatternScenario(intervalYearly, "[data-]YYYY", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalYearly, "[data-]YYYY", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018") }) - indexPatternScenario(intervalYearly, "YYYY[-data]", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalYearly, "YYYY[-data]", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "2018-data") }) - indexPatternScenario(intervalDaily, "YYYY[-data-]MM.DD", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalDaily, "YYYY[-data-]MM.DD", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "2018-data-05.15") }) - indexPatternScenario(intervalDaily, "[data-]YYYY[-moredata-]MM.DD", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalDaily, "[data-]YYYY[-moredata-]MM.DD", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018-moredata-05.15") }) @@ -90,7 +89,7 @@ func TestIndexPattern(t *testing.T) { Convey("Should return 01 week", func() { from = fmt.Sprintf("%d", time.Date(2018, 1, 15, 17, 50, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond)) to = fmt.Sprintf("%d", time.Date(2018, 1, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond)) - indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", tsdb.NewTimeRange(from, to), func(indices []string) { + indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", plugins.NewDataTimeRange(from, to), func(indices []string) { So(indices, ShouldHaveLength, 1) So(indices[0], ShouldEqual, "data-2018.03") }) @@ -276,7 +275,7 @@ func TestIndexPattern(t *testing.T) { }) } -func indexPatternScenario(interval string, pattern string, timeRange *tsdb.TimeRange, fn func(indices []string)) { +func indexPatternScenario(interval string, pattern string, timeRange plugins.DataTimeRange, fn func(indices []string)) { Convey(fmt.Sprintf("Index pattern (interval=%s, index=%s", interval, pattern), func() { ip, err := newIndexPattern(interval, pattern) So(err, ShouldBeNil) diff --git a/pkg/tsdb/elasticsearch/client/models.go b/pkg/tsdb/elasticsearch/client/models.go index 47b888bff3f..ba0179331d5 100644 --- a/pkg/tsdb/elasticsearch/client/models.go +++ b/pkg/tsdb/elasticsearch/client/models.go @@ -5,8 +5,7 @@ import ( "net/http" "github.com/grafana/grafana/pkg/components/simplejson" - - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/tsdb/interval" ) type response struct { @@ -33,7 +32,7 @@ type SearchDebugInfo struct { // SearchRequest represents a search request type SearchRequest struct { Index string - Interval tsdb.Interval + Interval interval.Interval Size int Sort map[string]interface{} Query *Query diff --git a/pkg/tsdb/elasticsearch/client/search_request.go b/pkg/tsdb/elasticsearch/client/search_request.go index b8c9232f97b..bedc1fe4640 100644 --- a/pkg/tsdb/elasticsearch/client/search_request.go +++ b/pkg/tsdb/elasticsearch/client/search_request.go @@ -3,13 +3,13 @@ package es import ( "strings" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/tsdb/interval" ) // SearchRequestBuilder represents a builder which can build a search request type SearchRequestBuilder struct { version int - interval tsdb.Interval + interval interval.Interval index string size int sort map[string]interface{} @@ -19,7 +19,7 @@ type SearchRequestBuilder struct { } // NewSearchRequestBuilder create a new search request builder -func NewSearchRequestBuilder(version int, interval tsdb.Interval) *SearchRequestBuilder { +func NewSearchRequestBuilder(version int, interval interval.Interval) *SearchRequestBuilder { builder := &SearchRequestBuilder{ version: version, interval: interval, @@ -131,7 +131,7 @@ func NewMultiSearchRequestBuilder(version int) *MultiSearchRequestBuilder { } // Search initiates and returns a new search request builder -func (m *MultiSearchRequestBuilder) Search(interval tsdb.Interval) *SearchRequestBuilder { +func (m *MultiSearchRequestBuilder) Search(interval interval.Interval) *SearchRequestBuilder { b := NewSearchRequestBuilder(m.version, interval) m.requestBuilders = append(m.requestBuilders, b) return b diff --git a/pkg/tsdb/elasticsearch/client/search_request_test.go b/pkg/tsdb/elasticsearch/client/search_request_test.go index 862b8058cba..be068d63060 100644 --- a/pkg/tsdb/elasticsearch/client/search_request_test.go +++ b/pkg/tsdb/elasticsearch/client/search_request_test.go @@ -6,7 +6,7 @@ import ( "time" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/tsdb/interval" . "github.com/smartystreets/goconvey/convey" ) @@ -15,7 +15,7 @@ func TestSearchRequest(t *testing.T) { Convey("Test elasticsearch search request", t, func() { timeField := "@timestamp" Convey("Given new search request builder for es version 5", func() { - b := NewSearchRequestBuilder(5, tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + b := NewSearchRequestBuilder(5, interval.Interval{Value: 15 * time.Second, Text: "15s"}) Convey("When building search request", func() { sr, err := b.Build() @@ -390,7 +390,7 @@ func TestSearchRequest(t *testing.T) { }) Convey("Given new search request builder for es version 2", func() { - b := NewSearchRequestBuilder(2, tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + b := NewSearchRequestBuilder(2, interval.Interval{Value: 15 * time.Second, Text: "15s"}) Convey("When adding doc value field", func() { b.AddDocValueField(timeField) @@ -449,7 +449,7 @@ func TestMultiSearchRequest(t *testing.T) { b := NewMultiSearchRequestBuilder(0) Convey("When adding one search request", func() { - b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + b.Search(interval.Interval{Value: 15 * time.Second, Text: "15s"}) Convey("When building search request should contain one search request", func() { mr, err := b.Build() @@ -459,8 +459,8 @@ func TestMultiSearchRequest(t *testing.T) { }) Convey("When adding two search requests", func() { - b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) - b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + b.Search(interval.Interval{Value: 15 * time.Second, Text: "15s"}) + b.Search(interval.Interval{Value: 15 * time.Second, Text: "15s"}) Convey("When building search request should contain two search requests", func() { mr, err := b.Build() diff --git a/pkg/tsdb/elasticsearch/elasticsearch.go b/pkg/tsdb/elasticsearch/elasticsearch.go index 5ca08df6a84..6ce436454f1 100644 --- a/pkg/tsdb/elasticsearch/elasticsearch.go +++ b/pkg/tsdb/elasticsearch/elasticsearch.go @@ -5,42 +5,39 @@ import ( "fmt" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" + "github.com/grafana/grafana/pkg/tsdb/interval" ) // ElasticsearchExecutor represents a handler for handling elasticsearch datasource request -type ElasticsearchExecutor struct{} - -var ( - intervalCalculator tsdb.IntervalCalculator -) - -// NewElasticsearchExecutor creates a new elasticsearch executor -func NewElasticsearchExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - return &ElasticsearchExecutor{}, nil +type Executor struct { + intervalCalculator interval.Calculator } -func init() { - intervalCalculator = tsdb.NewIntervalCalculator(nil) - tsdb.RegisterTsdbQueryEndpoint("elasticsearch", NewElasticsearchExecutor) +// NewExecutor creates a new Executor. +func NewExecutor(*models.DataSource) (plugins.DataPlugin, error) { + return &Executor{ + intervalCalculator: interval.NewCalculator(), + }, nil } // Query handles an elasticsearch datasource request -func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { +func (e *Executor) DataQuery(ctx context.Context, dsInfo *models.DataSource, + tsdbQuery plugins.DataQuery) (plugins.DataResponse, error) { if len(tsdbQuery.Queries) == 0 { - return nil, fmt.Errorf("query contains no queries") + return plugins.DataResponse{}, fmt.Errorf("query contains no queries") } - client, err := es.NewClient(ctx, dsInfo, tsdbQuery.TimeRange) + client, err := es.NewClient(ctx, dsInfo, *tsdbQuery.TimeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } if tsdbQuery.Debug { client.EnableDebug() } - query := newTimeSeriesQuery(client, tsdbQuery, intervalCalculator) + query := newTimeSeriesQuery(client, tsdbQuery, e.intervalCalculator) return query.execute() } diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go index 19b483d93f3..d85bf601b11 100644 --- a/pkg/tsdb/elasticsearch/response_parser.go +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -9,7 +9,7 @@ import ( "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" ) @@ -40,10 +40,10 @@ var newResponseParser = func(responses []*es.SearchResponse, targets []*Query, d } } -func (rp *responseParser) getTimeSeries() (*tsdb.Response, error) { - result := &tsdb.Response{} - result.Results = make(map[string]*tsdb.QueryResult) - +func (rp *responseParser) getTimeSeries() (plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), + } if rp.Responses == nil { return result, nil } @@ -57,27 +57,29 @@ func (rp *responseParser) getTimeSeries() (*tsdb.Response, error) { } if res.Error != nil { - result.Results[target.RefID] = getErrorFromElasticResponse(res) - result.Results[target.RefID].Meta = debugInfo + errRslt := getErrorFromElasticResponse(res) + errRslt.Meta = debugInfo + result.Results[target.RefID] = errRslt continue } - queryRes := tsdb.NewQueryResult() - queryRes.Meta = debugInfo + queryRes := plugins.DataQueryResult{ + Meta: debugInfo, + } props := make(map[string]string) - table := tsdb.Table{ - Columns: make([]tsdb.TableColumn, 0), - Rows: make([]tsdb.RowValues, 0), + table := plugins.DataTable{ + Columns: make([]plugins.DataTableColumn, 0), + Rows: make([]plugins.DataRowValues, 0), } err := rp.processBuckets(res.Aggregations, target, &queryRes.Series, &table, props, 0) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - rp.nameSeries(&queryRes.Series, target) - rp.trimDatapoints(&queryRes.Series, target) + rp.nameSeries(queryRes.Series, target) + rp.trimDatapoints(queryRes.Series, target) if len(table.Rows) > 0 { - queryRes.Tables = append(queryRes.Tables, &table) + queryRes.Tables = append(queryRes.Tables, table) } result.Results[target.RefID] = queryRes @@ -85,7 +87,8 @@ func (rp *responseParser) getTimeSeries() (*tsdb.Response, error) { return result, nil } -func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Query, series *tsdb.TimeSeriesSlice, table *tsdb.Table, props map[string]string, depth int) error { +func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Query, + series *plugins.DataTimeSeriesSlice, table *plugins.DataTable, props map[string]string, depth int) error { var err error maxDepth := len(target.BucketAggs) - 1 @@ -162,7 +165,8 @@ func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Qu return nil } -func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, series *tsdb.TimeSeriesSlice, props map[string]string) error { +func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, series *plugins.DataTimeSeriesSlice, + props map[string]string) error { for _, metric := range target.Metrics { if metric.Hide { continue @@ -170,7 +174,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, switch metric.Type { case countType: - newSeries := tsdb.TimeSeries{ + newSeries := plugins.DataTimeSeries{ Tags: make(map[string]string), } @@ -178,14 +182,14 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, bucket := simplejson.NewFromAny(v) value := castToNullFloat(bucket.Get("doc_count")) key := castToNullFloat(bucket.Get("key")) - newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) + newSeries.Points = append(newSeries.Points, plugins.DataTimePoint{value, key}) } for k, v := range props { newSeries.Tags[k] = v } newSeries.Tags["metric"] = countType - *series = append(*series, &newSeries) + *series = append(*series, newSeries) case percentilesType: buckets := esAgg.Get("buckets").MustArray() @@ -202,7 +206,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, } sort.Strings(percentileKeys) for _, percentileName := range percentileKeys { - newSeries := tsdb.TimeSeries{ + newSeries := plugins.DataTimeSeries{ Tags: make(map[string]string), } for k, v := range props { @@ -214,9 +218,9 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, bucket := simplejson.NewFromAny(v) value := castToNullFloat(bucket.GetPath(metric.ID, "values", percentileName)) key := castToNullFloat(bucket.Get("key")) - newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) + newSeries.Points = append(newSeries.Points, plugins.DataTimePoint{value, key}) } - *series = append(*series, &newSeries) + *series = append(*series, newSeries) } case extendedStatsType: buckets := esAgg.Get("buckets").MustArray() @@ -233,7 +237,7 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, continue } - newSeries := tsdb.TimeSeries{ + newSeries := plugins.DataTimeSeries{ Tags: make(map[string]string), } for k, v := range props { @@ -254,12 +258,12 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, default: value = castToNullFloat(bucket.GetPath(metric.ID, statName)) } - newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) + newSeries.Points = append(newSeries.Points, plugins.DataTimePoint{value, key}) } - *series = append(*series, &newSeries) + *series = append(*series, newSeries) } default: - newSeries := tsdb.TimeSeries{ + newSeries := plugins.DataTimeSeries{ Tags: make(map[string]string), } for k, v := range props { @@ -282,15 +286,16 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, } else { value = castToNullFloat(bucket.GetPath(metric.ID, "value")) } - newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) + newSeries.Points = append(newSeries.Points, plugins.DataTimePoint{value, key}) } - *series = append(*series, &newSeries) + *series = append(*series, newSeries) } } return nil } -func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef *BucketAgg, target *Query, table *tsdb.Table, props map[string]string) error { +func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef *BucketAgg, target *Query, + table *plugins.DataTable, props map[string]string) error { propKeys := make([]string, 0) for k := range props { propKeys = append(propKeys, k) @@ -299,12 +304,12 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef if len(table.Columns) == 0 { for _, propKey := range propKeys { - table.Columns = append(table.Columns, tsdb.TableColumn{Text: propKey}) + table.Columns = append(table.Columns, plugins.DataTableColumn{Text: propKey}) } - table.Columns = append(table.Columns, tsdb.TableColumn{Text: aggDef.Field}) + table.Columns = append(table.Columns, plugins.DataTableColumn{Text: aggDef.Field}) } - addMetricValue := func(values *tsdb.RowValues, metricName string, value null.Float) { + addMetricValue := func(values *plugins.DataRowValues, metricName string, value null.Float) { found := false for _, c := range table.Columns { if c.Text == metricName { @@ -313,14 +318,14 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef } } if !found { - table.Columns = append(table.Columns, tsdb.TableColumn{Text: metricName}) + table.Columns = append(table.Columns, plugins.DataTableColumn{Text: metricName}) } *values = append(*values, value) } for _, v := range esAgg.Get("buckets").MustArray() { bucket := simplejson.NewFromAny(v) - values := make(tsdb.RowValues, 0) + values := make(plugins.DataRowValues, 0) for _, propKey := range propKeys { values = append(values, props[propKey]) @@ -390,7 +395,7 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef return nil } -func (rp *responseParser) trimDatapoints(series *tsdb.TimeSeriesSlice, target *Query) { +func (rp *responseParser) trimDatapoints(series plugins.DataTimeSeriesSlice, target *Query) { var histogram *BucketAgg for _, bucketAgg := range target.BucketAggs { if bucketAgg.Type == dateHistType { @@ -408,31 +413,31 @@ func (rp *responseParser) trimDatapoints(series *tsdb.TimeSeriesSlice, target *Q return } - for _, s := range *series { - if len(s.Points) > trimEdges*2 { - s.Points = s.Points[trimEdges : len(s.Points)-trimEdges] + for i := range series { + if len(series[i].Points) > trimEdges*2 { + series[i].Points = series[i].Points[trimEdges : len(series[i].Points)-trimEdges] } } } -func (rp *responseParser) nameSeries(seriesList *tsdb.TimeSeriesSlice, target *Query) { - set := make(map[string]string) - for _, v := range *seriesList { +func (rp *responseParser) nameSeries(seriesList plugins.DataTimeSeriesSlice, target *Query) { + set := make(map[string]struct{}) + for _, v := range seriesList { if metricType, exists := v.Tags["metric"]; exists { if _, ok := set[metricType]; !ok { - set[metricType] = "" + set[metricType] = struct{}{} } } } metricTypeCount := len(set) - for _, series := range *seriesList { - series.Name = rp.getSeriesName(series, target, metricTypeCount) + for i := range seriesList { + seriesList[i].Name = rp.getSeriesName(seriesList[i], target, metricTypeCount) } } var aliasPatternRegex = regexp.MustCompile(`\{\{([\s\S]+?)\}\}`) -func (rp *responseParser) getSeriesName(series *tsdb.TimeSeries, target *Query, metricTypeCount int) string { +func (rp *responseParser) getSeriesName(series plugins.DataTimeSeries, target *Query, metricTypeCount int) string { metricType := series.Tags["metric"] metricName := rp.getMetricName(metricType) delete(series.Tags, "metric") @@ -564,8 +569,8 @@ func findAgg(target *Query, aggID string) (*BucketAgg, error) { return nil, errors.New("can't found aggDef, aggID:" + aggID) } -func getErrorFromElasticResponse(response *es.SearchResponse) *tsdb.QueryResult { - result := tsdb.NewQueryResult() +func getErrorFromElasticResponse(response *es.SearchResponse) plugins.DataQueryResult { + var result plugins.DataQueryResult json := simplejson.NewFromAny(response.Error) reason := json.Get("reason").MustString() rootCauseReason := json.Get("root_cause").GetIndex(0).Get("reason").MustString() diff --git a/pkg/tsdb/elasticsearch/response_parser_test.go b/pkg/tsdb/elasticsearch/response_parser_test.go index e638cf799f9..0ee1c089910 100644 --- a/pkg/tsdb/elasticsearch/response_parser_test.go +++ b/pkg/tsdb/elasticsearch/response_parser_test.go @@ -8,9 +8,9 @@ import ( "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/plugins" es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" - "github.com/grafana/grafana/pkg/tsdb" . "github.com/smartystreets/goconvey/convey" ) @@ -999,9 +999,10 @@ func newResponseParserForTest(tsdbQueries map[string]string, responseBody string to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC) fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond)) toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond)) - tsdbQuery := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{}, - TimeRange: tsdb.NewTimeRange(fromStr, toStr), + timeRange := plugins.NewDataTimeRange(fromStr, toStr) + tsdbQuery := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{}, + TimeRange: &timeRange, } for refID, tsdbQueryBody := range tsdbQueries { @@ -1010,9 +1011,9 @@ func newResponseParserForTest(tsdbQueries map[string]string, responseBody string return nil, err } - tsdbQuery.Queries = append(tsdbQuery.Queries, &tsdb.Query{ + tsdbQuery.Queries = append(tsdbQuery.Queries, plugins.DataSubQuery{ Model: tsdbQueryJSON, - RefId: refID, + RefID: refID, }) } diff --git a/pkg/tsdb/elasticsearch/time_series_query.go b/pkg/tsdb/elasticsearch/time_series_query.go index 51f22a54632..3ed45667b8f 100644 --- a/pkg/tsdb/elasticsearch/time_series_query.go +++ b/pkg/tsdb/elasticsearch/time_series_query.go @@ -6,52 +6,54 @@ import ( "strconv" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" + "github.com/grafana/grafana/pkg/tsdb/interval" ) type timeSeriesQuery struct { client es.Client - tsdbQuery *tsdb.TsdbQuery - intervalCalculator tsdb.IntervalCalculator + tsdbQuery plugins.DataQuery + intervalCalculator interval.Calculator } -var newTimeSeriesQuery = func(client es.Client, tsdbQuery *tsdb.TsdbQuery, intervalCalculator tsdb.IntervalCalculator) *timeSeriesQuery { +var newTimeSeriesQuery = func(client es.Client, dataQuery plugins.DataQuery, + intervalCalculator interval.Calculator) *timeSeriesQuery { return &timeSeriesQuery{ client: client, - tsdbQuery: tsdbQuery, + tsdbQuery: dataQuery, intervalCalculator: intervalCalculator, } } -func (e *timeSeriesQuery) execute() (*tsdb.Response, error) { +func (e *timeSeriesQuery) execute() (plugins.DataResponse, error) { tsQueryParser := newTimeSeriesQueryParser() queries, err := tsQueryParser.parse(e.tsdbQuery) if err != nil { - return nil, err + return plugins.DataResponse{}, err } ms := e.client.MultiSearch() from := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetFromAsMsEpoch()) to := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetToAsMsEpoch()) - result := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), + result := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), } for _, q := range queries { if err := e.processQuery(q, ms, from, to, result); err != nil { - return nil, err + return plugins.DataResponse{}, err } } req, err := ms.Build() if err != nil { - return nil, err + return plugins.DataResponse{}, err } res, err := e.client.ExecuteMultisearch(req) if err != nil { - return nil, err + return plugins.DataResponse{}, err } rp := newResponseParser(res.Responses, queries, res.DebugInfo) @@ -59,12 +61,12 @@ func (e *timeSeriesQuery) execute() (*tsdb.Response, error) { } func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilder, from, to string, - result *tsdb.Response) error { + result plugins.DataResponse) error { minInterval, err := e.client.GetMinInterval(q.Interval) if err != nil { return err } - interval := e.intervalCalculator.Calculate(e.tsdbQuery.TimeRange, minInterval) + interval := e.intervalCalculator.Calculate(*e.tsdbQuery.TimeRange, minInterval) b := ms.Search(interval) b.Size(0) @@ -77,8 +79,8 @@ func (e *timeSeriesQuery) processQuery(q *Query, ms *es.MultiSearchRequestBuilde if len(q.BucketAggs) == 0 { if len(q.Metrics) == 0 || q.Metrics[0].Type != "raw_document" { - result.Results[q.RefID] = &tsdb.QueryResult{ - RefId: q.RefID, + result.Results[q.RefID] = plugins.DataQueryResult{ + RefID: q.RefID, Error: fmt.Errorf("invalid query, missing metrics and aggregations"), ErrorString: "invalid query, missing metrics and aggregations", } @@ -308,7 +310,7 @@ func newTimeSeriesQueryParser() *timeSeriesQueryParser { return &timeSeriesQueryParser{} } -func (p *timeSeriesQueryParser) parse(tsdbQuery *tsdb.TsdbQuery) ([]*Query, error) { +func (p *timeSeriesQueryParser) parse(tsdbQuery plugins.DataQuery) ([]*Query, error) { queries := make([]*Query, 0) for _, q := range tsdbQuery.Queries { model := q.Model @@ -335,7 +337,7 @@ func (p *timeSeriesQueryParser) parse(tsdbQuery *tsdb.TsdbQuery) ([]*Query, erro Metrics: metrics, Alias: alias, Interval: interval, - RefID: q.RefId, + RefID: q.RefID, }) } diff --git a/pkg/tsdb/elasticsearch/time_series_query_test.go b/pkg/tsdb/elasticsearch/time_series_query_test.go index 9cd16a1ca6d..5a3a01f78d9 100644 --- a/pkg/tsdb/elasticsearch/time_series_query_test.go +++ b/pkg/tsdb/elasticsearch/time_series_query_test.go @@ -5,10 +5,11 @@ import ( "testing" "time" + "github.com/grafana/grafana/pkg/plugins" es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" + "github.com/grafana/grafana/pkg/tsdb/interval" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" . "github.com/smartystreets/goconvey/convey" ) @@ -897,13 +898,13 @@ func (c *fakeClient) MultiSearch() *es.MultiSearchRequestBuilder { return c.builder } -func newTsdbQuery(body string) (*tsdb.TsdbQuery, error) { +func newDataQuery(body string) (plugins.DataQuery, error) { json, err := simplejson.NewJson([]byte(body)) if err != nil { - return nil, err + return plugins.DataQuery{}, err } - return &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + return plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: json, }, @@ -911,22 +912,24 @@ func newTsdbQuery(body string) (*tsdb.TsdbQuery, error) { }, nil } -func executeTsdbQuery(c es.Client, body string, from, to time.Time, minInterval time.Duration) (*tsdb.Response, error) { +func executeTsdbQuery(c es.Client, body string, from, to time.Time, minInterval time.Duration) ( + plugins.DataResponse, error) { json, err := simplejson.NewJson([]byte(body)) if err != nil { - return nil, err + return plugins.DataResponse{}, err } fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond)) toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond)) - tsdbQuery := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + timeRange := plugins.NewDataTimeRange(fromStr, toStr) + tsdbQuery := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: json, }, }, - TimeRange: tsdb.NewTimeRange(fromStr, toStr), + TimeRange: &timeRange, } - query := newTimeSeriesQuery(c, tsdbQuery, tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{MinInterval: minInterval})) + query := newTimeSeriesQuery(c, tsdbQuery, interval.NewCalculator(interval.CalculatorOptions{MinInterval: minInterval})) return query.execute() } @@ -985,7 +988,7 @@ func TestTimeSeriesQueryParser(t *testing.T) { } ] }` - tsdbQuery, err := newTsdbQuery(body) + tsdbQuery, err := newDataQuery(body) So(err, ShouldBeNil) queries, err := p.parse(tsdbQuery) So(err, ShouldBeNil) diff --git a/pkg/tsdb/fake_test.go b/pkg/tsdb/fake_test.go deleted file mode 100644 index 572b3c77ff3..00000000000 --- a/pkg/tsdb/fake_test.go +++ /dev/null @@ -1,45 +0,0 @@ -package tsdb - -import ( - "context" - - "github.com/grafana/grafana/pkg/models" -) - -type FakeExecutor struct { - results map[string]*QueryResult - resultsFn map[string]ResultsFn -} - -type ResultsFn func(context *TsdbQuery) *QueryResult - -func NewFakeExecutor(dsInfo *models.DataSource) (*FakeExecutor, error) { - return &FakeExecutor{ - results: make(map[string]*QueryResult), - resultsFn: make(map[string]ResultsFn), - }, nil -} - -func (e *FakeExecutor) Query(ctx context.Context, dsInfo *models.DataSource, context *TsdbQuery) (*Response, error) { - result := &Response{Results: make(map[string]*QueryResult)} - for _, query := range context.Queries { - if results, has := e.results[query.RefId]; has { - result.Results[query.RefId] = results - } - if testFunc, has := e.resultsFn[query.RefId]; has { - result.Results[query.RefId] = testFunc(context) - } - } - - return result, nil -} - -func (e *FakeExecutor) Return(refId string, series TimeSeriesSlice) { - e.results[refId] = &QueryResult{ - RefId: refId, Series: series, - } -} - -func (e *FakeExecutor) HandleQuery(refId string, fn ResultsFn) { - e.resultsFn[refId] = fn -} diff --git a/pkg/tsdb/frame_util.go b/pkg/tsdb/frame_util.go deleted file mode 100644 index f88ad39e583..00000000000 --- a/pkg/tsdb/frame_util.go +++ /dev/null @@ -1,37 +0,0 @@ -package tsdb - -import ( - "time" - - "github.com/grafana/grafana-plugin-sdk-go/data" -) - -// SeriesToFrame converts a TimeSeries to a sdk Frame -func SeriesToFrame(series *TimeSeries) (*data.Frame, error) { - timeVec := make([]*time.Time, len(series.Points)) - floatVec := make([]*float64, len(series.Points)) - for idx, point := range series.Points { - timeVec[idx], floatVec[idx] = convertTSDBTimePoint(point) - } - frame := data.NewFrame(series.Name, - data.NewField("time", nil, timeVec), - data.NewField("value", data.Labels(series.Tags), floatVec), - ) - - return frame, nil -} - -// convertTSDBTimePoint coverts a tsdb.TimePoint into two values appropriate -// for Series values. -func convertTSDBTimePoint(point TimePoint) (t *time.Time, f *float64) { - timeIdx, valueIdx := 1, 0 - if point[timeIdx].Valid { // Assuming valid is null? - tI := int64(point[timeIdx].Float64) - uT := time.Unix(tI/int64(1e+3), (tI%int64(1e+3))*int64(1e+6)) // time.Time from millisecond unix ts - t = &uT - } - if point[valueIdx].Valid { - f = &point[valueIdx].Float64 - } - return -} diff --git a/pkg/tsdb/graphite/graphite.go b/pkg/tsdb/graphite/graphite.go index f8b0056620e..0b4a276405b 100644 --- a/pkg/tsdb/graphite/graphite.go +++ b/pkg/tsdb/graphite/graphite.go @@ -17,8 +17,8 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" "github.com/opentracing/opentracing-go" ) @@ -26,29 +26,24 @@ type GraphiteExecutor struct { HttpClient *http.Client } -func NewGraphiteExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { +func NewExecutor(*models.DataSource) (plugins.DataPlugin, error) { return &GraphiteExecutor{}, nil } var glog = log.New("tsdb.graphite") -func init() { - tsdb.RegisterTsdbQueryEndpoint("graphite", NewGraphiteExecutor) -} - -func (e *GraphiteExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{} - +func (e *GraphiteExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource, tsdbQuery plugins.DataQuery) ( + plugins.DataResponse, error) { // This logic is used when called from Dashboard Alerting. from := "-" + formatTimeRange(tsdbQuery.TimeRange.From) until := formatTimeRange(tsdbQuery.TimeRange.To) // This logic is used when called through server side expressions. - if isTimeRangeNumeric(tsdbQuery.TimeRange) { + if isTimeRangeNumeric(*tsdbQuery.TimeRange) { var err error - from, until, err = epochMStoGraphiteTime(tsdbQuery.TimeRange) + from, until, err = epochMStoGraphiteTime(*tsdbQuery.TimeRange) if err != nil { - return nil, err + return plugins.DataResponse{}, err } } @@ -80,7 +75,7 @@ func (e *GraphiteExecutor) Query(ctx context.Context, dsInfo *models.DataSource, if target == "" { glog.Error("No targets in query model", "models without targets", strings.Join(emptyQueries, "\n")) - return nil, errors.New("no query target found for the alert rule") + return plugins.DataResponse{}, errors.New("no query target found for the alert rule") } formData["target"] = []string{target} @@ -91,12 +86,12 @@ func (e *GraphiteExecutor) Query(ctx context.Context, dsInfo *models.DataSource, req, err := e.createRequest(dsInfo, formData) if err != nil { - return nil, err + return plugins.DataResponse{}, err } httpClient, err := dsInfo.GetHttpClient() if err != nil { - return nil, err + return plugins.DataResponse{}, err } span, ctx := opentracing.StartSpanFromContext(ctx, "graphite query") @@ -112,24 +107,25 @@ func (e *GraphiteExecutor) Query(ctx context.Context, dsInfo *models.DataSource, span.Context(), opentracing.HTTPHeaders, opentracing.HTTPHeadersCarrier(req.Header)); err != nil { - return nil, err + return plugins.DataResponse{}, err } res, err := ctxhttp.Do(ctx, httpClient, req) if err != nil { - return nil, err + return plugins.DataResponse{}, err } data, err := e.parseResponse(res) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - result.Results = make(map[string]*tsdb.QueryResult) - queryRes := tsdb.NewQueryResult() - + result := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), + } + queryRes := plugins.DataQueryResult{} for _, series := range data { - queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{ + queryRes.Series = append(queryRes.Series, plugins.DataTimeSeries{ Name: series.Target, Points: series.DataPoints, }) @@ -215,7 +211,7 @@ func fixIntervalFormat(target string) string { return target } -func isTimeRangeNumeric(tr *tsdb.TimeRange) bool { +func isTimeRangeNumeric(tr plugins.DataTimeRange) bool { if _, err := strconv.ParseInt(tr.From, 10, 64); err != nil { return false } @@ -225,7 +221,7 @@ func isTimeRangeNumeric(tr *tsdb.TimeRange) bool { return true } -func epochMStoGraphiteTime(tr *tsdb.TimeRange) (string, string, error) { +func epochMStoGraphiteTime(tr plugins.DataTimeRange) (string, string, error) { from, err := strconv.ParseInt(tr.From, 10, 64) if err != nil { return "", "", err diff --git a/pkg/tsdb/graphite/types.go b/pkg/tsdb/graphite/types.go index 8bd13aec4f2..1c325b69eb1 100644 --- a/pkg/tsdb/graphite/types.go +++ b/pkg/tsdb/graphite/types.go @@ -1,8 +1,8 @@ package graphite -import "github.com/grafana/grafana/pkg/tsdb" +import "github.com/grafana/grafana/pkg/plugins" type TargetResponseDTO struct { - Target string `json:"target"` - DataPoints tsdb.TimeSeriesPoints `json:"datapoints"` + Target string `json:"target"` + DataPoints plugins.DataTimeSeriesPoints `json:"datapoints"` } diff --git a/pkg/tsdb/influxdb/flux/flux.go b/pkg/tsdb/influxdb/flux/flux.go index a7312e2a2b0..e70f3356121 100644 --- a/pkg/tsdb/influxdb/flux/flux.go +++ b/pkg/tsdb/influxdb/flux/flux.go @@ -7,7 +7,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" influxdb2 "github.com/influxdata/influxdb-client-go/v2" "github.com/influxdata/influxdb-client-go/v2/api" ) @@ -21,21 +21,22 @@ func init() { } // Query builds flux queries, executes them, and returns the results. -func Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - glog.Debug("Received a query", "query", *tsdbQuery) - tRes := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), +func Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery plugins.DataQuery) ( + plugins.DataResponse, error) { + glog.Debug("Received a query", "query", tsdbQuery) + tRes := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), } r, err := runnerFromDataSource(dsInfo) if err != nil { - return nil, err + return plugins.DataResponse{}, err } defer r.client.Close() for _, query := range tsdbQuery.Queries { - qm, err := getQueryModelTSDB(query, tsdbQuery.TimeRange, dsInfo) + qm, err := getQueryModelTSDB(query, *tsdbQuery.TimeRange, dsInfo) if err != nil { - tRes.Results[query.RefId] = &tsdb.QueryResult{Error: err} + tRes.Results[query.RefID] = plugins.DataQueryResult{Error: err} continue } @@ -43,7 +44,7 @@ func Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQ maxSeries := dsInfo.JsonData.Get("maxSeries").MustInt(1000) res := executeQuery(ctx, *qm, r, maxSeries) - tRes.Results[query.RefId] = backendDataResponseToTSDBResponse(&res, query.RefId) + tRes.Results[query.RefID] = backendDataResponseToDataResponse(&res, query.RefID) } return tRes, nil } @@ -94,16 +95,16 @@ func runnerFromDataSource(dsInfo *models.DataSource) (*runner, error) { }, nil } -// backendDataResponseToTSDBResponse takes the SDK's style response and changes it into a -// tsdb.QueryResult. This is a wrapper so less of existing code needs to be changed. This should +// backendDataResponseToDataResponse takes the SDK's style response and changes it into a +// plugins.DataQueryResult. This is a wrapper so less of existing code needs to be changed. This should // be able to be removed in the near future https://github.com/grafana/grafana/pull/25472. -func backendDataResponseToTSDBResponse(dr *backend.DataResponse, refID string) *tsdb.QueryResult { - qr := &tsdb.QueryResult{RefId: refID} - - qr.Error = dr.Error - +func backendDataResponseToDataResponse(dr *backend.DataResponse, refID string) plugins.DataQueryResult { + qr := plugins.DataQueryResult{ + RefID: refID, + Error: dr.Error, + } if dr.Frames != nil { - qr.Dataframes = tsdb.NewDecodedDataFrames(dr.Frames) + qr.Dataframes = plugins.NewDecodedDataFrames(dr.Frames) } return qr } diff --git a/pkg/tsdb/influxdb/flux/query_models.go b/pkg/tsdb/influxdb/flux/query_models.go index e1be9971b14..50837fe2ccd 100644 --- a/pkg/tsdb/influxdb/flux/query_models.go +++ b/pkg/tsdb/influxdb/flux/query_models.go @@ -7,7 +7,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) // queryOptions represents datasource configuration options @@ -46,8 +46,9 @@ type queryModel struct { // return model, nil // } -// getQueryModelTSDB builds a queryModel from tsdb.Query information and datasource configuration (dsInfo). -func getQueryModelTSDB(query *tsdb.Query, timeRange *tsdb.TimeRange, dsInfo *models.DataSource) (*queryModel, error) { +// getQueryModelTSDB builds a queryModel from plugins.DataQuery information and datasource configuration (dsInfo). +func getQueryModelTSDB(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, + dsInfo *models.DataSource) (*queryModel, error) { model := &queryModel{} queryBytes, err := query.Model.Encode() if err != nil { @@ -86,7 +87,7 @@ func getQueryModelTSDB(query *tsdb.Query, timeRange *tsdb.TimeRange, dsInfo *mod if model.MaxDataPoints == 0 { model.MaxDataPoints = 10000 // 10k/series should be a reasonable place to abort! } - model.Interval = time.Millisecond * time.Duration(query.IntervalMs) + model.Interval = time.Millisecond * time.Duration(query.IntervalMS) if model.Interval.Milliseconds() == 0 { model.Interval = time.Millisecond // 1ms } diff --git a/pkg/tsdb/influxdb/influxdb.go b/pkg/tsdb/influxdb/influxdb.go index 685c0c0a495..44ed691aefb 100644 --- a/pkg/tsdb/influxdb/influxdb.go +++ b/pkg/tsdb/influxdb/influxdb.go @@ -12,19 +12,19 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb/influxdb/flux" ) -type InfluxDBExecutor struct { +type Executor struct { // *models.DataSource QueryParser *InfluxdbQueryParser ResponseParser *ResponseParser } -func NewInfluxDBExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - return &InfluxDBExecutor{ +func NewExecutor(*models.DataSource) (plugins.DataPlugin, error) { + return &Executor{ QueryParser: &InfluxdbQueryParser{}, ResponseParser: &ResponseParser{}, }, nil @@ -38,10 +38,10 @@ var ErrInvalidHttpMode error = errors.New("'httpMode' should be either 'GET' or func init() { glog = log.New("tsdb.influxdb") - tsdb.RegisterTsdbQueryEndpoint("influxdb", NewInfluxDBExecutor) } -func (e *InfluxDBExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { +func (e *Executor) DataQuery(ctx context.Context, dsInfo *models.DataSource, tsdbQuery plugins.DataQuery) ( + plugins.DataResponse, error) { glog.Debug("Received a query request", "numQueries", len(tsdbQuery.Queries)) version := dsInfo.JsonData.Get("version").MustString("") @@ -54,14 +54,14 @@ func (e *InfluxDBExecutor) Query(ctx context.Context, dsInfo *models.DataSource, // NOTE: the following path is currently only called from alerting queries // In dashboards, the request runs through proxy and are managed in the frontend - query, err := e.getQuery(dsInfo, tsdbQuery.Queries, tsdbQuery) + query, err := e.getQuery(dsInfo, tsdbQuery) if err != nil { - return nil, err + return plugins.DataResponse{}, err } rawQuery, err := query.Build(tsdbQuery) if err != nil { - return nil, err + return plugins.DataResponse{}, err } if setting.Env == setting.Dev { @@ -70,17 +70,17 @@ func (e *InfluxDBExecutor) Query(ctx context.Context, dsInfo *models.DataSource, req, err := e.createRequest(ctx, dsInfo, rawQuery) if err != nil { - return nil, err + return plugins.DataResponse{}, err } httpClient, err := dsInfo.GetHttpClient() if err != nil { - return nil, err + return plugins.DataResponse{}, err } resp, err := httpClient.Do(req) if err != nil { - return nil, err + return plugins.DataResponse{}, err } defer func() { if err := resp.Body.Close(); err != nil { @@ -88,41 +88,39 @@ func (e *InfluxDBExecutor) Query(ctx context.Context, dsInfo *models.DataSource, } }() if resp.StatusCode/100 != 2 { - return nil, fmt.Errorf("InfluxDB returned error status: %s", resp.Status) + return plugins.DataResponse{}, fmt.Errorf("InfluxDB returned error status: %s", resp.Status) } var response Response dec := json.NewDecoder(resp.Body) dec.UseNumber() if err := dec.Decode(&response); err != nil { - return nil, err + return plugins.DataResponse{}, err } if response.Err != nil { - return nil, response.Err + return plugins.DataResponse{}, response.Err } - result := &tsdb.Response{} - result.Results = make(map[string]*tsdb.QueryResult) - result.Results["A"] = e.ResponseParser.Parse(&response, query) + result := plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ + "A": e.ResponseParser.Parse(&response, query), + }, + } return result, nil } -func (e *InfluxDBExecutor) getQuery(dsInfo *models.DataSource, queries []*tsdb.Query, context *tsdb.TsdbQuery) (*Query, error) { - if len(queries) == 0 { +func (e *Executor) getQuery(dsInfo *models.DataSource, query plugins.DataQuery) (*Query, error) { + if len(query.Queries) == 0 { return nil, fmt.Errorf("query request contains no queries") } // The model supports multiple queries, but right now this is only used from // alerting so we only needed to support batch executing 1 query at a time. - query, err := e.QueryParser.Parse(queries[0].Model, dsInfo) - if err != nil { - return nil, err - } - return query, nil + return e.QueryParser.Parse(query.Queries[0].Model, dsInfo) } -func (e *InfluxDBExecutor) createRequest(ctx context.Context, dsInfo *models.DataSource, query string) (*http.Request, error) { +func (e *Executor) createRequest(ctx context.Context, dsInfo *models.DataSource, query string) (*http.Request, error) { u, err := url.Parse(dsInfo.Url) if err != nil { return nil, err diff --git a/pkg/tsdb/influxdb/influxdb_test.go b/pkg/tsdb/influxdb/influxdb_test.go index 3d4801e9036..a480c7a7045 100644 --- a/pkg/tsdb/influxdb/influxdb_test.go +++ b/pkg/tsdb/influxdb/influxdb_test.go @@ -12,14 +12,14 @@ import ( "github.com/stretchr/testify/require" ) -func TestInfluxDBExecutor_createRequest(t *testing.T) { +func TestExecutor_createRequest(t *testing.T) { datasource := &models.DataSource{ Url: "http://awesome-influxdb:1337", Database: "awesome-db", JsonData: simplejson.New(), } query := "SELECT awesomeness FROM somewhere" - e := &InfluxDBExecutor{ + e := &Executor{ QueryParser: &InfluxdbQueryParser{}, ResponseParser: &ResponseParser{}, } diff --git a/pkg/tsdb/influxdb/model_parser.go b/pkg/tsdb/influxdb/model_parser.go index 7d728e8dea9..b7375e8ae39 100644 --- a/pkg/tsdb/influxdb/model_parser.go +++ b/pkg/tsdb/influxdb/model_parser.go @@ -6,7 +6,7 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/tsdb/interval" ) type InfluxdbQueryParser struct{} @@ -40,7 +40,7 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *models.Data return nil, err } - parsedInterval, err := tsdb.GetIntervalFrom(dsInfo, model, time.Millisecond*1) + parsedInterval, err := interval.GetIntervalFrom(dsInfo, model, time.Millisecond*1) if err != nil { return nil, err } diff --git a/pkg/tsdb/influxdb/query.go b/pkg/tsdb/influxdb/query.go index 90b5d07d0bc..cceba318c8c 100644 --- a/pkg/tsdb/influxdb/query.go +++ b/pkg/tsdb/influxdb/query.go @@ -6,7 +6,8 @@ import ( "strconv" "strings" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/tsdb/interval" ) var ( @@ -14,7 +15,7 @@ var ( regexpMeasurementPattern = regexp.MustCompile(`^\/.*\/$`) ) -func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) { +func (query *Query) Build(queryContext plugins.DataQuery) (string, error) { var res string if query.UseRawQuery && query.RawQuery != "" { res = query.RawQuery @@ -27,13 +28,13 @@ func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) { res += query.renderTz() } - calculator := tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{}) - interval := calculator.Calculate(queryContext.TimeRange, query.Interval) + calculator := interval.NewCalculator(interval.CalculatorOptions{}) + i := calculator.Calculate(*queryContext.TimeRange, query.Interval) res = strings.ReplaceAll(res, "$timeFilter", query.renderTimeFilter(queryContext)) - res = strings.ReplaceAll(res, "$interval", interval.Text) - res = strings.ReplaceAll(res, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10)) - res = strings.ReplaceAll(res, "$__interval", interval.Text) + res = strings.ReplaceAll(res, "$interval", i.Text) + res = strings.ReplaceAll(res, "$__interval_ms", strconv.FormatInt(i.Milliseconds(), 10)) + res = strings.ReplaceAll(res, "$__interval", i.Text) return res, nil } @@ -77,7 +78,7 @@ func (query *Query) renderTags() []string { return res } -func (query *Query) renderTimeFilter(queryContext *tsdb.TsdbQuery) string { +func (query *Query) renderTimeFilter(queryContext plugins.DataQuery) string { from := "now() - " + queryContext.TimeRange.From to := "" @@ -88,7 +89,7 @@ func (query *Query) renderTimeFilter(queryContext *tsdb.TsdbQuery) string { return fmt.Sprintf("time > %s%s", from, to) } -func (query *Query) renderSelectors(queryContext *tsdb.TsdbQuery) string { +func (query *Query) renderSelectors(queryContext plugins.DataQuery) string { res := "SELECT " var selectors []string @@ -135,7 +136,7 @@ func (query *Query) renderWhereClause() string { return res } -func (query *Query) renderGroupBy(queryContext *tsdb.TsdbQuery) string { +func (query *Query) renderGroupBy(queryContext plugins.DataQuery) string { groupBy := "" for i, group := range query.GroupBy { if i == 0 { diff --git a/pkg/tsdb/influxdb/query_part.go b/pkg/tsdb/influxdb/query_part.go index 049fe39e473..b8588cb5a87 100644 --- a/pkg/tsdb/influxdb/query_part.go +++ b/pkg/tsdb/influxdb/query_part.go @@ -4,7 +4,7 @@ import ( "fmt" "strings" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) var renders map[string]QueryDefinition @@ -15,7 +15,7 @@ type DefinitionParameters struct { } type QueryDefinition struct { - Renderer func(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string + Renderer func(query *Query, queryContext plugins.DataQuery, part *QueryPart, innerExpr string) string Params []DefinitionParameters } @@ -97,14 +97,14 @@ func init() { renders["alias"] = QueryDefinition{Renderer: aliasRenderer} } -func fieldRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string { +func fieldRenderer(query *Query, queryContext plugins.DataQuery, part *QueryPart, innerExpr string) string { if part.Params[0] == "*" { return "*" } return fmt.Sprintf(`"%s"`, part.Params[0]) } -func functionRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string { +func functionRenderer(query *Query, queryContext plugins.DataQuery, part *QueryPart, innerExpr string) string { for i, param := range part.Params { if part.Type == "time" && param == "auto" { part.Params[i] = "$__interval" @@ -120,11 +120,11 @@ func functionRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPar return fmt.Sprintf("%s(%s)", part.Type, params) } -func suffixRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string { +func suffixRenderer(query *Query, queryContext plugins.DataQuery, part *QueryPart, innerExpr string) string { return fmt.Sprintf("%s %s", innerExpr, part.Params[0]) } -func aliasRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string { +func aliasRenderer(query *Query, queryContext plugins.DataQuery, part *QueryPart, innerExpr string) string { return fmt.Sprintf(`%s AS "%s"`, innerExpr, part.Params[0]) } @@ -147,6 +147,6 @@ type QueryPart struct { Params []string } -func (qp *QueryPart) Render(query *Query, queryContext *tsdb.TsdbQuery, expr string) string { +func (qp *QueryPart) Render(query *Query, queryContext plugins.DataQuery, expr string) string { return qp.Def.Renderer(query, queryContext, qp, expr) } diff --git a/pkg/tsdb/influxdb/query_part_test.go b/pkg/tsdb/influxdb/query_part_test.go index 915c066b460..067a5ae7c0d 100644 --- a/pkg/tsdb/influxdb/query_part_test.go +++ b/pkg/tsdb/influxdb/query_part_test.go @@ -3,7 +3,7 @@ package influxdb import ( "testing" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) func TestInfluxdbQueryPart(t *testing.T) { @@ -27,7 +27,8 @@ func TestInfluxdbQueryPart(t *testing.T) { {mode: "non_negative_difference", params: []string{}, input: "max(value)", expected: `non_negative_difference(max(value))`}, } - queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("5m", "now")} + timeRange := plugins.NewDataTimeRange("5m", "now") + queryContext := plugins.DataQuery{TimeRange: &timeRange} query := &Query{} for _, tc := range tcs { diff --git a/pkg/tsdb/influxdb/query_test.go b/pkg/tsdb/influxdb/query_test.go index 16ebb038c1e..5d45b25ad26 100644 --- a/pkg/tsdb/influxdb/query_test.go +++ b/pkg/tsdb/influxdb/query_test.go @@ -6,7 +6,7 @@ import ( "strings" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" . "github.com/smartystreets/goconvey/convey" ) @@ -27,8 +27,9 @@ func TestInfluxdbQueryBuilder(t *testing.T) { tag1 := &Tag{Key: "hostname", Value: "server1", Operator: "="} tag2 := &Tag{Key: "hostname", Value: "server2", Operator: "=", Condition: "OR"} - queryContext := &tsdb.TsdbQuery{ - TimeRange: tsdb.NewTimeRange("5m", "now"), + timeRange := plugins.NewDataTimeRange("5m", "now") + queryContext := plugins.DataQuery{ + TimeRange: &timeRange, } Convey("can build simple query", func() { @@ -114,12 +115,14 @@ func TestInfluxdbQueryBuilder(t *testing.T) { query := Query{} Convey("render from: 2h to now-1h", func() { query := Query{} - queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("2h", "now-1h")} + timeRange := plugins.NewDataTimeRange("2h", "now-1h") + queryContext := plugins.DataQuery{TimeRange: &timeRange} So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 2h and time < now() - 1h") }) Convey("render from: 10m", func() { - queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("10m", "now")} + timeRange := plugins.NewDataTimeRange("10m", "now") + queryContext := plugins.DataQuery{TimeRange: &timeRange} So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 10m") }) }) diff --git a/pkg/tsdb/influxdb/response_parser.go b/pkg/tsdb/influxdb/response_parser.go index fd502667c61..ccdf1117c3a 100644 --- a/pkg/tsdb/influxdb/response_parser.go +++ b/pkg/tsdb/influxdb/response_parser.go @@ -8,7 +8,7 @@ import ( "strings" "github.com/grafana/grafana/pkg/components/null" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" ) type ResponseParser struct{} @@ -21,8 +21,8 @@ func init() { legendFormat = regexp.MustCompile(`\[\[(\w+)(\.\w+)*\]\]*|\$\s*(\w+?)*`) } -func (rp *ResponseParser) Parse(response *Response, query *Query) *tsdb.QueryResult { - queryRes := tsdb.NewQueryResult() +func (rp *ResponseParser) Parse(response *Response, query *Query) plugins.DataQueryResult { + var queryRes plugins.DataQueryResult for _, result := range response.Results { queryRes.Series = append(queryRes.Series, rp.transformRows(result.Series, queryRes, query)...) @@ -34,22 +34,22 @@ func (rp *ResponseParser) Parse(response *Response, query *Query) *tsdb.QueryRes return queryRes } -func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResult, query *Query) tsdb.TimeSeriesSlice { - var result tsdb.TimeSeriesSlice +func (rp *ResponseParser) transformRows(rows []Row, queryResult plugins.DataQueryResult, query *Query) plugins.DataTimeSeriesSlice { + var result plugins.DataTimeSeriesSlice for _, row := range rows { for columnIndex, column := range row.Columns { if column == "time" { continue } - var points tsdb.TimeSeriesPoints + var points plugins.DataTimeSeriesPoints for _, valuePair := range row.Values { point, err := rp.parseTimepoint(valuePair, columnIndex) if err == nil { points = append(points, point) } } - result = append(result, &tsdb.TimeSeries{ + result = append(result, plugins.DataTimeSeries{ Name: rp.formatSeriesName(row, column, query), Points: points, Tags: row.Tags, @@ -115,19 +115,19 @@ func (rp *ResponseParser) buildSeriesNameFromQuery(row Row, column string) strin return fmt.Sprintf("%s.%s%s", row.Name, column, tagText) } -func (rp *ResponseParser) parseTimepoint(valuePair []interface{}, valuePosition int) (tsdb.TimePoint, error) { +func (rp *ResponseParser) parseTimepoint(valuePair []interface{}, valuePosition int) (plugins.DataTimePoint, error) { value := rp.parseValue(valuePair[valuePosition]) timestampNumber, ok := valuePair[0].(json.Number) if !ok { - return tsdb.TimePoint{}, fmt.Errorf("valuePair[0] has invalid type: %#v", valuePair[0]) + return plugins.DataTimePoint{}, fmt.Errorf("valuePair[0] has invalid type: %#v", valuePair[0]) } timestamp, err := timestampNumber.Float64() if err != nil { - return tsdb.TimePoint{}, err + return plugins.DataTimePoint{}, err } - return tsdb.NewTimePoint(value, timestamp), nil + return plugins.DataTimePoint{value, null.FloatFrom(timestamp)}, nil } func (rp *ResponseParser) parseValue(value interface{}) null.Float { diff --git a/pkg/tsdb/interval.go b/pkg/tsdb/interval/interval.go similarity index 90% rename from pkg/tsdb/interval.go rename to pkg/tsdb/interval/interval.go index 781cd30b41a..b256a351e11 100644 --- a/pkg/tsdb/interval.go +++ b/pkg/tsdb/interval/interval.go @@ -1,4 +1,4 @@ -package tsdb +package interval import ( "fmt" @@ -8,6 +8,7 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" ) var ( @@ -26,25 +27,23 @@ type intervalCalculator struct { minInterval time.Duration } -type IntervalCalculator interface { - Calculate(timeRange *TimeRange, minInterval time.Duration) Interval +type Calculator interface { + Calculate(timeRange plugins.DataTimeRange, minInterval time.Duration) Interval } -type IntervalOptions struct { +type CalculatorOptions struct { MinInterval time.Duration } -func NewIntervalCalculator(opt *IntervalOptions) *intervalCalculator { - if opt == nil { - opt = &IntervalOptions{} - } - +func NewCalculator(opts ...CalculatorOptions) *intervalCalculator { calc := &intervalCalculator{} - if opt.MinInterval == 0 { - calc.minInterval = defaultMinInterval - } else { - calc.minInterval = opt.MinInterval + for _, o := range opts { + if o.MinInterval == 0 { + calc.minInterval = defaultMinInterval + } else { + calc.minInterval = o.MinInterval + } } return calc @@ -54,7 +53,7 @@ func (i *Interval) Milliseconds() int64 { return i.Value.Nanoseconds() / int64(time.Millisecond) } -func (ic *intervalCalculator) Calculate(timerange *TimeRange, minInterval time.Duration) Interval { +func (ic *intervalCalculator) Calculate(timerange plugins.DataTimeRange, minInterval time.Duration) Interval { to := timerange.MustGetTo().UnixNano() from := timerange.MustGetFrom().UnixNano() interval := time.Duration((to - from) / defaultRes) diff --git a/pkg/tsdb/interval_test.go b/pkg/tsdb/interval/interval_test.go similarity index 83% rename from pkg/tsdb/interval_test.go rename to pkg/tsdb/interval/interval_test.go index bb9edbb5c29..88bba2a2d86 100644 --- a/pkg/tsdb/interval_test.go +++ b/pkg/tsdb/interval/interval_test.go @@ -1,4 +1,4 @@ -package tsdb +package interval import ( "testing" @@ -6,21 +6,22 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/assert" ) func TestIntervalCalculator_Calculate(t *testing.T) { - calculator := NewIntervalCalculator(&IntervalOptions{}) + calculator := NewCalculator(CalculatorOptions{}) testCases := []struct { name string - timeRange *TimeRange + timeRange plugins.DataTimeRange expected string }{ - {"from 5m to now", NewTimeRange("5m", "now"), "200ms"}, - {"from 15m to now", NewTimeRange("15m", "now"), "500ms"}, - {"from 30m to now", NewTimeRange("30m", "now"), "1s"}, - {"from 1h to now", NewTimeRange("1h", "now"), "2s"}, + {"from 5m to now", plugins.NewDataTimeRange("5m", "now"), "200ms"}, + {"from 15m to now", plugins.NewDataTimeRange("15m", "now"), "500ms"}, + {"from 30m to now", plugins.NewDataTimeRange("30m", "now"), "1s"}, + {"from 1h to now", plugins.NewDataTimeRange("1h", "now"), "2s"}, } for _, tc := range testCases { diff --git a/pkg/tsdb/loki/loki.go b/pkg/tsdb/loki/loki.go index bfa84759fae..3aa3240572f 100644 --- a/pkg/tsdb/loki/loki.go +++ b/pkg/tsdb/loki/loki.go @@ -10,7 +10,8 @@ import ( "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/tsdb/interval" "github.com/grafana/loki/pkg/logcli/client" "github.com/grafana/loki/pkg/loghttp" "github.com/grafana/loki/pkg/logproto" @@ -18,28 +19,30 @@ import ( "github.com/prometheus/common/model" ) -type LokiExecutor struct{} +type LokiExecutor struct { + intervalCalculator interval.Calculator +} -func NewLokiExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - return &LokiExecutor{}, nil +func NewExecutor(dsInfo *models.DataSource) (plugins.DataPlugin, error) { + return newExecutor(), nil +} + +func newExecutor() *LokiExecutor { + return &LokiExecutor{ + intervalCalculator: interval.NewCalculator(interval.CalculatorOptions{MinInterval: time.Second * 1}), + } } var ( - plog log.Logger - legendFormat *regexp.Regexp - intervalCalculator tsdb.IntervalCalculator + plog = log.New("tsdb.loki") + legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) ) -func init() { - plog = log.New("tsdb.loki") - tsdb.RegisterTsdbQueryEndpoint("loki", NewLokiExecutor) - legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) - intervalCalculator = tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{MinInterval: time.Second * 1}) -} - -func (e *LokiExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{}, +// DataQuery executes a Loki query. +func (e *LokiExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource, + queryContext plugins.DataQuery) (plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{}, } client := &client.DefaultClient{ @@ -48,9 +51,9 @@ func (e *LokiExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsd Password: dsInfo.DecryptedBasicAuthPassword(), } - queries, err := parseQuery(dsInfo, tsdbQuery.Queries, tsdbQuery) + queries, err := e.parseQuery(dsInfo, queryContext) if err != nil { - return nil, err + return plugins.DataResponse{}, err } for _, query := range queries { @@ -67,23 +70,22 @@ func (e *LokiExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsd interval := time.Second * 1 value, err := client.QueryRange(query.Expr, limit, query.Start, query.End, logproto.BACKWARD, query.Step, interval, false) - if err != nil { - return nil, err + return plugins.DataResponse{}, err } queryResult, err := parseResponse(value, query) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - result.Results[query.RefId] = queryResult + result.Results[query.RefID] = queryResult } return result, nil } //If legend (using of name or pattern instead of time series name) is used, use that name/pattern for formatting -func formatLegend(metric model.Metric, query *LokiQuery) string { +func formatLegend(metric model.Metric, query *lokiQuery) string { if query.LegendFormat == "" { return metric.String() } @@ -101,9 +103,9 @@ func formatLegend(metric model.Metric, query *LokiQuery) string { return string(result) } -func parseQuery(dsInfo *models.DataSource, queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) ([]*LokiQuery, error) { - qs := []*LokiQuery{} - for _, queryModel := range queries { +func (e *LokiExecutor) parseQuery(dsInfo *models.DataSource, queryContext plugins.DataQuery) ([]*lokiQuery, error) { + qs := []*lokiQuery{} + for _, queryModel := range queryContext.Queries { expr, err := queryModel.Model.Get("expr").String() if err != nil { return nil, fmt.Errorf("failed to parse Expr: %v", err) @@ -121,29 +123,29 @@ func parseQuery(dsInfo *models.DataSource, queries []*tsdb.Query, queryContext * return nil, fmt.Errorf("failed to parse To: %v", err) } - dsInterval, err := tsdb.GetIntervalFrom(dsInfo, queryModel.Model, time.Second) + dsInterval, err := interval.GetIntervalFrom(dsInfo, queryModel.Model, time.Second) if err != nil { return nil, fmt.Errorf("failed to parse Interval: %v", err) } - interval := intervalCalculator.Calculate(queryContext.TimeRange, dsInterval) + interval := e.intervalCalculator.Calculate(*queryContext.TimeRange, dsInterval) step := time.Duration(int64(interval.Value)) - qs = append(qs, &LokiQuery{ + qs = append(qs, &lokiQuery{ Expr: expr, Step: step, LegendFormat: format, Start: start, End: end, - RefId: queryModel.RefId, + RefID: queryModel.RefID, }) } return qs, nil } -func parseResponse(value *loghttp.QueryResponse, query *LokiQuery) (*tsdb.QueryResult, error) { - queryRes := tsdb.NewQueryResult() +func parseResponse(value *loghttp.QueryResponse, query *lokiQuery) (plugins.DataQueryResult, error) { + var queryRes plugins.DataQueryResult //We are currently processing only matrix results (for alerting) data, ok := value.Data.Result.(loghttp.Matrix) @@ -152,10 +154,10 @@ func parseResponse(value *loghttp.QueryResponse, query *LokiQuery) (*tsdb.QueryR } for _, v := range data { - series := tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: formatLegend(v.Metric, query), Tags: make(map[string]string, len(v.Metric)), - Points: make([]tsdb.TimePoint, 0, len(v.Values)), + Points: make([]plugins.DataTimePoint, 0, len(v.Values)), } for k, v := range v.Metric { @@ -163,10 +165,12 @@ func parseResponse(value *loghttp.QueryResponse, query *LokiQuery) (*tsdb.QueryR } for _, k := range v.Values { - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(k.Value)), float64(k.Timestamp.Unix()*1000))) + series.Points = append(series.Points, plugins.DataTimePoint{ + null.FloatFrom(float64(k.Value)), null.FloatFrom(float64(k.Timestamp.Unix() * 1000)), + }) } - queryRes.Series = append(queryRes.Series, &series) + queryRes.Series = append(queryRes.Series, series) } return queryRes, nil diff --git a/pkg/tsdb/loki/loki_test.go b/pkg/tsdb/loki/loki_test.go index d8fa98ca438..565382324ca 100644 --- a/pkg/tsdb/loki/loki_test.go +++ b/pkg/tsdb/loki/loki_test.go @@ -6,7 +6,7 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" p "github.com/prometheus/common/model" "github.com/stretchr/testify/require" ) @@ -22,7 +22,7 @@ func TestLoki(t *testing.T) { p.LabelName("device"): p.LabelValue("mobile"), } - query := &LokiQuery{ + query := &lokiQuery{ LegendFormat: "legend {{app}} {{ device }} {{broken}}", } @@ -36,7 +36,7 @@ func TestLoki(t *testing.T) { p.LabelName("device"): p.LabelValue("mobile"), } - query := &LokiQuery{ + query := &lokiQuery{ LegendFormat: "", } @@ -49,15 +49,19 @@ func TestLoki(t *testing.T) { "format": "time_series", "refId": "A" }` - jsonModel, _ := simplejson.NewJson([]byte(json)) - queryContext := &tsdb.TsdbQuery{} - queryModels := []*tsdb.Query{ - {Model: jsonModel}, + jsonModel, err := simplejson.NewJson([]byte(json)) + require.NoError(t, err) + timeRange := plugins.NewDataTimeRange("12h", "now") + queryContext := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ + {Model: jsonModel}, + }, + TimeRange: &timeRange, } - queryContext.TimeRange = tsdb.NewTimeRange("12h", "now") - - models, err := parseQuery(dsInfo, queryModels, queryContext) + exe := newExecutor() + require.NoError(t, err) + models, err := exe.parseQuery(dsInfo, queryContext) require.NoError(t, err) require.Equal(t, time.Second*30, models[0].Step) }) @@ -68,19 +72,24 @@ func TestLoki(t *testing.T) { "format": "time_series", "refId": "A" }` - jsonModel, _ := simplejson.NewJson([]byte(json)) - queryContext := &tsdb.TsdbQuery{} - queryModels := []*tsdb.Query{ - {Model: jsonModel}, + jsonModel, err := simplejson.NewJson([]byte(json)) + require.NoError(t, err) + timeRange := plugins.NewDataTimeRange("48h", "now") + queryContext := plugins.DataQuery{ + TimeRange: &timeRange, + Queries: []plugins.DataSubQuery{ + {Model: jsonModel}, + }, } - - queryContext.TimeRange = tsdb.NewTimeRange("48h", "now") - models, err := parseQuery(dsInfo, queryModels, queryContext) + exe := newExecutor() + require.NoError(t, err) + models, err := exe.parseQuery(dsInfo, queryContext) require.NoError(t, err) require.Equal(t, time.Minute*2, models[0].Step) - queryContext.TimeRange = tsdb.NewTimeRange("1h", "now") - models, err = parseQuery(dsInfo, queryModels, queryContext) + timeRange = plugins.NewDataTimeRange("1h", "now") + queryContext.TimeRange = &timeRange + models, err = exe.parseQuery(dsInfo, queryContext) require.NoError(t, err) require.Equal(t, time.Second*2, models[0].Step) }) diff --git a/pkg/tsdb/loki/types.go b/pkg/tsdb/loki/types.go index 57ad485b42e..f434fc70db1 100644 --- a/pkg/tsdb/loki/types.go +++ b/pkg/tsdb/loki/types.go @@ -2,11 +2,11 @@ package loki import "time" -type LokiQuery struct { +type lokiQuery struct { Expr string Step time.Duration LegendFormat string Start time.Time End time.Time - RefId string + RefID string } diff --git a/pkg/tsdb/models.go b/pkg/tsdb/models.go deleted file mode 100644 index 6ff35fd6a5a..00000000000 --- a/pkg/tsdb/models.go +++ /dev/null @@ -1,272 +0,0 @@ -package tsdb - -import ( - "encoding/base64" - "encoding/json" - "fmt" - - "github.com/grafana/grafana-plugin-sdk-go/data" - "github.com/grafana/grafana/pkg/components/null" - "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/models" - jsoniter "github.com/json-iterator/go" -) - -// TsdbQuery contains all information about a query request. -type TsdbQuery struct { - TimeRange *TimeRange - Queries []*Query - Headers map[string]string - Debug bool - User *models.SignedInUser -} - -type Query struct { - RefId string `json:"refId"` - Model *simplejson.Json `json:"model,omitempty"` - DataSource *models.DataSource `json:"datasource"` - MaxDataPoints int64 `json:"maxDataPoints"` - IntervalMs int64 `json:"intervalMs"` - QueryType string `json:"queryType"` -} - -type Response struct { - Results map[string]*QueryResult `json:"results"` - Message string `json:"message,omitempty"` -} - -type QueryResult struct { - Error error `json:"-"` - ErrorString string `json:"error,omitempty"` - RefId string `json:"refId"` - Meta *simplejson.Json `json:"meta,omitempty"` - Series TimeSeriesSlice `json:"series"` - Tables []*Table `json:"tables"` - Dataframes DataFrames `json:"dataframes"` -} - -// UnmarshalJSON deserializes a QueryResult from JSON. -// -// Deserialization support is required by tests. -func (r *QueryResult) UnmarshalJSON(b []byte) error { - m := map[string]interface{}{} - // TODO: Use JSON decoder - if err := json.Unmarshal(b, &m); err != nil { - return err - } - - refID, ok := m["refId"].(string) - if !ok { - return fmt.Errorf("can't decode field refId - not a string") - } - var meta *simplejson.Json - if m["meta"] != nil { - mm, ok := m["meta"].(map[string]interface{}) - if !ok { - return fmt.Errorf("can't decode field meta - not a JSON object") - } - meta = simplejson.NewFromAny(mm) - } - var series TimeSeriesSlice - /* TODO - if m["series"] != nil { - } - */ - var tables []*Table - if m["tables"] != nil { - ts, ok := m["tables"].([]interface{}) - if !ok { - return fmt.Errorf("can't decode field tables - not an array of Tables") - } - for _, ti := range ts { - tm, ok := ti.(map[string]interface{}) - if !ok { - return fmt.Errorf("can't decode field tables - not an array of Tables") - } - var columns []TableColumn - cs, ok := tm["columns"].([]interface{}) - if !ok { - return fmt.Errorf("can't decode field tables - not an array of Tables") - } - for _, ci := range cs { - cm, ok := ci.(map[string]interface{}) - if !ok { - return fmt.Errorf("can't decode field tables - not an array of Tables") - } - val, ok := cm["text"].(string) - if !ok { - return fmt.Errorf("can't decode field tables - not an array of Tables") - } - - columns = append(columns, TableColumn{Text: val}) - } - - rs, ok := tm["rows"].([]interface{}) - if !ok { - return fmt.Errorf("can't decode field tables - not an array of Tables") - } - var rows []RowValues - for _, ri := range rs { - vals, ok := ri.([]interface{}) - if !ok { - return fmt.Errorf("can't decode field tables - not an array of Tables") - } - rows = append(rows, vals) - } - - tables = append(tables, &Table{ - Columns: columns, - Rows: rows, - }) - } - } - - var dfs *dataFrames - if m["dataframes"] != nil { - raw, ok := m["dataframes"].([]interface{}) - if !ok { - return fmt.Errorf("can't decode field dataframes - not an array of byte arrays") - } - - var encoded [][]byte - for _, ra := range raw { - encS, ok := ra.(string) - if !ok { - return fmt.Errorf("can't decode field dataframes - not an array of byte arrays") - } - enc, err := base64.StdEncoding.DecodeString(encS) - if err != nil { - return fmt.Errorf("can't decode field dataframes - not an array of arrow frames") - } - encoded = append(encoded, enc) - } - decoded, err := data.UnmarshalArrowFrames(encoded) - if err != nil { - return err - } - dfs = &dataFrames{ - decoded: decoded, - encoded: encoded, - } - } - - r.RefId = refID - r.Meta = meta - r.Series = series - r.Tables = tables - if dfs != nil { - r.Dataframes = dfs - } - return nil -} - -type TimeSeries struct { - Name string `json:"name"` - Points TimeSeriesPoints `json:"points"` - Tags map[string]string `json:"tags,omitempty"` -} - -type Table struct { - Columns []TableColumn `json:"columns"` - Rows []RowValues `json:"rows"` -} - -type TableColumn struct { - Text string `json:"text"` -} - -type RowValues []interface{} -type TimePoint [2]null.Float -type TimeSeriesPoints []TimePoint -type TimeSeriesSlice []*TimeSeries - -func NewQueryResult() *QueryResult { - return &QueryResult{ - Series: make(TimeSeriesSlice, 0), - } -} - -func NewTimePoint(value null.Float, timestamp float64) TimePoint { - return TimePoint{value, null.FloatFrom(timestamp)} -} - -// DataFrames is an interface for retrieving encoded and decoded data frames. -// -// See NewDecodedDataFrames and NewEncodedDataFrames for more information. -type DataFrames interface { - // Encoded encodes Frames into a slice of []byte. - // If an error occurs [][]byte will be nil. - // The encoded result, if any, will be cached and returned next time Encoded is called. - Encoded() ([][]byte, error) - - // Decoded decodes a slice of Arrow encoded frames to data.Frames ([]*data.Frame). - // If an error occurs Frames will be nil. - // The decoded result, if any, will be cached and returned next time Decoded is called. - Decoded() (data.Frames, error) -} - -type dataFrames struct { - decoded data.Frames - encoded [][]byte -} - -// NewDecodedDataFrames instantiates DataFrames from decoded frames. -// -// This should be the primary function for creating DataFrames if you're implementing a plugin. -// In a Grafana alerting scenario it needs to operate on decoded frames, which is why this function is -// preferrable. When encoded data frames are needed, e.g. returned from Grafana HTTP API, it will -// happen automatically when MarshalJSON() is called. -func NewDecodedDataFrames(decodedFrames data.Frames) DataFrames { - return &dataFrames{ - decoded: decodedFrames, - } -} - -// NewEncodedDataFrames instantiates DataFrames from encoded frames. -// -// This one is primarily used for creating DataFrames when receiving encoded data frames from an external -// plugin or similar. This may allow the encoded data frames to be returned to Grafana UI without any additional -// decoding/encoding required. In Grafana alerting scenario it needs to operate on decoded data frames why encoded -// frames needs to be decoded before usage. -func NewEncodedDataFrames(encodedFrames [][]byte) DataFrames { - return &dataFrames{ - encoded: encodedFrames, - } -} - -func (df *dataFrames) Encoded() ([][]byte, error) { - if df.encoded == nil { - encoded, err := df.decoded.MarshalArrow() - if err != nil { - return nil, err - } - df.encoded = encoded - } - - return df.encoded, nil -} - -func (df *dataFrames) Decoded() (data.Frames, error) { - if df.decoded == nil { - decoded, err := data.UnmarshalArrowFrames(df.encoded) - if err != nil { - return nil, err - } - df.decoded = decoded - } - - return df.decoded, nil -} - -func (df *dataFrames) MarshalJSON() ([]byte, error) { - encoded, err := df.Encoded() - if err != nil { - return nil, err - } - - // Use a configuration that's compatible with the standard library - // to minimize the risk of introducing bugs. This will make sure - // that map keys is ordered. - jsonCfg := jsoniter.ConfigCompatibleWithStandardLibrary - return jsonCfg.Marshal(encoded) -} diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index 777d41cd5c1..33cb6e79ba4 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -7,26 +7,28 @@ import ( "time" "github.com/grafana/grafana/pkg/components/gtime" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/tsdb/sqleng" ) const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` -type msSqlMacroEngine struct { - *sqleng.SqlMacroEngineBase - timeRange *tsdb.TimeRange - query *tsdb.Query +type msSQLMacroEngine struct { + *sqleng.SQLMacroEngineBase + timeRange plugins.DataTimeRange + query plugins.DataSubQuery } -func newMssqlMacroEngine() sqleng.SqlMacroEngine { - return &msSqlMacroEngine{SqlMacroEngineBase: sqleng.NewSqlMacroEngineBase()} +func newMssqlMacroEngine() sqleng.SQLMacroEngine { + return &msSQLMacroEngine{SQLMacroEngineBase: sqleng.NewSQLMacroEngineBase()} } -func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { +func (m *msSQLMacroEngine) Interpolate(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, + sql string) (string, error) { m.timeRange = timeRange m.query = query + // TODO: Return any error rExp, _ := regexp.Compile(sExpr) var macroError error @@ -50,7 +52,7 @@ func (m *msSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa return sql, nil } -func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { +func (m *msSQLMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__time": if len(args) == 0 { diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go index 341ccc56c14..7734d7e0555 100644 --- a/pkg/tsdb/mssql/macros_test.go +++ b/pkg/tsdb/mssql/macros_test.go @@ -8,38 +8,40 @@ import ( "time" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" . "github.com/smartystreets/goconvey/convey" ) func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { - engine := &msSqlMacroEngine{} - query := &tsdb.Query{ + engine := &msSQLMacroEngine{} + query := plugins.DataSubQuery{ Model: simplejson.New(), } + dfltTimeRange := plugins.DataTimeRange{} + Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC) to := from.Add(5 * time.Minute) - timeRange := tsdb.NewFakeTimeRange("5m", "now", to) + timeRange := plugins.DataTimeRange{From: "5m", Now: to, To: "now"} Convey("interpolate __time function", func() { - sql, err := engine.Interpolate(query, nil, "select $__time(time_column)") + sql, err := engine.Interpolate(query, dfltTimeRange, "select $__time(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select time_column AS time") }) Convey("interpolate __timeEpoch function", func() { - sql, err := engine.Interpolate(query, nil, "select $__timeEpoch(time_column)") + sql, err := engine.Interpolate(query, dfltTimeRange, "select $__timeEpoch(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select DATEDIFF(second, '1970-01-01', time_column) AS time") }) Convey("interpolate __timeEpoch function wrapped in aggregation", func() { - sql, err := engine.Interpolate(query, nil, "select min($__timeEpoch(time_column))") + sql, err := engine.Interpolate(query, dfltTimeRange, "select min($__timeEpoch(time_column))") So(err, ShouldBeNil) So(sql, ShouldEqual, "select min(DATEDIFF(second, '1970-01-01', time_column) AS time)") @@ -166,7 +168,9 @@ func TestMacroEngine(t *testing.T) { Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() { from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) to := time.Date(1965, 2, 3, 8, 0, 0, 0, time.UTC) - timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) + timeRange := plugins.NewDataTimeRange( + strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), + strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) Convey("interpolate __timeFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") @@ -193,7 +197,9 @@ func TestMacroEngine(t *testing.T) { Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) to := time.Date(1980, 2, 3, 8, 0, 0, 0, time.UTC) - timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) + timeRange := plugins.NewDataTimeRange( + strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), + strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) Convey("interpolate __timeFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") diff --git a/pkg/tsdb/mssql/mssql.go b/pkg/tsdb/mssql/mssql.go index 5950a847c65..c45a3fe5480 100644 --- a/pkg/tsdb/mssql/mssql.go +++ b/pkg/tsdb/mssql/mssql.go @@ -13,27 +13,24 @@ import ( mssql "github.com/denisenkom/go-mssqldb" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/tsdb/sqleng" "xorm.io/core" ) -func init() { - tsdb.RegisterTsdbQueryEndpoint("mssql", newMssqlQueryEndpoint) -} - var logger = log.New("tsdb.mssql") -func newMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { +func NewExecutor(datasource *models.DataSource) (plugins.DataPlugin, error) { cnnstr, err := generateConnectionString(datasource) if err != nil { return nil, err } + // TODO: Don't use global if setting.Env == setting.Dev { logger.Debug("getEngine", "connection", cnnstr) } - config := sqleng.SqlQueryEndpointConfiguration{ + config := sqleng.DataPluginConfiguration{ DriverName: "mssql", ConnectionString: cnnstr, Datasource: datasource, @@ -44,7 +41,7 @@ func newMssqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoin log: logger, } - return sqleng.NewSqlQueryEndpoint(&config, &queryResultTransformer, newMssqlMacroEngine(), logger) + return sqleng.NewDataPlugin(config, &queryResultTransformer, newMssqlMacroEngine(), logger) } // ParseURL tries to parse an MSSQL URL string into a URL object. @@ -105,7 +102,8 @@ type mssqlQueryResultTransformer struct { log log.Logger } -func (t *mssqlQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { +func (t *mssqlQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) ( + plugins.DataRowValues, error) { values := make([]interface{}, len(columnTypes)) valuePtrs := make([]interface{}, len(columnTypes)) diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go index eaf02b512fa..ec2688d875f 100644 --- a/pkg/tsdb/mssql/mssql_test.go +++ b/pkg/tsdb/mssql/mssql_test.go @@ -11,8 +11,8 @@ import ( "github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb/sqleng" . "github.com/smartystreets/goconvey/convey" "xorm.io/xorm" @@ -38,11 +38,11 @@ func TestMSSQL(t *testing.T) { } origInterpolate := sqleng.Interpolate - sqleng.Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + sqleng.Interpolate = func(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, sql string) (string, error) { return sql, nil } - endpoint, err := newMssqlQueryEndpoint(&models.DataSource{ + endpoint, err := NewExecutor(&models.DataSource{ JsonData: simplejson.New(), SecureJsonData: securejsondata.SecureJsonData{}, }) @@ -122,19 +122,19 @@ func TestMSSQL(t *testing.T) { So(err, ShouldBeNil) Convey("When doing a table query should map MSSQL column types to Go types", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT * FROM mssql_types", "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) @@ -214,19 +214,19 @@ func TestMSSQL(t *testing.T) { So(err, ShouldBeNil) Convey("When doing a metric query using timeGroup", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m') AS time, avg(value) as value FROM metric GROUP BY $__timeGroup(time, '5m') ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -257,23 +257,23 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query using timeGroup with NULL fill enabled", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m', NULL) AS time, avg(value) as value FROM metric GROUP BY $__timeGroup(time, '5m') ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -317,24 +317,24 @@ func TestMSSQL(t *testing.T) { }) Convey("Should replace $__interval", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { DataSource: &models.DataSource{}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY $__timeGroup(time, $__interval) ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(30*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -343,23 +343,23 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query using timeGroup with float fill enabled", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m', 1.5) AS time, avg(value) as value FROM metric GROUP BY $__timeGroup(time, '5m') ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -437,19 +437,19 @@ func TestMSSQL(t *testing.T) { So(err, ShouldBeNil) Convey("When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT TOP 1 timeInt64 as time, timeInt64 FROM metric_values ORDER BY time`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -459,19 +459,19 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT TOP 1 timeInt64Nullable as time, timeInt64Nullable FROM metric_values ORDER BY time`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -481,19 +481,19 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query using epoch (float64) as time column and value column (float64) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT TOP 1 timeFloat64 as time, timeFloat64 FROM metric_values ORDER BY time`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -503,19 +503,19 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT TOP 1 timeFloat64Nullable as time, timeFloat64Nullable FROM metric_values ORDER BY time`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -525,19 +525,19 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query using epoch (int32) as time column and value column (int32) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT TOP 1 timeInt32 as time, timeInt32 FROM metric_values ORDER BY time`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -547,19 +547,19 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT TOP 1 timeInt32Nullable as time, timeInt32Nullable FROM metric_values ORDER BY time`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -569,19 +569,19 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query using epoch (float32) as time column and value column (float32) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT TOP 1 timeFloat32 as time, timeFloat32 FROM metric_values ORDER BY time`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -591,19 +591,19 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT TOP 1 timeFloat32Nullable as time, timeFloat32Nullable FROM metric_values ORDER BY time`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -613,19 +613,19 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeEpoch(time), measurement + ' - value one' as metric, valueOne FROM metric_values ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -636,19 +636,19 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query grouping by time should return correct series", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeEpoch(time), valueOne, valueTwo FROM metric_values ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -659,19 +659,19 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing a metric query with metric column and multiple value columns", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeEpoch(time), measurement, valueOne, valueTwo FROM metric_values ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -685,21 +685,22 @@ func TestMSSQL(t *testing.T) { Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() { sqleng.Interpolate = origInterpolate - query := &tsdb.TsdbQuery{ - TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart), - Queries: []*tsdb.Query{ + timeRange := plugins.DataTimeRange{From: "5m", To: "now", Now: fromStart} + query := plugins.DataQuery{ + TimeRange: &timeRange, + Queries: []plugins.DataSubQuery{ { DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -751,8 +752,8 @@ func TestMSSQL(t *testing.T) { Convey("When doing a metric query using stored procedure should return correct result", func() { sqleng.Interpolate = origInterpolate - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ @@ -763,16 +764,16 @@ func TestMSSQL(t *testing.T) { EXEC dbo.sp_test_epoch @from, @to`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: "1521117000000", To: "1521122100000", }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) So(queryResult.Error, ShouldBeNil) @@ -830,8 +831,8 @@ func TestMSSQL(t *testing.T) { Convey("When doing a metric query using stored procedure should return correct result", func() { sqleng.Interpolate = origInterpolate - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ @@ -842,16 +843,16 @@ func TestMSSQL(t *testing.T) { EXEC dbo.sp_test_epoch @from, @to`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: "1521117000000", To: "1521122100000", }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) So(queryResult.Error, ShouldBeNil) @@ -911,46 +912,46 @@ func TestMSSQL(t *testing.T) { } Convey("When doing an annotation query of deploy events should return expected result", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT time_sec as time, description as [text], tags FROM [event] WHERE $__unixEpochFilter(time_sec) AND tags='deploy' ORDER BY 1 ASC", "format": "table", }), - RefId: "Deploys", + RefID: "Deploys", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) }) Convey("When doing an annotation query of ticket events should return expected result", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT time_sec as time, description as [text], tags FROM [event] WHERE $__unixEpochFilter(time_sec) AND tags='ticket' ORDER BY 1 ASC", "format": "table", }), - RefId: "Tickets", + RefID: "Tickets", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -960,8 +961,8 @@ func TestMSSQL(t *testing.T) { dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) dtFormat := "2006-01-02 15:04:05.999999999" - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -971,12 +972,12 @@ func TestMSSQL(t *testing.T) { `, dt.Format(dtFormat)), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -990,8 +991,8 @@ func TestMSSQL(t *testing.T) { Convey("When doing an annotation query with a time column in epoch second format should return ms", func() { dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -1001,12 +1002,12 @@ func TestMSSQL(t *testing.T) { `, dt.Unix()), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1020,8 +1021,8 @@ func TestMSSQL(t *testing.T) { Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() { dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -1031,12 +1032,12 @@ func TestMSSQL(t *testing.T) { `, dt.Unix()), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1050,8 +1051,8 @@ func TestMSSQL(t *testing.T) { Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() { dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -1061,12 +1062,12 @@ func TestMSSQL(t *testing.T) { `, dt.Unix()*1000), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1078,8 +1079,8 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing an annotation query with a time column holding a bigint null value should return nil", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT @@ -1089,12 +1090,12 @@ func TestMSSQL(t *testing.T) { `, "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1106,8 +1107,8 @@ func TestMSSQL(t *testing.T) { }) Convey("When doing an annotation query with a time column holding a datetime null value should return nil", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT @@ -1117,12 +1118,12 @@ func TestMSSQL(t *testing.T) { `, "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := endpoint.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index 624309a6258..9a21885d588 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -8,7 +8,7 @@ import ( "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/tsdb/sqleng" ) @@ -17,18 +17,18 @@ const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` var restrictedRegExp = regexp.MustCompile(`(?im)([\s]*show[\s]+grants|[\s,]session_user\([^\)]*\)|[\s,]current_user(\([^\)]*\))?|[\s,]system_user\([^\)]*\)|[\s,]user\([^\)]*\))([\s,;]|$)`) -type mySqlMacroEngine struct { - *sqleng.SqlMacroEngineBase - timeRange *tsdb.TimeRange - query *tsdb.Query +type mySQLMacroEngine struct { + *sqleng.SQLMacroEngineBase + timeRange plugins.DataTimeRange + query plugins.DataSubQuery logger log.Logger } -func newMysqlMacroEngine(logger log.Logger) sqleng.SqlMacroEngine { - return &mySqlMacroEngine{SqlMacroEngineBase: sqleng.NewSqlMacroEngineBase(), logger: logger} +func newMysqlMacroEngine(logger log.Logger) sqleng.SQLMacroEngine { + return &mySQLMacroEngine{SQLMacroEngineBase: sqleng.NewSQLMacroEngineBase(), logger: logger} } -func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { +func (m *mySQLMacroEngine) Interpolate(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, sql string) (string, error) { m.timeRange = timeRange m.query = query @@ -38,6 +38,7 @@ func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa return "", errors.New("invalid query - inspect Grafana server log for details") } + // TODO: Handle error rExp, _ := regexp.Compile(sExpr) var macroError error @@ -61,7 +62,7 @@ func (m *mySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRa return sql, nil } -func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) { +func (m *mySQLMacroEngine) evaluateMacro(name string, args []string) (string, error) { switch name { case "__timeEpoch", "__time": if len(args) == 0 { diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go index 6dc7472e497..1fa57cfd115 100644 --- a/pkg/tsdb/mysql/macros_test.go +++ b/pkg/tsdb/mysql/macros_test.go @@ -7,21 +7,21 @@ import ( "time" "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" . "github.com/smartystreets/goconvey/convey" ) func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { - engine := &mySqlMacroEngine{ + engine := &mySQLMacroEngine{ logger: log.New("test"), } - query := &tsdb.Query{} + query := plugins.DataSubQuery{} Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC) to := from.Add(5 * time.Minute) - timeRange := tsdb.NewFakeTimeRange("5m", "now", to) + timeRange := plugins.DataTimeRange{From: "5m", Now: to, To: "now"} Convey("interpolate __time function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__time(time_column)") @@ -120,7 +120,8 @@ func TestMacroEngine(t *testing.T) { Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() { from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) to := time.Date(1965, 2, 3, 8, 0, 0, 0, time.UTC) - timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) + timeRange := plugins.NewDataTimeRange( + strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) Convey("interpolate __timeFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") @@ -140,7 +141,8 @@ func TestMacroEngine(t *testing.T) { Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) to := time.Date(1980, 2, 3, 8, 0, 0, 0, time.UTC) - timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) + timeRange := plugins.NewDataTimeRange( + strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) Convey("interpolate __timeFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") @@ -180,7 +182,7 @@ func TestMacroEngine(t *testing.T) { } for _, tc := range tcs { - _, err := engine.Interpolate(nil, nil, tc) + _, err := engine.Interpolate(plugins.DataSubQuery{}, plugins.DataTimeRange{}, tc) So(err.Error(), ShouldEqual, "invalid query - inspect Grafana server log for details") } }) diff --git a/pkg/tsdb/mysql/mysql.go b/pkg/tsdb/mysql/mysql.go index e395ec3bdb0..e9281777db8 100644 --- a/pkg/tsdb/mysql/mysql.go +++ b/pkg/tsdb/mysql/mysql.go @@ -15,20 +15,16 @@ import ( "github.com/go-sql-driver/mysql" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/tsdb/sqleng" "xorm.io/core" ) -func init() { - tsdb.RegisterTsdbQueryEndpoint("mysql", newMysqlQueryEndpoint) -} - func characterEscape(s string, escapeChar string) string { return strings.ReplaceAll(s, escapeChar, url.QueryEscape(escapeChar)) } -func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { +func NewExecutor(datasource *models.DataSource) (plugins.DataPlugin, error) { logger := log.New("tsdb.mysql") protocol := "tcp" @@ -61,7 +57,7 @@ func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoin logger.Debug("getEngine", "connection", cnnstr) } - config := sqleng.SqlQueryEndpointConfiguration{ + config := sqleng.DataPluginConfiguration{ DriverName: "mysql", ConnectionString: cnnstr, Datasource: datasource, @@ -73,14 +69,15 @@ func newMysqlQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoin log: logger, } - return sqleng.NewSqlQueryEndpoint(&config, &rowTransformer, newMysqlMacroEngine(logger), logger) + return sqleng.NewDataPlugin(config, &rowTransformer, newMysqlMacroEngine(logger), logger) } type mysqlQueryResultTransformer struct { log log.Logger } -func (t *mysqlQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { +func (t *mysqlQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) ( + plugins.DataRowValues, error) { values := make([]interface{}, len(columnTypes)) for i := range values { diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index 84296a8005a..1a65db49d72 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -13,9 +13,9 @@ import ( "github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb/sqleng" "xorm.io/xorm" @@ -48,11 +48,11 @@ func TestMySQL(t *testing.T) { } origInterpolate := sqleng.Interpolate - sqleng.Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + sqleng.Interpolate = func(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, sql string) (string, error) { return sql, nil } - endpoint, err := newMysqlQueryEndpoint(&models.DataSource{ + exe, err := NewExecutor(&models.DataSource{ JsonData: simplejson.New(), SecureJsonData: securejsondata.SecureJsonData{}, }) @@ -123,19 +123,19 @@ func TestMySQL(t *testing.T) { So(err, ShouldBeNil) Convey("Query with Table format should map MySQL column types to Go types", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT * FROM mysql_types", "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -212,19 +212,19 @@ func TestMySQL(t *testing.T) { So(err, ShouldBeNil) Convey("When doing a metric query using timeGroup", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m') as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -255,23 +255,23 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using timeGroup with NULL fill enabled", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m', NULL) as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -316,24 +316,24 @@ func TestMySQL(t *testing.T) { }) Convey("Should replace $__interval", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(30*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -342,23 +342,23 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using timeGroup with value fill enabled", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m', 1.5) as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -368,23 +368,23 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using timeGroup with previous fill enabled", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m', previous) as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -467,19 +467,19 @@ func TestMySQL(t *testing.T) { So(err, ShouldBeNil) Convey("When doing a metric query using time as time column should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT time, valueOne FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -489,19 +489,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using time (nullable) as time column should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT timeNullable as time, valueOne FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -511,19 +511,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT timeInt64 as time, timeInt64 FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -533,19 +533,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT timeInt64Nullable as time, timeInt64Nullable FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -555,19 +555,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using epoch (float64) as time column and value column (float64) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT timeFloat64 as time, timeFloat64 FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -577,19 +577,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT timeFloat64Nullable as time, timeFloat64Nullable FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -599,19 +599,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using epoch (int32) as time column and value column (int32) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT timeInt32 as time, timeInt32 FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -621,19 +621,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT timeInt32Nullable as time, timeInt32Nullable FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -643,19 +643,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using epoch (float32) as time column and value column (float32) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT timeFloat32 as time, timeFloat32 FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -665,19 +665,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT timeFloat32Nullable as time, timeFloat32Nullable FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -687,19 +687,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values ORDER BY 1,2`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -710,19 +710,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query with metric column and multiple value columns", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT $__time(time), measurement as metric, valueOne, valueTwo FROM metric_values ORDER BY 1,2`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -735,19 +735,19 @@ func TestMySQL(t *testing.T) { }) Convey("When doing a metric query grouping by time should return correct series", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT $__time(time), valueOne, valueTwo FROM metric_values ORDER BY 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -760,21 +760,21 @@ func TestMySQL(t *testing.T) { Convey("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func() { sqleng.Interpolate = origInterpolate - query := &tsdb.TsdbQuery{ - TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart), - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + TimeRange: &plugins.DataTimeRange{From: "5m", To: "now", Now: fromStart}, + Queries: []plugins.DataSubQuery{ { DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeTo() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -816,46 +816,46 @@ func TestMySQL(t *testing.T) { } Convey("When doing an annotation query of deploy events should return expected result", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT time_sec, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='deploy' ORDER BY 1 ASC`, "format": "table", }), - RefId: "Deploys", + RefID: "Deploys", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) queryResult := resp.Results["Deploys"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) }) Convey("When doing an annotation query of ticket events should return expected result", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT time_sec, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='ticket' ORDER BY 1 ASC`, "format": "table", }), - RefId: "Tickets", + RefID: "Tickets", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) queryResult := resp.Results["Tickets"] So(err, ShouldBeNil) So(len(queryResult.Tables[0].Rows), ShouldEqual, 3) @@ -865,8 +865,8 @@ func TestMySQL(t *testing.T) { dt := time.Date(2018, 3, 14, 21, 20, 6, 0, time.UTC) dtFormat := "2006-01-02 15:04:05.999999999" - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -876,12 +876,12 @@ func TestMySQL(t *testing.T) { `, dt.Format(dtFormat)), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -895,8 +895,8 @@ func TestMySQL(t *testing.T) { Convey("When doing an annotation query with a time column in epoch second format should return ms", func() { dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -906,12 +906,12 @@ func TestMySQL(t *testing.T) { `, dt.Unix()), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -925,8 +925,8 @@ func TestMySQL(t *testing.T) { Convey("When doing an annotation query with a time column in epoch second format (signed integer) should return ms", func() { dt := time.Date(2018, 3, 14, 21, 20, 6, 0, time.Local) - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -936,12 +936,12 @@ func TestMySQL(t *testing.T) { `, dt.Unix()), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -955,8 +955,8 @@ func TestMySQL(t *testing.T) { Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() { dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -966,12 +966,12 @@ func TestMySQL(t *testing.T) { `, dt.Unix()*1000), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -983,8 +983,8 @@ func TestMySQL(t *testing.T) { }) Convey("When doing an annotation query with a time column holding a unsigned integer null value should return nil", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT @@ -994,12 +994,12 @@ func TestMySQL(t *testing.T) { `, "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) @@ -1011,8 +1011,8 @@ func TestMySQL(t *testing.T) { }) Convey("When doing an annotation query with a time column holding a DATETIME null value should return nil", func() { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT @@ -1022,12 +1022,12 @@ func TestMySQL(t *testing.T) { `, "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) So(err, ShouldBeNil) queryResult := resp.Results["A"] So(queryResult.Error, ShouldBeNil) diff --git a/pkg/tsdb/opentsdb/opentsdb.go b/pkg/tsdb/opentsdb/opentsdb.go index 2edd0084ce9..7674ebf69f4 100644 --- a/pkg/tsdb/opentsdb/opentsdb.go +++ b/pkg/tsdb/opentsdb/opentsdb.go @@ -17,29 +17,23 @@ import ( "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" ) type OpenTsdbExecutor struct { } -func NewOpenTsdbExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { +func NewExecutor(*models.DataSource) (plugins.DataPlugin, error) { return &OpenTsdbExecutor{}, nil } var ( - plog log.Logger + plog = log.New("tsdb.opentsdb") ) -func init() { - plog = log.New("tsdb.opentsdb") - tsdb.RegisterTsdbQueryEndpoint("opentsdb", NewOpenTsdbExecutor) -} - -func (e *OpenTsdbExecutor) Query(ctx context.Context, dsInfo *models.DataSource, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{} - +func (e *OpenTsdbExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource, + queryContext plugins.DataQuery) (plugins.DataResponse, error) { var tsdbQuery OpenTsdbQuery tsdbQuery.Start = queryContext.TimeRange.GetFromAsMsEpoch() @@ -50,32 +44,34 @@ func (e *OpenTsdbExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery.Queries = append(tsdbQuery.Queries, metric) } + // TODO: Don't use global variable if setting.Env == setting.Dev { plog.Debug("OpenTsdb request", "params", tsdbQuery) } req, err := e.createRequest(dsInfo, tsdbQuery) if err != nil { - return nil, err + return plugins.DataResponse{}, err } httpClient, err := dsInfo.GetHttpClient() if err != nil { - return nil, err + return plugins.DataResponse{}, err } res, err := ctxhttp.Do(ctx, httpClient, req) if err != nil { - return nil, err + return plugins.DataResponse{}, err } queryResult, err := e.parseResponse(tsdbQuery, res) if err != nil { - return nil, err + return plugins.DataResponse{}, err } - result.Results = queryResult - return result, nil + return plugins.DataResponse{ + Results: queryResult, + }, nil } func (e *OpenTsdbExecutor) createRequest(dsInfo *models.DataSource, data OpenTsdbQuery) (*http.Request, error) { @@ -102,12 +98,12 @@ func (e *OpenTsdbExecutor) createRequest(dsInfo *models.DataSource, data OpenTsd req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.DecryptedBasicAuthPassword()) } - return req, err + return req, nil } -func (e *OpenTsdbExecutor) parseResponse(query OpenTsdbQuery, res *http.Response) (map[string]*tsdb.QueryResult, error) { - queryResults := make(map[string]*tsdb.QueryResult) - queryRes := tsdb.NewQueryResult() +func (e *OpenTsdbExecutor) parseResponse(query OpenTsdbQuery, res *http.Response) (map[string]plugins.DataQueryResult, error) { + queryResults := make(map[string]plugins.DataQueryResult) + queryRes := plugins.DataQueryResult{} body, err := ioutil.ReadAll(res.Body) if err != nil { @@ -132,7 +128,7 @@ func (e *OpenTsdbExecutor) parseResponse(query OpenTsdbQuery, res *http.Response } for _, val := range data { - series := tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: val.Metric, } @@ -142,17 +138,19 @@ func (e *OpenTsdbExecutor) parseResponse(query OpenTsdbQuery, res *http.Response plog.Info("Failed to unmarshal opentsdb timestamp", "timestamp", timeString) return nil, err } - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(value), timestamp)) + series.Points = append(series.Points, plugins.DataTimePoint{ + null.FloatFrom(value), null.FloatFrom(timestamp), + }) } - queryRes.Series = append(queryRes.Series, &series) + queryRes.Series = append(queryRes.Series, series) } queryResults["A"] = queryRes return queryResults, nil } -func (e *OpenTsdbExecutor) buildMetric(query *tsdb.Query) map[string]interface{} { +func (e *OpenTsdbExecutor) buildMetric(query plugins.DataSubQuery) map[string]interface{} { metric := make(map[string]interface{}) // Setting metric and aggregator diff --git a/pkg/tsdb/opentsdb/opentsdb_test.go b/pkg/tsdb/opentsdb/opentsdb_test.go index 053bb337aa0..2c1f578640d 100644 --- a/pkg/tsdb/opentsdb/opentsdb_test.go +++ b/pkg/tsdb/opentsdb/opentsdb_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/require" ) @@ -12,7 +12,7 @@ func TestOpenTsdbExecutor(t *testing.T) { exec := &OpenTsdbExecutor{} t.Run("Build metric with downsampling enabled", func(t *testing.T) { - query := &tsdb.Query{ + query := plugins.DataSubQuery{ Model: simplejson.New(), } @@ -32,7 +32,7 @@ func TestOpenTsdbExecutor(t *testing.T) { }) t.Run("Build metric with downsampling disabled", func(t *testing.T) { - query := &tsdb.Query{ + query := plugins.DataSubQuery{ Model: simplejson.New(), } @@ -51,7 +51,7 @@ func TestOpenTsdbExecutor(t *testing.T) { }) t.Run("Build metric with downsampling enabled with params", func(t *testing.T) { - query := &tsdb.Query{ + query := plugins.DataSubQuery{ Model: simplejson.New(), } @@ -71,7 +71,7 @@ func TestOpenTsdbExecutor(t *testing.T) { }) t.Run("Build metric with tags with downsampling disabled", func(t *testing.T) { - query := &tsdb.Query{ + query := plugins.DataSubQuery{ Model: simplejson.New(), } @@ -102,7 +102,7 @@ func TestOpenTsdbExecutor(t *testing.T) { }) t.Run("Build metric with rate enabled but counter disabled", func(t *testing.T) { - query := &tsdb.Query{ + query := plugins.DataSubQuery{ Model: simplejson.New(), } @@ -134,7 +134,7 @@ func TestOpenTsdbExecutor(t *testing.T) { }) t.Run("Build metric with rate and counter enabled", func(t *testing.T) { - query := &tsdb.Query{ + query := plugins.DataSubQuery{ Model: simplejson.New(), } diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index d11f3e19cd4..3ada39a9919 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -7,7 +7,7 @@ import ( "time" "github.com/grafana/grafana/pkg/components/gtime" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/tsdb/sqleng" ) @@ -15,22 +15,24 @@ const rsIdentifier = `([_a-zA-Z0-9]+)` const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` type postgresMacroEngine struct { - *sqleng.SqlMacroEngineBase - timeRange *tsdb.TimeRange - query *tsdb.Query + *sqleng.SQLMacroEngineBase + timeRange plugins.DataTimeRange + query plugins.DataSubQuery timescaledb bool } -func newPostgresMacroEngine(timescaledb bool) sqleng.SqlMacroEngine { +func newPostgresMacroEngine(timescaledb bool) sqleng.SQLMacroEngine { return &postgresMacroEngine{ - SqlMacroEngineBase: sqleng.NewSqlMacroEngineBase(), + SQLMacroEngineBase: sqleng.NewSQLMacroEngineBase(), timescaledb: timescaledb, } } -func (m *postgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { +func (m *postgresMacroEngine) Interpolate(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, + sql string) (string, error) { m.timeRange = timeRange m.query = query + // TODO: Handle error rExp, _ := regexp.Compile(sExpr) var macroError error diff --git a/pkg/tsdb/postgres/macros_test.go b/pkg/tsdb/postgres/macros_test.go index 106d71effa0..0d177b9e889 100644 --- a/pkg/tsdb/postgres/macros_test.go +++ b/pkg/tsdb/postgres/macros_test.go @@ -6,7 +6,7 @@ import ( "testing" "time" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" . "github.com/smartystreets/goconvey/convey" ) @@ -16,12 +16,12 @@ func TestMacroEngine(t *testing.T) { engine := newPostgresMacroEngine(timescaledbEnabled) timescaledbEnabled = true engineTS := newPostgresMacroEngine(timescaledbEnabled) - query := &tsdb.Query{} + query := plugins.DataSubQuery{} Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC) to := from.Add(5 * time.Minute) - timeRange := tsdb.NewFakeTimeRange("5m", "now", to) + timeRange := plugins.DataTimeRange{From: "5m", To: "now", Now: to} Convey("interpolate __time function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__time(time_column)") @@ -151,7 +151,9 @@ func TestMacroEngine(t *testing.T) { Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() { from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) to := time.Date(1965, 2, 3, 8, 0, 0, 0, time.UTC) - timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) + timeRange := plugins.NewDataTimeRange( + strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), + strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) Convey("interpolate __timeFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") @@ -177,7 +179,9 @@ func TestMacroEngine(t *testing.T) { Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) to := time.Date(1980, 2, 3, 8, 0, 0, 0, time.UTC) - timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) + timeRange := plugins.NewDataTimeRange( + strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), + strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) Convey("interpolate __timeFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") @@ -203,7 +207,8 @@ func TestMacroEngine(t *testing.T) { Convey("Given a time range between 1960-02-01 07:00:00.5 and 1980-02-03 08:00:00.5", func() { from := time.Date(1960, 2, 1, 7, 0, 0, 500e6, time.UTC) to := time.Date(1980, 2, 3, 8, 0, 0, 500e6, time.UTC) - timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) + timeRange := plugins.NewDataTimeRange( + strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) So(from.Format(time.RFC3339Nano), ShouldEqual, "1960-02-01T07:00:00.5Z") So(to.Format(time.RFC3339Nano), ShouldEqual, "1980-02-03T08:00:00.5Z") diff --git a/pkg/tsdb/postgres/postgres.go b/pkg/tsdb/postgres/postgres.go index 504fc18e595..4484f450097 100644 --- a/pkg/tsdb/postgres/postgres.go +++ b/pkg/tsdb/postgres/postgres.go @@ -12,9 +12,8 @@ import ( "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/tsdb/sqleng" - "xorm.io/core" ) @@ -22,26 +21,23 @@ func init() { registry.Register(®istry.Descriptor{ Name: "PostgresService", InitPriority: registry.Low, - Instance: &postgresService{}, + Instance: &PostgresService{}, }) } -type postgresService struct { +type PostgresService struct { Cfg *setting.Cfg `inject:""` logger log.Logger tlsManager tlsSettingsProvider } -func (s *postgresService) Init() error { +func (s *PostgresService) Init() error { s.logger = log.New("tsdb.postgres") s.tlsManager = newTLSManager(s.logger, s.Cfg.DataPath) - tsdb.RegisterTsdbQueryEndpoint("postgres", func(ds *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - return s.newPostgresQueryEndpoint(ds) - }) return nil } -func (s *postgresService) newPostgresQueryEndpoint(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { +func (s *PostgresService) NewExecutor(datasource *models.DataSource) (plugins.DataPlugin, error) { s.logger.Debug("Creating Postgres query endpoint") cnnstr, err := s.generateConnectionString(datasource) @@ -53,7 +49,7 @@ func (s *postgresService) newPostgresQueryEndpoint(datasource *models.DataSource s.logger.Debug("getEngine", "connection", cnnstr) } - config := sqleng.SqlQueryEndpointConfiguration{ + config := sqleng.DataPluginConfiguration{ DriverName: "postgres", ConnectionString: cnnstr, Datasource: datasource, @@ -66,7 +62,7 @@ func (s *postgresService) newPostgresQueryEndpoint(datasource *models.DataSource timescaledb := datasource.JsonData.Get("timescaledb").MustBool(false) - endpoint, err := sqleng.NewSqlQueryEndpoint(&config, &queryResultTransformer, newPostgresMacroEngine(timescaledb), + plugin, err := sqleng.NewDataPlugin(config, &queryResultTransformer, newPostgresMacroEngine(timescaledb), s.logger) if err != nil { s.logger.Error("Failed connecting to Postgres", "err", err) @@ -74,7 +70,7 @@ func (s *postgresService) newPostgresQueryEndpoint(datasource *models.DataSource } s.logger.Debug("Successfully connected to Postgres") - return endpoint, err + return plugin, nil } // escape single quotes and backslashes in Postgres connection string parameters. @@ -82,10 +78,9 @@ func escape(input string) string { return strings.ReplaceAll(strings.ReplaceAll(input, `\`, `\\`), "'", `\'`) } -func (s *postgresService) generateConnectionString(datasource *models.DataSource) (string, error) { +func (s *PostgresService) generateConnectionString(datasource *models.DataSource) (string, error) { var host string var port int - var err error if strings.HasPrefix(datasource.Url, "/") { host = datasource.Url s.logger.Debug("Generating connection string with Unix socket specifier", "socket", host) @@ -141,7 +136,8 @@ type postgresQueryResultTransformer struct { log log.Logger } -func (t *postgresQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { +func (t *postgresQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) ( + plugins.DataRowValues, error) { values := make([]interface{}, len(columnTypes)) valuePtrs := make([]interface{}, len(columnTypes)) diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index 66e7d448e1f..442a2e35311 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -14,10 +14,10 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb/sqleng" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -113,7 +113,7 @@ func TestGenerateConnectionString(t *testing.T) { } for _, tt := range testCases { t.Run(tt.desc, func(t *testing.T) { - svc := postgresService{ + svc := PostgresService{ Cfg: cfg, logger: log.New("tsdb.postgres"), tlsManager: &tlsTestManager{settings: tt.tlsSettings}, @@ -169,19 +169,19 @@ func TestPostgres(t *testing.T) { sqleng.NewXormEngine = func(d, c string) (*xorm.Engine, error) { return x, nil } - sqleng.Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { + sqleng.Interpolate = func(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, sql string) (string, error) { return sql, nil } cfg := setting.NewCfg() cfg.DataPath = t.TempDir() - svc := postgresService{ + svc := PostgresService{ Cfg: cfg, logger: log.New("tsdb.postgres"), tlsManager: &tlsTestManager{settings: tlsSettings{Mode: "disable"}}, } - endpoint, err := svc.newPostgresQueryEndpoint(&models.DataSource{ + exe, err := svc.NewExecutor(&models.DataSource{ JsonData: simplejson.New(), SecureJsonData: securejsondata.SecureJsonData{}, }) @@ -233,19 +233,19 @@ func TestPostgres(t *testing.T) { require.NoError(t, err) t.Run("When doing a table query should map Postgres column types to Go types", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT * FROM postgres_types", "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -318,19 +318,19 @@ func TestPostgres(t *testing.T) { require.NoError(t, err) t.Run("When doing a metric query using timeGroup", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m') AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -368,24 +368,24 @@ func TestPostgres(t *testing.T) { sqleng.Interpolate = mockInterpolate }) - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { DataSource: &models.DataSource{}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, $__interval) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(30*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -395,23 +395,23 @@ func TestPostgres(t *testing.T) { }) t.Run("When doing a metric query using timeGroup with NULL fill enabled", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m', NULL) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -448,23 +448,23 @@ func TestPostgres(t *testing.T) { }) t.Run("When doing a metric query using timeGroup with value fill enabled", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m', 1.5) AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -475,23 +475,23 @@ func TestPostgres(t *testing.T) { }) t.Run("When doing a metric query using timeGroup with previous fill enabled", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": "SELECT $__timeGroup(time, '5m', previous), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1", "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -572,19 +572,19 @@ func TestPostgres(t *testing.T) { t.Run( "When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT "timeInt64" as time, "timeInt64" FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -595,19 +595,19 @@ func TestPostgres(t *testing.T) { t.Run("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable,) should return metric with time in milliseconds", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT "timeInt64Nullable" as time, "timeInt64Nullable" FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -618,19 +618,19 @@ func TestPostgres(t *testing.T) { t.Run("When doing a metric query using epoch (float64) as time column and value column (float64), should return metric with time in milliseconds", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT "timeFloat64" as time, "timeFloat64" FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -641,19 +641,19 @@ func TestPostgres(t *testing.T) { t.Run("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable), should return metric with time in milliseconds", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT "timeFloat64Nullable" as time, "timeFloat64Nullable" FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -664,19 +664,19 @@ func TestPostgres(t *testing.T) { t.Run("When doing a metric query using epoch (int32) as time column and value column (int32), should return metric with time in milliseconds", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT "timeInt32" as time, "timeInt32" FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -687,19 +687,19 @@ func TestPostgres(t *testing.T) { t.Run("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable), should return metric with time in milliseconds", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT "timeInt32Nullable" as time, "timeInt32Nullable" FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -710,19 +710,19 @@ func TestPostgres(t *testing.T) { t.Run("When doing a metric query using epoch (float32) as time column and value column (float32), should return metric with time in milliseconds", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT "timeFloat32" as time, "timeFloat32" FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -733,19 +733,19 @@ func TestPostgres(t *testing.T) { t.Run("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable), should return metric with time in milliseconds", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT "timeFloat32Nullable" as time, "timeFloat32Nullable" FROM metric_values ORDER BY time LIMIT 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -755,19 +755,19 @@ func TestPostgres(t *testing.T) { }) t.Run("When doing a metric query grouping by time and select metric column should return correct series", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT $__timeEpoch(time), measurement || ' - value one' as metric, "valueOne" FROM metric_values ORDER BY 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -778,19 +778,19 @@ func TestPostgres(t *testing.T) { }) t.Run("When doing a metric query with metric column and multiple value columns", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT $__timeEpoch(time), measurement as metric, "valueOne", "valueTwo" FROM metric_values ORDER BY 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -803,19 +803,19 @@ func TestPostgres(t *testing.T) { }) t.Run("When doing a metric query grouping by time should return correct series", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT $__timeEpoch(time), "valueOne", "valueTwo" FROM metric_values ORDER BY 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -832,21 +832,21 @@ func TestPostgres(t *testing.T) { }) sqleng.Interpolate = origInterpolate - query := &tsdb.TsdbQuery{ - TimeRange: tsdb.NewFakeTimeRange("5m", "now", fromStart), - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + TimeRange: &plugins.DataTimeRange{From: "5m", To: "now", Now: fromStart}, + Queries: []plugins.DataSubQuery{ { DataSource: &models.DataSource{JsonData: simplejson.New()}, Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT time FROM metric_values WHERE time > $__timeFrom() OR time < $__timeFrom() OR 1 < $__unixEpochFrom() OR $__unixEpochTo() > 1 ORDER BY 1`, "format": "time_series", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -891,46 +891,46 @@ func TestPostgres(t *testing.T) { } t.Run("When doing an annotation query of deploy events should return expected result", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT "time_sec" as time, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='deploy' ORDER BY 1 ASC`, "format": "table", }), - RefId: "Deploys", + RefID: "Deploys", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) queryResult := resp.Results["Deploys"] require.NoError(t, err) require.Len(t, queryResult.Tables[0].Rows, 3) }) t.Run("When doing an annotation query of ticket events should return expected result", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT "time_sec" as time, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='ticket' ORDER BY 1 ASC`, "format": "table", }), - RefId: "Tickets", + RefID: "Tickets", }, }, - TimeRange: &tsdb.TimeRange{ + TimeRange: &plugins.DataTimeRange{ From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000), To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000), }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) queryResult := resp.Results["Tickets"] require.NoError(t, err) require.Len(t, queryResult.Tables[0].Rows, 3) @@ -940,8 +940,8 @@ func TestPostgres(t *testing.T) { dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) dtFormat := "2006-01-02 15:04:05.999999999" - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -951,12 +951,12 @@ func TestPostgres(t *testing.T) { `, dt.Format(dtFormat)), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -970,8 +970,8 @@ func TestPostgres(t *testing.T) { t.Run("When doing an annotation query with a time column in epoch second format should return ms", func(t *testing.T) { dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -981,12 +981,12 @@ func TestPostgres(t *testing.T) { `, dt.Unix()), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -1000,8 +1000,8 @@ func TestPostgres(t *testing.T) { t.Run("When doing an annotation query with a time column in epoch second format (t *testing.Tint) should return ms", func(t *testing.T) { dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -1011,12 +1011,12 @@ func TestPostgres(t *testing.T) { `, dt.Unix()), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -1030,8 +1030,8 @@ func TestPostgres(t *testing.T) { t.Run("When doing an annotation query with a time column in epoch millisecond format should return ms", func(t *testing.T) { dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": fmt.Sprintf(`SELECT @@ -1041,12 +1041,12 @@ func TestPostgres(t *testing.T) { `, dt.Unix()*1000), "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -1058,8 +1058,8 @@ func TestPostgres(t *testing.T) { }) t.Run("When doing an annotation query with a time column holding a bigint null value should return nil", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT @@ -1069,12 +1069,12 @@ func TestPostgres(t *testing.T) { `, "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) @@ -1086,8 +1086,8 @@ func TestPostgres(t *testing.T) { }) t.Run("When doing an annotation query with a time column holding a timestamp null value should return nil", func(t *testing.T) { - query := &tsdb.TsdbQuery{ - Queries: []*tsdb.Query{ + query := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ { Model: simplejson.NewFromAny(map[string]interface{}{ "rawSql": `SELECT @@ -1097,12 +1097,12 @@ func TestPostgres(t *testing.T) { `, "format": "table", }), - RefId: "A", + RefID: "A", }, }, } - resp, err := endpoint.Query(context.Background(), nil, query) + resp, err := exe.DataQuery(context.Background(), nil, query) require.NoError(t, err) queryResult := resp.Results["A"] require.NoError(t, queryResult.Error) diff --git a/pkg/tsdb/prometheus/prometheus.go b/pkg/tsdb/prometheus/prometheus.go index 1eb1da6e8cf..312d7f83e91 100644 --- a/pkg/tsdb/prometheus/prometheus.go +++ b/pkg/tsdb/prometheus/prometheus.go @@ -15,7 +15,8 @@ import ( "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/tsdb/interval" "github.com/prometheus/client_golang/api" apiv1 "github.com/prometheus/client_golang/api/prometheus/v1" "github.com/prometheus/common/model" @@ -23,6 +24,8 @@ import ( type PrometheusExecutor struct { Transport http.RoundTripper + + intervalCalculator interval.Calculator } type basicAuthTransport struct { @@ -37,28 +40,25 @@ func (bat basicAuthTransport) RoundTrip(req *http.Request) (*http.Response, erro return bat.Transport.RoundTrip(req) } -func NewPrometheusExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { +func NewExecutor(dsInfo *models.DataSource) (plugins.DataPlugin, error) { transport, err := dsInfo.GetHttpTransport() if err != nil { return nil, err } return &PrometheusExecutor{ - Transport: transport, + Transport: transport, + intervalCalculator: interval.NewCalculator(interval.CalculatorOptions{MinInterval: time.Second * 1}), }, nil } var ( - plog log.Logger - legendFormat *regexp.Regexp - intervalCalculator tsdb.IntervalCalculator + plog log.Logger + legendFormat *regexp.Regexp = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) ) func init() { plog = log.New("tsdb.prometheus") - tsdb.RegisterTsdbQueryEndpoint("prometheus", NewPrometheusExecutor) - legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) - intervalCalculator = tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{MinInterval: time.Second * 1}) } func (e *PrometheusExecutor) getClient(dsInfo *models.DataSource) (apiv1.API, error) { @@ -83,19 +83,20 @@ func (e *PrometheusExecutor) getClient(dsInfo *models.DataSource) (apiv1.API, er return apiv1.NewAPI(client), nil } -func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{}, +func (e *PrometheusExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource, + tsdbQuery plugins.DataQuery) (plugins.DataResponse, error) { + result := plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{}, } client, err := e.getClient(dsInfo) if err != nil { - return nil, err + return result, err } - queries, err := parseQuery(dsInfo, tsdbQuery.Queries, tsdbQuery) + queries, err := e.parseQuery(dsInfo, tsdbQuery) if err != nil { - return nil, err + return result, err } for _, query := range queries { @@ -116,12 +117,12 @@ func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSourc value, _, err := client.QueryRange(ctx, query.Expr, timeRange) if err != nil { - return nil, err + return result, err } queryResult, err := parseResponse(value, query) if err != nil { - return nil, err + return result, err } result.Results[query.RefId] = queryResult } @@ -147,9 +148,10 @@ func formatLegend(metric model.Metric, query *PrometheusQuery) string { return string(result) } -func parseQuery(dsInfo *models.DataSource, queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) ([]*PrometheusQuery, error) { +func (e *PrometheusExecutor) parseQuery(dsInfo *models.DataSource, query plugins.DataQuery) ( + []*PrometheusQuery, error) { qs := []*PrometheusQuery{} - for _, queryModel := range queries { + for _, queryModel := range query.Queries { expr, err := queryModel.Model.Get("expr").String() if err != nil { return nil, err @@ -157,23 +159,23 @@ func parseQuery(dsInfo *models.DataSource, queries []*tsdb.Query, queryContext * format := queryModel.Model.Get("legendFormat").MustString("") - start, err := queryContext.TimeRange.ParseFrom() + start, err := query.TimeRange.ParseFrom() if err != nil { return nil, err } - end, err := queryContext.TimeRange.ParseTo() + end, err := query.TimeRange.ParseTo() if err != nil { return nil, err } - dsInterval, err := tsdb.GetIntervalFrom(dsInfo, queryModel.Model, time.Second*15) + dsInterval, err := interval.GetIntervalFrom(dsInfo, queryModel.Model, time.Second*15) if err != nil { return nil, err } intervalFactor := queryModel.Model.Get("intervalFactor").MustInt64(1) - interval := intervalCalculator.Calculate(queryContext.TimeRange, dsInterval) + interval := e.intervalCalculator.Calculate(*query.TimeRange, dsInterval) step := time.Duration(int64(interval.Value) * intervalFactor) qs = append(qs, &PrometheusQuery{ @@ -182,15 +184,15 @@ func parseQuery(dsInfo *models.DataSource, queries []*tsdb.Query, queryContext * LegendFormat: format, Start: start, End: end, - RefId: queryModel.RefId, + RefId: queryModel.RefID, }) } return qs, nil } -func parseResponse(value model.Value, query *PrometheusQuery) (*tsdb.QueryResult, error) { - queryRes := tsdb.NewQueryResult() +func parseResponse(value model.Value, query *PrometheusQuery) (plugins.DataQueryResult, error) { + var queryRes plugins.DataQueryResult data, ok := value.(model.Matrix) if !ok { @@ -198,10 +200,10 @@ func parseResponse(value model.Value, query *PrometheusQuery) (*tsdb.QueryResult } for _, v := range data { - series := tsdb.TimeSeries{ + series := plugins.DataTimeSeries{ Name: formatLegend(v.Metric, query), Tags: make(map[string]string, len(v.Metric)), - Points: make([]tsdb.TimePoint, 0, len(v.Values)), + Points: make([]plugins.DataTimePoint, 0, len(v.Values)), } for k, v := range v.Metric { @@ -209,10 +211,13 @@ func parseResponse(value model.Value, query *PrometheusQuery) (*tsdb.QueryResult } for _, k := range v.Values { - series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(k.Value)), float64(k.Timestamp.Unix()*1000))) + series.Points = append(series.Points, plugins.DataTimePoint{ + null.FloatFrom(float64(k.Value)), + null.FloatFrom(float64(k.Timestamp.Unix() * 1000)), + }) } - queryRes.Series = append(queryRes.Series, &series) + queryRes.Series = append(queryRes.Series, series) } return queryRes, nil diff --git a/pkg/tsdb/prometheus/prometheus_test.go b/pkg/tsdb/prometheus/prometheus_test.go index 43338258781..b15edc9dcbb 100644 --- a/pkg/tsdb/prometheus/prometheus_test.go +++ b/pkg/tsdb/prometheus/prometheus_test.go @@ -6,7 +6,7 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" p "github.com/prometheus/common/model" "github.com/stretchr/testify/require" ) @@ -15,6 +15,9 @@ func TestPrometheus(t *testing.T) { dsInfo := &models.DataSource{ JsonData: simplejson.New(), } + plug, err := NewExecutor(dsInfo) + executor := plug.(*PrometheusExecutor) + require.NoError(t, err) t.Run("converting metric name", func(t *testing.T) { metric := map[p.LabelName]p.LabelValue{ @@ -50,14 +53,17 @@ func TestPrometheus(t *testing.T) { "refId": "A" }` jsonModel, _ := simplejson.NewJson([]byte(json)) - queryContext := &tsdb.TsdbQuery{} - queryModels := []*tsdb.Query{ + queryModels := []plugins.DataSubQuery{ {Model: jsonModel}, } - queryContext.TimeRange = tsdb.NewTimeRange("12h", "now") + timeRange := plugins.NewDataTimeRange("12h", "now") + queryContext := plugins.DataQuery{ + Queries: queryModels, + TimeRange: &timeRange, + } - models, err := parseQuery(dsInfo, queryModels, queryContext) + models, err := executor.parseQuery(dsInfo, queryContext) require.NoError(t, err) require.Equal(t, time.Second*30, models[0].Step) }) @@ -70,18 +76,22 @@ func TestPrometheus(t *testing.T) { "refId": "A" }` jsonModel, _ := simplejson.NewJson([]byte(json)) - queryContext := &tsdb.TsdbQuery{} - queryModels := []*tsdb.Query{ + queryModels := []plugins.DataSubQuery{ {Model: jsonModel}, } - queryContext.TimeRange = tsdb.NewTimeRange("48h", "now") - models, err := parseQuery(dsInfo, queryModels, queryContext) + timeRange := plugins.NewDataTimeRange("48h", "now") + queryContext := plugins.DataQuery{ + Queries: queryModels, + TimeRange: &timeRange, + } + models, err := executor.parseQuery(dsInfo, queryContext) require.NoError(t, err) require.Equal(t, time.Minute*2, models[0].Step) - queryContext.TimeRange = tsdb.NewTimeRange("1h", "now") - models, err = parseQuery(dsInfo, queryModels, queryContext) + timeRange = plugins.NewDataTimeRange("1h", "now") + queryContext.TimeRange = &timeRange + models, err = executor.parseQuery(dsInfo, queryContext) require.NoError(t, err) require.Equal(t, time.Second*15, models[0].Step) }) @@ -94,14 +104,17 @@ func TestPrometheus(t *testing.T) { "refId": "A" }` jsonModel, _ := simplejson.NewJson([]byte(json)) - queryContext := &tsdb.TsdbQuery{} - queryModels := []*tsdb.Query{ + queryModels := []plugins.DataSubQuery{ {Model: jsonModel}, } - queryContext.TimeRange = tsdb.NewTimeRange("48h", "now") + timeRange := plugins.NewDataTimeRange("48h", "now") + queryContext := plugins.DataQuery{ + TimeRange: &timeRange, + Queries: queryModels, + } - models, err := parseQuery(dsInfo, queryModels, queryContext) + models, err := executor.parseQuery(dsInfo, queryContext) require.NoError(t, err) require.Equal(t, time.Minute*20, models[0].Step) }) @@ -114,14 +127,17 @@ func TestPrometheus(t *testing.T) { "refId": "A" }` jsonModel, _ := simplejson.NewJson([]byte(json)) - queryContext := &tsdb.TsdbQuery{} - queryModels := []*tsdb.Query{ + queryModels := []plugins.DataSubQuery{ {Model: jsonModel}, } - queryContext.TimeRange = tsdb.NewTimeRange("48h", "now") + timeRange := plugins.NewDataTimeRange("48h", "now") + queryContext := plugins.DataQuery{ + TimeRange: &timeRange, + Queries: queryModels, + } - models, err := parseQuery(dsInfo, queryModels, queryContext) + models, err := executor.parseQuery(dsInfo, queryContext) require.NoError(t, err) require.Equal(t, time.Minute*2, models[0].Step) }) diff --git a/pkg/tsdb/query_endpoint.go b/pkg/tsdb/query_endpoint.go deleted file mode 100644 index 856b7066832..00000000000 --- a/pkg/tsdb/query_endpoint.go +++ /dev/null @@ -1,23 +0,0 @@ -package tsdb - -import ( - "context" - - "github.com/grafana/grafana/pkg/models" -) - -type TsdbQueryEndpoint interface { - Query(ctx context.Context, ds *models.DataSource, query *TsdbQuery) (*Response, error) -} - -var registry map[string]GetTsdbQueryEndpointFn - -type GetTsdbQueryEndpointFn func(dsInfo *models.DataSource) (TsdbQueryEndpoint, error) - -func init() { - registry = make(map[string]GetTsdbQueryEndpointFn) -} - -func RegisterTsdbQueryEndpoint(pluginId string, fn GetTsdbQueryEndpointFn) { - registry[pluginId] = fn -} diff --git a/pkg/tsdb/request.go b/pkg/tsdb/request.go deleted file mode 100644 index 6d751374174..00000000000 --- a/pkg/tsdb/request.go +++ /dev/null @@ -1,26 +0,0 @@ -package tsdb - -import ( - "context" - "fmt" - - "github.com/grafana/grafana/pkg/models" -) - -type HandleRequestFunc func(ctx context.Context, dsInfo *models.DataSource, req *TsdbQuery) (*Response, error) - -func HandleRequest(ctx context.Context, dsInfo *models.DataSource, req *TsdbQuery) (*Response, error) { - var endpoint TsdbQueryEndpoint - fn, exists := registry[dsInfo.Type] - if !exists { - return nil, fmt.Errorf("could not find executor for data source type: %s", dsInfo.Type) - } - - var err error - endpoint, err = fn(dsInfo) - if err != nil { - return nil, err - } - - return endpoint.Query(ctx, dsInfo, req) -} diff --git a/pkg/tsdb/request_test.go b/pkg/tsdb/request_test.go index 97a5895347e..67ab0b6e377 100644 --- a/pkg/tsdb/request_test.go +++ b/pkg/tsdb/request_test.go @@ -5,39 +5,42 @@ import ( "testing" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/backendplugin" + "github.com/grafana/grafana/pkg/plugins/manager" "github.com/stretchr/testify/require" ) func TestHandleRequest(t *testing.T) { t.Run("Should return query result when handling request for query", func(t *testing.T) { - req := &TsdbQuery{ - Queries: []*Query{ - {RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}}, + req := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ + {RefID: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}}, }, } - fakeExecutor := registerFakeExecutor() - fakeExecutor.Return("A", TimeSeriesSlice{&TimeSeries{Name: "argh"}}) + svc, exe := createService() + exe.Return("A", plugins.DataTimeSeriesSlice{plugins.DataTimeSeries{Name: "argh"}}) - res, err := HandleRequest(context.TODO(), &models.DataSource{Id: 1, Type: "test"}, req) + res, err := svc.HandleRequest(context.TODO(), &models.DataSource{Id: 1, Type: "test"}, req) require.NoError(t, err) require.NotEmpty(t, res.Results["A"].Series) require.Equal(t, "argh", res.Results["A"].Series[0].Name) }) t.Run("Should return query results when handling request for two queries with same data source", func(t *testing.T) { - req := &TsdbQuery{ - Queries: []*Query{ - {RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}}, - {RefId: "B", DataSource: &models.DataSource{Id: 1, Type: "test"}}, + req := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ + {RefID: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}}, + {RefID: "B", DataSource: &models.DataSource{Id: 1, Type: "test"}}, }, } - fakeExecutor := registerFakeExecutor() - fakeExecutor.Return("A", TimeSeriesSlice{&TimeSeries{Name: "argh"}}) - fakeExecutor.Return("B", TimeSeriesSlice{&TimeSeries{Name: "barg"}}) + svc, exe := createService() + exe.Return("A", plugins.DataTimeSeriesSlice{plugins.DataTimeSeries{Name: "argh"}}) + exe.Return("B", plugins.DataTimeSeriesSlice{plugins.DataTimeSeries{Name: "barg"}}) - res, err := HandleRequest(context.TODO(), &models.DataSource{Id: 1, Type: "test"}, req) + res, err := svc.HandleRequest(context.TODO(), &models.DataSource{Id: 1, Type: "test"}, req) require.NoError(t, err) require.Len(t, res.Results, 2) @@ -46,22 +49,70 @@ func TestHandleRequest(t *testing.T) { }) t.Run("Should return error when handling request for query with unknown type", func(t *testing.T) { - req := &TsdbQuery{ - Queries: []*Query{ - {RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "asdasdas"}}, + svc, _ := createService() + + req := plugins.DataQuery{ + Queries: []plugins.DataSubQuery{ + {RefID: "A", DataSource: &models.DataSource{Id: 1, Type: "asdasdas"}}, }, } - - _, err := HandleRequest(context.TODO(), &models.DataSource{Id: 12, Type: "testjughjgjg"}, req) + _, err := svc.HandleRequest(context.TODO(), &models.DataSource{Id: 12, Type: "testjughjgjg"}, req) require.Error(t, err) }) } -func registerFakeExecutor() *FakeExecutor { - executor, _ := NewFakeExecutor(nil) - RegisterTsdbQueryEndpoint("test", func(dsInfo *models.DataSource) (TsdbQueryEndpoint, error) { - return executor, nil - }) +type resultsFn func(context plugins.DataQuery) plugins.DataQueryResult - return executor +type fakeExecutor struct { + results map[string]plugins.DataQueryResult + resultsFn map[string]resultsFn +} + +func (e *fakeExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource, context plugins.DataQuery) ( + plugins.DataResponse, error) { + result := plugins.DataResponse{Results: make(map[string]plugins.DataQueryResult)} + for _, query := range context.Queries { + if results, has := e.results[query.RefID]; has { + result.Results[query.RefID] = results + } + if testFunc, has := e.resultsFn[query.RefID]; has { + result.Results[query.RefID] = testFunc(context) + } + } + + return result, nil +} + +func (e *fakeExecutor) Return(refID string, series plugins.DataTimeSeriesSlice) { + e.results[refID] = plugins.DataQueryResult{ + RefID: refID, Series: series, + } +} + +func (e *fakeExecutor) HandleQuery(refId string, fn resultsFn) { + e.resultsFn[refId] = fn +} + +type fakeBackendPM struct { + backendplugin.Manager +} + +func (pm fakeBackendPM) GetDataPlugin(string) interface{} { + return nil +} + +func createService() (Service, *fakeExecutor) { + s := NewService() + s.PluginManager = &manager.PluginManager{ + BackendPluginManager: fakeBackendPM{}, + } + e := &fakeExecutor{ + results: make(map[string]plugins.DataQueryResult), + resultsFn: make(map[string]resultsFn), + } + s.registry["test"] = func(*models.DataSource) (plugins.DataPlugin, error) { + return e, nil + } + + return s, e } diff --git a/pkg/tsdb/service.go b/pkg/tsdb/service.go new file mode 100644 index 00000000000..4195def1fd3 --- /dev/null +++ b/pkg/tsdb/service.go @@ -0,0 +1,97 @@ +package tsdb + +import ( + "context" + "fmt" + + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/plugins/manager" + "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb/azuremonitor" + "github.com/grafana/grafana/pkg/tsdb/cloudmonitoring" + "github.com/grafana/grafana/pkg/tsdb/cloudwatch" + "github.com/grafana/grafana/pkg/tsdb/elasticsearch" + "github.com/grafana/grafana/pkg/tsdb/graphite" + "github.com/grafana/grafana/pkg/tsdb/influxdb" + "github.com/grafana/grafana/pkg/tsdb/loki" + "github.com/grafana/grafana/pkg/tsdb/mssql" + "github.com/grafana/grafana/pkg/tsdb/mysql" + "github.com/grafana/grafana/pkg/tsdb/opentsdb" + "github.com/grafana/grafana/pkg/tsdb/postgres" + "github.com/grafana/grafana/pkg/tsdb/prometheus" + "github.com/grafana/grafana/pkg/tsdb/tempo" +) + +// NewService returns a new Service. +func NewService() Service { + return Service{ + registry: map[string]func(*models.DataSource) (plugins.DataPlugin, error){}, + } +} + +func init() { + svc := NewService() + registry.Register(®istry.Descriptor{ + Name: "DataService", + Instance: &svc, + }) +} + +// Service handles data requests to data sources. +type Service struct { + Cfg *setting.Cfg `inject:""` + CloudWatchService *cloudwatch.CloudWatchService `inject:""` + PostgresService *postgres.PostgresService `inject:""` + CloudMonitoringService *cloudmonitoring.Service `inject:""` + AzureMonitorService *azuremonitor.Service `inject:""` + PluginManager *manager.PluginManager `inject:""` + + registry map[string]func(*models.DataSource) (plugins.DataPlugin, error) +} + +// Init initialises the service. +func (s *Service) Init() error { + s.registry["graphite"] = graphite.NewExecutor + s.registry["opentsdb"] = opentsdb.NewExecutor + s.registry["prometheus"] = prometheus.NewExecutor + s.registry["influxdb"] = influxdb.NewExecutor + s.registry["mssql"] = mssql.NewExecutor + s.registry["postgres"] = s.PostgresService.NewExecutor + s.registry["mysql"] = mysql.NewExecutor + s.registry["elasticsearch"] = elasticsearch.NewExecutor + s.registry["cloudwatch"] = s.CloudWatchService.NewExecutor + s.registry["stackdriver"] = s.CloudMonitoringService.NewExecutor + s.registry["grafana-azure-monitor-datasource"] = s.AzureMonitorService.NewExecutor + s.registry["loki"] = loki.NewExecutor + s.registry["tempo"] = tempo.NewExecutor + return nil +} + +func (s *Service) HandleRequest(ctx context.Context, ds *models.DataSource, query plugins.DataQuery) ( + plugins.DataResponse, error) { + plugin := s.PluginManager.GetDataPlugin(ds.Type) + if plugin == nil { + factory, exists := s.registry[ds.Type] + if !exists { + return plugins.DataResponse{}, fmt.Errorf( + "could not find plugin corresponding to data source type: %q", ds.Type) + } + + var err error + plugin, err = factory(ds) + if err != nil { + return plugins.DataResponse{}, fmt.Errorf("could not instantiate endpoint for data plugin %q: %w", + ds.Type, err) + } + } + + return plugin.DataQuery(ctx, ds, query) +} + +// RegisterQueryHandler registers a query handler factory. +// This is only exposed for tests! +func (s *Service) RegisterQueryHandler(name string, factory func(*models.DataSource) (plugins.DataPlugin, error)) { + s.registry[name] = factory +} diff --git a/pkg/tsdb/sqleng/sql_engine.go b/pkg/tsdb/sqleng/sql_engine.go index 7141e6f88a6..386b9ef7e92 100644 --- a/pkg/tsdb/sqleng/sql_engine.go +++ b/pkg/tsdb/sqleng/sql_engine.go @@ -12,10 +12,11 @@ import ( "sync" "time" + "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/tsdb/interval" "github.com/grafana/grafana/pkg/infra/log" - "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/components/null" @@ -28,16 +29,16 @@ import ( // MetaKeyExecutedQueryString is the key where the executed query should get stored const MetaKeyExecutedQueryString = "executedQueryString" -// SqlMacroEngine interpolates macros into sql. It takes in the Query to have access to query context and +// SQLMacroEngine interpolates macros into sql. It takes in the Query to have access to query context and // timeRange to be able to generate queries that use from and to. -type SqlMacroEngine interface { - Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) +type SQLMacroEngine interface { + Interpolate(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, sql string) (string, error) } // SqlQueryResultTransformer transforms a query result row to RowValues with proper types. type SqlQueryResultTransformer interface { // TransformQueryResult transforms a query result row to RowValues with proper types. - TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) + TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (plugins.DataRowValues, error) // TransformQueryError transforms a query error. TransformQueryError(err error) error } @@ -53,7 +54,7 @@ var engineCache = engineCacheType{ versions: make(map[int64]int), } -var sqlIntervalCalculator = tsdb.NewIntervalCalculator(nil) +var sqlIntervalCalculator = interval.NewCalculator() // NewXormEngine is an xorm.Engine factory, that can be stubbed by tests. //nolint:gocritic @@ -63,8 +64,8 @@ var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engi const timeEndColumnName = "timeend" -type sqlQueryEndpoint struct { - macroEngine SqlMacroEngine +type dataPlugin struct { + macroEngine SQLMacroEngine queryResultTransformer SqlQueryResultTransformer engine *xorm.Engine timeColumnNames []string @@ -72,7 +73,7 @@ type sqlQueryEndpoint struct { log log.Logger } -type SqlQueryEndpointConfiguration struct { +type DataPluginConfiguration struct { DriverName string Datasource *models.DataSource ConnectionString string @@ -80,8 +81,10 @@ type SqlQueryEndpointConfiguration struct { MetricColumnTypes []string } -var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, queryResultTransformer SqlQueryResultTransformer, macroEngine SqlMacroEngine, log log.Logger) (tsdb.TsdbQueryEndpoint, error) { - queryEndpoint := sqlQueryEndpoint{ +// NewDataPlugin returns a new plugins.DataPlugin +func NewDataPlugin(config DataPluginConfiguration, queryResultTransformer SqlQueryResultTransformer, + macroEngine SQLMacroEngine, log log.Logger) (plugins.DataPlugin, error) { + plugin := dataPlugin{ queryResultTransformer: queryResultTransformer, macroEngine: macroEngine, timeColumnNames: []string{"time"}, @@ -89,11 +92,11 @@ var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, queryResul } if len(config.TimeColumnNames) > 0 { - queryEndpoint.timeColumnNames = config.TimeColumnNames + plugin.timeColumnNames = config.TimeColumnNames } if len(config.MetricColumnTypes) > 0 { - queryEndpoint.metricColumnTypes = config.MetricColumnTypes + plugin.metricColumnTypes = config.MetricColumnTypes } engineCache.Lock() @@ -101,8 +104,8 @@ var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, queryResul if engine, present := engineCache.cache[config.Datasource.Id]; present { if version := engineCache.versions[config.Datasource.Id]; version == config.Datasource.Version { - queryEndpoint.engine = engine - return &queryEndpoint, nil + plugin.engine = engine + return &plugin, nil } } @@ -120,50 +123,61 @@ var NewSqlQueryEndpoint = func(config *SqlQueryEndpointConfiguration, queryResul engineCache.versions[config.Datasource.Id] = config.Datasource.Version engineCache.cache[config.Datasource.Id] = engine - queryEndpoint.engine = engine + plugin.engine = engine - return &queryEndpoint, nil + return &plugin, nil } const rowLimit = 1000000 // Query is the main function for the SqlQueryEndpoint -func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: make(map[string]*tsdb.QueryResult), +func (e *dataPlugin) DataQuery(ctx context.Context, dsInfo *models.DataSource, + queryContext plugins.DataQuery) (plugins.DataResponse, error) { + var timeRange plugins.DataTimeRange + if queryContext.TimeRange != nil { + timeRange = *queryContext.TimeRange } - + ch := make(chan plugins.DataQueryResult, len(queryContext.Queries)) var wg sync.WaitGroup - - for _, query := range tsdbQuery.Queries { - rawSQL := query.Model.Get("rawSql").MustString() - if rawSQL == "" { + // Execute each query in a goroutine and wait for them to finish afterwards + for _, query := range queryContext.Queries { + if query.Model.Get("rawSql").MustString() == "" { continue } - queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefId} - result.Results[query.RefId] = queryResult - - // global substitutions - rawSQL, err := Interpolate(query, tsdbQuery.TimeRange, rawSQL) - if err != nil { - queryResult.Error = err - continue - } - - // datasource specific substitutions - rawSQL, err = e.macroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSQL) - if err != nil { - queryResult.Error = err - continue - } - - queryResult.Meta.Set(MetaKeyExecutedQueryString, rawSQL) - wg.Add(1) - go func(rawSQL string, query *tsdb.Query, queryResult *tsdb.QueryResult) { + go func(query plugins.DataSubQuery) { defer wg.Done() + + queryResult := plugins.DataQueryResult{ + Meta: simplejson.New(), + RefID: query.RefID, + } + + rawSQL := query.Model.Get("rawSql").MustString() + if rawSQL == "" { + panic("Query model property rawSql should not be empty at this point") + } + + // global substitutions + rawSQL, err := Interpolate(query, timeRange, rawSQL) + if err != nil { + queryResult.Error = err + ch <- queryResult + return + } + + // datasource specific substitutions + rawSQL, err = e.macroEngine.Interpolate(query, timeRange, rawSQL) + if err != nil { + queryResult.Error = err + ch <- queryResult + return + } + + queryResult.Meta.Set(MetaKeyExecutedQueryString, rawSQL) + session := e.engine.NewSession() defer session.Close() db := session.DB() @@ -183,28 +197,40 @@ func (e *sqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, switch format { case "time_series": - err := e.transformToTimeSeries(query, rows, queryResult, tsdbQuery) + err := e.transformToTimeSeries(query, rows, &queryResult, queryContext) if err != nil { queryResult.Error = err return } case "table": - err := e.transformToTable(query, rows, queryResult, tsdbQuery) + err := e.transformToTable(query, rows, &queryResult, queryContext) if err != nil { queryResult.Error = err return } } - }(rawSQL, query, queryResult) + + ch <- queryResult + }(query) } + wg.Wait() + // Read results from channels + close(ch) + result := plugins.DataResponse{ + Results: make(map[string]plugins.DataQueryResult), + } + for queryResult := range ch { + result.Results[queryResult.RefID] = queryResult + } + return result, nil } // Interpolate provides global macros/substitutions for all sql datasources. -var Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { - minInterval, err := tsdb.GetIntervalFrom(query.DataSource, query.Model, time.Second*60) +var Interpolate = func(query plugins.DataSubQuery, timeRange plugins.DataTimeRange, sql string) (string, error) { + minInterval, err := interval.GetIntervalFrom(query.DataSource, query.Model, time.Second*60) if err != nil { return sql, nil } @@ -218,21 +244,22 @@ var Interpolate = func(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) return sql, nil } -func (e *sqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { +func (e *dataPlugin) transformToTable(query plugins.DataSubQuery, rows *core.Rows, + result *plugins.DataQueryResult, queryContext plugins.DataQuery) error { columnNames, err := rows.Columns() - columnCount := len(columnNames) - if err != nil { return err } + columnCount := len(columnNames) + rowCount := 0 timeIndex := -1 timeEndIndex := -1 - table := &tsdb.Table{ - Columns: make([]tsdb.TableColumn, columnCount), - Rows: make([]tsdb.RowValues, 0), + table := plugins.DataTable{ + Columns: make([]plugins.DataTableColumn, columnCount), + Rows: make([]plugins.DataRowValues, 0), } for i, name := range columnNames { @@ -279,7 +306,7 @@ func (e *sqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, return nil } -func newProcessCfg(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery, rows *core.Rows) (*processCfg, error) { +func newProcessCfg(query plugins.DataSubQuery, queryContext plugins.DataQuery, rows *core.Rows) (*processCfg, error) { columnNames, err := rows.Columns() if err != nil { return nil, err @@ -301,15 +328,15 @@ func newProcessCfg(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery, rows *core.Rows metricPrefix: false, fillMissing: fillMissing, seriesByQueryOrder: list.New(), - pointsBySeries: make(map[string]*tsdb.TimeSeries), - tsdbQuery: tsdbQuery, + pointsBySeries: make(map[string]*plugins.DataTimeSeries), + queryContext: queryContext, } return cfg, nil } -func (e *sqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, - tsdbQuery *tsdb.TsdbQuery) error { - cfg, err := newProcessCfg(query, tsdbQuery, rows) +func (e *dataPlugin) transformToTimeSeries(query plugins.DataSubQuery, rows *core.Rows, + result *plugins.DataQueryResult, queryContext plugins.DataQuery) error { + cfg, err := newProcessCfg(query, queryContext, rows) if err != nil { return err } @@ -369,15 +396,15 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.R for elem := cfg.seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { key := elem.Value.(string) - result.Series = append(result.Series, cfg.pointsBySeries[key]) if !cfg.fillMissing { + result.Series = append(result.Series, *cfg.pointsBySeries[key]) continue } series := cfg.pointsBySeries[key] // fill in values from last fetched value till interval end intervalStart := series.Points[len(series.Points)-1][1].Float64 - intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) + intervalEnd := float64(queryContext.TimeRange.MustGetTo().UnixNano() / 1e6) if cfg.fillPrevious { if len(series.Points) > 0 { @@ -390,9 +417,11 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.R // align interval start intervalStart = math.Floor(intervalStart/cfg.fillInterval) * cfg.fillInterval for i := intervalStart + cfg.fillInterval; i < intervalEnd; i += cfg.fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{cfg.fillValue, null.FloatFrom(i)}) + series.Points = append(series.Points, plugins.DataTimePoint{cfg.fillValue, null.FloatFrom(i)}) cfg.rowCount++ } + + result.Series = append(result.Series, *series) } result.Meta.Set("rowCount", cfg.rowCount) @@ -409,15 +438,15 @@ type processCfg struct { metricPrefix bool metricPrefixValue string fillMissing bool - pointsBySeries map[string]*tsdb.TimeSeries + pointsBySeries map[string]*plugins.DataTimeSeries seriesByQueryOrder *list.List fillValue null.Float - tsdbQuery *tsdb.TsdbQuery + queryContext plugins.DataQuery fillInterval float64 fillPrevious bool } -func (e *sqlQueryEndpoint) processRow(cfg *processCfg) error { +func (e *dataPlugin) processRow(cfg *processCfg) error { var timestamp float64 var value null.Float var metric string @@ -447,17 +476,18 @@ func (e *sqlQueryEndpoint) processRow(cfg *processCfg) error { } if cfg.metricIndex >= 0 { - if columnValue, ok := values[cfg.metricIndex].(string); ok { - if cfg.metricPrefix { - cfg.metricPrefixValue = columnValue - } else { - metric = columnValue - } - } else { + columnValue, ok := values[cfg.metricIndex].(string) + if !ok { return fmt.Errorf("column metric must be of type %s. metric column name: %s type: %s but datatype is %T", strings.Join(e.metricColumnTypes, ", "), cfg.columnNames[cfg.metricIndex], cfg.columnTypes[cfg.metricIndex].DatabaseTypeName(), values[cfg.metricIndex]) } + + if cfg.metricPrefix { + cfg.metricPrefixValue = columnValue + } else { + metric = columnValue + } } for i, col := range cfg.columnNames { @@ -475,17 +505,17 @@ func (e *sqlQueryEndpoint) processRow(cfg *processCfg) error { metric = cfg.metricPrefixValue + " " + col } - series, exist := cfg.pointsBySeries[metric] - if !exist { - series = &tsdb.TimeSeries{Name: metric} + series, exists := cfg.pointsBySeries[metric] + if !exists { + series = &plugins.DataTimeSeries{Name: metric} cfg.pointsBySeries[metric] = series cfg.seriesByQueryOrder.PushBack(metric) } if cfg.fillMissing { var intervalStart float64 - if !exist { - intervalStart = float64(cfg.tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) + if !exists { + intervalStart = float64(cfg.queryContext.TimeRange.MustGetFrom().UnixNano() / 1e6) } else { intervalStart = series.Points[len(series.Points)-1][1].Float64 + cfg.fillInterval } @@ -502,13 +532,15 @@ func (e *sqlQueryEndpoint) processRow(cfg *processCfg) error { intervalStart = math.Floor(intervalStart/cfg.fillInterval) * cfg.fillInterval for i := intervalStart; i < timestamp; i += cfg.fillInterval { - series.Points = append(series.Points, tsdb.TimePoint{cfg.fillValue, null.FloatFrom(i)}) + series.Points = append(series.Points, plugins.DataTimePoint{cfg.fillValue, null.FloatFrom(i)}) cfg.rowCount++ } } - series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)}) + series.Points = append(series.Points, plugins.DataTimePoint{value, null.FloatFrom(timestamp)}) + cfg.pointsBySeries[metric] = series + // TODO: Make non-global if setting.Env == setting.Dev { e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) } @@ -519,7 +551,7 @@ func (e *sqlQueryEndpoint) processRow(cfg *processCfg) error { // ConvertSqlTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds // to make native datetime types and epoch dates work in annotation and table queries. -func ConvertSqlTimeColumnToEpochMs(values tsdb.RowValues, timeIndex int) { +func ConvertSqlTimeColumnToEpochMs(values plugins.DataRowValues, timeIndex int) { if timeIndex >= 0 { switch value := values[timeIndex].(type) { case time.Time: @@ -529,40 +561,40 @@ func ConvertSqlTimeColumnToEpochMs(values tsdb.RowValues, timeIndex int) { values[timeIndex] = float64(value.UnixNano()) / float64(time.Millisecond) } case int64: - values[timeIndex] = int64(tsdb.EpochPrecisionToMs(float64(value))) + values[timeIndex] = int64(epochPrecisionToMS(float64(value))) case *int64: if value != nil { - values[timeIndex] = int64(tsdb.EpochPrecisionToMs(float64(*value))) + values[timeIndex] = int64(epochPrecisionToMS(float64(*value))) } case uint64: - values[timeIndex] = int64(tsdb.EpochPrecisionToMs(float64(value))) + values[timeIndex] = int64(epochPrecisionToMS(float64(value))) case *uint64: if value != nil { - values[timeIndex] = int64(tsdb.EpochPrecisionToMs(float64(*value))) + values[timeIndex] = int64(epochPrecisionToMS(float64(*value))) } case int32: - values[timeIndex] = int64(tsdb.EpochPrecisionToMs(float64(value))) + values[timeIndex] = int64(epochPrecisionToMS(float64(value))) case *int32: if value != nil { - values[timeIndex] = int64(tsdb.EpochPrecisionToMs(float64(*value))) + values[timeIndex] = int64(epochPrecisionToMS(float64(*value))) } case uint32: - values[timeIndex] = int64(tsdb.EpochPrecisionToMs(float64(value))) + values[timeIndex] = int64(epochPrecisionToMS(float64(value))) case *uint32: if value != nil { - values[timeIndex] = int64(tsdb.EpochPrecisionToMs(float64(*value))) + values[timeIndex] = int64(epochPrecisionToMS(float64(*value))) } case float64: - values[timeIndex] = tsdb.EpochPrecisionToMs(value) + values[timeIndex] = epochPrecisionToMS(value) case *float64: if value != nil { - values[timeIndex] = tsdb.EpochPrecisionToMs(*value) + values[timeIndex] = epochPrecisionToMS(*value) } case float32: - values[timeIndex] = tsdb.EpochPrecisionToMs(float64(value)) + values[timeIndex] = epochPrecisionToMS(float64(value)) case *float32: if value != nil { - values[timeIndex] = tsdb.EpochPrecisionToMs(float64(*value)) + values[timeIndex] = epochPrecisionToMS(float64(*value)) } } } @@ -678,7 +710,7 @@ func ConvertSqlValueColumnToFloat(columnName string, columnValue interface{}) (n return value, nil } -func SetupFillmode(query *tsdb.Query, interval time.Duration, fillmode string) error { +func SetupFillmode(query plugins.DataSubQuery, interval time.Duration, fillmode string) error { query.Model.Set("fill", true) query.Model.Set("fillInterval", interval.Seconds()) switch fillmode { @@ -698,13 +730,13 @@ func SetupFillmode(query *tsdb.Query, interval time.Duration, fillmode string) e return nil } -type SqlMacroEngineBase struct{} +type SQLMacroEngineBase struct{} -func NewSqlMacroEngineBase() *SqlMacroEngineBase { - return &SqlMacroEngineBase{} +func NewSQLMacroEngineBase() *SQLMacroEngineBase { + return &SQLMacroEngineBase{} } -func (m *SqlMacroEngineBase) ReplaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string { +func (m *SQLMacroEngineBase) ReplaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string { result := "" lastIndex := 0 @@ -720,3 +752,18 @@ func (m *SqlMacroEngineBase) ReplaceAllStringSubmatchFunc(re *regexp.Regexp, str return result + str[lastIndex:] } + +// epochPrecisionToMS converts epoch precision to millisecond, if needed. +// Only seconds to milliseconds supported right now +func epochPrecisionToMS(value float64) float64 { + s := strconv.FormatFloat(value, 'e', -1, 64) + if strings.HasSuffix(s, "e+09") { + return value * float64(1e3) + } + + if strings.HasSuffix(s, "e+18") { + return value / float64(time.Millisecond) + } + + return value +} diff --git a/pkg/tsdb/sqleng/sql_engine_test.go b/pkg/tsdb/sqleng/sql_engine_test.go index 06b63fb8faa..43768683ca5 100644 --- a/pkg/tsdb/sqleng/sql_engine_test.go +++ b/pkg/tsdb/sqleng/sql_engine_test.go @@ -8,19 +8,19 @@ import ( "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/require" ) -func TestSqlEngine(t *testing.T) { +func TestSQLEngine(t *testing.T) { dt := time.Date(2018, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC) earlyDt := time.Date(1970, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC) t.Run("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func(t *testing.T) { from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC) to := from.Add(5 * time.Minute) - timeRange := tsdb.NewFakeTimeRange("5m", "now", to) - query := &tsdb.Query{DataSource: &models.DataSource{}, Model: simplejson.New()} + timeRange := plugins.DataTimeRange{From: "5m", To: "now", Now: to} + query := plugins.DataSubQuery{DataSource: &models.DataSource{}, Model: simplejson.New()} t.Run("interpolate $__interval", func(t *testing.T) { sql, err := Interpolate(query, timeRange, "select $__interval ") diff --git a/pkg/tsdb/tempo/tempo.go b/pkg/tsdb/tempo/tempo.go index e1952984715..cf3fd5692b0 100644 --- a/pkg/tsdb/tempo/tempo.go +++ b/pkg/tsdb/tempo/tempo.go @@ -10,7 +10,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" jaeger "github.com/jaegertracing/jaeger/model" jaeger_json "github.com/jaegertracing/jaeger/model/converter/json" @@ -23,7 +23,7 @@ type tempoExecutor struct { httpClient *http.Client } -func newTempoExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { +func NewExecutor(dsInfo *models.DataSource) (plugins.DataPlugin, error) { httpClient, err := dsInfo.GetHttpClient() if err != nil { return nil, err @@ -35,29 +35,21 @@ func newTempoExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) } var ( - tlog log.Logger + tlog = log.New("tsdb.tempo") ) -func init() { - tlog = log.New("tsdb.tempo") - tsdb.RegisterTsdbQueryEndpoint("tempo", newTempoExecutor) -} +func (e *tempoExecutor) DataQuery(ctx context.Context, dsInfo *models.DataSource, + queryContext plugins.DataQuery) (plugins.DataResponse, error) { + refID := queryContext.Queries[0].RefID + queryResult := plugins.DataQueryResult{} -func (e *tempoExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { - result := &tsdb.Response{ - Results: map[string]*tsdb.QueryResult{}, - } - refID := tsdbQuery.Queries[0].RefId - queryResult := &tsdb.QueryResult{} - result.Results[refID] = queryResult - - traceID := tsdbQuery.Queries[0].Model.Get("query").MustString("") + traceID := queryContext.Queries[0].Model.Get("query").MustString("") tlog.Debug("Querying tempo with traceID", "traceID", traceID) req, err := http.NewRequestWithContext(ctx, "GET", dsInfo.Url+"/api/traces/"+traceID, nil) if err != nil { - return nil, err + return plugins.DataResponse{}, err } if dsInfo.BasicAuth { @@ -68,7 +60,7 @@ func (e *tempoExecutor) Query(ctx context.Context, dsInfo *models.DataSource, ts resp, err := e.httpClient.Do(req) if err != nil { - return nil, fmt.Errorf("failed get to tempo: %w", err) + return plugins.DataResponse{}, fmt.Errorf("failed get to tempo: %w", err) } defer func() { @@ -79,24 +71,28 @@ func (e *tempoExecutor) Query(ctx context.Context, dsInfo *models.DataSource, ts body, err := ioutil.ReadAll(resp.Body) if err != nil { - return nil, err + return plugins.DataResponse{}, err } if resp.StatusCode != http.StatusOK { queryResult.Error = fmt.Errorf("failed to get trace: %s", traceID) tlog.Error("Request to tempo failed", "Status", resp.Status, "Body", string(body)) - return result, nil + return plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ + refID: queryResult, + }, + }, nil } otTrace := ot_pdata.NewTraces() err = otTrace.FromOtlpProtoBytes(body) if err != nil { - return nil, fmt.Errorf("failed to convert tempo response to Otlp: %w", err) + return plugins.DataResponse{}, fmt.Errorf("failed to convert tempo response to Otlp: %w", err) } jaegerBatches, err := ot_jaeger.InternalTracesToJaegerProto(otTrace) if err != nil { - return nil, fmt.Errorf("failed to translate to jaegerBatches %v: %w", traceID, err) + return plugins.DataResponse{}, fmt.Errorf("failed to translate to jaegerBatches %v: %w", traceID, err) } jaegerTrace := &jaeger.Trace{ @@ -120,13 +116,17 @@ func (e *tempoExecutor) Query(ctx context.Context, dsInfo *models.DataSource, ts traceBytes, err := json.Marshal(jsonTrace) if err != nil { - return nil, fmt.Errorf("failed to json.Marshal trace \"%s\" :%w", traceID, err) + return plugins.DataResponse{}, fmt.Errorf("failed to json.Marshal trace \"%s\" :%w", traceID, err) } frames := []*data.Frame{ {Name: "Traces", RefID: refID, Fields: []*data.Field{data.NewField("trace", nil, []string{string(traceBytes)})}}, } - queryResult.Dataframes = tsdb.NewDecodedDataFrames(frames) + queryResult.Dataframes = plugins.NewDecodedDataFrames(frames) - return result, nil + return plugins.DataResponse{ + Results: map[string]plugins.DataQueryResult{ + refID: queryResult, + }, + }, nil } diff --git a/pkg/tsdb/testdatasource/scenarios_test.go b/pkg/tsdb/testdatasource/scenarios_test.go index b4da6905c6e..816f695dc7d 100644 --- a/pkg/tsdb/testdatasource/scenarios_test.go +++ b/pkg/tsdb/testdatasource/scenarios_test.go @@ -9,7 +9,7 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/plugins" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -19,7 +19,7 @@ func TestTestdataScenarios(t *testing.T) { t.Run("random walk ", func(t *testing.T) { t.Run("Should start at the requested value", func(t *testing.T) { - timeRange := tsdb.NewFakeTimeRange("5m", "now", time.Now()) + timeRange := plugins.DataTimeRange{From: "5m", To: "now", Now: time.Now()} model := simplejson.New() model.Set("startValue", 1.234) @@ -63,7 +63,7 @@ func TestTestdataScenarios(t *testing.T) { t.Run("random walk table", func(t *testing.T) { t.Run("Should return a table that looks like value/min/max", func(t *testing.T) { - timeRange := tsdb.NewFakeTimeRange("5m", "now", time.Now()) + timeRange := plugins.DataTimeRange{From: "5m", To: "now", Now: time.Now()} model := simplejson.New() modelBytes, err := model.MarshalJSON() @@ -117,7 +117,7 @@ func TestTestdataScenarios(t *testing.T) { }) t.Run("Should return a table with some nil values", func(t *testing.T) { - timeRange := tsdb.NewFakeTimeRange("5m", "now", time.Now()) + timeRange := plugins.DataTimeRange{From: "5m", To: "now", Now: time.Now()} model := simplejson.New() model.Set("withNil", true) diff --git a/pkg/tsdb/time_range.go b/pkg/tsdb/time_range.go index eb855c0976b..8b99bd3e933 100644 --- a/pkg/tsdb/time_range.go +++ b/pkg/tsdb/time_range.go @@ -2,7 +2,6 @@ package tsdb import ( "strconv" - "strings" "time" "github.com/timberio/go-datemath" @@ -115,18 +114,3 @@ func parse(s string, now time.Time, withRoundUp bool, location *time.Location) ( return now.Add(diff), nil } - -// EpochPrecisionToMs converts epoch precision to millisecond, if needed. -// Only seconds to milliseconds supported right now -func EpochPrecisionToMs(value float64) float64 { - s := strconv.FormatFloat(value, 'e', -1, 64) - if strings.HasSuffix(s, "e+09") { - return value * float64(1e3) - } - - if strings.HasSuffix(s, "e+18") { - return value / float64(time.Millisecond) - } - - return value -} diff --git a/pkg/tsdb/tsdbifaces/ifaces.go b/pkg/tsdb/tsdbifaces/ifaces.go new file mode 100644 index 00000000000..3c75f86d255 --- /dev/null +++ b/pkg/tsdb/tsdbifaces/ifaces.go @@ -0,0 +1,13 @@ +package tsdbifaces + +import ( + "context" + + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" +) + +// RequestHandler is a data request handler interface. +type RequestHandler interface { + HandleRequest(context.Context, *models.DataSource, plugins.DataQuery) (plugins.DataResponse, error) +}