Introduce TSDB service (#31520)

* Introduce TSDB service

Signed-off-by: Arve Knudsen <arve.knudsen@gmail.com>

Co-authored-by: Erik Sundell <erik.sundell87@gmail.com>
Co-authored-by: Will Browne <will.browne@grafana.com>
Co-authored-by: Torkel Ödegaard <torkel@grafana.org>
Co-authored-by: Will Browne <wbrowne@users.noreply.github.com>
Co-authored-by: Zoltán Bedi <zoltan.bedi@gmail.com>
This commit is contained in:
Arve Knudsen 2021-03-08 07:02:49 +01:00 committed by GitHub
parent c899bf3592
commit b79e61656a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
203 changed files with 5270 additions and 4777 deletions

View File

@ -128,19 +128,13 @@ func GetAlerts(c *models.ReqContext) response.Response {
} }
// POST /api/alerts/test // POST /api/alerts/test
func AlertTest(c *models.ReqContext, dto dtos.AlertTestCommand) response.Response { func (hs *HTTPServer) AlertTest(c *models.ReqContext, dto dtos.AlertTestCommand) response.Response {
if _, idErr := dto.Dashboard.Get("id").Int64(); idErr != nil { if _, idErr := dto.Dashboard.Get("id").Int64(); idErr != nil {
return response.Error(400, "The dashboard needs to be saved at least once before you can test an alert rule", nil) return response.Error(400, "The dashboard needs to be saved at least once before you can test an alert rule", nil)
} }
backendCmd := alerting.AlertTestCommand{ res, err := hs.AlertEngine.AlertTest(c.OrgId, dto.Dashboard, dto.PanelId, c.SignedInUser)
OrgID: c.OrgId, if err != nil {
Dashboard: dto.Dashboard,
PanelID: dto.PanelId,
User: c.SignedInUser,
}
if err := bus.Dispatch(&backendCmd); err != nil {
var validationErr alerting.ValidationError var validationErr alerting.ValidationError
if errors.As(err, &validationErr) { if errors.As(err, &validationErr) {
return response.Error(422, validationErr.Error(), nil) return response.Error(422, validationErr.Error(), nil)
@ -151,7 +145,6 @@ func AlertTest(c *models.ReqContext, dto dtos.AlertTestCommand) response.Respons
return response.Error(500, "Failed to test rule", err) return response.Error(500, "Failed to test rule", err)
} }
res := backendCmd.Result
dtoRes := &dtos.AlertTestResult{ dtoRes := &dtos.AlertTestResult{
Firing: res.Firing, Firing: res.Firing,
ConditionEvals: res.ConditionEvals, ConditionEvals: res.ConditionEvals,

View File

@ -266,14 +266,14 @@ func (hs *HTTPServer) registerRoutes() {
apiRoute.Get("/plugins", routing.Wrap(hs.GetPluginList)) apiRoute.Get("/plugins", routing.Wrap(hs.GetPluginList))
apiRoute.Get("/plugins/:pluginId/settings", routing.Wrap(GetPluginSettingByID)) apiRoute.Get("/plugins/:pluginId/settings", routing.Wrap(GetPluginSettingByID))
apiRoute.Get("/plugins/:pluginId/markdown/:name", routing.Wrap(GetPluginMarkdown)) apiRoute.Get("/plugins/:pluginId/markdown/:name", routing.Wrap(hs.GetPluginMarkdown))
apiRoute.Get("/plugins/:pluginId/health", routing.Wrap(hs.CheckHealth)) apiRoute.Get("/plugins/:pluginId/health", routing.Wrap(hs.CheckHealth))
apiRoute.Any("/plugins/:pluginId/resources", hs.CallResource) apiRoute.Any("/plugins/:pluginId/resources", hs.CallResource)
apiRoute.Any("/plugins/:pluginId/resources/*", hs.CallResource) apiRoute.Any("/plugins/:pluginId/resources/*", hs.CallResource)
apiRoute.Any("/plugins/errors", routing.Wrap(hs.GetPluginErrorsList)) apiRoute.Any("/plugins/errors", routing.Wrap(hs.GetPluginErrorsList))
apiRoute.Group("/plugins", func(pluginRoute routing.RouteRegister) { apiRoute.Group("/plugins", func(pluginRoute routing.RouteRegister) {
pluginRoute.Get("/:pluginId/dashboards/", routing.Wrap(GetPluginDashboards)) pluginRoute.Get("/:pluginId/dashboards/", routing.Wrap(hs.GetPluginDashboards))
pluginRoute.Post("/:pluginId/settings", bind(models.UpdatePluginSettingCmd{}), routing.Wrap(UpdatePluginSetting)) pluginRoute.Post("/:pluginId/settings", bind(models.UpdatePluginSettingCmd{}), routing.Wrap(UpdatePluginSetting))
pluginRoute.Get("/:pluginId/metrics", routing.Wrap(hs.CollectPluginMetrics)) pluginRoute.Get("/:pluginId/metrics", routing.Wrap(hs.CollectPluginMetrics))
}, reqOrgAdmin) }, reqOrgAdmin)
@ -316,7 +316,7 @@ func (hs *HTTPServer) registerRoutes() {
dashboardRoute.Post("/db", bind(models.SaveDashboardCommand{}), routing.Wrap(hs.PostDashboard)) dashboardRoute.Post("/db", bind(models.SaveDashboardCommand{}), routing.Wrap(hs.PostDashboard))
dashboardRoute.Get("/home", routing.Wrap(hs.GetHomeDashboard)) dashboardRoute.Get("/home", routing.Wrap(hs.GetHomeDashboard))
dashboardRoute.Get("/tags", GetDashboardTags) dashboardRoute.Get("/tags", GetDashboardTags)
dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), routing.Wrap(ImportDashboard)) dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), routing.Wrap(hs.ImportDashboard))
dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute routing.RouteRegister) { dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute routing.RouteRegister) {
dashIdRoute.Get("/versions", routing.Wrap(GetDashboardVersions)) dashIdRoute.Get("/versions", routing.Wrap(GetDashboardVersions))
@ -353,13 +353,13 @@ func (hs *HTTPServer) registerRoutes() {
// metrics // metrics
apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), routing.Wrap(hs.QueryMetrics)) apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), routing.Wrap(hs.QueryMetrics))
apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, routing.Wrap(GenerateSQLTestData)) apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, routing.Wrap(GenerateSQLTestData))
apiRoute.Get("/tsdb/testdata/random-walk", routing.Wrap(GetTestDataRandomWalk)) apiRoute.Get("/tsdb/testdata/random-walk", routing.Wrap(hs.GetTestDataRandomWalk))
// DataSource w/ expressions // DataSource w/ expressions
apiRoute.Post("/ds/query", bind(dtos.MetricRequest{}), routing.Wrap(hs.QueryMetricsV2)) apiRoute.Post("/ds/query", bind(dtos.MetricRequest{}), routing.Wrap(hs.QueryMetricsV2))
apiRoute.Group("/alerts", func(alertsRoute routing.RouteRegister) { apiRoute.Group("/alerts", func(alertsRoute routing.RouteRegister) {
alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), routing.Wrap(AlertTest)) alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), routing.Wrap(hs.AlertTest))
alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), routing.Wrap(PauseAlert)) alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), routing.Wrap(PauseAlert))
alertsRoute.Get("/:alertId", ValidateOrgAlert, routing.Wrap(GetAlert)) alertsRoute.Get("/:alertId", ValidateOrgAlert, routing.Wrap(GetAlert))
alertsRoute.Get("/", routing.Wrap(GetAlerts)) alertsRoute.Get("/", routing.Wrap(GetAlerts))

View File

@ -11,6 +11,7 @@ import (
"github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
macaron "gopkg.in/macaron.v1" macaron "gopkg.in/macaron.v1"
) )
@ -31,7 +32,7 @@ func (hs *HTTPServer) initAppPluginRoutes(r *macaron.Macaron) {
TLSHandshakeTimeout: 10 * time.Second, TLSHandshakeTimeout: 10 * time.Second,
} }
for _, plugin := range plugins.Apps { for _, plugin := range manager.Apps {
for _, route := range plugin.Routes { for _, route := range plugin.Routes {
url := util.JoinURLFragments("/api/plugin-proxy/"+plugin.Id, route.Path) url := util.JoinURLFragments("/api/plugin-proxy/"+plugin.Id, route.Path)
handlers := make([]macaron.Handler, 0) handlers := make([]macaron.Handler, 0)

View File

@ -8,6 +8,7 @@ import (
"path/filepath" "path/filepath"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/alerting"
"github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/dashboards"
@ -17,7 +18,6 @@ import (
"github.com/grafana/grafana/pkg/components/dashdiffs" "github.com/grafana/grafana/pkg/components/dashdiffs"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/metrics" "github.com/grafana/grafana/pkg/infra/metrics"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/guardian" "github.com/grafana/grafana/pkg/services/guardian"
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
) )
@ -226,7 +226,7 @@ func (hs *HTTPServer) deleteDashboard(c *models.ReqContext) response.Response {
} }
} }
err := dashboards.NewService().DeleteDashboard(dash.Id, c.OrgId) err := dashboards.NewService(hs.DataService).DeleteDashboard(dash.Id, c.OrgId)
if err != nil { if err != nil {
var dashboardErr models.DashboardErr var dashboardErr models.DashboardErr
if ok := errors.As(err, &dashboardErr); ok { if ok := errors.As(err, &dashboardErr); ok {
@ -288,7 +288,7 @@ func (hs *HTTPServer) PostDashboard(c *models.ReqContext, cmd models.SaveDashboa
Overwrite: cmd.Overwrite, Overwrite: cmd.Overwrite,
} }
dashboard, err := dashboards.NewService().SaveDashboard(dashItem, allowUiUpdate) dashboard, err := dashboards.NewService(hs.DataService).SaveDashboard(dashItem, allowUiUpdate)
if err != nil { if err != nil {
return dashboardSaveErrorToApiResponse(err) return dashboardSaveErrorToApiResponse(err)
} }
@ -356,7 +356,7 @@ func dashboardSaveErrorToApiResponse(err error) response.Response {
if ok := errors.As(err, &pluginErr); ok { if ok := errors.As(err, &pluginErr); ok {
message := fmt.Sprintf("The dashboard belongs to plugin %s.", pluginErr.PluginId) message := fmt.Sprintf("The dashboard belongs to plugin %s.", pluginErr.PluginId)
// look up plugin name // look up plugin name
if pluginDef, exist := plugins.Plugins[pluginErr.PluginId]; exist { if pluginDef, exist := manager.Plugins[pluginErr.PluginId]; exist {
message = fmt.Sprintf("The dashboard belongs to plugin %s.", pluginDef.Name) message = fmt.Sprintf("The dashboard belongs to plugin %s.", pluginDef.Name)
} }
return response.JSON(412, util.DynMap{"status": "plugin-dashboard", "message": message}) return response.JSON(412, util.DynMap{"status": "plugin-dashboard", "message": message})

View File

@ -9,7 +9,7 @@ import (
"github.com/grafana/grafana/pkg/api/pluginproxy" "github.com/grafana/grafana/pkg/api/pluginproxy"
"github.com/grafana/grafana/pkg/infra/metrics" "github.com/grafana/grafana/pkg/infra/metrics"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/manager"
) )
// ProxyDataSourceRequest proxies datasource requests // ProxyDataSourceRequest proxies datasource requests
@ -34,7 +34,7 @@ func (hs *HTTPServer) ProxyDataSourceRequest(c *models.ReqContext) {
} }
// find plugin // find plugin
plugin, ok := plugins.DataSources[ds.Type] plugin, ok := manager.DataSources[ds.Type]
if !ok { if !ok {
c.JsonApiErr(http.StatusInternalServerError, "Unable to find datasource plugin", err) c.JsonApiErr(http.StatusInternalServerError, "Unable to find datasource plugin", err)
return return

View File

@ -13,8 +13,8 @@ import (
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/adapters"
"github.com/grafana/grafana/pkg/plugins/datasource/wrapper" "github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
) )
@ -47,7 +47,7 @@ func (hs *HTTPServer) GetDataSources(c *models.ReqContext) response.Response {
ReadOnly: ds.ReadOnly, ReadOnly: ds.ReadOnly,
} }
if plugin, exists := plugins.DataSources[ds.Type]; exists { if plugin, exists := manager.DataSources[ds.Type]; exists {
dsItem.TypeLogoUrl = plugin.Info.Logos.Small dsItem.TypeLogoUrl = plugin.Info.Logos.Small
dsItem.TypeName = plugin.Name dsItem.TypeName = plugin.Name
} else { } else {
@ -363,19 +363,19 @@ func (hs *HTTPServer) CallDatasourceResource(c *models.ReqContext) {
} }
// find plugin // find plugin
plugin, ok := plugins.DataSources[ds.Type] plugin, ok := manager.DataSources[ds.Type]
if !ok { if !ok {
c.JsonApiErr(500, "Unable to find datasource plugin", err) c.JsonApiErr(500, "Unable to find datasource plugin", err)
return return
} }
dsInstanceSettings, err := wrapper.ModelToInstanceSettings(ds) dsInstanceSettings, err := adapters.ModelToInstanceSettings(ds)
if err != nil { if err != nil {
c.JsonApiErr(500, "Unable to process datasource instance model", err) c.JsonApiErr(500, "Unable to process datasource instance model", err)
} }
pCtx := backend.PluginContext{ pCtx := backend.PluginContext{
User: wrapper.BackendUserFromSignedInUser(c.SignedInUser), User: adapters.BackendUserFromSignedInUser(c.SignedInUser),
OrgID: c.OrgId, OrgID: c.OrgId,
PluginID: plugin.Id, PluginID: plugin.Id,
DataSourceInstanceSettings: dsInstanceSettings, DataSourceInstanceSettings: dsInstanceSettings,
@ -433,12 +433,12 @@ func (hs *HTTPServer) CheckDatasourceHealth(c *models.ReqContext) response.Respo
return response.Error(500, "Unable to find datasource plugin", err) return response.Error(500, "Unable to find datasource plugin", err)
} }
dsInstanceSettings, err := wrapper.ModelToInstanceSettings(ds) dsInstanceSettings, err := adapters.ModelToInstanceSettings(ds)
if err != nil { if err != nil {
return response.Error(500, "Unable to get datasource model", err) return response.Error(500, "Unable to get datasource model", err)
} }
pCtx := backend.PluginContext{ pCtx := backend.PluginContext{
User: wrapper.BackendUserFromSignedInUser(c.SignedInUser), User: adapters.BackendUserFromSignedInUser(c.SignedInUser),
OrgID: c.OrgId, OrgID: c.OrgId,
PluginID: plugin.Id, PluginID: plugin.Id,
DataSourceInstanceSettings: dsInstanceSettings, DataSourceInstanceSettings: dsInstanceSettings,

View File

@ -3,6 +3,7 @@ package dtos
import ( import (
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/manager"
) )
type PluginSetting struct { type PluginSetting struct {
@ -63,6 +64,6 @@ type ImportDashboardCommand struct {
Path string `json:"path"` Path string `json:"path"`
Overwrite bool `json:"overwrite"` Overwrite bool `json:"overwrite"`
Dashboard *simplejson.Json `json:"dashboard"` Dashboard *simplejson.Json `json:"dashboard"`
Inputs []plugins.ImportDashboardInput `json:"inputs"` Inputs []manager.ImportDashboardInput `json:"inputs"`
FolderId int64 `json:"folderId"` FolderId int64 `json:"folderId"`
} }

View File

@ -16,6 +16,7 @@ import (
"github.com/grafana/grafana/pkg/api/routing" "github.com/grafana/grafana/pkg/api/routing"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
log "github.com/inconshreveable/log15" log "github.com/inconshreveable/log15"
@ -90,7 +91,7 @@ func TestFrontendLoggingEndpoint(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// fake plugin route so we will try to find a source map there. I can't believe I can do this // fake plugin route so we will try to find a source map there. I can't believe I can do this
plugins.StaticRoutes = append(plugins.StaticRoutes, &plugins.PluginStaticRoute{ manager.StaticRoutes = append(manager.StaticRoutes, &plugins.PluginStaticRoute{
Directory: "/usr/local/telepathic-panel", Directory: "/usr/local/telepathic-panel",
PluginId: "telepathic", PluginId: "telepathic",
}) })

View File

@ -12,7 +12,7 @@ import (
sourcemap "github.com/go-sourcemap/sourcemap" sourcemap "github.com/go-sourcemap/sourcemap"
"github.com/getsentry/sentry-go" "github.com/getsentry/sentry-go"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
) )
@ -80,7 +80,7 @@ func (store *SourceMapStore) guessSourceMapLocation(sourceURL string) (*sourceMa
} }
// if source comes from a plugin, look in plugin dir // if source comes from a plugin, look in plugin dir
} else if strings.HasPrefix(u.Path, "/public/plugins/") { } else if strings.HasPrefix(u.Path, "/public/plugins/") {
for _, route := range plugins.StaticRoutes { for _, route := range manager.StaticRoutes {
pluginPrefix := filepath.Join("/public/plugins/", route.PluginId) pluginPrefix := filepath.Join("/public/plugins/", route.PluginId)
if strings.HasPrefix(u.Path, pluginPrefix) { if strings.HasPrefix(u.Path, pluginPrefix) {
return &sourceMapLocation{ return &sourceMapLocation{

View File

@ -5,6 +5,7 @@ import (
"strconv" "strconv"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
@ -109,12 +110,12 @@ func (hs *HTTPServer) getFSDataSources(c *models.ReqContext, enabledPlugins *plu
// add data sources that are built in (meaning they are not added via data sources page, nor have any entry in // add data sources that are built in (meaning they are not added via data sources page, nor have any entry in
// the datasource table) // the datasource table)
for _, ds := range plugins.DataSources { for _, ds := range manager.DataSources {
if ds.BuiltIn { if ds.BuiltIn {
dataSources[ds.Name] = map[string]interface{}{ dataSources[ds.Name] = map[string]interface{}{
"type": ds.Type, "type": ds.Type,
"name": ds.Name, "name": ds.Name,
"meta": plugins.DataSources[ds.Id], "meta": manager.DataSources[ds.Id],
} }
} }
} }
@ -124,7 +125,7 @@ func (hs *HTTPServer) getFSDataSources(c *models.ReqContext, enabledPlugins *plu
// getFrontendSettingsMap returns a json object with all the settings needed for front end initialisation. // getFrontendSettingsMap returns a json object with all the settings needed for front end initialisation.
func (hs *HTTPServer) getFrontendSettingsMap(c *models.ReqContext) (map[string]interface{}, error) { func (hs *HTTPServer) getFrontendSettingsMap(c *models.ReqContext) (map[string]interface{}, error) {
enabledPlugins, err := plugins.GetEnabledPlugins(c.OrgId) enabledPlugins, err := hs.PluginManager.GetEnabledPlugins(c.OrgId)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -11,7 +11,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/services/rendering" "github.com/grafana/grafana/pkg/services/rendering"
"github.com/grafana/grafana/pkg/services/licensing" "github.com/grafana/grafana/pkg/services/licensing"
@ -50,7 +50,7 @@ func setupTestEnvironment(t *testing.T, cfg *setting.Cfg) (*macaron.Macaron, *HT
Bus: bus.GetBus(), Bus: bus.GetBus(),
License: &licensing.OSSLicensingService{Cfg: cfg}, License: &licensing.OSSLicensingService{Cfg: cfg},
RenderService: r, RenderService: r,
PluginManager: &plugins.PluginManager{Cfg: cfg}, PluginManager: &manager.PluginManager{Cfg: cfg},
} }
m := macaron.New() m := macaron.New()

View File

@ -13,12 +13,12 @@ import (
"strings" "strings"
"sync" "sync"
"github.com/grafana/grafana/pkg/services/alerting"
"github.com/grafana/grafana/pkg/services/live" "github.com/grafana/grafana/pkg/services/live"
"github.com/grafana/grafana/pkg/services/search" "github.com/grafana/grafana/pkg/services/search"
"github.com/grafana/grafana/pkg/services/shorturls" "github.com/grafana/grafana/pkg/services/shorturls"
"github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/api/routing" "github.com/grafana/grafana/pkg/api/routing"
httpstatic "github.com/grafana/grafana/pkg/api/static" httpstatic "github.com/grafana/grafana/pkg/api/static"
@ -29,7 +29,10 @@ import (
"github.com/grafana/grafana/pkg/infra/remotecache" "github.com/grafana/grafana/pkg/infra/remotecache"
"github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/backendplugin"
_ "github.com/grafana/grafana/pkg/plugins/backendplugin/manager"
"github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/plugins/plugindashboards"
"github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/services/contexthandler" "github.com/grafana/grafana/pkg/services/contexthandler"
"github.com/grafana/grafana/pkg/services/datasources" "github.com/grafana/grafana/pkg/services/datasources"
@ -76,13 +79,16 @@ type HTTPServer struct {
License models.Licensing `inject:""` License models.Licensing `inject:""`
BackendPluginManager backendplugin.Manager `inject:""` BackendPluginManager backendplugin.Manager `inject:""`
PluginRequestValidator models.PluginRequestValidator `inject:""` PluginRequestValidator models.PluginRequestValidator `inject:""`
PluginManager *plugins.PluginManager `inject:""` PluginManager *manager.PluginManager `inject:""`
SearchService *search.SearchService `inject:""` SearchService *search.SearchService `inject:""`
ShortURLService *shorturls.ShortURLService `inject:""` ShortURLService *shorturls.ShortURLService `inject:""`
Live *live.GrafanaLive `inject:""` Live *live.GrafanaLive `inject:""`
ContextHandler *contexthandler.ContextHandler `inject:""` ContextHandler *contexthandler.ContextHandler `inject:""`
SQLStore *sqlstore.SQLStore `inject:""` SQLStore *sqlstore.SQLStore `inject:""`
LibraryPanelService *librarypanels.LibraryPanelService `inject:""` LibraryPanelService *librarypanels.LibraryPanelService `inject:""`
DataService *tsdb.Service `inject:""`
PluginDashboardService *plugindashboards.Service `inject:""`
AlertEngine *alerting.AlertEngine `inject:""`
Listener net.Listener Listener net.Listener
} }
@ -312,7 +318,7 @@ func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() {
m.Use(middleware.Recovery(hs.Cfg)) m.Use(middleware.Recovery(hs.Cfg))
for _, route := range plugins.StaticRoutes { for _, route := range manager.StaticRoutes {
pluginRoute := path.Join("/public/plugins/", route.PluginId) pluginRoute := path.Join("/public/plugins/", route.PluginId)
hs.log.Debug("Plugins: Adding route", "route", pluginRoute, "dir", route.Directory) hs.log.Debug("Plugins: Adding route", "route", pluginRoute, "dir", route.Directory)
hs.mapStatic(m, route.Directory, "", pluginRoute) hs.mapStatic(m, route.Directory, "", pluginRoute)

View File

@ -8,7 +8,6 @@ import (
"github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
) )
@ -63,8 +62,8 @@ func getProfileNode(c *models.ReqContext) *dtos.NavLink {
} }
} }
func getAppLinks(c *models.ReqContext) ([]*dtos.NavLink, error) { func (hs *HTTPServer) getAppLinks(c *models.ReqContext) ([]*dtos.NavLink, error) {
enabledPlugins, err := plugins.GetEnabledPlugins(c.OrgId) enabledPlugins, err := hs.PluginManager.GetEnabledPlugins(c.OrgId)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -213,7 +212,7 @@ func (hs *HTTPServer) getNavTree(c *models.ReqContext, hasEditPerm bool) ([]*dto
}) })
} }
appLinks, err := getAppLinks(c) appLinks, err := hs.getAppLinks(c)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -7,12 +7,12 @@ import (
"github.com/grafana/grafana/pkg/expr" "github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/api/response" "github.com/grafana/grafana/pkg/api/response"
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
) )
@ -23,11 +23,12 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq
return response.Error(http.StatusBadRequest, "No queries found in query", nil) return response.Error(http.StatusBadRequest, "No queries found in query", nil)
} }
request := &tsdb.TsdbQuery{ timeRange := plugins.NewDataTimeRange(reqDTO.From, reqDTO.To)
TimeRange: tsdb.NewTimeRange(reqDTO.From, reqDTO.To), request := plugins.DataQuery{
TimeRange: &timeRange,
Debug: reqDTO.Debug, Debug: reqDTO.Debug,
User: c.SignedInUser, User: c.SignedInUser,
Queries: make([]*tsdb.Query, 0, len(reqDTO.Queries)), Queries: make([]plugins.DataSubQuery, 0, len(reqDTO.Queries)),
} }
// Loop to see if we have an expression. // Loop to see if we have an expression.
@ -57,10 +58,10 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq
} }
} }
request.Queries = append(request.Queries, &tsdb.Query{ request.Queries = append(request.Queries, plugins.DataSubQuery{
RefId: query.Get("refId").MustString("A"), RefID: query.Get("refId").MustString("A"),
MaxDataPoints: query.Get("maxDataPoints").MustInt64(100), MaxDataPoints: query.Get("maxDataPoints").MustInt64(100),
IntervalMs: query.Get("intervalMs").MustInt64(1000), IntervalMS: query.Get("intervalMs").MustInt64(1000),
QueryType: query.Get("queryType").MustString(""), QueryType: query.Get("queryType").MustString(""),
Model: query, Model: query,
DataSource: ds, DataSource: ds,
@ -72,7 +73,7 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq
return response.Error(http.StatusForbidden, "Access denied", err) return response.Error(http.StatusForbidden, "Access denied", err)
} }
resp, err := tsdb.HandleRequest(c.Req.Context(), ds, request) resp, err := hs.DataService.HandleRequest(c.Req.Context(), ds, request)
if err != nil { if err != nil {
return response.Error(http.StatusInternalServerError, "Metric request error", err) return response.Error(http.StatusInternalServerError, "Metric request error", err)
} }
@ -91,11 +92,12 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDTO dtos.MetricReq
// handleExpressions handles POST /api/ds/query when there is an expression. // handleExpressions handles POST /api/ds/query when there is an expression.
func (hs *HTTPServer) handleExpressions(c *models.ReqContext, reqDTO dtos.MetricRequest) response.Response { func (hs *HTTPServer) handleExpressions(c *models.ReqContext, reqDTO dtos.MetricRequest) response.Response {
request := &tsdb.TsdbQuery{ timeRange := plugins.NewDataTimeRange(reqDTO.From, reqDTO.To)
TimeRange: tsdb.NewTimeRange(reqDTO.From, reqDTO.To), request := plugins.DataQuery{
TimeRange: &timeRange,
Debug: reqDTO.Debug, Debug: reqDTO.Debug,
User: c.SignedInUser, User: c.SignedInUser,
Queries: make([]*tsdb.Query, 0, len(reqDTO.Queries)), Queries: make([]plugins.DataSubQuery, 0, len(reqDTO.Queries)),
} }
for _, query := range reqDTO.Queries { for _, query := range reqDTO.Queries {
@ -116,16 +118,19 @@ func (hs *HTTPServer) handleExpressions(c *models.ReqContext, reqDTO dtos.Metric
} }
} }
request.Queries = append(request.Queries, &tsdb.Query{ request.Queries = append(request.Queries, plugins.DataSubQuery{
RefId: query.Get("refId").MustString("A"), RefID: query.Get("refId").MustString("A"),
MaxDataPoints: query.Get("maxDataPoints").MustInt64(100), MaxDataPoints: query.Get("maxDataPoints").MustInt64(100),
IntervalMs: query.Get("intervalMs").MustInt64(1000), IntervalMS: query.Get("intervalMs").MustInt64(1000),
QueryType: query.Get("queryType").MustString(""), QueryType: query.Get("queryType").MustString(""),
Model: query, Model: query,
}) })
} }
exprService := expr.Service{Cfg: hs.Cfg} exprService := expr.Service{
Cfg: hs.Cfg,
DataService: hs.DataService,
}
resp, err := exprService.WrapTransformData(c.Req.Context(), request) resp, err := exprService.WrapTransformData(c.Req.Context(), request)
if err != nil { if err != nil {
return response.Error(500, "expression request error", err) return response.Error(500, "expression request error", err)
@ -157,8 +162,6 @@ func (hs *HTTPServer) handleGetDataSourceError(err error, datasourceID int64) *r
// QueryMetrics returns query metrics // QueryMetrics returns query metrics
// POST /api/tsdb/query // POST /api/tsdb/query
func (hs *HTTPServer) QueryMetrics(c *models.ReqContext, reqDto dtos.MetricRequest) response.Response { func (hs *HTTPServer) QueryMetrics(c *models.ReqContext, reqDto dtos.MetricRequest) response.Response {
timeRange := tsdb.NewTimeRange(reqDto.From, reqDto.To)
if len(reqDto.Queries) == 0 { if len(reqDto.Queries) == 0 {
return response.Error(http.StatusBadRequest, "No queries found in query", nil) return response.Error(http.StatusBadRequest, "No queries found in query", nil)
} }
@ -178,23 +181,24 @@ func (hs *HTTPServer) QueryMetrics(c *models.ReqContext, reqDto dtos.MetricReque
return response.Error(http.StatusForbidden, "Access denied", err) return response.Error(http.StatusForbidden, "Access denied", err)
} }
request := &tsdb.TsdbQuery{ timeRange := plugins.NewDataTimeRange(reqDto.From, reqDto.To)
TimeRange: timeRange, request := plugins.DataQuery{
TimeRange: &timeRange,
Debug: reqDto.Debug, Debug: reqDto.Debug,
User: c.SignedInUser, User: c.SignedInUser,
} }
for _, query := range reqDto.Queries { for _, query := range reqDto.Queries {
request.Queries = append(request.Queries, &tsdb.Query{ request.Queries = append(request.Queries, plugins.DataSubQuery{
RefId: query.Get("refId").MustString("A"), RefID: query.Get("refId").MustString("A"),
MaxDataPoints: query.Get("maxDataPoints").MustInt64(100), MaxDataPoints: query.Get("maxDataPoints").MustInt64(100),
IntervalMs: query.Get("intervalMs").MustInt64(1000), IntervalMS: query.Get("intervalMs").MustInt64(1000),
Model: query, Model: query,
DataSource: ds, DataSource: ds,
}) })
} }
resp, err := tsdb.HandleRequest(c.Req.Context(), ds, request) resp, err := hs.DataService.HandleRequest(c.Req.Context(), ds, request)
if err != nil { if err != nil {
return response.Error(http.StatusInternalServerError, "Metric request error", err) return response.Error(http.StatusInternalServerError, "Metric request error", err)
} }
@ -221,28 +225,28 @@ func GenerateSQLTestData(c *models.ReqContext) response.Response {
} }
// GET /api/tsdb/testdata/random-walk // GET /api/tsdb/testdata/random-walk
func GetTestDataRandomWalk(c *models.ReqContext) response.Response { func (hs *HTTPServer) GetTestDataRandomWalk(c *models.ReqContext) response.Response {
from := c.Query("from") from := c.Query("from")
to := c.Query("to") to := c.Query("to")
intervalMs := c.QueryInt64("intervalMs") intervalMS := c.QueryInt64("intervalMs")
timeRange := tsdb.NewTimeRange(from, to) timeRange := plugins.NewDataTimeRange(from, to)
request := &tsdb.TsdbQuery{TimeRange: timeRange} request := plugins.DataQuery{TimeRange: &timeRange}
dsInfo := &models.DataSource{ dsInfo := &models.DataSource{
Type: "testdata", Type: "testdata",
JsonData: simplejson.New(), JsonData: simplejson.New(),
} }
request.Queries = append(request.Queries, &tsdb.Query{ request.Queries = append(request.Queries, plugins.DataSubQuery{
RefId: "A", RefID: "A",
IntervalMs: intervalMs, IntervalMS: intervalMS,
Model: simplejson.NewFromAny(&util.DynMap{ Model: simplejson.NewFromAny(&util.DynMap{
"scenario": "random_walk", "scenario": "random_walk",
}), }),
DataSource: dsInfo, DataSource: dsInfo,
}) })
resp, err := tsdb.HandleRequest(context.Background(), dsInfo, request) resp, err := hs.DataService.HandleRequest(context.Background(), dsInfo, request)
if err != nil { if err != nil {
return response.Error(500, "Metric request error", err) return response.Error(500, "Metric request error", err)
} }

View File

@ -14,7 +14,8 @@ import (
) )
// ApplyRoute should use the plugin route data to set auth headers and custom headers. // ApplyRoute should use the plugin route data to set auth headers and custom headers.
func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route *plugins.AppPluginRoute, ds *models.DataSource) { func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route *plugins.AppPluginRoute,
ds *models.DataSource) {
proxyPath = strings.TrimPrefix(proxyPath, route.Path) proxyPath = strings.TrimPrefix(proxyPath, route.Path)
data := templateData{ data := templateData{

View File

@ -14,6 +14,7 @@ import (
"github.com/grafana/grafana/pkg/api/datasource" "github.com/grafana/grafana/pkg/api/datasource"
"github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/components/securejsondata"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -23,7 +24,6 @@ import (
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/login/social" "github.com/grafana/grafana/pkg/login/social"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
) )

View File

@ -20,7 +20,8 @@ type templateData struct {
} }
// NewApiPluginProxy create a plugin proxy // NewApiPluginProxy create a plugin proxy
func NewApiPluginProxy(ctx *models.ReqContext, proxyPath string, route *plugins.AppPluginRoute, appID string, cfg *setting.Cfg) *httputil.ReverseProxy { func NewApiPluginProxy(ctx *models.ReqContext, proxyPath string, route *plugins.AppPluginRoute,
appID string, cfg *setting.Cfg) *httputil.ReverseProxy {
director := func(req *http.Request) { director := func(req *http.Request) {
query := models.GetPluginSettingByIdQuery{OrgId: ctx.OrgId, PluginId: appID} query := models.GetPluginSettingByIdQuery{OrgId: ctx.OrgId, PluginId: appID}
if err := bus.Dispatch(&query); err != nil { if err := bus.Dispatch(&query); err != nil {

View File

@ -14,8 +14,9 @@ import (
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/adapters"
"github.com/grafana/grafana/pkg/plugins/backendplugin" "github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/datasource/wrapper" "github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
) )
@ -25,7 +26,7 @@ var ErrPluginNotFound error = errors.New("plugin not found, no installed plugin
func (hs *HTTPServer) getPluginContext(pluginID string, user *models.SignedInUser) (backend.PluginContext, error) { func (hs *HTTPServer) getPluginContext(pluginID string, user *models.SignedInUser) (backend.PluginContext, error) {
pc := backend.PluginContext{} pc := backend.PluginContext{}
plugin, exists := plugins.Plugins[pluginID] plugin, exists := manager.Plugins[pluginID]
if !exists { if !exists {
return pc, ErrPluginNotFound return pc, ErrPluginNotFound
} }
@ -53,7 +54,7 @@ func (hs *HTTPServer) getPluginContext(pluginID string, user *models.SignedInUse
return backend.PluginContext{ return backend.PluginContext{
OrgID: user.OrgId, OrgID: user.OrgId,
PluginID: plugin.Id, PluginID: plugin.Id,
User: wrapper.BackendUserFromSignedInUser(user), User: adapters.BackendUserFromSignedInUser(user),
AppInstanceSettings: &backend.AppInstanceSettings{ AppInstanceSettings: &backend.AppInstanceSettings{
JSONData: jsonData, JSONData: jsonData,
DecryptedSecureJSONData: decryptedSecureJSONData, DecryptedSecureJSONData: decryptedSecureJSONData,
@ -73,14 +74,14 @@ func (hs *HTTPServer) GetPluginList(c *models.ReqContext) response.Response {
coreFilter = "1" coreFilter = "1"
} }
pluginSettingsMap, err := plugins.GetPluginSettings(c.OrgId) pluginSettingsMap, err := hs.PluginManager.GetPluginSettings(c.OrgId)
if err != nil { if err != nil {
return response.Error(500, "Failed to get list of plugins", err) return response.Error(500, "Failed to get list of plugins", err)
} }
result := make(dtos.PluginList, 0) result := make(dtos.PluginList, 0)
for _, pluginDef := range plugins.Plugins { for _, pluginDef := range manager.Plugins {
// filter out app sub plugins // filter out app sub plugins
if embeddedFilter == "0" && pluginDef.IncludedInAppId != "" { if embeddedFilter == "0" && pluginDef.IncludedInAppId != "" {
continue continue
@ -130,7 +131,7 @@ func (hs *HTTPServer) GetPluginList(c *models.ReqContext) response.Response {
} }
// filter out built in data sources // filter out built in data sources
if ds, exists := plugins.DataSources[pluginDef.Id]; exists { if ds, exists := manager.DataSources[pluginDef.Id]; exists {
if ds.BuiltIn { if ds.BuiltIn {
continue continue
} }
@ -146,7 +147,7 @@ func (hs *HTTPServer) GetPluginList(c *models.ReqContext) response.Response {
func GetPluginSettingByID(c *models.ReqContext) response.Response { func GetPluginSettingByID(c *models.ReqContext) response.Response {
pluginID := c.Params(":pluginId") pluginID := c.Params(":pluginId")
def, exists := plugins.Plugins[pluginID] def, exists := manager.Plugins[pluginID]
if !exists { if !exists {
return response.Error(404, "Plugin not found, no installed plugin with that id", nil) return response.Error(404, "Plugin not found, no installed plugin with that id", nil)
} }
@ -169,7 +170,7 @@ func GetPluginSettingByID(c *models.ReqContext) response.Response {
SignatureOrg: def.SignatureOrg, SignatureOrg: def.SignatureOrg,
} }
if app, ok := plugins.Apps[def.Id]; ok { if app, ok := manager.Apps[def.Id]; ok {
dto.Enabled = app.AutoEnabled dto.Enabled = app.AutoEnabled
dto.Pinned = app.AutoEnabled dto.Pinned = app.AutoEnabled
} }
@ -194,7 +195,7 @@ func UpdatePluginSetting(c *models.ReqContext, cmd models.UpdatePluginSettingCmd
cmd.OrgId = c.OrgId cmd.OrgId = c.OrgId
cmd.PluginId = pluginID cmd.PluginId = pluginID
if _, ok := plugins.Apps[cmd.PluginId]; !ok { if _, ok := manager.Apps[cmd.PluginId]; !ok {
return response.Error(404, "Plugin not installed.", nil) return response.Error(404, "Plugin not installed.", nil)
} }
@ -205,10 +206,10 @@ func UpdatePluginSetting(c *models.ReqContext, cmd models.UpdatePluginSettingCmd
return response.Success("Plugin settings updated") return response.Success("Plugin settings updated")
} }
func GetPluginDashboards(c *models.ReqContext) response.Response { func (hs *HTTPServer) GetPluginDashboards(c *models.ReqContext) response.Response {
pluginID := c.Params(":pluginId") pluginID := c.Params(":pluginId")
list, err := plugins.GetPluginDashboards(c.OrgId, pluginID) list, err := hs.PluginManager.GetPluginDashboards(c.OrgId, pluginID)
if err != nil { if err != nil {
var notFound plugins.PluginNotFoundError var notFound plugins.PluginNotFoundError
if errors.As(err, &notFound) { if errors.As(err, &notFound) {
@ -221,11 +222,11 @@ func GetPluginDashboards(c *models.ReqContext) response.Response {
return response.JSON(200, list) return response.JSON(200, list)
} }
func GetPluginMarkdown(c *models.ReqContext) response.Response { func (hs *HTTPServer) GetPluginMarkdown(c *models.ReqContext) response.Response {
pluginID := c.Params(":pluginId") pluginID := c.Params(":pluginId")
name := c.Params(":name") name := c.Params(":name")
content, err := plugins.GetPluginMarkdown(pluginID, name) content, err := hs.PluginManager.GetPluginMarkdown(pluginID, name)
if err != nil { if err != nil {
var notFound plugins.PluginNotFoundError var notFound plugins.PluginNotFoundError
if errors.As(err, &notFound) { if errors.As(err, &notFound) {
@ -237,7 +238,7 @@ func GetPluginMarkdown(c *models.ReqContext) response.Response {
// fallback try readme // fallback try readme
if len(content) == 0 { if len(content) == 0 {
content, err = plugins.GetPluginMarkdown(pluginID, "readme") content, err = hs.PluginManager.GetPluginMarkdown(pluginID, "readme")
if err != nil { if err != nil {
return response.Error(501, "Could not get markdown file", err) return response.Error(501, "Could not get markdown file", err)
} }
@ -248,27 +249,18 @@ func GetPluginMarkdown(c *models.ReqContext) response.Response {
return resp return resp
} }
func ImportDashboard(c *models.ReqContext, apiCmd dtos.ImportDashboardCommand) response.Response { func (hs *HTTPServer) ImportDashboard(c *models.ReqContext, apiCmd dtos.ImportDashboardCommand) response.Response {
if apiCmd.PluginId == "" && apiCmd.Dashboard == nil { if apiCmd.PluginId == "" && apiCmd.Dashboard == nil {
return response.Error(422, "Dashboard must be set", nil) return response.Error(422, "Dashboard must be set", nil)
} }
cmd := plugins.ImportDashboardCommand{ dashInfo, err := hs.PluginManager.ImportDashboard(apiCmd.PluginId, apiCmd.Path, c.OrgId, apiCmd.FolderId,
OrgId: c.OrgId, apiCmd.Dashboard, apiCmd.Overwrite, apiCmd.Inputs, c.SignedInUser, hs.DataService)
User: c.SignedInUser, if err != nil {
PluginId: apiCmd.PluginId,
Path: apiCmd.Path,
Inputs: apiCmd.Inputs,
Overwrite: apiCmd.Overwrite,
FolderId: apiCmd.FolderId,
Dashboard: apiCmd.Dashboard,
}
if err := bus.Dispatch(&cmd); err != nil {
return dashboardSaveErrorToApiResponse(err) return dashboardSaveErrorToApiResponse(err)
} }
return response.JSON(200, cmd.Result) return response.JSON(200, dashInfo)
} }
// CollectPluginMetrics collect metrics from a plugin. // CollectPluginMetrics collect metrics from a plugin.
@ -276,7 +268,7 @@ func ImportDashboard(c *models.ReqContext, apiCmd dtos.ImportDashboardCommand) r
// /api/plugins/:pluginId/metrics // /api/plugins/:pluginId/metrics
func (hs *HTTPServer) CollectPluginMetrics(c *models.ReqContext) response.Response { func (hs *HTTPServer) CollectPluginMetrics(c *models.ReqContext) response.Response {
pluginID := c.Params("pluginId") pluginID := c.Params("pluginId")
plugin, exists := plugins.Plugins[pluginID] plugin, exists := manager.Plugins[pluginID]
if !exists { if !exists {
return response.Error(404, "Plugin not found", nil) return response.Error(404, "Plugin not found", nil)
} }

View File

@ -28,7 +28,7 @@ type Node interface {
ID() int64 // ID() allows the gonum graph node interface to be fulfilled ID() int64 // ID() allows the gonum graph node interface to be fulfilled
NodeType() NodeType NodeType() NodeType
RefID() string RefID() string
Execute(c context.Context, vars mathexp.Vars) (mathexp.Results, error) Execute(c context.Context, vars mathexp.Vars, s *Service) (mathexp.Results, error)
String() string String() string
} }
@ -37,10 +37,10 @@ type DataPipeline []Node
// execute runs all the command/datasource requests in the pipeline return a // execute runs all the command/datasource requests in the pipeline return a
// map of the refId of the of each command // map of the refId of the of each command
func (dp *DataPipeline) execute(c context.Context) (mathexp.Vars, error) { func (dp *DataPipeline) execute(c context.Context, s *Service) (mathexp.Vars, error) {
vars := make(mathexp.Vars) vars := make(mathexp.Vars)
for _, node := range *dp { for _, node := range *dp {
res, err := node.Execute(c, vars) res, err := node.Execute(c, vars, s)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -52,8 +52,8 @@ func (dp *DataPipeline) execute(c context.Context) (mathexp.Vars, error) {
// BuildPipeline builds a graph of the nodes, and returns the nodes in an // BuildPipeline builds a graph of the nodes, and returns the nodes in an
// executable order. // executable order.
func buildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) { func (s *Service) buildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) {
graph, err := buildDependencyGraph(req) graph, err := s.buildDependencyGraph(req)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -67,8 +67,8 @@ func buildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) {
} }
// buildDependencyGraph returns a dependency graph for a set of queries. // buildDependencyGraph returns a dependency graph for a set of queries.
func buildDependencyGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) { func (s *Service) buildDependencyGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) {
graph, err := buildGraph(req) graph, err := s.buildGraph(req)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -113,7 +113,7 @@ func buildNodeRegistry(g *simple.DirectedGraph) map[string]Node {
} }
// buildGraph creates a new graph populated with nodes for every query. // buildGraph creates a new graph populated with nodes for every query.
func buildGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) { func (s *Service) buildGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) {
dp := simple.NewDirectedGraph() dp := simple.NewDirectedGraph()
for _, query := range req.Queries { for _, query := range req.Queries {
@ -139,7 +139,7 @@ func buildGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) {
case DatasourceName: case DatasourceName:
node, err = buildCMDNode(dp, rn) node, err = buildCMDNode(dp, rn)
default: // If it's not an expression query, it's a data source query. default: // If it's not an expression query, it's a data source query.
node, err = buildDSNode(dp, rn, req.PluginContext.OrgID) node, err = s.buildDSNode(dp, rn, req.PluginContext.OrgID)
} }
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -82,7 +82,7 @@ func (gn *CMDNode) NodeType() NodeType {
// Execute runs the node and adds the results to vars. If the node requires // Execute runs the node and adds the results to vars. If the node requires
// other nodes they must have already been executed and their results must // other nodes they must have already been executed and their results must
// already by in vars. // already by in vars.
func (gn *CMDNode) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Results, error) { func (gn *CMDNode) Execute(ctx context.Context, vars mathexp.Vars, s *Service) (mathexp.Results, error) {
return gn.Command.Execute(ctx, vars) return gn.Command.Execute(ctx, vars)
} }
@ -142,7 +142,7 @@ func (dn *DSNode) NodeType() NodeType {
return TypeDatasourceNode return TypeDatasourceNode
} }
func buildDSNode(dp *simple.DirectedGraph, rn *rawNode, orgID int64) (*DSNode, error) { func (s *Service) buildDSNode(dp *simple.DirectedGraph, rn *rawNode, orgID int64) (*DSNode, error) {
encodedQuery, err := json.Marshal(rn.Query) encodedQuery, err := json.Marshal(rn.Query)
if err != nil { if err != nil {
return nil, err return nil, err
@ -203,7 +203,7 @@ func buildDSNode(dp *simple.DirectedGraph, rn *rawNode, orgID int64) (*DSNode, e
// Execute runs the node and adds the results to vars. If the node requires // Execute runs the node and adds the results to vars. If the node requires
// other nodes they must have already been executed and their results must // other nodes they must have already been executed and their results must
// already by in vars. // already by in vars.
func (dn *DSNode) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Results, error) { func (dn *DSNode) Execute(ctx context.Context, vars mathexp.Vars, s *Service) (mathexp.Results, error) {
pc := backend.PluginContext{ pc := backend.PluginContext{
OrgID: dn.orgID, OrgID: dn.orgID,
DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{ DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{
@ -223,7 +223,7 @@ func (dn *DSNode) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Resul
}, },
} }
resp, err := QueryData(ctx, &backend.QueryDataRequest{ resp, err := s.queryData(ctx, &backend.QueryDataRequest{
PluginContext: pc, PluginContext: pc,
Queries: q, Queries: q,
}) })

View File

@ -5,6 +5,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
) )
// DatasourceName is the string constant used as the datasource name in requests // DatasourceName is the string constant used as the datasource name in requests
@ -21,7 +22,8 @@ const DatasourceUID = "-100"
// Service is service representation for expression handling. // Service is service representation for expression handling.
type Service struct { type Service struct {
Cfg *setting.Cfg Cfg *setting.Cfg
DataService *tsdb.Service
} }
func (s *Service) isDisabled() bool { func (s *Service) isDisabled() bool {
@ -33,13 +35,13 @@ func (s *Service) isDisabled() bool {
// BuildPipeline builds a pipeline from a request. // BuildPipeline builds a pipeline from a request.
func (s *Service) BuildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) { func (s *Service) BuildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) {
return buildPipeline(req) return s.buildPipeline(req)
} }
// ExecutePipeline executes an expression pipeline and returns all the results. // ExecutePipeline executes an expression pipeline and returns all the results.
func (s *Service) ExecutePipeline(ctx context.Context, pipeline DataPipeline) (*backend.QueryDataResponse, error) { func (s *Service) ExecutePipeline(ctx context.Context, pipeline DataPipeline) (*backend.QueryDataResponse, error) {
res := backend.NewQueryDataResponse() res := backend.NewQueryDataResponse()
vars, err := pipeline.execute(ctx) vars, err := pipeline.execute(ctx, s)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -12,6 +12,9 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -21,9 +24,21 @@ func TestService(t *testing.T) {
data.NewField("time", nil, []*time.Time{utp(1)}), data.NewField("time", nil, []*time.Time{utp(1)}),
data.NewField("value", nil, []*float64{fp(2)})) data.NewField("value", nil, []*float64{fp(2)}))
registerEndPoint(dsDF) dataSvc := tsdb.NewService()
dataSvc.PluginManager = &manager.PluginManager{
s := Service{} BackendPluginManager: fakeBackendPM{},
}
s := Service{DataService: &dataSvc}
me := &mockEndpoint{
Frames: []*data.Frame{dsDF},
}
s.DataService.RegisterQueryHandler("test", func(*models.DataSource) (plugins.DataPlugin, error) {
return me, nil
})
bus.AddHandler("test", func(query *models.GetDataSourceQuery) error {
query.Result = &models.DataSource{Id: 1, OrgId: 1, Type: "test"}
return nil
})
queries := []backend.DataQuery{ queries := []backend.DataQuery{
{ {
@ -87,27 +102,21 @@ type mockEndpoint struct {
Frames data.Frames Frames data.Frames
} }
func (me *mockEndpoint) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) { func (me *mockEndpoint) DataQuery(ctx context.Context, ds *models.DataSource, query plugins.DataQuery) (
return &tsdb.Response{ plugins.DataResponse, error) {
Results: map[string]*tsdb.QueryResult{ return plugins.DataResponse{
Results: map[string]plugins.DataQueryResult{
"A": { "A": {
Dataframes: tsdb.NewDecodedDataFrames(me.Frames), Dataframes: plugins.NewDecodedDataFrames(me.Frames),
}, },
}, },
}, nil }, nil
} }
func registerEndPoint(df ...*data.Frame) { type fakeBackendPM struct {
me := &mockEndpoint{ backendplugin.Manager
Frames: df, }
}
endpoint := func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { func (pm fakeBackendPM) GetDataPlugin(string) interface{} {
return me, nil return nil
}
tsdb.RegisterTsdbQueryEndpoint("test", endpoint)
bus.AddHandler("test", func(query *models.GetDataSourceQuery) error {
query.Result = &models.DataSource{Id: 1, OrgId: 1, Type: "test"}
return nil
})
} }

View File

@ -10,7 +10,7 @@ import (
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/plugins"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"golang.org/x/net/context" "golang.org/x/net/context"
"google.golang.org/grpc/codes" "google.golang.org/grpc/codes"
@ -35,7 +35,7 @@ func init() {
} }
// WrapTransformData creates and executes transform requests // WrapTransformData creates and executes transform requests
func (s *Service) WrapTransformData(ctx context.Context, query *tsdb.TsdbQuery) (*tsdb.Response, error) { func (s *Service) WrapTransformData(ctx context.Context, query plugins.DataQuery) (plugins.DataResponse, error) {
sdkReq := &backend.QueryDataRequest{ sdkReq := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{ PluginContext: backend.PluginContext{
OrgID: query.User.OrgId, OrgID: query.User.OrgId,
@ -46,12 +46,12 @@ func (s *Service) WrapTransformData(ctx context.Context, query *tsdb.TsdbQuery)
for _, q := range query.Queries { for _, q := range query.Queries {
modelJSON, err := q.Model.MarshalJSON() modelJSON, err := q.Model.MarshalJSON()
if err != nil { if err != nil {
return nil, err return plugins.DataResponse{}, err
} }
sdkReq.Queries = append(sdkReq.Queries, backend.DataQuery{ sdkReq.Queries = append(sdkReq.Queries, backend.DataQuery{
JSON: modelJSON, JSON: modelJSON,
Interval: time.Duration(q.IntervalMs) * time.Millisecond, Interval: time.Duration(q.IntervalMS) * time.Millisecond,
RefID: q.RefId, RefID: q.RefID,
MaxDataPoints: q.MaxDataPoints, MaxDataPoints: q.MaxDataPoints,
QueryType: q.QueryType, QueryType: q.QueryType,
TimeRange: backend.TimeRange{ TimeRange: backend.TimeRange{
@ -62,16 +62,16 @@ func (s *Service) WrapTransformData(ctx context.Context, query *tsdb.TsdbQuery)
} }
pbRes, err := s.TransformData(ctx, sdkReq) pbRes, err := s.TransformData(ctx, sdkReq)
if err != nil { if err != nil {
return nil, err return plugins.DataResponse{}, err
} }
tR := &tsdb.Response{ tR := plugins.DataResponse{
Results: make(map[string]*tsdb.QueryResult, len(pbRes.Responses)), Results: make(map[string]plugins.DataQueryResult, len(pbRes.Responses)),
} }
for refID, res := range pbRes.Responses { for refID, res := range pbRes.Responses {
tRes := &tsdb.QueryResult{ tRes := plugins.DataQueryResult{
RefId: refID, RefID: refID,
Dataframes: tsdb.NewDecodedDataFrames(res.Frames), Dataframes: plugins.NewDecodedDataFrames(res.Frames),
} }
// if len(res.JsonMeta) != 0 { // if len(res.JsonMeta) != 0 {
// tRes.Meta = simplejson.NewFromAny(res.JsonMeta) // tRes.Meta = simplejson.NewFromAny(res.JsonMeta)
@ -158,9 +158,9 @@ func hiddenRefIDs(queries []backend.DataQuery) (map[string]struct{}, error) {
return hidden, nil return hidden, nil
} }
// QueryData is called used to query datasources that are not expression commands, but are used // queryData is called used to query datasources that are not expression commands, but are used
// alongside expressions and/or are the input of an expression command. // alongside expressions and/or are the input of an expression command.
func QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) { func (s *Service) queryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
if len(req.Queries) == 0 { if len(req.Queries) == 0 {
return nil, fmt.Errorf("zero queries found in datasource request") return nil, fmt.Errorf("zero queries found in datasource request")
} }
@ -184,15 +184,15 @@ func QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.Que
} }
// Convert plugin-model (datasource) queries to tsdb queries // Convert plugin-model (datasource) queries to tsdb queries
queries := make([]*tsdb.Query, len(req.Queries)) queries := make([]plugins.DataSubQuery, len(req.Queries))
for i, query := range req.Queries { for i, query := range req.Queries {
sj, err := simplejson.NewJson(query.JSON) sj, err := simplejson.NewJson(query.JSON)
if err != nil { if err != nil {
return nil, err return nil, err
} }
queries[i] = &tsdb.Query{ queries[i] = plugins.DataSubQuery{
RefId: query.RefID, RefID: query.RefID,
IntervalMs: query.Interval.Milliseconds(), IntervalMS: query.Interval.Milliseconds(),
MaxDataPoints: query.MaxDataPoints, MaxDataPoints: query.MaxDataPoints,
QueryType: query.QueryType, QueryType: query.QueryType,
DataSource: getDsInfo.Result, DataSource: getDsInfo.Result,
@ -201,20 +201,21 @@ func QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.Que
} }
// For now take Time Range from first query. // For now take Time Range from first query.
timeRange := tsdb.NewTimeRange(strconv.FormatInt(req.Queries[0].TimeRange.From.Unix()*1000, 10), strconv.FormatInt(req.Queries[0].TimeRange.To.Unix()*1000, 10)) timeRange := plugins.NewDataTimeRange(strconv.FormatInt(req.Queries[0].TimeRange.From.Unix()*1000, 10),
strconv.FormatInt(req.Queries[0].TimeRange.To.Unix()*1000, 10))
tQ := &tsdb.TsdbQuery{ tQ := plugins.DataQuery{
TimeRange: timeRange, TimeRange: &timeRange,
Queries: queries, Queries: queries,
} }
// Execute the converted queries // Execute the converted queries
tsdbRes, err := tsdb.HandleRequest(ctx, getDsInfo.Result, tQ) tsdbRes, err := s.DataService.HandleRequest(ctx, getDsInfo.Result, tQ)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Convert tsdb results (map) to plugin-model/datasource (slice) results. // Convert tsdb results (map) to plugin-model/datasource (slice) results.
// Only error, tsdb.Series, and encoded Dataframes responses are mapped. // Only error, Series, and encoded Dataframes responses are mapped.
responses := make(map[string]backend.DataResponse, len(tsdbRes.Results)) responses := make(map[string]backend.DataResponse, len(tsdbRes.Results))
for refID, res := range tsdbRes.Results { for refID, res := range tsdbRes.Results {
pRes := backend.DataResponse{} pRes := backend.DataResponse{}
@ -233,7 +234,7 @@ func QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.Que
} }
for _, series := range res.Series { for _, series := range res.Series {
frame, err := tsdb.SeriesToFrame(series) frame, err := plugins.SeriesToFrame(series)
frame.RefID = refID frame.RefID = refID
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -12,7 +12,7 @@ import (
"github.com/grafana/grafana/pkg/infra/metrics" "github.com/grafana/grafana/pkg/infra/metrics"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
) )
@ -53,9 +53,9 @@ func (uss *UsageStatsService) GetUsageReport(ctx context.Context) (UsageReport,
metrics["stats.users.count"] = statsQuery.Result.Users metrics["stats.users.count"] = statsQuery.Result.Users
metrics["stats.orgs.count"] = statsQuery.Result.Orgs metrics["stats.orgs.count"] = statsQuery.Result.Orgs
metrics["stats.playlist.count"] = statsQuery.Result.Playlists metrics["stats.playlist.count"] = statsQuery.Result.Playlists
metrics["stats.plugins.apps.count"] = len(plugins.Apps) metrics["stats.plugins.apps.count"] = len(manager.Apps)
metrics["stats.plugins.panels.count"] = len(plugins.Panels) metrics["stats.plugins.panels.count"] = len(manager.Panels)
metrics["stats.plugins.datasources.count"] = len(plugins.DataSources) metrics["stats.plugins.datasources.count"] = len(manager.DataSources)
metrics["stats.alerts.count"] = statsQuery.Result.Alerts metrics["stats.alerts.count"] = statsQuery.Result.Alerts
metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers
metrics["stats.datasources.count"] = statsQuery.Result.Datasources metrics["stats.datasources.count"] = statsQuery.Result.Datasources
@ -291,7 +291,7 @@ func (uss *UsageStatsService) updateTotalStats() {
} }
func (uss *UsageStatsService) shouldBeReported(dsType string) bool { func (uss *UsageStatsService) shouldBeReported(dsType string) bool {
ds, ok := plugins.DataSources[dsType] ds, ok := manager.DataSources[dsType]
if !ok { if !ok {
return false return false
} }

View File

@ -10,6 +10,8 @@ import (
"testing" "testing"
"time" "time"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/alerting"
"github.com/grafana/grafana/pkg/services/licensing" "github.com/grafana/grafana/pkg/services/licensing"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -20,7 +22,6 @@ import (
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -248,9 +249,9 @@ func TestMetrics(t *testing.T) {
assert.Equal(t, getSystemStatsQuery.Result.Users, metrics.Get("stats.users.count").MustInt64()) assert.Equal(t, getSystemStatsQuery.Result.Users, metrics.Get("stats.users.count").MustInt64())
assert.Equal(t, getSystemStatsQuery.Result.Orgs, metrics.Get("stats.orgs.count").MustInt64()) assert.Equal(t, getSystemStatsQuery.Result.Orgs, metrics.Get("stats.orgs.count").MustInt64())
assert.Equal(t, getSystemStatsQuery.Result.Playlists, metrics.Get("stats.playlist.count").MustInt64()) assert.Equal(t, getSystemStatsQuery.Result.Playlists, metrics.Get("stats.playlist.count").MustInt64())
assert.Equal(t, len(plugins.Apps), metrics.Get("stats.plugins.apps.count").MustInt()) assert.Equal(t, len(manager.Apps), metrics.Get("stats.plugins.apps.count").MustInt())
assert.Equal(t, len(plugins.Panels), metrics.Get("stats.plugins.panels.count").MustInt()) assert.Equal(t, len(manager.Panels), metrics.Get("stats.plugins.panels.count").MustInt())
assert.Equal(t, len(plugins.DataSources), metrics.Get("stats.plugins.datasources.count").MustInt()) assert.Equal(t, len(manager.DataSources), metrics.Get("stats.plugins.datasources.count").MustInt())
assert.Equal(t, getSystemStatsQuery.Result.Alerts, metrics.Get("stats.alerts.count").MustInt64()) assert.Equal(t, getSystemStatsQuery.Result.Alerts, metrics.Get("stats.alerts.count").MustInt64())
assert.Equal(t, getSystemStatsQuery.Result.ActiveUsers, metrics.Get("stats.active_users.count").MustInt64()) assert.Equal(t, getSystemStatsQuery.Result.ActiveUsers, metrics.Get("stats.active_users.count").MustInt64())
assert.Equal(t, getSystemStatsQuery.Result.Datasources, metrics.Get("stats.datasources.count").MustInt64()) assert.Equal(t, getSystemStatsQuery.Result.Datasources, metrics.Get("stats.datasources.count").MustInt64())
@ -530,19 +531,19 @@ func (aum *alertingUsageMock) QueryUsageStats() (*alerting.UsageStats, error) {
} }
func setupSomeDataSourcePlugins(t *testing.T) { func setupSomeDataSourcePlugins(t *testing.T) {
originalDataSources := plugins.DataSources originalDataSources := manager.DataSources
t.Cleanup(func() { plugins.DataSources = originalDataSources }) t.Cleanup(func() { manager.DataSources = originalDataSources })
plugins.DataSources = make(map[string]*plugins.DataSourcePlugin) manager.DataSources = make(map[string]*plugins.DataSourcePlugin)
plugins.DataSources[models.DS_ES] = &plugins.DataSourcePlugin{ manager.DataSources[models.DS_ES] = &plugins.DataSourcePlugin{
FrontendPluginBase: plugins.FrontendPluginBase{ FrontendPluginBase: plugins.FrontendPluginBase{
PluginBase: plugins.PluginBase{ PluginBase: plugins.PluginBase{
Signature: "internal", Signature: "internal",
}, },
}, },
} }
plugins.DataSources[models.DS_PROMETHEUS] = &plugins.DataSourcePlugin{ manager.DataSources[models.DS_PROMETHEUS] = &plugins.DataSourcePlugin{
FrontendPluginBase: plugins.FrontendPluginBase{ FrontendPluginBase: plugins.FrontendPluginBase{
PluginBase: plugins.PluginBase{ PluginBase: plugins.PluginBase{
Signature: "internal", Signature: "internal",
@ -550,7 +551,7 @@ func setupSomeDataSourcePlugins(t *testing.T) {
}, },
} }
plugins.DataSources[models.DS_GRAPHITE] = &plugins.DataSourcePlugin{ manager.DataSources[models.DS_GRAPHITE] = &plugins.DataSourcePlugin{
FrontendPluginBase: plugins.FrontendPluginBase{ FrontendPluginBase: plugins.FrontendPluginBase{
PluginBase: plugins.PluginBase{ PluginBase: plugins.PluginBase{
Signature: "internal", Signature: "internal",
@ -558,7 +559,7 @@ func setupSomeDataSourcePlugins(t *testing.T) {
}, },
} }
plugins.DataSources[models.DS_MYSQL] = &plugins.DataSourcePlugin{ manager.DataSources[models.DS_MYSQL] = &plugins.DataSourcePlugin{
FrontendPluginBase: plugins.FrontendPluginBase{ FrontendPluginBase: plugins.FrontendPluginBase{
PluginBase: plugins.PluginBase{ PluginBase: plugins.PluginBase{
Signature: "internal", Signature: "internal",

View File

@ -0,0 +1,42 @@
// Package adapters contains plugin SDK adapters.
package adapters
import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/models"
)
// ModelToInstanceSettings converts a models.DataSource to a backend.DataSourceInstanceSettings.
func ModelToInstanceSettings(ds *models.DataSource) (*backend.DataSourceInstanceSettings, error) {
jsonDataBytes, err := ds.JsonData.MarshalJSON()
if err != nil {
return nil, err
}
return &backend.DataSourceInstanceSettings{
ID: ds.Id,
Name: ds.Name,
URL: ds.Url,
Database: ds.Database,
User: ds.User,
BasicAuthEnabled: ds.BasicAuth,
BasicAuthUser: ds.BasicAuthUser,
JSONData: jsonDataBytes,
DecryptedSecureJSONData: ds.DecryptedValues(),
Updated: ds.Updated,
}, nil
}
// BackendUserFromSignedInUser converts Grafana's SignedInUser model
// to the backend plugin's model.
func BackendUserFromSignedInUser(su *models.SignedInUser) *backend.User {
if su == nil {
return nil
}
return &backend.User{
Login: su.Login,
Name: su.Name,
Email: su.Email,
Role: string(su.OrgRole),
}
}

View File

@ -59,33 +59,29 @@ type JwtTokenAuth struct {
Params map[string]string `json:"params"` Params map[string]string `json:"params"`
} }
func (app *AppPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error { func (app *AppPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) (
interface{}, error) {
if err := decoder.Decode(app); err != nil { if err := decoder.Decode(app); err != nil {
return err return nil, err
}
if err := app.registerPlugin(base); err != nil {
return err
} }
if app.Backend { if app.Backend {
cmd := ComposePluginStartCommand(app.Executable) cmd := ComposePluginStartCommand(app.Executable)
fullpath := filepath.Join(app.PluginDir, cmd) fullpath := filepath.Join(base.PluginDir, cmd)
factory := grpcplugin.NewBackendPlugin(app.Id, fullpath, grpcplugin.PluginStartFuncs{}) factory := grpcplugin.NewBackendPlugin(app.Id, fullpath, grpcplugin.PluginStartFuncs{})
if err := backendPluginManager.Register(app.Id, factory); err != nil { if err := backendPluginManager.Register(app.Id, factory); err != nil {
return errutil.Wrapf(err, "failed to register backend plugin") return nil, errutil.Wrapf(err, "failed to register backend plugin")
} }
} }
Apps[app.Id] = app return app, nil
return nil
} }
func (app *AppPlugin) initApp() { func (app *AppPlugin) InitApp(panels map[string]*PanelPlugin, dataSources map[string]*DataSourcePlugin) []*PluginStaticRoute {
app.initFrontendPlugin() staticRoutes := app.InitFrontendPlugin()
// check if we have child panels // check if we have child panels
for _, panel := range Panels { for _, panel := range panels {
if strings.HasPrefix(panel.PluginDir, app.PluginDir) { if strings.HasPrefix(panel.PluginDir, app.PluginDir) {
panel.setPathsBasedOnApp(app) panel.setPathsBasedOnApp(app)
app.FoundChildPlugins = append(app.FoundChildPlugins, &PluginInclude{ app.FoundChildPlugins = append(app.FoundChildPlugins, &PluginInclude{
@ -97,7 +93,7 @@ func (app *AppPlugin) initApp() {
} }
// check if we have child datasources // check if we have child datasources
for _, ds := range DataSources { for _, ds := range dataSources {
if strings.HasPrefix(ds.PluginDir, app.PluginDir) { if strings.HasPrefix(ds.PluginDir, app.PluginDir) {
ds.setPathsBasedOnApp(app) ds.setPathsBasedOnApp(app)
app.FoundChildPlugins = append(app.FoundChildPlugins, &PluginInclude{ app.FoundChildPlugins = append(app.FoundChildPlugins, &PluginInclude{
@ -120,4 +116,6 @@ func (app *AppPlugin) initApp() {
app.DefaultNavUrl = setting.AppSubUrl + "/dashboard/db/" + include.Slug app.DefaultNavUrl = setting.AppSubUrl + "/dashboard/db/" + include.Slug
} }
} }
return staticRoutes
} }

View File

@ -0,0 +1,9 @@
// Package backendplugin contains backend plugin related logic.
package backendplugin
import (
"github.com/grafana/grafana/pkg/infra/log"
)
// PluginFactoryFunc is a function type for creating a Plugin.
type PluginFactoryFunc func(pluginID string, logger log.Logger, env []string) (Plugin, error)

View File

@ -6,14 +6,16 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/backendplugin" "github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/plugins/backendplugin/instrumentation"
) )
// corePlugin represents a plugin that's part of Grafana core. // corePlugin represents a plugin that's part of Grafana core.
type corePlugin struct { type corePlugin struct {
pluginID string isDataPlugin bool
logger log.Logger pluginID string
logger log.Logger
backend.CheckHealthHandler backend.CheckHealthHandler
backend.CallResourceHandler backend.CallResourceHandler
backend.QueryDataHandler backend.QueryDataHandler
@ -21,7 +23,7 @@ type corePlugin struct {
// New returns a new backendplugin.PluginFactoryFunc for creating a core (built-in) backendplugin.Plugin. // New returns a new backendplugin.PluginFactoryFunc for creating a core (built-in) backendplugin.Plugin.
func New(opts backend.ServeOpts) backendplugin.PluginFactoryFunc { func New(opts backend.ServeOpts) backendplugin.PluginFactoryFunc {
return backendplugin.PluginFactoryFunc(func(pluginID string, logger log.Logger, env []string) (backendplugin.Plugin, error) { return func(pluginID string, logger log.Logger, env []string) (backendplugin.Plugin, error) {
return &corePlugin{ return &corePlugin{
pluginID: pluginID, pluginID: pluginID,
logger: logger, logger: logger,
@ -29,7 +31,7 @@ func New(opts backend.ServeOpts) backendplugin.PluginFactoryFunc {
CallResourceHandler: opts.CallResourceHandler, CallResourceHandler: opts.CallResourceHandler,
QueryDataHandler: opts.QueryDataHandler, QueryDataHandler: opts.QueryDataHandler,
}, nil }, nil
}) }
} }
func (cp *corePlugin) PluginID() string { func (cp *corePlugin) PluginID() string {
@ -40,11 +42,21 @@ func (cp *corePlugin) Logger() log.Logger {
return cp.logger return cp.logger
} }
func (cp *corePlugin) CanHandleDataQueries() bool {
return cp.isDataPlugin
}
func (cp *corePlugin) DataQuery(ctx context.Context, dsInfo *models.DataSource,
tsdbQuery plugins.DataQuery) (plugins.DataResponse, error) {
// TODO: Inline the adapter, since it shouldn't be necessary
adapter := newQueryEndpointAdapter(cp.pluginID, cp.logger, instrumentation.InstrumentQueryDataHandler(
cp.QueryDataHandler))
return adapter.DataQuery(ctx, dsInfo, tsdbQuery)
}
func (cp *corePlugin) Start(ctx context.Context) error { func (cp *corePlugin) Start(ctx context.Context) error {
if cp.QueryDataHandler != nil { if cp.QueryDataHandler != nil {
tsdb.RegisterTsdbQueryEndpoint(cp.pluginID, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { cp.isDataPlugin = true
return newQueryEndpointAdapter(cp.pluginID, cp.logger, backendplugin.InstrumentQueryDataHandler(cp.QueryDataHandler)), nil
})
} }
return nil return nil
} }

View File

@ -36,11 +36,13 @@ func TestCorePlugin(t *testing.T) {
checkHealthCalled := false checkHealthCalled := false
callResourceCalled := false callResourceCalled := false
factory := coreplugin.New(backend.ServeOpts{ factory := coreplugin.New(backend.ServeOpts{
CheckHealthHandler: backend.CheckHealthHandlerFunc(func(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) { CheckHealthHandler: backend.CheckHealthHandlerFunc(func(ctx context.Context,
req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
checkHealthCalled = true checkHealthCalled = true
return nil, nil return nil, nil
}), }),
CallResourceHandler: backend.CallResourceHandlerFunc(func(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error { CallResourceHandler: backend.CallResourceHandlerFunc(func(ctx context.Context,
req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
callResourceCalled = true callResourceCalled = true
return nil return nil
}), }),

View File

@ -7,11 +7,11 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins/datasource/wrapper" "github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/plugins/adapters"
) )
func newQueryEndpointAdapter(pluginID string, logger log.Logger, handler backend.QueryDataHandler) tsdb.TsdbQueryEndpoint { func newQueryEndpointAdapter(pluginID string, logger log.Logger, handler backend.QueryDataHandler) plugins.DataPlugin {
return &queryEndpointAdapter{ return &queryEndpointAdapter{
pluginID: pluginID, pluginID: pluginID,
logger: logger, logger: logger,
@ -45,17 +45,18 @@ func modelToInstanceSettings(ds *models.DataSource) (*backend.DataSourceInstance
}, nil }, nil
} }
func (a *queryEndpointAdapter) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) { func (a *queryEndpointAdapter) DataQuery(ctx context.Context, ds *models.DataSource, query plugins.DataQuery) (
plugins.DataResponse, error) {
instanceSettings, err := modelToInstanceSettings(ds) instanceSettings, err := modelToInstanceSettings(ds)
if err != nil { if err != nil {
return nil, err return plugins.DataResponse{}, err
} }
req := &backend.QueryDataRequest{ req := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{ PluginContext: backend.PluginContext{
OrgID: ds.OrgId, OrgID: ds.OrgId,
PluginID: a.pluginID, PluginID: a.pluginID,
User: wrapper.BackendUserFromSignedInUser(query.User), User: adapters.BackendUserFromSignedInUser(query.User),
DataSourceInstanceSettings: instanceSettings, DataSourceInstanceSettings: instanceSettings,
}, },
Queries: []backend.DataQuery{}, Queries: []backend.DataQuery{},
@ -65,11 +66,11 @@ func (a *queryEndpointAdapter) Query(ctx context.Context, ds *models.DataSource,
for _, q := range query.Queries { for _, q := range query.Queries {
modelJSON, err := q.Model.MarshalJSON() modelJSON, err := q.Model.MarshalJSON()
if err != nil { if err != nil {
return nil, err return plugins.DataResponse{}, err
} }
req.Queries = append(req.Queries, backend.DataQuery{ req.Queries = append(req.Queries, backend.DataQuery{
RefID: q.RefId, RefID: q.RefID,
Interval: time.Duration(q.IntervalMs) * time.Millisecond, Interval: time.Duration(q.IntervalMS) * time.Millisecond,
MaxDataPoints: q.MaxDataPoints, MaxDataPoints: q.MaxDataPoints,
TimeRange: backend.TimeRange{ TimeRange: backend.TimeRange{
From: query.TimeRange.GetFromAsTimeUTC(), From: query.TimeRange.GetFromAsTimeUTC(),
@ -82,16 +83,16 @@ func (a *queryEndpointAdapter) Query(ctx context.Context, ds *models.DataSource,
resp, err := a.handler.QueryData(ctx, req) resp, err := a.handler.QueryData(ctx, req)
if err != nil { if err != nil {
return nil, err return plugins.DataResponse{}, err
} }
tR := &tsdb.Response{ tR := plugins.DataResponse{
Results: make(map[string]*tsdb.QueryResult, len(resp.Responses)), Results: make(map[string]plugins.DataQueryResult, len(resp.Responses)),
} }
for refID, r := range resp.Responses { for refID, r := range resp.Responses {
qr := &tsdb.QueryResult{ qr := plugins.DataQueryResult{
RefId: refID, RefID: refID,
} }
for _, f := range r.Frames { for _, f := range r.Frames {
@ -100,7 +101,7 @@ func (a *queryEndpointAdapter) Query(ctx context.Context, ds *models.DataSource,
} }
} }
qr.Dataframes = tsdb.NewDecodedDataFrames(r.Frames) qr.Dataframes = plugins.NewDecodedDataFrames(r.Frames)
if r.Error != nil { if r.Error != nil {
qr.Error = r.Error qr.Error = r.Error

View File

@ -0,0 +1,14 @@
package backendplugin
import "errors"
var (
// ErrPluginNotRegistered error returned when plugin not registered.
ErrPluginNotRegistered = errors.New("plugin not registered")
// ErrHealthCheckFailed error returned when health check failed.
ErrHealthCheckFailed = errors.New("health check failed")
// ErrPluginUnavailable error returned when plugin is unavailable.
ErrPluginUnavailable = errors.New("plugin unavailable")
// ErrMethodNotImplemented error returned when plugin method not implemented.
ErrMethodNotImplemented = errors.New("method not implemented")
)

View File

@ -8,6 +8,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/plugins/backendplugin" "github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/backendplugin/instrumentation"
"github.com/hashicorp/go-plugin" "github.com/hashicorp/go-plugin"
) )
@ -75,7 +76,7 @@ func instrumentDatasourcePluginV1(plugin datasourceV1.DatasourcePlugin) datasour
return datasourceV1QueryFunc(func(ctx context.Context, req *datasourceV1.DatasourceRequest) (*datasourceV1.DatasourceResponse, error) { return datasourceV1QueryFunc(func(ctx context.Context, req *datasourceV1.DatasourceRequest) (*datasourceV1.DatasourceResponse, error) {
var resp *datasourceV1.DatasourceResponse var resp *datasourceV1.DatasourceResponse
err := backendplugin.InstrumentQueryDataRequest(req.Datasource.Type, func() (innerErr error) { err := instrumentation.InstrumentQueryDataRequest(req.Datasource.Type, func() (innerErr error) {
resp, innerErr = plugin.Query(ctx, req) resp, innerErr = plugin.Query(ctx, req)
return return
}) })

View File

@ -10,6 +10,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/genproto/pluginv2" "github.com/grafana/grafana-plugin-sdk-go/genproto/pluginv2"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/plugins/backendplugin" "github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/backendplugin/instrumentation"
"github.com/grafana/grafana/pkg/plugins/backendplugin/pluginextensionv2" "github.com/grafana/grafana/pkg/plugins/backendplugin/pluginextensionv2"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
"github.com/hashicorp/go-plugin" "github.com/hashicorp/go-plugin"
@ -170,7 +171,7 @@ func instrumentDataClient(plugin grpcplugin.DataClient) grpcplugin.DataClient {
return dataClientQueryDataFunc(func(ctx context.Context, req *pluginv2.QueryDataRequest, opts ...grpc.CallOption) (*pluginv2.QueryDataResponse, error) { return dataClientQueryDataFunc(func(ctx context.Context, req *pluginv2.QueryDataRequest, opts ...grpc.CallOption) (*pluginv2.QueryDataResponse, error) {
var resp *pluginv2.QueryDataResponse var resp *pluginv2.QueryDataResponse
err := backendplugin.InstrumentQueryDataRequest(req.PluginContext.PluginId, func() (innerErr error) { err := instrumentation.InstrumentQueryDataRequest(req.PluginContext.PluginId, func() (innerErr error) {
resp, innerErr = plugin.QueryData(ctx, req) resp, innerErr = plugin.QueryData(ctx, req)
return return
}) })

View File

@ -28,7 +28,7 @@ type grpcPlugin struct {
// newPlugin allocates and returns a new gRPC (external) backendplugin.Plugin. // newPlugin allocates and returns a new gRPC (external) backendplugin.Plugin.
func newPlugin(descriptor PluginDescriptor) backendplugin.PluginFactoryFunc { func newPlugin(descriptor PluginDescriptor) backendplugin.PluginFactoryFunc {
return backendplugin.PluginFactoryFunc(func(pluginID string, logger log.Logger, env []string) (backendplugin.Plugin, error) { return func(pluginID string, logger log.Logger, env []string) (backendplugin.Plugin, error) {
return &grpcPlugin{ return &grpcPlugin{
descriptor: descriptor, descriptor: descriptor,
logger: logger, logger: logger,
@ -36,7 +36,11 @@ func newPlugin(descriptor PluginDescriptor) backendplugin.PluginFactoryFunc {
return plugin.NewClient(newClientConfig(descriptor.executablePath, env, logger, descriptor.versionedPlugins)) return plugin.NewClient(newClientConfig(descriptor.executablePath, env, logger, descriptor.versionedPlugins))
}, },
}, nil }, nil
}) }
}
func (p *grpcPlugin) CanHandleDataQueries() bool {
return false
} }
func (p *grpcPlugin) PluginID() string { func (p *grpcPlugin) PluginID() string {

View File

@ -0,0 +1,40 @@
package backendplugin
import (
"context"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
)
// Manager manages backend plugins.
type Manager interface {
// Register registers a backend plugin
Register(pluginID string, factory PluginFactoryFunc) error
// StartPlugin starts a non-managed backend plugin
StartPlugin(ctx context.Context, pluginID string) error
// CollectMetrics collects metrics from a registered backend plugin.
CollectMetrics(ctx context.Context, pluginID string) (*backend.CollectMetricsResult, error)
// CheckHealth checks the health of a registered backend plugin.
CheckHealth(ctx context.Context, pCtx backend.PluginContext) (*backend.CheckHealthResult, error)
// CallResource calls a plugin resource.
CallResource(pluginConfig backend.PluginContext, ctx *models.ReqContext, path string)
// GetDataPlugin gets a DataPlugin with a certain ID or nil if it doesn't exist.
// TODO: interface{} is the return type in order to break a dependency cycle. Should be plugins.DataPlugin.
GetDataPlugin(pluginID string) interface{}
}
// Plugin is the backend plugin interface.
type Plugin interface {
PluginID() string
Logger() log.Logger
Start(ctx context.Context) error
Stop(ctx context.Context) error
IsManaged() bool
Exited() bool
CanHandleDataQueries() bool
backend.CollectMetricsHandler
backend.CheckHealthHandler
backend.CallResourceHandler
}

View File

@ -1,4 +1,5 @@
package backendplugin // Package instrumentation contains backend plugin instrumentation logic.
package instrumentation
import ( import (
"context" "context"
@ -48,19 +49,22 @@ func instrumentPluginRequest(pluginID string, endpoint string, fn func() error)
return err return err
} }
func instrumentCollectMetrics(pluginID string, fn func() error) error { // InstrumentCollectMetrics instruments collectMetrics.
func InstrumentCollectMetrics(pluginID string, fn func() error) error {
return instrumentPluginRequest(pluginID, "collectMetrics", fn) return instrumentPluginRequest(pluginID, "collectMetrics", fn)
} }
func instrumentCheckHealthRequest(pluginID string, fn func() error) error { // InstrumentCheckHealthRequest instruments checkHealth.
func InstrumentCheckHealthRequest(pluginID string, fn func() error) error {
return instrumentPluginRequest(pluginID, "checkHealth", fn) return instrumentPluginRequest(pluginID, "checkHealth", fn)
} }
func instrumentCallResourceRequest(pluginID string, fn func() error) error { // InstrumentCallResourceRequest instruments callResource.
func InstrumentCallResourceRequest(pluginID string, fn func() error) error {
return instrumentPluginRequest(pluginID, "callResource", fn) return instrumentPluginRequest(pluginID, "callResource", fn)
} }
// InstrumentQueryDataRequest instruments success rate and latency of query data request. // InstrumentQueryDataRequest instruments success rate and latency of query data requests.
func InstrumentQueryDataRequest(pluginID string, fn func() error) error { func InstrumentQueryDataRequest(pluginID string, fn func() error) error {
return instrumentPluginRequest(pluginID, "queryData", fn) return instrumentPluginRequest(pluginID, "queryData", fn)
} }

View File

@ -1,4 +1,4 @@
package backendplugin package manager
import ( import (
"context" "context"
@ -15,53 +15,31 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/backendplugin/instrumentation"
"github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
"github.com/grafana/grafana/pkg/util/proxyutil" "github.com/grafana/grafana/pkg/util/proxyutil"
) )
var (
// ErrPluginNotRegistered error returned when plugin not registered.
ErrPluginNotRegistered = errors.New("plugin not registered")
// ErrHealthCheckFailed error returned when health check failed.
ErrHealthCheckFailed = errors.New("health check failed")
// ErrPluginUnavailable error returned when plugin is unavailable.
ErrPluginUnavailable = errors.New("plugin unavailable")
// ErrMethodNotImplemented error returned when plugin method not implemented.
ErrMethodNotImplemented = errors.New("method not implemented")
)
func init() { func init() {
registry.RegisterServiceWithPriority(&manager{}, registry.MediumHigh) registry.RegisterServiceWithPriority(&manager{}, registry.MediumHigh)
} }
// Manager manages backend plugins.
type Manager interface {
// Register registers a backend plugin
Register(pluginID string, factory PluginFactoryFunc) error
// StartPlugin starts a non-managed backend plugin
StartPlugin(ctx context.Context, pluginID string) error
// CollectMetrics collects metrics from a registered backend plugin.
CollectMetrics(ctx context.Context, pluginID string) (*backend.CollectMetricsResult, error)
// CheckHealth checks the health of a registered backend plugin.
CheckHealth(ctx context.Context, pCtx backend.PluginContext) (*backend.CheckHealthResult, error)
// CallResource calls a plugin resource.
CallResource(pluginConfig backend.PluginContext, ctx *models.ReqContext, path string)
}
type manager struct { type manager struct {
Cfg *setting.Cfg `inject:""` Cfg *setting.Cfg `inject:""`
License models.Licensing `inject:""` License models.Licensing `inject:""`
PluginRequestValidator models.PluginRequestValidator `inject:""` PluginRequestValidator models.PluginRequestValidator `inject:""`
pluginsMu sync.RWMutex pluginsMu sync.RWMutex
plugins map[string]Plugin plugins map[string]backendplugin.Plugin
logger log.Logger logger log.Logger
pluginSettings map[string]pluginSettings pluginSettings map[string]pluginSettings
} }
func (m *manager) Init() error { func (m *manager) Init() error {
m.plugins = make(map[string]Plugin) m.plugins = make(map[string]backendplugin.Plugin)
m.logger = log.New("plugins.backend") m.logger = log.New("plugins.backend")
m.pluginSettings = extractPluginSettings(m.Cfg) m.pluginSettings = extractPluginSettings(m.Cfg)
@ -76,7 +54,7 @@ func (m *manager) Run(ctx context.Context) error {
} }
// Register registers a backend plugin // Register registers a backend plugin
func (m *manager) Register(pluginID string, factory PluginFactoryFunc) error { func (m *manager) Register(pluginID string, factory backendplugin.PluginFactoryFunc) error {
m.logger.Debug("Registering backend plugin", "pluginId", pluginID) m.logger.Debug("Registering backend plugin", "pluginId", pluginID)
m.pluginsMu.Lock() m.pluginsMu.Lock()
defer m.pluginsMu.Unlock() defer m.pluginsMu.Unlock()
@ -121,6 +99,19 @@ func (m *manager) Register(pluginID string, factory PluginFactoryFunc) error {
return nil return nil
} }
func (m *manager) GetDataPlugin(pluginID string) interface{} {
plugin := m.plugins[pluginID]
if plugin == nil || !plugin.CanHandleDataQueries() {
return nil
}
if dataPlugin, ok := plugin.(plugins.DataPlugin); ok {
return dataPlugin
}
return nil
}
// start starts all managed backend plugins // start starts all managed backend plugins
func (m *manager) start(ctx context.Context) { func (m *manager) start(ctx context.Context) {
m.pluginsMu.RLock() m.pluginsMu.RLock()
@ -143,7 +134,7 @@ func (m *manager) StartPlugin(ctx context.Context, pluginID string) error {
p, registered := m.plugins[pluginID] p, registered := m.plugins[pluginID]
m.pluginsMu.RUnlock() m.pluginsMu.RUnlock()
if !registered { if !registered {
return ErrPluginNotRegistered return backendplugin.ErrPluginNotRegistered
} }
if p.IsManaged() { if p.IsManaged() {
@ -160,7 +151,7 @@ func (m *manager) stop(ctx context.Context) {
var wg sync.WaitGroup var wg sync.WaitGroup
for _, p := range m.plugins { for _, p := range m.plugins {
wg.Add(1) wg.Add(1)
go func(p Plugin, ctx context.Context) { go func(p backendplugin.Plugin, ctx context.Context) {
defer wg.Done() defer wg.Done()
p.Logger().Debug("Stopping plugin") p.Logger().Debug("Stopping plugin")
if err := p.Stop(ctx); err != nil { if err := p.Stop(ctx); err != nil {
@ -179,11 +170,11 @@ func (m *manager) CollectMetrics(ctx context.Context, pluginID string) (*backend
m.pluginsMu.RUnlock() m.pluginsMu.RUnlock()
if !registered { if !registered {
return nil, ErrPluginNotRegistered return nil, backendplugin.ErrPluginNotRegistered
} }
var resp *backend.CollectMetricsResult var resp *backend.CollectMetricsResult
err := instrumentCollectMetrics(p.PluginID(), func() (innerErr error) { err := instrumentation.InstrumentCollectMetrics(p.PluginID(), func() (innerErr error) {
resp, innerErr = p.CollectMetrics(ctx) resp, innerErr = p.CollectMetrics(ctx)
return return
}) })
@ -214,25 +205,25 @@ func (m *manager) CheckHealth(ctx context.Context, pluginContext backend.PluginC
m.pluginsMu.RUnlock() m.pluginsMu.RUnlock()
if !registered { if !registered {
return nil, ErrPluginNotRegistered return nil, backendplugin.ErrPluginNotRegistered
} }
var resp *backend.CheckHealthResult var resp *backend.CheckHealthResult
err = instrumentCheckHealthRequest(p.PluginID(), func() (innerErr error) { err = instrumentation.InstrumentCheckHealthRequest(p.PluginID(), func() (innerErr error) {
resp, innerErr = p.CheckHealth(ctx, &backend.CheckHealthRequest{PluginContext: pluginContext}) resp, innerErr = p.CheckHealth(ctx, &backend.CheckHealthRequest{PluginContext: pluginContext})
return return
}) })
if err != nil { if err != nil {
if errors.Is(err, ErrMethodNotImplemented) { if errors.Is(err, backendplugin.ErrMethodNotImplemented) {
return nil, err return nil, err
} }
if errors.Is(err, ErrPluginUnavailable) { if errors.Is(err, backendplugin.ErrPluginUnavailable) {
return nil, err return nil, err
} }
return nil, errutil.Wrap("failed to check plugin health", ErrHealthCheckFailed) return nil, errutil.Wrap("failed to check plugin health", backendplugin.ErrHealthCheckFailed)
} }
return resp, nil return resp, nil
@ -248,7 +239,7 @@ func (m *manager) callResourceInternal(w http.ResponseWriter, req *http.Request,
m.pluginsMu.RUnlock() m.pluginsMu.RUnlock()
if !registered { if !registered {
return ErrPluginNotRegistered return backendplugin.ErrPluginNotRegistered
} }
keepCookieModel := keepCookiesJSONModel{} keepCookieModel := keepCookiesJSONModel{}
@ -276,7 +267,7 @@ func (m *manager) callResourceInternal(w http.ResponseWriter, req *http.Request,
Body: body, Body: body,
} }
return instrumentCallResourceRequest(p.PluginID(), func() error { return instrumentation.InstrumentCallResourceRequest(p.PluginID(), func() error {
childCtx, cancel := context.WithCancel(req.Context()) childCtx, cancel := context.WithCancel(req.Context())
defer cancel() defer cancel()
stream := newCallResourceResponseStream(childCtx) stream := newCallResourceResponseStream(childCtx)
@ -336,12 +327,12 @@ func (m *manager) CallResource(pCtx backend.PluginContext, reqCtx *models.ReqCon
} }
func handleCallResourceError(err error, reqCtx *models.ReqContext) { func handleCallResourceError(err error, reqCtx *models.ReqContext) {
if errors.Is(err, ErrPluginUnavailable) { if errors.Is(err, backendplugin.ErrPluginUnavailable) {
reqCtx.JsonApiErr(503, "Plugin unavailable", err) reqCtx.JsonApiErr(503, "Plugin unavailable", err)
return return
} }
if errors.Is(err, ErrMethodNotImplemented) { if errors.Is(err, backendplugin.ErrMethodNotImplemented) {
reqCtx.JsonApiErr(404, "Not found", err) reqCtx.JsonApiErr(404, "Not found", err)
return return
} }
@ -349,7 +340,7 @@ func handleCallResourceError(err error, reqCtx *models.ReqContext) {
reqCtx.JsonApiErr(500, "Failed to call resource", err) reqCtx.JsonApiErr(500, "Failed to call resource", err)
} }
func flushStream(plugin Plugin, stream CallResourceClientResponseStream, w http.ResponseWriter) error { func flushStream(plugin backendplugin.Plugin, stream callResourceClientResponseStream, w http.ResponseWriter) error {
processedStreams := 0 processedStreams := 0
for { for {
@ -404,12 +395,12 @@ func flushStream(plugin Plugin, stream CallResourceClientResponseStream, w http.
} }
} }
func startPluginAndRestartKilledProcesses(ctx context.Context, p Plugin) error { func startPluginAndRestartKilledProcesses(ctx context.Context, p backendplugin.Plugin) error {
if err := p.Start(ctx); err != nil { if err := p.Start(ctx); err != nil {
return err return err
} }
go func(ctx context.Context, p Plugin) { go func(ctx context.Context, p backendplugin.Plugin) {
if err := restartKilledProcess(ctx, p); err != nil { if err := restartKilledProcess(ctx, p); err != nil {
p.Logger().Error("Attempt to restart killed plugin process failed", "error", err) p.Logger().Error("Attempt to restart killed plugin process failed", "error", err)
} }
@ -418,7 +409,7 @@ func startPluginAndRestartKilledProcesses(ctx context.Context, p Plugin) error {
return nil return nil
} }
func restartKilledProcess(ctx context.Context, p Plugin) error { func restartKilledProcess(ctx context.Context, p backendplugin.Plugin) error {
ticker := time.NewTicker(time.Second * 1) ticker := time.NewTicker(time.Second * 1)
for { for {
@ -442,3 +433,9 @@ func restartKilledProcess(ctx context.Context, p Plugin) error {
} }
} }
} }
// callResourceClientResponseStream is used for receiving resource call responses.
type callResourceClientResponseStream interface {
Recv() (*backend.CallResourceResponse, error)
Close() error
}

View File

@ -1,4 +1,4 @@
package backendplugin package manager
import ( import (
"bytes" "bytes"
@ -12,6 +12,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -22,19 +23,19 @@ func TestManager(t *testing.T) {
newManagerScenario(t, false, func(t *testing.T, ctx *managerScenarioCtx) { newManagerScenario(t, false, func(t *testing.T, ctx *managerScenarioCtx) {
t.Run("Unregistered plugin scenario", func(t *testing.T) { t.Run("Unregistered plugin scenario", func(t *testing.T) {
err := ctx.manager.StartPlugin(context.Background(), testPluginID) err := ctx.manager.StartPlugin(context.Background(), testPluginID)
require.Equal(t, ErrPluginNotRegistered, err) require.Equal(t, backendplugin.ErrPluginNotRegistered, err)
_, err = ctx.manager.CollectMetrics(context.Background(), testPluginID) _, err = ctx.manager.CollectMetrics(context.Background(), testPluginID)
require.Equal(t, ErrPluginNotRegistered, err) require.Equal(t, backendplugin.ErrPluginNotRegistered, err)
_, err = ctx.manager.CheckHealth(context.Background(), backend.PluginContext{PluginID: testPluginID}) _, err = ctx.manager.CheckHealth(context.Background(), backend.PluginContext{PluginID: testPluginID})
require.Equal(t, ErrPluginNotRegistered, err) require.Equal(t, backendplugin.ErrPluginNotRegistered, err)
req, err := http.NewRequest(http.MethodGet, "/test", nil) req, err := http.NewRequest(http.MethodGet, "/test", nil)
require.NoError(t, err) require.NoError(t, err)
w := httptest.NewRecorder() w := httptest.NewRecorder()
err = ctx.manager.callResourceInternal(w, req, backend.PluginContext{PluginID: testPluginID}) err = ctx.manager.callResourceInternal(w, req, backend.PluginContext{PluginID: testPluginID})
require.Equal(t, ErrPluginNotRegistered, err) require.Equal(t, backendplugin.ErrPluginNotRegistered, err)
}) })
}) })
@ -121,12 +122,12 @@ func TestManager(t *testing.T) {
t.Run("Unimplemented handlers", func(t *testing.T) { t.Run("Unimplemented handlers", func(t *testing.T) {
t.Run("Collect metrics should return method not implemented error", func(t *testing.T) { t.Run("Collect metrics should return method not implemented error", func(t *testing.T) {
_, err = ctx.manager.CollectMetrics(context.Background(), testPluginID) _, err = ctx.manager.CollectMetrics(context.Background(), testPluginID)
require.Equal(t, ErrMethodNotImplemented, err) require.Equal(t, backendplugin.ErrMethodNotImplemented, err)
}) })
t.Run("Check health should return method not implemented error", func(t *testing.T) { t.Run("Check health should return method not implemented error", func(t *testing.T) {
_, err = ctx.manager.CheckHealth(context.Background(), backend.PluginContext{PluginID: testPluginID}) _, err = ctx.manager.CheckHealth(context.Background(), backend.PluginContext{PluginID: testPluginID})
require.Equal(t, ErrMethodNotImplemented, err) require.Equal(t, backendplugin.ErrMethodNotImplemented, err)
}) })
t.Run("Call resource should return method not implemented error", func(t *testing.T) { t.Run("Call resource should return method not implemented error", func(t *testing.T) {
@ -134,17 +135,17 @@ func TestManager(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
w := httptest.NewRecorder() w := httptest.NewRecorder()
err = ctx.manager.callResourceInternal(w, req, backend.PluginContext{PluginID: testPluginID}) err = ctx.manager.callResourceInternal(w, req, backend.PluginContext{PluginID: testPluginID})
require.Equal(t, ErrMethodNotImplemented, err) require.Equal(t, backendplugin.ErrMethodNotImplemented, err)
}) })
}) })
t.Run("Implemented handlers", func(t *testing.T) { t.Run("Implemented handlers", func(t *testing.T) {
t.Run("Collect metrics should return expected result", func(t *testing.T) { t.Run("Collect metrics should return expected result", func(t *testing.T) {
ctx.plugin.CollectMetricsHandlerFunc = backend.CollectMetricsHandlerFunc(func(ctx context.Context) (*backend.CollectMetricsResult, error) { ctx.plugin.CollectMetricsHandlerFunc = func(ctx context.Context) (*backend.CollectMetricsResult, error) {
return &backend.CollectMetricsResult{ return &backend.CollectMetricsResult{
PrometheusMetrics: []byte("hello"), PrometheusMetrics: []byte("hello"),
}, nil }, nil
}) }
res, err := ctx.manager.CollectMetrics(context.Background(), testPluginID) res, err := ctx.manager.CollectMetrics(context.Background(), testPluginID)
require.NoError(t, err) require.NoError(t, err)
@ -156,13 +157,13 @@ func TestManager(t *testing.T) {
json := []byte(`{ json := []byte(`{
"key": "value" "key": "value"
}`) }`)
ctx.plugin.CheckHealthHandlerFunc = backend.CheckHealthHandlerFunc(func(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) { ctx.plugin.CheckHealthHandlerFunc = func(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
return &backend.CheckHealthResult{ return &backend.CheckHealthResult{
Status: backend.HealthStatusOk, Status: backend.HealthStatusOk,
Message: "All good", Message: "All good",
JSONDetails: json, JSONDetails: json,
}, nil }, nil
}) }
res, err := ctx.manager.CheckHealth(context.Background(), backend.PluginContext{PluginID: testPluginID}) res, err := ctx.manager.CheckHealth(context.Background(), backend.PluginContext{PluginID: testPluginID})
require.NoError(t, err) require.NoError(t, err)
@ -173,11 +174,12 @@ func TestManager(t *testing.T) {
}) })
t.Run("Call resource should return expected response", func(t *testing.T) { t.Run("Call resource should return expected response", func(t *testing.T) {
ctx.plugin.CallResourceHandlerFunc = backend.CallResourceHandlerFunc(func(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error { ctx.plugin.CallResourceHandlerFunc = func(ctx context.Context,
req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
return sender.Send(&backend.CallResourceResponse{ return sender.Send(&backend.CallResourceResponse{
Status: http.StatusOK, Status: http.StatusOK,
}) })
}) }
req, err := http.NewRequest(http.MethodGet, "/test", bytes.NewReader([]byte{})) req, err := http.NewRequest(http.MethodGet, "/test", bytes.NewReader([]byte{}))
require.NoError(t, err) require.NoError(t, err)
@ -270,7 +272,7 @@ type managerScenarioCtx struct {
cfg *setting.Cfg cfg *setting.Cfg
license *testLicensingService license *testLicensingService
manager *manager manager *manager
factory PluginFactoryFunc factory backendplugin.PluginFactoryFunc
plugin *testPlugin plugin *testPlugin
env []string env []string
} }
@ -293,7 +295,7 @@ func newManagerScenario(t *testing.T, managed bool, fn func(t *testing.T, ctx *m
err := ctx.manager.Init() err := ctx.manager.Init()
require.NoError(t, err) require.NoError(t, err)
ctx.factory = PluginFactoryFunc(func(pluginID string, logger log.Logger, env []string) (Plugin, error) { ctx.factory = func(pluginID string, logger log.Logger, env []string) (backendplugin.Plugin, error) {
ctx.plugin = &testPlugin{ ctx.plugin = &testPlugin{
pluginID: pluginID, pluginID: pluginID,
logger: logger, logger: logger,
@ -302,7 +304,7 @@ func newManagerScenario(t *testing.T, managed bool, fn func(t *testing.T, ctx *m
ctx.env = env ctx.env = env
return ctx.plugin, nil return ctx.plugin, nil
}) }
fn(t, ctx) fn(t, ctx)
} }
@ -328,6 +330,10 @@ func (tp *testPlugin) Logger() log.Logger {
return tp.logger return tp.logger
} }
func (tp *testPlugin) CanHandleDataQueries() bool {
return false
}
func (tp *testPlugin) Start(ctx context.Context) error { func (tp *testPlugin) Start(ctx context.Context) error {
tp.mutex.Lock() tp.mutex.Lock()
defer tp.mutex.Unlock() defer tp.mutex.Unlock()
@ -364,7 +370,7 @@ func (tp *testPlugin) CollectMetrics(ctx context.Context) (*backend.CollectMetri
return tp.CollectMetricsHandlerFunc(ctx) return tp.CollectMetricsHandlerFunc(ctx)
} }
return nil, ErrMethodNotImplemented return nil, backendplugin.ErrMethodNotImplemented
} }
func (tp *testPlugin) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) { func (tp *testPlugin) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
@ -372,7 +378,7 @@ func (tp *testPlugin) CheckHealth(ctx context.Context, req *backend.CheckHealthR
return tp.CheckHealthHandlerFunc(ctx, req) return tp.CheckHealthHandlerFunc(ctx, req)
} }
return nil, ErrMethodNotImplemented return nil, backendplugin.ErrMethodNotImplemented
} }
func (tp *testPlugin) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error { func (tp *testPlugin) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
@ -380,7 +386,7 @@ func (tp *testPlugin) CallResource(ctx context.Context, req *backend.CallResourc
return tp.CallResourceHandlerFunc(ctx, req, sender) return tp.CallResourceHandlerFunc(ctx, req, sender)
} }
return ErrMethodNotImplemented return backendplugin.ErrMethodNotImplemented
} }
type testLicensingService struct { type testLicensingService struct {

View File

@ -1,4 +1,4 @@
package backendplugin package manager
import ( import (
"fmt" "fmt"

View File

@ -1,4 +1,4 @@
package backendplugin package manager
import ( import (
"sort" "sort"

View File

@ -1,4 +1,4 @@
package backendplugin package manager
import ( import (
"context" "context"

View File

@ -1,30 +0,0 @@
package backendplugin
import (
"context"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/infra/log"
)
// Plugin backend plugin interface.
type Plugin interface {
PluginID() string
Logger() log.Logger
Start(ctx context.Context) error
Stop(ctx context.Context) error
IsManaged() bool
Exited() bool
backend.CollectMetricsHandler
backend.CheckHealthHandler
backend.CallResourceHandler
}
// PluginFactoryFunc factory for creating a Plugin.
type PluginFactoryFunc func(pluginID string, logger log.Logger, env []string) (Plugin, error)
// CallResourceClientResponseStream is used for receiving resource call responses.
type CallResourceClientResponseStream interface {
Recv() (*backend.CallResourceResponse, error)
Close() error
}

View File

@ -1,103 +0,0 @@
package plugins
import (
"io/ioutil"
"testing"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/setting"
. "github.com/smartystreets/goconvey/convey"
)
func TestDashboardImport(t *testing.T) {
pluginScenario("When importing a plugin dashboard", t, func() {
origNewDashboardService := dashboards.NewService
mock := &dashboards.FakeDashboardService{}
dashboards.MockDashboardService(mock)
cmd := ImportDashboardCommand{
PluginId: "test-app",
Path: "dashboards/connections.json",
OrgId: 1,
User: &models.SignedInUser{UserId: 1, OrgRole: models.ROLE_ADMIN},
Inputs: []ImportDashboardInput{
{Name: "*", Type: "datasource", Value: "graphite"},
},
}
err := ImportDashboard(&cmd)
So(err, ShouldBeNil)
Convey("should install dashboard", func() {
So(cmd.Result, ShouldNotBeNil)
resultStr, _ := mock.SavedDashboards[0].Dashboard.Data.EncodePretty()
expectedBytes, _ := ioutil.ReadFile("testdata/test-app/dashboards/connections_result.json")
expectedJson, _ := simplejson.NewJson(expectedBytes)
expectedStr, _ := expectedJson.EncodePretty()
So(string(resultStr), ShouldEqual, string(expectedStr))
panel := mock.SavedDashboards[0].Dashboard.Data.Get("rows").GetIndex(0).Get("panels").GetIndex(0)
So(panel.Get("datasource").MustString(), ShouldEqual, "graphite")
})
Reset(func() {
dashboards.NewService = origNewDashboardService
})
})
Convey("When evaling dashboard template", t, func() {
template, _ := simplejson.NewJson([]byte(`{
"__inputs": [
{
"name": "DS_NAME",
"type": "datasource"
}
],
"test": {
"prop": "${DS_NAME}_${DS_NAME}"
}
}`))
evaluator := &DashTemplateEvaluator{
template: template,
inputs: []ImportDashboardInput{
{Name: "*", Type: "datasource", Value: "my-server"},
},
}
res, err := evaluator.Eval()
So(err, ShouldBeNil)
Convey("should render template", func() {
So(res.GetPath("test", "prop").MustString(), ShouldEqual, "my-server_my-server")
})
Convey("should not include inputs in output", func() {
inputs := res.Get("__inputs")
So(inputs.Interface(), ShouldBeNil)
})
})
}
func pluginScenario(desc string, t *testing.T, fn func()) {
Convey("Given a plugin", t, func() {
pm := &PluginManager{
Cfg: &setting.Cfg{
FeatureToggles: map[string]bool{},
PluginSettings: setting.PluginSettings{
"test-app": map[string]string{
"path": "testdata/test-app",
},
},
},
}
err := pm.Init()
So(err, ShouldBeNil)
Convey(desc, fn)
})
}

View File

@ -1,14 +1,5 @@
package plugins package plugins
import (
"os"
"path/filepath"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
)
type PluginDashboardInfoDTO struct { type PluginDashboardInfoDTO struct {
PluginId string `json:"pluginId"` PluginId string `json:"pluginId"`
Title string `json:"title"` Title string `json:"title"`
@ -24,97 +15,3 @@ type PluginDashboardInfoDTO struct {
Path string `json:"path"` Path string `json:"path"`
Removed bool `json:"removed"` Removed bool `json:"removed"`
} }
func GetPluginDashboards(orgId int64, pluginId string) ([]*PluginDashboardInfoDTO, error) {
plugin, exists := Plugins[pluginId]
if !exists {
return nil, PluginNotFoundError{pluginId}
}
result := make([]*PluginDashboardInfoDTO, 0)
// load current dashboards
query := models.GetDashboardsByPluginIdQuery{OrgId: orgId, PluginId: pluginId}
if err := bus.Dispatch(&query); err != nil {
return nil, err
}
existingMatches := make(map[int64]bool)
for _, include := range plugin.Includes {
if include.Type != PluginTypeDashboard {
continue
}
res := &PluginDashboardInfoDTO{}
var dashboard *models.Dashboard
var err error
if dashboard, err = loadPluginDashboard(plugin.Id, include.Path); err != nil {
return nil, err
}
res.Path = include.Path
res.PluginId = plugin.Id
res.Title = dashboard.Title
res.Revision = dashboard.Data.Get("revision").MustInt64(1)
// find existing dashboard
for _, existingDash := range query.Result {
if existingDash.Slug == dashboard.Slug {
res.DashboardId = existingDash.Id
res.Imported = true
res.ImportedUri = "db/" + existingDash.Slug
res.ImportedUrl = existingDash.GetUrl()
res.ImportedRevision = existingDash.Data.Get("revision").MustInt64(1)
existingMatches[existingDash.Id] = true
}
}
result = append(result, res)
}
// find deleted dashboards
for _, dash := range query.Result {
if _, exists := existingMatches[dash.Id]; !exists {
result = append(result, &PluginDashboardInfoDTO{
Slug: dash.Slug,
DashboardId: dash.Id,
Removed: true,
})
}
}
return result, nil
}
func loadPluginDashboard(pluginId, path string) (*models.Dashboard, error) {
plugin, exists := Plugins[pluginId]
if !exists {
return nil, PluginNotFoundError{pluginId}
}
dashboardFilePath := filepath.Join(plugin.PluginDir, path)
// nolint:gosec
// We can ignore the gosec G304 warning on this one because `plugin.PluginDir` is based
// on plugin folder structure on disk and not user input. `path` comes from the
// `plugin.json` configuration file for the loaded plugin
reader, err := os.Open(dashboardFilePath)
if err != nil {
return nil, err
}
defer func() {
if err := reader.Close(); err != nil {
plog.Warn("Failed to close file", "path", dashboardFilePath, "err", err)
}
}()
data, err := simplejson.NewFromReader(reader)
if err != nil {
return nil, err
}
return models.NewDashboardFromJson(data), nil
}

View File

@ -1,131 +0,0 @@
package plugins
import (
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/models"
)
func init() {
bus.AddEventListener(handlePluginStateChanged)
}
func (pm *PluginManager) updateAppDashboards() {
pm.log.Debug("Looking for App Dashboard Updates")
query := models.GetPluginSettingsQuery{OrgId: 0}
if err := bus.Dispatch(&query); err != nil {
pm.log.Error("Failed to get all plugin settings", "error", err)
return
}
for _, pluginSetting := range query.Result {
// ignore disabled plugins
if !pluginSetting.Enabled {
continue
}
if pluginDef, exist := Plugins[pluginSetting.PluginId]; exist {
if pluginDef.Info.Version != pluginSetting.PluginVersion {
syncPluginDashboards(pluginDef, pluginSetting.OrgId)
}
}
}
}
func autoUpdateAppDashboard(pluginDashInfo *PluginDashboardInfoDTO, orgId int64) error {
dash, err := loadPluginDashboard(pluginDashInfo.PluginId, pluginDashInfo.Path)
if err != nil {
return err
}
plog.Info("Auto updating App dashboard", "dashboard", dash.Title, "newRev", pluginDashInfo.Revision, "oldRev", pluginDashInfo.ImportedRevision)
updateCmd := ImportDashboardCommand{
OrgId: orgId,
PluginId: pluginDashInfo.PluginId,
Overwrite: true,
Dashboard: dash.Data,
User: &models.SignedInUser{UserId: 0, OrgRole: models.ROLE_ADMIN},
Path: pluginDashInfo.Path,
}
return bus.Dispatch(&updateCmd)
}
func syncPluginDashboards(pluginDef *PluginBase, orgId int64) {
plog.Info("Syncing plugin dashboards to DB", "pluginId", pluginDef.Id)
// Get plugin dashboards
dashboards, err := GetPluginDashboards(orgId, pluginDef.Id)
if err != nil {
plog.Error("Failed to load app dashboards", "error", err)
return
}
// Update dashboards with updated revisions
for _, dash := range dashboards {
// remove removed ones
if dash.Removed {
plog.Info("Deleting plugin dashboard", "pluginId", pluginDef.Id, "dashboard", dash.Slug)
deleteCmd := models.DeleteDashboardCommand{OrgId: orgId, Id: dash.DashboardId}
if err := bus.Dispatch(&deleteCmd); err != nil {
plog.Error("Failed to auto update app dashboard", "pluginId", pluginDef.Id, "error", err)
return
}
continue
}
// update updated ones
if dash.ImportedRevision != dash.Revision {
if err := autoUpdateAppDashboard(dash, orgId); err != nil {
plog.Error("Failed to auto update app dashboard", "pluginId", pluginDef.Id, "error", err)
return
}
}
}
// update version in plugin_setting table to mark that we have processed the update
query := models.GetPluginSettingByIdQuery{PluginId: pluginDef.Id, OrgId: orgId}
if err := bus.Dispatch(&query); err != nil {
plog.Error("Failed to read plugin setting by id", "error", err)
return
}
appSetting := query.Result
cmd := models.UpdatePluginSettingVersionCmd{
OrgId: appSetting.OrgId,
PluginId: appSetting.PluginId,
PluginVersion: pluginDef.Info.Version,
}
if err := bus.Dispatch(&cmd); err != nil {
plog.Error("Failed to update plugin setting version", "error", err)
}
}
func handlePluginStateChanged(event *models.PluginStateChangedEvent) error {
plog.Info("Plugin state changed", "pluginId", event.PluginId, "enabled", event.Enabled)
if event.Enabled {
syncPluginDashboards(Plugins[event.PluginId], event.OrgId)
} else {
query := models.GetDashboardsByPluginIdQuery{PluginId: event.PluginId, OrgId: event.OrgId}
if err := bus.Dispatch(&query); err != nil {
return err
}
for _, dash := range query.Result {
deleteCmd := models.DeleteDashboardCommand{OrgId: dash.OrgId, Id: dash.Id}
plog.Info("Deleting plugin dashboard", "pluginId", event.PluginId, "dashboard", dash.Slug)
if err := bus.Dispatch(&deleteCmd); err != nil {
return err
}
}
}
return nil
}

87
pkg/plugins/dataframes.go Normal file
View File

@ -0,0 +1,87 @@
package plugins
import (
"github.com/grafana/grafana-plugin-sdk-go/data"
jsoniter "github.com/json-iterator/go"
)
// DataFrames is an interface for retrieving encoded and decoded data frames.
//
// See NewDecodedDataFrames and NewEncodedDataFrames for more information.
type DataFrames interface {
// Encoded encodes Frames into a slice of []byte.
// If an error occurs [][]byte will be nil.
// The encoded result, if any, will be cached and returned next time Encoded is called.
Encoded() ([][]byte, error)
// Decoded decodes a slice of Arrow encoded frames to data.Frames ([]*data.Frame).
// If an error occurs Frames will be nil.
// The decoded result, if any, will be cached and returned next time Decoded is called.
Decoded() (data.Frames, error)
}
type dataFrames struct {
decoded data.Frames
encoded [][]byte
}
// NewDecodedDataFrames instantiates DataFrames from decoded frames.
//
// This should be the primary function for creating DataFrames if you're implementing a plugin.
// In a Grafana alerting scenario it needs to operate on decoded frames, which is why this function is
// preferrable. When encoded data frames are needed, e.g. returned from Grafana HTTP API, it will
// happen automatically when MarshalJSON() is called.
func NewDecodedDataFrames(decodedFrames data.Frames) DataFrames {
return &dataFrames{
decoded: decodedFrames,
}
}
// NewEncodedDataFrames instantiates DataFrames from encoded frames.
//
// This one is primarily used for creating DataFrames when receiving encoded data frames from an external
// plugin or similar. This may allow the encoded data frames to be returned to Grafana UI without any additional
// decoding/encoding required. In Grafana alerting scenario it needs to operate on decoded data frames why encoded
// frames needs to be decoded before usage.
func NewEncodedDataFrames(encodedFrames [][]byte) DataFrames {
return &dataFrames{
encoded: encodedFrames,
}
}
func (df *dataFrames) Encoded() ([][]byte, error) {
if df.encoded == nil {
encoded, err := df.decoded.MarshalArrow()
if err != nil {
return nil, err
}
df.encoded = encoded
}
return df.encoded, nil
}
func (df *dataFrames) Decoded() (data.Frames, error) {
if df.decoded == nil {
decoded, err := data.UnmarshalArrowFrames(df.encoded)
if err != nil {
return nil, err
}
df.decoded = decoded
}
return df.decoded, nil
}
func (df *dataFrames) MarshalJSON() ([]byte, error) {
encoded, err := df.Encoded()
if err != nil {
return nil, err
}
// Use a configuration that's compatible with the standard library
// to minimize the risk of introducing bugs. This will make sure
// that map keys is ordered.
jsonCfg := jsoniter.ConfigCompatibleWithStandardLibrary
return jsonCfg.Marshal(encoded)
}

View File

@ -1,15 +1,15 @@
package plugins package plugins
import ( import (
"context"
"encoding/json" "encoding/json"
"fmt"
"path/filepath" "path/filepath"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins/backendplugin" "github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/backendplugin/grpcplugin" "github.com/grafana/grafana/pkg/plugins/backendplugin/grpcplugin"
"github.com/grafana/grafana/pkg/plugins/datasource/wrapper"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
) )
@ -32,47 +32,65 @@ type DataSourcePlugin struct {
Backend bool `json:"backend,omitempty"` Backend bool `json:"backend,omitempty"`
Executable string `json:"executable,omitempty"` Executable string `json:"executable,omitempty"`
SDK bool `json:"sdk,omitempty"` SDK bool `json:"sdk,omitempty"`
client *grpcplugin.Client
legacyClient *grpcplugin.LegacyClient
logger log.Logger
} }
func (p *DataSourcePlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error { func (p *DataSourcePlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) (
interface{}, error) {
if err := decoder.Decode(p); err != nil { if err := decoder.Decode(p); err != nil {
return errutil.Wrapf(err, "Failed to decode datasource plugin") return nil, errutil.Wrapf(err, "Failed to decode datasource plugin")
}
if err := p.registerPlugin(base); err != nil {
return errutil.Wrapf(err, "Failed to register plugin")
} }
if p.Backend { if p.Backend {
cmd := ComposePluginStartCommand(p.Executable) cmd := ComposePluginStartCommand(p.Executable)
fullpath := filepath.Join(p.PluginDir, cmd) fullpath := filepath.Join(base.PluginDir, cmd)
factory := grpcplugin.NewBackendPlugin(p.Id, fullpath, grpcplugin.PluginStartFuncs{ factory := grpcplugin.NewBackendPlugin(p.Id, fullpath, grpcplugin.PluginStartFuncs{
OnLegacyStart: p.onLegacyPluginStart, OnLegacyStart: p.onLegacyPluginStart,
OnStart: p.onPluginStart, OnStart: p.onPluginStart,
}) })
if err := backendPluginManager.Register(p.Id, factory); err != nil { if err := backendPluginManager.Register(p.Id, factory); err != nil {
return errutil.Wrapf(err, "Failed to register backend plugin") return nil, errutil.Wrapf(err, "failed to register backend plugin")
} }
} }
DataSources[p.Id] = p return p, nil
return nil }
func (p *DataSourcePlugin) DataQuery(ctx context.Context, dsInfo *models.DataSource, query DataQuery) (DataResponse, error) {
if !p.CanHandleDataQueries() {
return DataResponse{}, fmt.Errorf("plugin %q can't handle data queries", p.Id)
}
if p.client != nil {
endpoint := newDataSourcePluginWrapperV2(p.logger, p.Id, p.Type, p.client.DataPlugin)
return endpoint.Query(ctx, dsInfo, query)
}
endpoint := newDataSourcePluginWrapper(p.logger, p.legacyClient.DatasourcePlugin)
return endpoint.Query(ctx, dsInfo, query)
} }
func (p *DataSourcePlugin) onLegacyPluginStart(pluginID string, client *grpcplugin.LegacyClient, logger log.Logger) error { func (p *DataSourcePlugin) onLegacyPluginStart(pluginID string, client *grpcplugin.LegacyClient, logger log.Logger) error {
tsdb.RegisterTsdbQueryEndpoint(pluginID, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { p.legacyClient = client
return wrapper.NewDatasourcePluginWrapper(logger, client.DatasourcePlugin), nil p.logger = logger
})
return nil return nil
} }
func (p *DataSourcePlugin) CanHandleDataQueries() bool {
return p.client != nil || p.legacyClient != nil
}
func (p *DataSourcePlugin) onPluginStart(pluginID string, client *grpcplugin.Client, logger log.Logger) error { func (p *DataSourcePlugin) onPluginStart(pluginID string, client *grpcplugin.Client, logger log.Logger) error {
if client.DataPlugin != nil { if client.DataPlugin == nil {
tsdb.RegisterTsdbQueryEndpoint(pluginID, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { return nil
return wrapper.NewDatasourcePluginWrapperV2(logger, p.Id, p.Type, client.DataPlugin), nil
})
} }
p.client = client
p.logger = logger
return nil return nil
} }

View File

@ -1,4 +1,4 @@
package wrapper package plugins
import ( import (
"context" "context"
@ -10,10 +10,9 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
) )
func NewDatasourcePluginWrapper(log log.Logger, plugin datasource.DatasourcePlugin) *DatasourcePluginWrapper { func newDataSourcePluginWrapper(log log.Logger, plugin datasource.DatasourcePlugin) *DatasourcePluginWrapper {
return &DatasourcePluginWrapper{DatasourcePlugin: plugin, logger: log} return &DatasourcePluginWrapper{DatasourcePlugin: plugin, logger: log}
} }
@ -22,10 +21,10 @@ type DatasourcePluginWrapper struct {
logger log.Logger logger log.Logger
} }
func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) { func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSource, query DataQuery) (DataResponse, error) {
jsonData, err := ds.JsonData.MarshalJSON() jsonData, err := ds.JsonData.MarshalJSON()
if err != nil { if err != nil {
return nil, err return DataResponse{}, err
} }
pbQuery := &datasource.DatasourceRequest{ pbQuery := &datasource.DatasourceRequest{
@ -48,31 +47,33 @@ func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSou
} }
for _, q := range query.Queries { for _, q := range query.Queries {
modelJson, _ := q.Model.MarshalJSON() modelJson, err := q.Model.MarshalJSON()
if err != nil {
return DataResponse{}, err
}
pbQuery.Queries = append(pbQuery.Queries, &datasource.Query{ pbQuery.Queries = append(pbQuery.Queries, &datasource.Query{
ModelJson: string(modelJson), ModelJson: string(modelJson),
IntervalMs: q.IntervalMs, IntervalMs: q.IntervalMS,
RefId: q.RefId, RefId: q.RefID,
MaxDataPoints: q.MaxDataPoints, MaxDataPoints: q.MaxDataPoints,
}) })
} }
pbres, err := tw.DatasourcePlugin.Query(ctx, pbQuery) pbres, err := tw.DatasourcePlugin.Query(ctx, pbQuery)
if err != nil { if err != nil {
return nil, err return DataResponse{}, err
} }
res := &tsdb.Response{ res := DataResponse{
Results: map[string]*tsdb.QueryResult{}, Results: map[string]DataQueryResult{},
} }
for _, r := range pbres.Results { for _, r := range pbres.Results {
qr := &tsdb.QueryResult{ qr := DataQueryResult{
RefId: r.RefId, RefID: r.RefId,
Series: []*tsdb.TimeSeries{}, Series: []DataTimeSeries{},
Tables: []*tsdb.Table{}, Tables: []DataTable{},
} }
if r.Error != "" { if r.Error != "" {
@ -89,14 +90,14 @@ func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSou
} }
for _, s := range r.GetSeries() { for _, s := range r.GetSeries() {
points := tsdb.TimeSeriesPoints{} points := DataTimeSeriesPoints{}
for _, p := range s.Points { for _, p := range s.Points {
po := tsdb.NewTimePoint(null.FloatFrom(p.Value), float64(p.Timestamp)) po := DataTimePoint{null.FloatFrom(p.Value), null.FloatFrom(float64(p.Timestamp))}
points = append(points, po) points = append(points, po)
} }
qr.Series = append(qr.Series, &tsdb.TimeSeries{ qr.Series = append(qr.Series, DataTimeSeries{
Name: s.Name, Name: s.Name,
Tags: s.Tags, Tags: s.Tags,
Points: points, Points: points,
@ -105,7 +106,7 @@ func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSou
mappedTables, err := tw.mapTables(r) mappedTables, err := tw.mapTables(r)
if err != nil { if err != nil {
return nil, err return DataResponse{}, err
} }
qr.Tables = mappedTables qr.Tables = mappedTables
@ -114,8 +115,9 @@ func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSou
return res, nil return res, nil
} }
func (tw *DatasourcePluginWrapper) mapTables(r *datasource.QueryResult) ([]*tsdb.Table, error) {
var tables []*tsdb.Table func (tw *DatasourcePluginWrapper) mapTables(r *datasource.QueryResult) ([]DataTable, error) {
var tables []DataTable
for _, t := range r.GetTables() { for _, t := range r.GetTables() {
mappedTable, err := tw.mapTable(t) mappedTable, err := tw.mapTable(t)
if err != nil { if err != nil {
@ -126,21 +128,21 @@ func (tw *DatasourcePluginWrapper) mapTables(r *datasource.QueryResult) ([]*tsdb
return tables, nil return tables, nil
} }
func (tw *DatasourcePluginWrapper) mapTable(t *datasource.Table) (*tsdb.Table, error) { func (tw *DatasourcePluginWrapper) mapTable(t *datasource.Table) (DataTable, error) {
table := &tsdb.Table{} table := DataTable{}
for _, c := range t.GetColumns() { for _, c := range t.GetColumns() {
table.Columns = append(table.Columns, tsdb.TableColumn{ table.Columns = append(table.Columns, DataTableColumn{
Text: c.Name, Text: c.Name,
}) })
} }
table.Rows = make([]tsdb.RowValues, 0) table.Rows = make([]DataRowValues, 0)
for _, r := range t.GetRows() { for _, r := range t.GetRows() {
row := tsdb.RowValues{} row := DataRowValues{}
for _, rv := range r.Values { for _, rv := range r.Values {
mappedRw, err := tw.mapRowValue(rv) mappedRw, err := tw.mapRowValue(rv)
if err != nil { if err != nil {
return nil, err return table, err
} }
row = append(row, mappedRw) row = append(row, mappedRw)

View File

@ -1,16 +1,15 @@
package wrapper package plugins
import ( import (
"testing" "testing"
"github.com/grafana/grafana-plugin-model/go/datasource" "github.com/grafana/grafana-plugin-model/go/datasource"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestMapTables(t *testing.T) { func TestMapTables(t *testing.T) {
dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil) dpw := newDataSourcePluginWrapper(log.New("test-logger"), nil)
var qr = &datasource.QueryResult{} var qr = &datasource.QueryResult{}
qr.Tables = append(qr.Tables, &datasource.Table{ qr.Tables = append(qr.Tables, &datasource.Table{
Columns: []*datasource.TableColumn{}, Columns: []*datasource.TableColumn{},
@ -23,7 +22,7 @@ func TestMapTables(t *testing.T) {
} }
func TestMapTable(t *testing.T) { func TestMapTable(t *testing.T) {
dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil) dpw := newDataSourcePluginWrapper(log.New("test-logger"), nil)
source := &datasource.Table{ source := &datasource.Table{
Columns: []*datasource.TableColumn{{Name: "column1"}, {Name: "column2"}}, Columns: []*datasource.TableColumn{{Name: "column1"}, {Name: "column2"}},
@ -41,8 +40,8 @@ func TestMapTable(t *testing.T) {
}}, }},
} }
want := &tsdb.Table{ want := DataTable{
Columns: []tsdb.TableColumn{{Text: "column1"}, {Text: "column2"}}, Columns: []DataTableColumn{{Text: "column1"}, {Text: "column2"}},
} }
have, err := dpw.mapTable(source) have, err := dpw.mapTable(source)
require.NoError(t, err) require.NoError(t, err)
@ -53,9 +52,10 @@ func TestMapTable(t *testing.T) {
} }
func TestMappingRowValue(t *testing.T) { func TestMappingRowValue(t *testing.T) {
dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil) dpw := newDataSourcePluginWrapper(log.New("test-logger"), nil)
boolRowValue, _ := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_BOOL, BoolValue: true}) boolRowValue, err := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_BOOL, BoolValue: true})
require.NoError(t, err)
haveBool, ok := boolRowValue.(bool) haveBool, ok := boolRowValue.(bool)
require.True(t, ok) require.True(t, ok)
require.True(t, haveBool) require.True(t, haveBool)

View File

@ -1,4 +1,4 @@
package wrapper package plugins
import ( import (
"context" "context"
@ -10,11 +10,11 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins/adapters"
"github.com/grafana/grafana/pkg/services/oauthtoken" "github.com/grafana/grafana/pkg/services/oauthtoken"
"github.com/grafana/grafana/pkg/tsdb"
) )
func NewDatasourcePluginWrapperV2(log log.Logger, pluginId, pluginType string, client grpcplugin.DataClient) *DatasourcePluginWrapperV2 { func newDataSourcePluginWrapperV2(log log.Logger, pluginId, pluginType string, client grpcplugin.DataClient) *DatasourcePluginWrapperV2 {
return &DatasourcePluginWrapperV2{DataClient: client, logger: log, pluginId: pluginId, pluginType: pluginType} return &DatasourcePluginWrapperV2{DataClient: client, logger: log, pluginId: pluginId, pluginType: pluginType}
} }
@ -25,30 +25,10 @@ type DatasourcePluginWrapperV2 struct {
pluginType string pluginType string
} }
func ModelToInstanceSettings(ds *models.DataSource) (*backend.DataSourceInstanceSettings, error) { func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataSource, query DataQuery) (DataResponse, error) {
jsonDataBytes, err := ds.JsonData.MarshalJSON() instanceSettings, err := adapters.ModelToInstanceSettings(ds)
if err != nil { if err != nil {
return nil, err return DataResponse{}, err
}
return &backend.DataSourceInstanceSettings{
ID: ds.Id,
Name: ds.Name,
URL: ds.Url,
Database: ds.Database,
User: ds.User,
BasicAuthEnabled: ds.BasicAuth,
BasicAuthUser: ds.BasicAuthUser,
JSONData: jsonDataBytes,
DecryptedSecureJSONData: ds.DecryptedValues(),
Updated: ds.Updated,
}, nil
}
func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) {
instanceSettings, err := ModelToInstanceSettings(ds)
if err != nil {
return nil, err
} }
if query.Headers == nil { if query.Headers == nil {
@ -66,7 +46,7 @@ func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataS
PluginContext: &pluginv2.PluginContext{ PluginContext: &pluginv2.PluginContext{
OrgId: ds.OrgId, OrgId: ds.OrgId,
PluginId: tw.pluginId, PluginId: tw.pluginId,
User: backend.ToProto().User(BackendUserFromSignedInUser(query.User)), User: backend.ToProto().User(adapters.BackendUserFromSignedInUser(query.User)),
DataSourceInstanceSettings: backend.ToProto().DataSourceInstanceSettings(instanceSettings), DataSourceInstanceSettings: backend.ToProto().DataSourceInstanceSettings(instanceSettings),
}, },
Queries: []*pluginv2.DataQuery{}, Queries: []*pluginv2.DataQuery{},
@ -76,12 +56,12 @@ func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataS
for _, q := range query.Queries { for _, q := range query.Queries {
modelJSON, err := q.Model.MarshalJSON() modelJSON, err := q.Model.MarshalJSON()
if err != nil { if err != nil {
return nil, err return DataResponse{}, err
} }
pbQuery.Queries = append(pbQuery.Queries, &pluginv2.DataQuery{ pbQuery.Queries = append(pbQuery.Queries, &pluginv2.DataQuery{
Json: modelJSON, Json: modelJSON,
IntervalMS: q.IntervalMs, IntervalMS: q.IntervalMS,
RefId: q.RefId, RefId: q.RefID,
MaxDataPoints: q.MaxDataPoints, MaxDataPoints: q.MaxDataPoints,
TimeRange: &pluginv2.TimeRange{ TimeRange: &pluginv2.TimeRange{
ToEpochMS: query.TimeRange.GetToAsMsEpoch(), ToEpochMS: query.TimeRange.GetToAsMsEpoch(),
@ -93,17 +73,17 @@ func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataS
pbRes, err := tw.DataClient.QueryData(ctx, pbQuery) pbRes, err := tw.DataClient.QueryData(ctx, pbQuery)
if err != nil { if err != nil {
return nil, err return DataResponse{}, err
} }
tR := &tsdb.Response{ tR := DataResponse{
Results: make(map[string]*tsdb.QueryResult, len(pbRes.Responses)), Results: make(map[string]DataQueryResult, len(pbRes.Responses)),
} }
for refID, pRes := range pbRes.Responses { for refID, pRes := range pbRes.Responses {
qr := &tsdb.QueryResult{ qr := DataQueryResult{
RefId: refID, RefID: refID,
Dataframes: tsdb.NewEncodedDataFrames(pRes.Frames), Dataframes: NewEncodedDataFrames(pRes.Frames),
} }
if len(pRes.JsonMeta) != 0 { if len(pRes.JsonMeta) != 0 {
qr.Meta = simplejson.NewFromAny(pRes.JsonMeta) qr.Meta = simplejson.NewFromAny(pRes.JsonMeta)
@ -117,17 +97,3 @@ func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataS
return tR, nil return tR, nil
} }
// BackendUserFromSignedInUser converts Grafana's SignedInUser model
// to the backend plugin's model.
func BackendUserFromSignedInUser(su *models.SignedInUser) *backend.User {
if su == nil {
return nil
}
return &backend.User{
Login: su.Login,
Name: su.Name,
Email: su.Email,
Role: string(su.OrgRole),
}
}

View File

@ -1,11 +1,5 @@
package plugins package plugins
const (
signatureMissing ErrorCode = "signatureMissing"
signatureModified ErrorCode = "signatureModified"
signatureInvalid ErrorCode = "signatureInvalid"
)
type ErrorCode string type ErrorCode string
type PluginError struct { type PluginError struct {

View File

@ -14,12 +14,15 @@ type FrontendPluginBase struct {
PluginBase PluginBase
} }
func (fp *FrontendPluginBase) initFrontendPlugin() { func (fp *FrontendPluginBase) InitFrontendPlugin() []*PluginStaticRoute {
var staticRoutes []*PluginStaticRoute
if isExternalPlugin(fp.PluginDir) { if isExternalPlugin(fp.PluginDir) {
StaticRoutes = append(StaticRoutes, &PluginStaticRoute{ staticRoutes = []*PluginStaticRoute{
Directory: fp.PluginDir, {
PluginId: fp.Id, Directory: fp.PluginDir,
}) PluginId: fp.Id,
},
}
} }
fp.handleModuleDefaults() fp.handleModuleDefaults()
@ -30,6 +33,8 @@ func (fp *FrontendPluginBase) initFrontendPlugin() {
for i := 0; i < len(fp.Info.Screenshots); i++ { for i := 0; i < len(fp.Info.Screenshots); i++ {
fp.Info.Screenshots[i].Path = evalRelativePluginUrlPath(fp.Info.Screenshots[i].Path, fp.BaseUrl) fp.Info.Screenshots[i].Path = evalRelativePluginUrlPath(fp.Info.Screenshots[i].Path, fp.BaseUrl)
} }
return staticRoutes
} }
func getPluginLogoUrl(pluginType, path, baseUrl string) string { func getPluginLogoUrl(pluginType, path, baseUrl string) string {

View File

@ -1,31 +1,19 @@
package plugins package manager
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"regexp" "regexp"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/tsdb/tsdbifaces"
) )
var varRegex = regexp.MustCompile(`(\$\{.+?\})`) var varRegex = regexp.MustCompile(`(\$\{.+?\})`)
type ImportDashboardCommand struct {
Dashboard *simplejson.Json
Path string
Inputs []ImportDashboardInput
Overwrite bool
FolderId int64
OrgId int64
User *models.SignedInUser
PluginId string
Result *PluginDashboardInfoDTO
}
type ImportDashboardInput struct { type ImportDashboardInput struct {
Type string `json:"type"` Type string `json:"type"`
PluginId string `json:"pluginId"` PluginId string `json:"pluginId"`
@ -41,58 +29,54 @@ func (e DashboardInputMissingError) Error() string {
return fmt.Sprintf("Dashboard input variable: %v missing from import command", e.VariableName) return fmt.Sprintf("Dashboard input variable: %v missing from import command", e.VariableName)
} }
func init() { func (pm *PluginManager) ImportDashboard(pluginID, path string, orgID, folderID int64, dashboardModel *simplejson.Json,
bus.AddHandler("plugins", ImportDashboard) overwrite bool, inputs []ImportDashboardInput, user *models.SignedInUser,
} requestHandler tsdbifaces.RequestHandler) (plugins.PluginDashboardInfoDTO, error) {
func ImportDashboard(cmd *ImportDashboardCommand) error {
var dashboard *models.Dashboard var dashboard *models.Dashboard
var err error if pluginID != "" {
var err error
if cmd.PluginId != "" { if dashboard, err = pm.LoadPluginDashboard(pluginID, path); err != nil {
if dashboard, err = loadPluginDashboard(cmd.PluginId, cmd.Path); err != nil { return plugins.PluginDashboardInfoDTO{}, err
return err
} }
} else { } else {
dashboard = models.NewDashboardFromJson(cmd.Dashboard) dashboard = models.NewDashboardFromJson(dashboardModel)
} }
evaluator := &DashTemplateEvaluator{ evaluator := &DashTemplateEvaluator{
template: dashboard.Data, template: dashboard.Data,
inputs: cmd.Inputs, inputs: inputs,
} }
generatedDash, err := evaluator.Eval() generatedDash, err := evaluator.Eval()
if err != nil { if err != nil {
return err return plugins.PluginDashboardInfoDTO{}, err
} }
saveCmd := models.SaveDashboardCommand{ saveCmd := models.SaveDashboardCommand{
Dashboard: generatedDash, Dashboard: generatedDash,
OrgId: cmd.OrgId, OrgId: orgID,
UserId: cmd.User.UserId, UserId: user.UserId,
Overwrite: cmd.Overwrite, Overwrite: overwrite,
PluginId: cmd.PluginId, PluginId: pluginID,
FolderId: cmd.FolderId, FolderId: folderID,
} }
dto := &dashboards.SaveDashboardDTO{ dto := &dashboards.SaveDashboardDTO{
OrgId: cmd.OrgId, OrgId: orgID,
Dashboard: saveCmd.GetDashboardModel(), Dashboard: saveCmd.GetDashboardModel(),
Overwrite: saveCmd.Overwrite, Overwrite: saveCmd.Overwrite,
User: cmd.User, User: user,
} }
savedDash, err := dashboards.NewService().ImportDashboard(dto) savedDash, err := dashboards.NewService(requestHandler).ImportDashboard(dto)
if err != nil { if err != nil {
return err return plugins.PluginDashboardInfoDTO{}, err
} }
cmd.Result = &PluginDashboardInfoDTO{ return plugins.PluginDashboardInfoDTO{
PluginId: cmd.PluginId, PluginId: pluginID,
Title: savedDash.Title, Title: savedDash.Title,
Path: cmd.Path, Path: path,
Revision: savedDash.Data.Get("revision").MustInt64(1), Revision: savedDash.Data.Get("revision").MustInt64(1),
FolderId: savedDash.FolderId, FolderId: savedDash.FolderId,
ImportedUri: "db/" + savedDash.Slug, ImportedUri: "db/" + savedDash.Slug,
@ -101,9 +85,7 @@ func ImportDashboard(cmd *ImportDashboardCommand) error {
Imported: true, Imported: true,
DashboardId: savedDash.Id, DashboardId: savedDash.Id,
Slug: savedDash.Slug, Slug: savedDash.Slug,
} }, nil
return nil
} }
type DashTemplateEvaluator struct { type DashTemplateEvaluator struct {

View File

@ -0,0 +1,97 @@
package manager
import (
"io/ioutil"
"testing"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/require"
)
func TestDashboardImport(t *testing.T) {
pluginScenario(t, "When importing a plugin dashboard", func(t *testing.T, pm *PluginManager) {
origNewDashboardService := dashboards.NewService
t.Cleanup(func() {
dashboards.NewService = origNewDashboardService
})
mock := &dashboards.FakeDashboardService{}
dashboards.MockDashboardService(mock)
info, err := pm.ImportDashboard("test-app", "dashboards/connections.json", 1, 0, nil, false,
[]ImportDashboardInput{
{Name: "*", Type: "datasource", Value: "graphite"},
}, &models.SignedInUser{UserId: 1, OrgRole: models.ROLE_ADMIN}, nil)
require.NoError(t, err)
require.NotNil(t, info)
resultStr, err := mock.SavedDashboards[0].Dashboard.Data.EncodePretty()
require.NoError(t, err)
expectedBytes, err := ioutil.ReadFile("testdata/test-app/dashboards/connections_result.json")
require.NoError(t, err)
expectedJson, err := simplejson.NewJson(expectedBytes)
require.NoError(t, err)
expectedStr, err := expectedJson.EncodePretty()
require.NoError(t, err)
require.Equal(t, expectedStr, resultStr)
panel := mock.SavedDashboards[0].Dashboard.Data.Get("rows").GetIndex(0).Get("panels").GetIndex(0)
require.Equal(t, "graphite", panel.Get("datasource").MustString())
})
t.Run("When evaling dashboard template", func(t *testing.T) {
template, err := simplejson.NewJson([]byte(`{
"__inputs": [
{
"name": "DS_NAME",
"type": "datasource"
}
],
"test": {
"prop": "${DS_NAME}_${DS_NAME}"
}
}`))
require.NoError(t, err)
evaluator := &DashTemplateEvaluator{
template: template,
inputs: []ImportDashboardInput{
{Name: "*", Type: "datasource", Value: "my-server"},
},
}
res, err := evaluator.Eval()
require.NoError(t, err)
require.Equal(t, "my-server_my-server", res.GetPath("test", "prop").MustString())
inputs := res.Get("__inputs")
require.Nil(t, inputs.Interface())
})
}
func pluginScenario(t *testing.T, desc string, fn func(*testing.T, *PluginManager)) {
t.Helper()
t.Run("Given a plugin", func(t *testing.T) {
pm := &PluginManager{
Cfg: &setting.Cfg{
FeatureToggles: map[string]bool{},
PluginSettings: setting.PluginSettings{
"test-app": map[string]string{
"path": "testdata/test-app",
},
},
},
}
err := pm.Init()
require.NoError(t, err)
t.Run(desc, func(t *testing.T) {
fn(t, pm)
})
})
}

View File

@ -0,0 +1,101 @@
package manager
import (
"os"
"path/filepath"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
)
func (pm *PluginManager) GetPluginDashboards(orgId int64, pluginId string) ([]*plugins.PluginDashboardInfoDTO, error) {
plugin, exists := Plugins[pluginId]
if !exists {
return nil, plugins.PluginNotFoundError{PluginID: pluginId}
}
result := make([]*plugins.PluginDashboardInfoDTO, 0)
// load current dashboards
query := models.GetDashboardsByPluginIdQuery{OrgId: orgId, PluginId: pluginId}
if err := bus.Dispatch(&query); err != nil {
return nil, err
}
existingMatches := make(map[int64]bool)
for _, include := range plugin.Includes {
if include.Type != plugins.PluginTypeDashboard {
continue
}
dashboard, err := pm.LoadPluginDashboard(plugin.Id, include.Path)
if err != nil {
return nil, err
}
res := &plugins.PluginDashboardInfoDTO{}
res.Path = include.Path
res.PluginId = plugin.Id
res.Title = dashboard.Title
res.Revision = dashboard.Data.Get("revision").MustInt64(1)
// find existing dashboard
for _, existingDash := range query.Result {
if existingDash.Slug == dashboard.Slug {
res.DashboardId = existingDash.Id
res.Imported = true
res.ImportedUri = "db/" + existingDash.Slug
res.ImportedUrl = existingDash.GetUrl()
res.ImportedRevision = existingDash.Data.Get("revision").MustInt64(1)
existingMatches[existingDash.Id] = true
}
}
result = append(result, res)
}
// find deleted dashboards
for _, dash := range query.Result {
if _, exists := existingMatches[dash.Id]; !exists {
result = append(result, &plugins.PluginDashboardInfoDTO{
Slug: dash.Slug,
DashboardId: dash.Id,
Removed: true,
})
}
}
return result, nil
}
func (pm *PluginManager) LoadPluginDashboard(pluginId, path string) (*models.Dashboard, error) {
plugin, exists := Plugins[pluginId]
if !exists {
return nil, plugins.PluginNotFoundError{PluginID: pluginId}
}
dashboardFilePath := filepath.Join(plugin.PluginDir, path)
// nolint:gosec
// We can ignore the gosec G304 warning on this one because `plugin.PluginDir` is based
// on plugin folder structure on disk and not user input. `path` comes from the
// `plugin.json` configuration file for the loaded plugin
reader, err := os.Open(dashboardFilePath)
if err != nil {
return nil, err
}
defer func() {
if err := reader.Close(); err != nil {
plog.Warn("Failed to close file", "path", dashboardFilePath, "err", err)
}
}()
data, err := simplejson.NewFromReader(reader)
if err != nil {
return nil, err
}
return models.NewDashboardFromJson(data), nil
}

View File

@ -1,4 +1,4 @@
package plugins package manager
import ( import (
"testing" "testing"
@ -11,7 +11,7 @@ import (
) )
func TestPluginDashboards(t *testing.T) { func TestPluginDashboards(t *testing.T) {
Convey("When asking plugin dashboard info", t, func() { Convey("When asking for plugin dashboard info", t, func() {
pm := &PluginManager{ pm := &PluginManager{
Cfg: &setting.Cfg{ Cfg: &setting.Cfg{
FeatureToggles: map[string]bool{}, FeatureToggles: map[string]bool{},
@ -47,8 +47,7 @@ func TestPluginDashboards(t *testing.T) {
return nil return nil
}) })
dashboards, err := GetPluginDashboards(1, "test-app") dashboards, err := pm.GetPluginDashboards(1, "test-app")
So(err, ShouldBeNil) So(err, ShouldBeNil)
Convey("should return 2 dashboards", func() { Convey("should return 2 dashboards", func() {

View File

@ -0,0 +1,11 @@
package manager
import (
"github.com/grafana/grafana/pkg/plugins"
)
const (
signatureMissing plugins.ErrorCode = "signatureMissing"
signatureModified plugins.ErrorCode = "signatureModified"
signatureInvalid plugins.ErrorCode = "signatureInvalid"
)

View File

@ -0,0 +1,616 @@
// Package manager contains plugin manager logic.
package manager
import (
"context"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"runtime"
"strings"
"time"
"github.com/grafana/grafana/pkg/infra/fs"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/metrics"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
"github.com/grafana/grafana/pkg/util/errutil"
)
var (
DataSources map[string]*plugins.DataSourcePlugin
Panels map[string]*plugins.PanelPlugin
StaticRoutes []*plugins.PluginStaticRoute
Apps map[string]*plugins.AppPlugin
Plugins map[string]*plugins.PluginBase
PluginTypes map[string]interface{}
Renderer *plugins.RendererPlugin
plog log.Logger
)
type unsignedPluginConditionFunc = func(plugin *plugins.PluginBase) bool
type PluginScanner struct {
pluginPath string
errors []error
backendPluginManager backendplugin.Manager
cfg *setting.Cfg
requireSigned bool
log log.Logger
plugins map[string]*plugins.PluginBase
allowUnsignedPluginsCondition unsignedPluginConditionFunc
}
type PluginManager struct {
BackendPluginManager backendplugin.Manager `inject:""`
Cfg *setting.Cfg `inject:""`
log log.Logger
scanningErrors []error
// AllowUnsignedPluginsCondition changes the policy for allowing unsigned plugins. Signature validation only runs when plugins are starting
// and running plugins will not be terminated if they violate the new policy.
AllowUnsignedPluginsCondition unsignedPluginConditionFunc
GrafanaLatestVersion string
GrafanaHasUpdate bool
pluginScanningErrors map[string]plugins.PluginError
}
func init() {
registry.RegisterService(&PluginManager{})
}
func (pm *PluginManager) Init() error {
pm.log = log.New("plugins")
plog = log.New("plugins")
DataSources = map[string]*plugins.DataSourcePlugin{}
StaticRoutes = []*plugins.PluginStaticRoute{}
Panels = map[string]*plugins.PanelPlugin{}
Apps = map[string]*plugins.AppPlugin{}
Plugins = map[string]*plugins.PluginBase{}
PluginTypes = map[string]interface{}{
"panel": plugins.PanelPlugin{},
"datasource": plugins.DataSourcePlugin{},
"app": plugins.AppPlugin{},
"renderer": plugins.RendererPlugin{},
}
pm.pluginScanningErrors = map[string]plugins.PluginError{}
pm.log.Info("Starting plugin search")
plugDir := filepath.Join(pm.Cfg.StaticRootPath, "app/plugins")
pm.log.Debug("Scanning core plugin directory", "dir", plugDir)
if err := pm.scan(plugDir, false); err != nil {
return errutil.Wrapf(err, "failed to scan core plugin directory '%s'", plugDir)
}
plugDir = pm.Cfg.BundledPluginsPath
pm.log.Debug("Scanning bundled plugins directory", "dir", plugDir)
exists, err := fs.Exists(plugDir)
if err != nil {
return err
}
if exists {
if err := pm.scan(plugDir, false); err != nil {
return errutil.Wrapf(err, "failed to scan bundled plugins directory '%s'", plugDir)
}
}
// check if plugins dir exists
exists, err = fs.Exists(pm.Cfg.PluginsPath)
if err != nil {
return err
}
if !exists {
if err = os.MkdirAll(pm.Cfg.PluginsPath, os.ModePerm); err != nil {
pm.log.Error("failed to create external plugins directory", "dir", pm.Cfg.PluginsPath, "error", err)
} else {
pm.log.Info("External plugins directory created", "directory", pm.Cfg.PluginsPath)
}
} else {
pm.log.Debug("Scanning external plugins directory", "dir", pm.Cfg.PluginsPath)
if err := pm.scan(pm.Cfg.PluginsPath, true); err != nil {
return errutil.Wrapf(err, "failed to scan external plugins directory '%s'",
pm.Cfg.PluginsPath)
}
}
if err := pm.scanPluginPaths(); err != nil {
return err
}
for _, panel := range Panels {
staticRoutes := panel.InitFrontendPlugin()
StaticRoutes = append(StaticRoutes, staticRoutes...)
}
for _, ds := range DataSources {
staticRoutes := ds.InitFrontendPlugin()
StaticRoutes = append(StaticRoutes, staticRoutes...)
}
for _, app := range Apps {
staticRoutes := app.InitApp(Panels, DataSources)
StaticRoutes = append(StaticRoutes, staticRoutes...)
}
if Renderer != nil {
staticRoutes := Renderer.InitFrontendPlugin()
StaticRoutes = append(StaticRoutes, staticRoutes...)
}
for _, p := range Plugins {
if p.IsCorePlugin {
p.Signature = plugins.PluginSignatureInternal
} else {
metrics.SetPluginBuildInformation(p.Id, p.Type, p.Info.Version)
}
}
return nil
}
func (pm *PluginManager) Run(ctx context.Context) error {
pm.checkForUpdates()
ticker := time.NewTicker(time.Minute * 10)
run := true
for run {
select {
case <-ticker.C:
pm.checkForUpdates()
case <-ctx.Done():
run = false
}
}
return ctx.Err()
}
// scanPluginPaths scans configured plugin paths.
func (pm *PluginManager) scanPluginPaths() error {
for pluginID, settings := range pm.Cfg.PluginSettings {
path, exists := settings["path"]
if !exists || path == "" {
continue
}
if err := pm.scan(path, true); err != nil {
return errutil.Wrapf(err, "failed to scan directory configured for plugin '%s': '%s'", pluginID, path)
}
}
return nil
}
// scan a directory for plugins.
func (pm *PluginManager) scan(pluginDir string, requireSigned bool) error {
scanner := &PluginScanner{
pluginPath: pluginDir,
backendPluginManager: pm.BackendPluginManager,
cfg: pm.Cfg,
requireSigned: requireSigned,
log: pm.log,
plugins: map[string]*plugins.PluginBase{},
allowUnsignedPluginsCondition: pm.AllowUnsignedPluginsCondition,
}
// 1st pass: Scan plugins, also mapping plugins to their respective directories
if err := util.Walk(pluginDir, true, true, scanner.walker); err != nil {
if errors.Is(err, os.ErrNotExist) {
pm.log.Debug("Couldn't scan directory since it doesn't exist", "pluginDir", pluginDir, "err", err)
return nil
}
if errors.Is(err, os.ErrPermission) {
pm.log.Debug("Couldn't scan directory due to lack of permissions", "pluginDir", pluginDir, "err", err)
return nil
}
if pluginDir != "data/plugins" {
pm.log.Warn("Could not scan dir", "pluginDir", pluginDir, "err", err)
}
return err
}
pm.log.Debug("Initial plugin loading done")
// 2nd pass: Validate and register plugins
for dpath, plugin := range scanner.plugins {
// Try to find any root plugin
ancestors := strings.Split(dpath, string(filepath.Separator))
ancestors = ancestors[0 : len(ancestors)-1]
aPath := ""
if runtime.GOOS != "windows" && filepath.IsAbs(dpath) {
aPath = "/"
}
for _, a := range ancestors {
aPath = filepath.Join(aPath, a)
if root, ok := scanner.plugins[aPath]; ok {
plugin.Root = root
break
}
}
pm.log.Debug("Found plugin", "id", plugin.Id, "signature", plugin.Signature, "hasRoot", plugin.Root != nil)
signingError := scanner.validateSignature(plugin)
if signingError != nil {
pm.log.Debug("Failed to validate plugin signature. Will skip loading", "id", plugin.Id,
"signature", plugin.Signature, "status", signingError.ErrorCode)
pm.pluginScanningErrors[plugin.Id] = *signingError
continue
}
pm.log.Debug("Attempting to add plugin", "id", plugin.Id)
pluginGoType, exists := PluginTypes[plugin.Type]
if !exists {
return fmt.Errorf("unknown plugin type %q", plugin.Type)
}
jsonFPath := filepath.Join(plugin.PluginDir, "plugin.json")
// External plugins need a module.js file for SystemJS to load
if !strings.HasPrefix(jsonFPath, pm.Cfg.StaticRootPath) && !scanner.IsBackendOnlyPlugin(plugin.Type) {
module := filepath.Join(plugin.PluginDir, "module.js")
exists, err := fs.Exists(module)
if err != nil {
return err
}
if !exists {
scanner.log.Warn("Plugin missing module.js",
"name", plugin.Name,
"warning", "Missing module.js, If you loaded this plugin from git, make sure to compile it.",
"path", module)
}
}
// nolint:gosec
// We can ignore the gosec G304 warning on this one because `jsonFPath` is based
// on plugin the folder structure on disk and not user input.
reader, err := os.Open(jsonFPath)
if err != nil {
return err
}
defer func() {
if err := reader.Close(); err != nil {
scanner.log.Warn("Failed to close JSON file", "path", jsonFPath, "err", err)
}
}()
jsonParser := json.NewDecoder(reader)
loader := reflect.New(reflect.TypeOf(pluginGoType)).Interface().(plugins.PluginLoader)
// Load the full plugin, and add it to manager
if err := pm.loadPlugin(jsonParser, plugin, scanner, loader); err != nil {
return err
}
}
if len(scanner.errors) > 0 {
pm.log.Warn("Some plugins failed to load", "errors", scanner.errors)
pm.scanningErrors = scanner.errors
}
return nil
}
func (pm *PluginManager) loadPlugin(jsonParser *json.Decoder, pluginBase *plugins.PluginBase,
scanner *PluginScanner, loader plugins.PluginLoader) error {
plug, err := loader.Load(jsonParser, pluginBase, scanner.backendPluginManager)
if err != nil {
return err
}
var pb *plugins.PluginBase
switch p := plug.(type) {
case *plugins.DataSourcePlugin:
DataSources[p.Id] = p
pb = &p.PluginBase
case *plugins.PanelPlugin:
Panels[p.Id] = p
pb = &p.PluginBase
case *plugins.RendererPlugin:
Renderer = p
pb = &p.PluginBase
case *plugins.AppPlugin:
Apps[p.Id] = p
pb = &p.PluginBase
default:
panic(fmt.Sprintf("Unrecognized plugin type %T", plug))
}
if p, exists := Plugins[pb.Id]; exists {
pm.log.Warn("Plugin is duplicate", "id", pb.Id)
scanner.errors = append(scanner.errors, plugins.DuplicatePluginError{Plugin: pb, ExistingPlugin: p})
return nil
}
if !strings.HasPrefix(pluginBase.PluginDir, pm.Cfg.StaticRootPath) {
pm.log.Info("Registering plugin", "id", pb.Id)
}
if len(pb.Dependencies.Plugins) == 0 {
pb.Dependencies.Plugins = []plugins.PluginDependencyItem{}
}
if pb.Dependencies.GrafanaVersion == "" {
pb.Dependencies.GrafanaVersion = "*"
}
for _, include := range pb.Includes {
if include.Role == "" {
include.Role = models.ROLE_VIEWER
}
}
// Copy relevant fields from the base
pb.PluginDir = pluginBase.PluginDir
pb.Signature = pluginBase.Signature
pb.SignatureType = pluginBase.SignatureType
pb.SignatureOrg = pluginBase.SignatureOrg
Plugins[pb.Id] = pb
pm.log.Debug("Successfully added plugin", "id", pb.Id)
return nil
}
// GetDatasource returns a datasource based on passed pluginID if it exists
//
// This function fetches the datasource from the global variable DataSources in this package.
// Rather then refactor all dependencies on the global variable we can use this as an transition.
func (pm *PluginManager) GetDatasource(pluginID string) (*plugins.DataSourcePlugin, bool) {
ds, exists := DataSources[pluginID]
return ds, exists
}
func (s *PluginScanner) walker(currentPath string, f os.FileInfo, err error) error {
// We scan all the subfolders for plugin.json (with some exceptions) so that we also load embedded plugins, for
// example https://github.com/raintank/worldping-app/tree/master/dist/grafana-worldmap-panel worldmap panel plugin
// is embedded in worldping app.
if err != nil {
return fmt.Errorf("filepath.Walk reported an error for %q: %w", currentPath, err)
}
if f.Name() == "node_modules" || f.Name() == "Chromium.app" {
return util.ErrWalkSkipDir
}
if f.IsDir() {
return nil
}
if f.Name() != "plugin.json" {
return nil
}
if err := s.loadPlugin(currentPath); err != nil {
s.log.Error("Failed to load plugin", "error", err, "pluginPath", filepath.Dir(currentPath))
s.errors = append(s.errors, err)
}
return nil
}
func (s *PluginScanner) loadPlugin(pluginJSONFilePath string) error {
s.log.Debug("Loading plugin", "path", pluginJSONFilePath)
currentDir := filepath.Dir(pluginJSONFilePath)
// nolint:gosec
// We can ignore the gosec G304 warning on this one because `currentPath` is based
// on plugin the folder structure on disk and not user input.
reader, err := os.Open(pluginJSONFilePath)
if err != nil {
return err
}
defer func() {
if err := reader.Close(); err != nil {
s.log.Warn("Failed to close JSON file", "path", pluginJSONFilePath, "err", err)
}
}()
jsonParser := json.NewDecoder(reader)
pluginCommon := plugins.PluginBase{}
if err := jsonParser.Decode(&pluginCommon); err != nil {
return err
}
if pluginCommon.Id == "" || pluginCommon.Type == "" {
return errors.New("did not find type or id properties in plugin.json")
}
pluginCommon.PluginDir = filepath.Dir(pluginJSONFilePath)
pluginCommon.Files, err = collectPluginFilesWithin(pluginCommon.PluginDir)
if err != nil {
s.log.Warn("Could not collect plugin file information in directory", "pluginID", pluginCommon.Id, "dir", pluginCommon.PluginDir)
return err
}
signatureState, err := getPluginSignatureState(s.log, &pluginCommon)
if err != nil {
s.log.Warn("Could not get plugin signature state", "pluginID", pluginCommon.Id, "err", err)
return err
}
pluginCommon.Signature = signatureState.Status
pluginCommon.SignatureType = signatureState.Type
pluginCommon.SignatureOrg = signatureState.SigningOrg
s.plugins[currentDir] = &pluginCommon
return nil
}
func (*PluginScanner) IsBackendOnlyPlugin(pluginType string) bool {
return pluginType == "renderer"
}
// validateSignature validates a plugin's signature.
func (s *PluginScanner) validateSignature(plugin *plugins.PluginBase) *plugins.PluginError {
if plugin.Signature == plugins.PluginSignatureValid {
s.log.Debug("Plugin has valid signature", "id", plugin.Id)
return nil
}
if plugin.Root != nil {
// If a descendant plugin with invalid signature, set signature to that of root
if plugin.IsCorePlugin || plugin.Signature == plugins.PluginSignatureInternal {
s.log.Debug("Not setting descendant plugin's signature to that of root since it's core or internal",
"plugin", plugin.Id, "signature", plugin.Signature, "isCore", plugin.IsCorePlugin)
} else {
s.log.Debug("Setting descendant plugin's signature to that of root", "plugin", plugin.Id,
"root", plugin.Root.Id, "signature", plugin.Signature, "rootSignature", plugin.Root.Signature)
plugin.Signature = plugin.Root.Signature
if plugin.Signature == plugins.PluginSignatureValid {
s.log.Debug("Plugin has valid signature (inherited from root)", "id", plugin.Id)
return nil
}
}
} else {
s.log.Debug("Non-valid plugin Signature", "pluginID", plugin.Id, "pluginDir", plugin.PluginDir,
"state", plugin.Signature)
}
// For the time being, we choose to only require back-end plugins to be signed
// NOTE: the state is calculated again when setting metadata on the object
if !plugin.Backend || !s.requireSigned {
return nil
}
switch plugin.Signature {
case plugins.PluginSignatureUnsigned:
if allowed := s.allowUnsigned(plugin); !allowed {
s.log.Debug("Plugin is unsigned", "id", plugin.Id)
s.errors = append(s.errors, fmt.Errorf("plugin %q is unsigned", plugin.Id))
return &plugins.PluginError{
ErrorCode: signatureMissing,
}
}
s.log.Warn("Running an unsigned backend plugin", "pluginID", plugin.Id, "pluginDir",
plugin.PluginDir)
return nil
case plugins.PluginSignatureInvalid:
s.log.Debug("Plugin %q has an invalid signature", plugin.Id)
s.errors = append(s.errors, fmt.Errorf("plugin %q has an invalid signature", plugin.Id))
return &plugins.PluginError{
ErrorCode: signatureInvalid,
}
case plugins.PluginSignatureModified:
s.log.Debug("Plugin %q has a modified signature", plugin.Id)
s.errors = append(s.errors, fmt.Errorf("plugin %q's signature has been modified", plugin.Id))
return &plugins.PluginError{
ErrorCode: signatureModified,
}
default:
panic(fmt.Sprintf("Plugin %q has unrecognized plugin signature state %q", plugin.Id, plugin.Signature))
}
}
func (s *PluginScanner) allowUnsigned(plugin *plugins.PluginBase) bool {
if s.allowUnsignedPluginsCondition != nil {
return s.allowUnsignedPluginsCondition(plugin)
}
if s.cfg.Env == setting.Dev {
return true
}
for _, plug := range s.cfg.PluginsAllowUnsigned {
if plug == plugin.Id {
return true
}
}
return false
}
// ScanningErrors returns plugin scanning errors encountered.
func (pm *PluginManager) ScanningErrors() []plugins.PluginError {
scanningErrs := make([]plugins.PluginError, 0)
for id, e := range pm.pluginScanningErrors {
scanningErrs = append(scanningErrs, plugins.PluginError{
ErrorCode: e.ErrorCode,
PluginID: id,
})
}
return scanningErrs
}
func (pm *PluginManager) GetPluginMarkdown(pluginId string, name string) ([]byte, error) {
plug, exists := Plugins[pluginId]
if !exists {
return nil, plugins.PluginNotFoundError{PluginID: pluginId}
}
// nolint:gosec
// We can ignore the gosec G304 warning on this one because `plug.PluginDir` is based
// on plugin the folder structure on disk and not user input.
path := filepath.Join(plug.PluginDir, fmt.Sprintf("%s.md", strings.ToUpper(name)))
exists, err := fs.Exists(path)
if err != nil {
return nil, err
}
if !exists {
path = filepath.Join(plug.PluginDir, fmt.Sprintf("%s.md", strings.ToLower(name)))
}
exists, err = fs.Exists(path)
if err != nil {
return nil, err
}
if !exists {
return make([]byte, 0), nil
}
// nolint:gosec
// We can ignore the gosec G304 warning on this one because `plug.PluginDir` is based
// on plugin the folder structure on disk and not user input.
data, err := ioutil.ReadFile(path)
if err != nil {
return nil, err
}
return data, nil
}
// gets plugin filenames that require verification for plugin signing
func collectPluginFilesWithin(rootDir string) ([]string, error) {
var files []string
err := filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && info.Name() != "MANIFEST.txt" {
file, err := filepath.Rel(rootDir, path)
if err != nil {
return err
}
files = append(files, filepath.ToSlash(file))
}
return nil
})
return files, err
}
// GetDataPlugin gets a DataPlugin with a certain name. If none is found, nil is returned.
func (pm *PluginManager) GetDataPlugin(pluginID string) plugins.DataPlugin {
if p, exists := DataSources[pluginID]; exists && p.CanHandleDataQueries() {
return p
}
// XXX: Might other plugins implement DataPlugin?
p := pm.BackendPluginManager.GetDataPlugin(pluginID)
if p != nil {
return p.(plugins.DataPlugin)
}
return nil
}

View File

@ -1,4 +1,4 @@
package plugins package manager
import ( import (
"context" "context"
@ -9,6 +9,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/backendplugin" "github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -17,7 +18,7 @@ import (
) )
func TestPluginManager_Init(t *testing.T) { func TestPluginManager_Init(t *testing.T) {
staticRootPath, err := filepath.Abs("../../public/") staticRootPath, err := filepath.Abs("../../../public/")
require.NoError(t, err) require.NoError(t, err)
origRootPath := setting.StaticRootPath origRootPath := setting.StaticRootPath
@ -132,7 +133,7 @@ func TestPluginManager_Init(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
assert.Len(t, pm.scanningErrors, 1) assert.Len(t, pm.scanningErrors, 1)
assert.True(t, errors.Is(pm.scanningErrors[0], duplicatePluginError{})) assert.True(t, errors.Is(pm.scanningErrors[0], plugins.DuplicatePluginError{}))
}) })
t.Run("With external back-end plugin with valid v2 signature", func(t *testing.T) { t.Run("With external back-end plugin with valid v2 signature", func(t *testing.T) {
@ -152,8 +153,8 @@ func TestPluginManager_Init(t *testing.T) {
assert.Equal(t, "Test", Plugins[pluginId].Name) assert.Equal(t, "Test", Plugins[pluginId].Name)
assert.Equal(t, pluginId, Plugins[pluginId].Id) assert.Equal(t, pluginId, Plugins[pluginId].Id)
assert.Equal(t, "1.0.0", Plugins[pluginId].Info.Version) assert.Equal(t, "1.0.0", Plugins[pluginId].Info.Version)
assert.Equal(t, pluginSignatureValid, Plugins[pluginId].Signature) assert.Equal(t, plugins.PluginSignatureValid, Plugins[pluginId].Signature)
assert.Equal(t, grafanaType, Plugins[pluginId].SignatureType) assert.Equal(t, plugins.GrafanaType, Plugins[pluginId].SignatureType)
assert.Equal(t, "Grafana Labs", Plugins[pluginId].SignatureOrg) assert.Equal(t, "Grafana Labs", Plugins[pluginId].SignatureOrg)
assert.False(t, Plugins[pluginId].IsCorePlugin) assert.False(t, Plugins[pluginId].IsCorePlugin)
}) })
@ -200,8 +201,8 @@ func TestPluginManager_Init(t *testing.T) {
assert.Equal(t, "Test", Plugins[pluginId].Name) assert.Equal(t, "Test", Plugins[pluginId].Name)
assert.Equal(t, pluginId, Plugins[pluginId].Id) assert.Equal(t, pluginId, Plugins[pluginId].Id)
assert.Equal(t, "1.0.0", Plugins[pluginId].Info.Version) assert.Equal(t, "1.0.0", Plugins[pluginId].Info.Version)
assert.Equal(t, pluginSignatureValid, Plugins[pluginId].Signature) assert.Equal(t, plugins.PluginSignatureValid, Plugins[pluginId].Signature)
assert.Equal(t, privateType, Plugins[pluginId].SignatureType) assert.Equal(t, plugins.PrivateType, Plugins[pluginId].SignatureType)
assert.Equal(t, "Will Browne", Plugins[pluginId].SignatureOrg) assert.Equal(t, "Will Browne", Plugins[pluginId].SignatureOrg)
assert.False(t, Plugins[pluginId].IsCorePlugin) assert.False(t, Plugins[pluginId].IsCorePlugin)
}) })
@ -266,6 +267,8 @@ func TestPluginManager_IsBackendOnlyPlugin(t *testing.T) {
} }
type fakeBackendPluginManager struct { type fakeBackendPluginManager struct {
backendplugin.Manager
registeredPlugins []string registeredPlugins []string
} }

View File

@ -1,4 +1,4 @@
package plugins package manager
import ( import (
"bytes" "bytes"
@ -14,6 +14,7 @@ import (
"strings" "strings"
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
@ -56,11 +57,11 @@ type pluginManifest struct {
Files map[string]string `json:"files"` Files map[string]string `json:"files"`
// V2 supported fields // V2 supported fields
ManifestVersion string `json:"manifestVersion"` ManifestVersion string `json:"manifestVersion"`
SignatureType PluginSignatureType `json:"signatureType"` SignatureType plugins.PluginSignatureType `json:"signatureType"`
SignedByOrg string `json:"signedByOrg"` SignedByOrg string `json:"signedByOrg"`
SignedByOrgName string `json:"signedByOrgName"` SignedByOrgName string `json:"signedByOrgName"`
RootURLs []string `json:"rootUrls"` RootURLs []string `json:"rootUrls"`
} }
func (m *pluginManifest) isV2() bool { func (m *pluginManifest) isV2() bool {
@ -97,7 +98,7 @@ func readPluginManifest(body []byte) (*pluginManifest, error) {
} }
// getPluginSignatureState returns the signature state for a plugin. // getPluginSignatureState returns the signature state for a plugin.
func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatureState, error) { func getPluginSignatureState(log log.Logger, plugin *plugins.PluginBase) (plugins.PluginSignatureState, error) {
log.Debug("Getting signature state of plugin", "plugin", plugin.Id, "isBackend", plugin.Backend) log.Debug("Getting signature state of plugin", "plugin", plugin.Id, "isBackend", plugin.Backend)
manifestPath := filepath.Join(plugin.PluginDir, "MANIFEST.txt") manifestPath := filepath.Join(plugin.PluginDir, "MANIFEST.txt")
@ -107,31 +108,31 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur
byteValue, err := ioutil.ReadFile(manifestPath) byteValue, err := ioutil.ReadFile(manifestPath)
if err != nil || len(byteValue) < 10 { if err != nil || len(byteValue) < 10 {
log.Debug("Plugin is unsigned", "id", plugin.Id) log.Debug("Plugin is unsigned", "id", plugin.Id)
return PluginSignatureState{ return plugins.PluginSignatureState{
Status: pluginSignatureUnsigned, Status: plugins.PluginSignatureUnsigned,
}, nil }, nil
} }
manifest, err := readPluginManifest(byteValue) manifest, err := readPluginManifest(byteValue)
if err != nil { if err != nil {
log.Debug("Plugin signature invalid", "id", plugin.Id) log.Debug("Plugin signature invalid", "id", plugin.Id)
return PluginSignatureState{ return plugins.PluginSignatureState{
Status: pluginSignatureInvalid, Status: plugins.PluginSignatureInvalid,
}, nil }, nil
} }
// Make sure the versions all match // Make sure the versions all match
if manifest.Plugin != plugin.Id || manifest.Version != plugin.Info.Version { if manifest.Plugin != plugin.Id || manifest.Version != plugin.Info.Version {
return PluginSignatureState{ return plugins.PluginSignatureState{
Status: pluginSignatureModified, Status: plugins.PluginSignatureModified,
}, nil }, nil
} }
// Validate that private is running within defined root URLs // Validate that private is running within defined root URLs
if manifest.SignatureType == privateType { if manifest.SignatureType == plugins.PrivateType {
appURL, err := url.Parse(setting.AppUrl) appURL, err := url.Parse(setting.AppUrl)
if err != nil { if err != nil {
return PluginSignatureState{}, err return plugins.PluginSignatureState{}, err
} }
foundMatch := false foundMatch := false
@ -139,7 +140,7 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur
rootURL, err := url.Parse(u) rootURL, err := url.Parse(u)
if err != nil { if err != nil {
log.Warn("Could not parse plugin root URL", "plugin", plugin.Id, "rootUrl", rootURL) log.Warn("Could not parse plugin root URL", "plugin", plugin.Id, "rootUrl", rootURL)
return PluginSignatureState{}, err return plugins.PluginSignatureState{}, err
} }
if rootURL.Scheme == appURL.Scheme && if rootURL.Scheme == appURL.Scheme &&
rootURL.Host == appURL.Host && rootURL.Host == appURL.Host &&
@ -150,9 +151,10 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur
} }
if !foundMatch { if !foundMatch {
log.Warn("Could not find root URL that matches running application URL", "plugin", plugin.Id, "appUrl", appURL, "rootUrls", manifest.RootURLs) log.Warn("Could not find root URL that matches running application URL", "plugin", plugin.Id,
return PluginSignatureState{ "appUrl", appURL, "rootUrls", manifest.RootURLs)
Status: pluginSignatureInvalid, return plugins.PluginSignatureState{
Status: plugins.PluginSignatureInvalid,
}, nil }, nil
} }
} }
@ -171,8 +173,8 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur
f, err := os.Open(fp) f, err := os.Open(fp)
if err != nil { if err != nil {
log.Warn("Plugin file listed in the manifest was not found", "plugin", plugin.Id, "filename", p, "dir", plugin.PluginDir) log.Warn("Plugin file listed in the manifest was not found", "plugin", plugin.Id, "filename", p, "dir", plugin.PluginDir)
return PluginSignatureState{ return plugins.PluginSignatureState{
Status: pluginSignatureModified, Status: plugins.PluginSignatureModified,
}, nil }, nil
} }
defer func() { defer func() {
@ -184,15 +186,15 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur
h := sha256.New() h := sha256.New()
if _, err := io.Copy(h, f); err != nil { if _, err := io.Copy(h, f); err != nil {
log.Warn("Couldn't read plugin file", "plugin", plugin.Id, "filename", fp) log.Warn("Couldn't read plugin file", "plugin", plugin.Id, "filename", fp)
return PluginSignatureState{ return plugins.PluginSignatureState{
Status: pluginSignatureModified, Status: plugins.PluginSignatureModified,
}, nil }, nil
} }
sum := hex.EncodeToString(h.Sum(nil)) sum := hex.EncodeToString(h.Sum(nil))
if sum != hash { if sum != hash {
log.Warn("Plugin file's signature has been modified versus manifest", "plugin", plugin.Id, "filename", fp) log.Warn("Plugin file's signature has been modified versus manifest", "plugin", plugin.Id, "filename", fp)
return PluginSignatureState{ return plugins.PluginSignatureState{
Status: pluginSignatureModified, Status: plugins.PluginSignatureModified,
}, nil }, nil
} }
manifestFiles[p] = true manifestFiles[p] = true
@ -209,16 +211,16 @@ func getPluginSignatureState(log log.Logger, plugin *PluginBase) (PluginSignatur
if len(unsignedFiles) > 0 { if len(unsignedFiles) > 0 {
log.Warn("The following files were not included in the signature", "plugin", plugin.Id, "files", unsignedFiles) log.Warn("The following files were not included in the signature", "plugin", plugin.Id, "files", unsignedFiles)
return PluginSignatureState{ return plugins.PluginSignatureState{
Status: pluginSignatureModified, Status: plugins.PluginSignatureModified,
}, nil }, nil
} }
} }
// Everything OK // Everything OK
log.Debug("Plugin signature valid", "id", plugin.Id) log.Debug("Plugin signature valid", "id", plugin.Id)
return PluginSignatureState{ return plugins.PluginSignatureState{
Status: pluginSignatureValid, Status: plugins.PluginSignatureValid,
Type: manifest.SignatureType, Type: manifest.SignatureType,
SigningOrg: manifest.SignedByOrgName, SigningOrg: manifest.SignedByOrgName,
}, nil }, nil

View File

@ -1,10 +1,11 @@
package plugins package manager
import ( import (
"sort" "sort"
"strings" "strings"
"testing" "testing"
"github.com/grafana/grafana/pkg/plugins"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -104,7 +105,7 @@ khdr/tZ1PDgRxMqB/u+Vtbpl0xSxgblnrDOYMSI=
assert.Equal(t, int64(1605807018050), manifest.Time) assert.Equal(t, int64(1605807018050), manifest.Time)
assert.Equal(t, "7e4d0c6a708866e7", manifest.KeyID) assert.Equal(t, "7e4d0c6a708866e7", manifest.KeyID)
assert.Equal(t, "2.0.0", manifest.ManifestVersion) assert.Equal(t, "2.0.0", manifest.ManifestVersion)
assert.Equal(t, privateType, manifest.SignatureType) assert.Equal(t, plugins.PrivateType, manifest.SignatureType)
assert.Equal(t, "willbrowne", manifest.SignedByOrg) assert.Equal(t, "willbrowne", manifest.SignedByOrg)
assert.Equal(t, "Will Browne", manifest.SignedByOrgName) assert.Equal(t, "Will Browne", manifest.SignedByOrgName)
assert.Equal(t, []string{"http://localhost:3000/"}, manifest.RootURLs) assert.Equal(t, []string{"http://localhost:3000/"}, manifest.RootURLs)

View File

@ -1,13 +1,13 @@
package plugins package manager
import ( import (
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
) )
func GetPluginSettings(orgId int64) (map[string]*models.PluginSettingInfoDTO, error) { func (pm *PluginManager) GetPluginSettings(orgId int64) (map[string]*models.PluginSettingInfoDTO, error) {
query := models.GetPluginSettingsQuery{OrgId: orgId} query := models.GetPluginSettingsQuery{OrgId: orgId}
if err := bus.Dispatch(&query); err != nil { if err := bus.Dispatch(&query); err != nil {
return nil, err return nil, err
} }
@ -52,39 +52,39 @@ func GetPluginSettings(orgId int64) (map[string]*models.PluginSettingInfoDTO, er
return pluginMap, nil return pluginMap, nil
} }
func GetEnabledPlugins(orgId int64) (*EnabledPlugins, error) { func (pm *PluginManager) GetEnabledPlugins(orgID int64) (*plugins.EnabledPlugins, error) {
enabledPlugins := NewEnabledPlugins() enabledPlugins := &plugins.EnabledPlugins{
pluginSettingMap, err := GetPluginSettings(orgId) Panels: make([]*plugins.PanelPlugin, 0),
DataSources: make(map[string]*plugins.DataSourcePlugin),
Apps: make([]*plugins.AppPlugin, 0),
}
pluginSettingMap, err := pm.GetPluginSettings(orgID)
if err != nil { if err != nil {
return nil, err return enabledPlugins, err
} }
isPluginEnabled := func(pluginId string) bool { for pluginID, app := range Apps {
_, ok := pluginSettingMap[pluginId] if b, ok := pluginSettingMap[pluginID]; ok {
return ok
}
for pluginId, app := range Apps {
if b, ok := pluginSettingMap[pluginId]; ok {
app.Pinned = b.Pinned app.Pinned = b.Pinned
enabledPlugins.Apps = append(enabledPlugins.Apps, app) enabledPlugins.Apps = append(enabledPlugins.Apps, app)
} }
} }
// add all plugins that are not part of an App. // add all plugins that are not part of an App.
for dsId, ds := range DataSources { for dsID, ds := range DataSources {
if isPluginEnabled(ds.Id) { if _, exists := pluginSettingMap[ds.Id]; exists {
enabledPlugins.DataSources[dsId] = ds enabledPlugins.DataSources[dsID] = ds
} }
} }
for _, panel := range Panels { for _, panel := range Panels {
if isPluginEnabled(panel.Id) { if _, exists := pluginSettingMap[panel.Id]; exists {
enabledPlugins.Panels = append(enabledPlugins.Panels, panel) enabledPlugins.Panels = append(enabledPlugins.Panels, panel)
} }
} }
return &enabledPlugins, nil return enabledPlugins, nil
} }
// IsAppInstalled checks if an app plugin with provided plugin ID is installed. // IsAppInstalled checks if an app plugin with provided plugin ID is installed.

View File

@ -1,4 +1,4 @@
package plugins package manager
import ( import (
"encoding/json" "encoding/json"
@ -40,7 +40,7 @@ func getAllExternalPluginSlugs() string {
} }
func (pm *PluginManager) checkForUpdates() { func (pm *PluginManager) checkForUpdates() {
if !setting.CheckForUpdates { if !pm.Cfg.CheckForUpdates {
return return
} }

View File

@ -3,55 +3,16 @@ package plugins
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"strings"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins/backendplugin" "github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/setting"
) )
var ( const (
PluginTypeApp = "app" PluginTypeApp = "app"
PluginTypeDashboard = "dashboard" PluginTypeDashboard = "dashboard"
) )
type PluginState string
var (
PluginStateAlpha PluginState = "alpha"
)
type PluginSignatureState struct {
Status PluginSignatureStatus
Type PluginSignatureType
SigningOrg string
}
type PluginSignatureStatus string
func (pss PluginSignatureStatus) IsValid() bool {
return pss == pluginSignatureValid
}
func (pss PluginSignatureStatus) IsInternal() bool {
return pss == pluginSignatureInternal
}
const (
pluginSignatureInternal PluginSignatureStatus = "internal" // core plugin, no signature
pluginSignatureValid PluginSignatureStatus = "valid" // signed and accurate MANIFEST
pluginSignatureInvalid PluginSignatureStatus = "invalid" // invalid signature
pluginSignatureModified PluginSignatureStatus = "modified" // valid signature, but content mismatch
pluginSignatureUnsigned PluginSignatureStatus = "unsigned" // no MANIFEST file
)
type PluginSignatureType string
const (
grafanaType PluginSignatureType = "grafana"
privateType PluginSignatureType = "private"
)
type PluginNotFoundError struct { type PluginNotFoundError struct {
PluginID string PluginID string
} }
@ -60,25 +21,25 @@ func (e PluginNotFoundError) Error() string {
return fmt.Sprintf("plugin with ID %q not found", e.PluginID) return fmt.Sprintf("plugin with ID %q not found", e.PluginID)
} }
type duplicatePluginError struct { type DuplicatePluginError struct {
Plugin *PluginBase Plugin *PluginBase
ExistingPlugin *PluginBase ExistingPlugin *PluginBase
} }
func (e duplicatePluginError) Error() string { func (e DuplicatePluginError) Error() string {
return fmt.Sprintf("plugin with ID %q already loaded from %q", e.Plugin.Id, e.ExistingPlugin.PluginDir) return fmt.Sprintf("plugin with ID %q already loaded from %q", e.Plugin.Id, e.ExistingPlugin.PluginDir)
} }
func (e duplicatePluginError) Is(err error) bool { func (e DuplicatePluginError) Is(err error) bool {
// nolint:errorlint // nolint:errorlint
_, ok := err.(duplicatePluginError) _, ok := err.(DuplicatePluginError)
return ok return ok
} }
// PluginLoader can load a plugin. // PluginLoader can load a plugin.
type PluginLoader interface { type PluginLoader interface {
// Load loads a plugin and registers it with the manager. // Load loads a plugin and returns it.
Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) (interface{}, error)
} }
// PluginBase is the base plugin type. // PluginBase is the base plugin type.
@ -112,39 +73,6 @@ type PluginBase struct {
Root *PluginBase Root *PluginBase
} }
func (pb *PluginBase) registerPlugin(base *PluginBase) error {
if p, exists := Plugins[pb.Id]; exists {
return duplicatePluginError{Plugin: pb, ExistingPlugin: p}
}
if !strings.HasPrefix(base.PluginDir, setting.StaticRootPath) {
plog.Info("Registering plugin", "id", pb.Id)
}
if len(pb.Dependencies.Plugins) == 0 {
pb.Dependencies.Plugins = []PluginDependencyItem{}
}
if pb.Dependencies.GrafanaVersion == "" {
pb.Dependencies.GrafanaVersion = "*"
}
for _, include := range pb.Includes {
if include.Role == "" {
include.Role = models.ROLE_VIEWER
}
}
// Copy relevant fields from the base
pb.PluginDir = base.PluginDir
pb.Signature = base.Signature
pb.SignatureType = base.SignatureType
pb.SignatureOrg = base.SignatureOrg
Plugins[pb.Id] = pb
return nil
}
type PluginDependencies struct { type PluginDependencies struct {
GrafanaVersion string `json:"grafanaVersion"` GrafanaVersion string `json:"grafanaVersion"`
Plugins []PluginDependencyItem `json:"plugins"` Plugins []PluginDependencyItem `json:"plugins"`
@ -214,11 +142,3 @@ type EnabledPlugins struct {
DataSources map[string]*DataSourcePlugin DataSources map[string]*DataSourcePlugin
Apps []*AppPlugin Apps []*AppPlugin
} }
func NewEnabledPlugins() EnabledPlugins {
return EnabledPlugins{
Panels: make([]*PanelPlugin, 0),
DataSources: make(map[string]*DataSourcePlugin),
Apps: make([]*AppPlugin, 0),
}
}

View File

@ -11,15 +11,11 @@ type PanelPlugin struct {
SkipDataQuery bool `json:"skipDataQuery"` SkipDataQuery bool `json:"skipDataQuery"`
} }
func (p *PanelPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error { func (p *PanelPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) (
interface{}, error) {
if err := decoder.Decode(p); err != nil { if err := decoder.Decode(p); err != nil {
return err return nil, err
} }
if err := p.registerPlugin(base); err != nil { return p, nil
return err
}
Panels[p.Id] = p
return nil
} }

View File

@ -0,0 +1,150 @@
package plugindashboards
import (
"context"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/tsdb"
)
func init() {
registry.Register(&registry.Descriptor{
Name: "PluginDashboardService",
Instance: &Service{},
})
}
type Service struct {
DataService *tsdb.Service `inject:""`
PluginManager *manager.PluginManager `inject:""`
logger log.Logger
}
func (s *Service) Init() error {
bus.AddEventListener(s.handlePluginStateChanged)
s.logger = log.New("plugindashboards")
return nil
}
func (s *Service) Run(ctx context.Context) error {
s.updateAppDashboards()
return nil
}
func (s *Service) updateAppDashboards() {
s.logger.Debug("Looking for app dashboard updates")
query := models.GetPluginSettingsQuery{OrgId: 0}
if err := bus.Dispatch(&query); err != nil {
s.logger.Error("Failed to get all plugin settings", "error", err)
return
}
for _, pluginSetting := range query.Result {
// ignore disabled plugins
if !pluginSetting.Enabled {
continue
}
if pluginDef, exists := manager.Plugins[pluginSetting.PluginId]; exists {
if pluginDef.Info.Version != pluginSetting.PluginVersion {
s.syncPluginDashboards(pluginDef, pluginSetting.OrgId)
}
}
}
}
func (s *Service) syncPluginDashboards(pluginDef *plugins.PluginBase, orgID int64) {
s.logger.Info("Syncing plugin dashboards to DB", "pluginId", pluginDef.Id)
// Get plugin dashboards
dashboards, err := s.PluginManager.GetPluginDashboards(orgID, pluginDef.Id)
if err != nil {
s.logger.Error("Failed to load app dashboards", "error", err)
return
}
// Update dashboards with updated revisions
for _, dash := range dashboards {
// remove removed ones
if dash.Removed {
s.logger.Info("Deleting plugin dashboard", "pluginId", pluginDef.Id, "dashboard", dash.Slug)
deleteCmd := models.DeleteDashboardCommand{OrgId: orgID, Id: dash.DashboardId}
if err := bus.Dispatch(&deleteCmd); err != nil {
s.logger.Error("Failed to auto update app dashboard", "pluginId", pluginDef.Id, "error", err)
return
}
continue
}
// update updated ones
if dash.ImportedRevision != dash.Revision {
if err := s.autoUpdateAppDashboard(dash, orgID); err != nil {
s.logger.Error("Failed to auto update app dashboard", "pluginId", pluginDef.Id, "error", err)
return
}
}
}
// update version in plugin_setting table to mark that we have processed the update
query := models.GetPluginSettingByIdQuery{PluginId: pluginDef.Id, OrgId: orgID}
if err := bus.Dispatch(&query); err != nil {
s.logger.Error("Failed to read plugin setting by ID", "error", err)
return
}
appSetting := query.Result
cmd := models.UpdatePluginSettingVersionCmd{
OrgId: appSetting.OrgId,
PluginId: appSetting.PluginId,
PluginVersion: pluginDef.Info.Version,
}
if err := bus.Dispatch(&cmd); err != nil {
s.logger.Error("Failed to update plugin setting version", "error", err)
}
}
func (s *Service) handlePluginStateChanged(event *models.PluginStateChangedEvent) error {
s.logger.Info("Plugin state changed", "pluginId", event.PluginId, "enabled", event.Enabled)
if event.Enabled {
s.syncPluginDashboards(manager.Plugins[event.PluginId], event.OrgId)
} else {
query := models.GetDashboardsByPluginIdQuery{PluginId: event.PluginId, OrgId: event.OrgId}
if err := bus.Dispatch(&query); err != nil {
return err
}
for _, dash := range query.Result {
s.logger.Info("Deleting plugin dashboard", "pluginId", event.PluginId, "dashboard", dash.Slug)
deleteCmd := models.DeleteDashboardCommand{OrgId: dash.OrgId, Id: dash.Id}
if err := bus.Dispatch(&deleteCmd); err != nil {
return err
}
}
}
return nil
}
func (s *Service) autoUpdateAppDashboard(pluginDashInfo *plugins.PluginDashboardInfoDTO, orgID int64) error {
dash, err := s.PluginManager.LoadPluginDashboard(pluginDashInfo.PluginId, pluginDashInfo.Path)
if err != nil {
return err
}
s.logger.Info("Auto updating App dashboard", "dashboard", dash.Title, "newRev",
pluginDashInfo.Revision, "oldRev", pluginDashInfo.ImportedRevision)
user := &models.SignedInUser{UserId: 0, OrgRole: models.ROLE_ADMIN}
_, err = s.PluginManager.ImportDashboard(pluginDashInfo.PluginId, pluginDashInfo.Path, orgID, 0, dash.Data, true,
nil, user, s.DataService)
return err
}

View File

@ -1,541 +1,2 @@
// Package plugins contains plugin related logic.
package plugins package plugins
import (
"context"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"runtime"
"strings"
"time"
"github.com/grafana/grafana/pkg/infra/fs"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/metrics"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
"github.com/grafana/grafana/pkg/util/errutil"
)
var (
DataSources map[string]*DataSourcePlugin
Panels map[string]*PanelPlugin
StaticRoutes []*PluginStaticRoute
Apps map[string]*AppPlugin
Plugins map[string]*PluginBase
PluginTypes map[string]interface{}
Renderer *RendererPlugin
plog log.Logger
)
type unsignedPluginConditionFunc = func(plugin *PluginBase) bool
type PluginScanner struct {
pluginPath string
errors []error
backendPluginManager backendplugin.Manager
cfg *setting.Cfg
requireSigned bool
log log.Logger
plugins map[string]*PluginBase
allowUnsignedPluginsCondition unsignedPluginConditionFunc
}
type PluginManager struct {
BackendPluginManager backendplugin.Manager `inject:""`
Cfg *setting.Cfg `inject:""`
log log.Logger
scanningErrors []error
// AllowUnsignedPluginsCondition changes the policy for allowing unsigned plugins. Signature validation only runs when plugins are starting
// and running plugins will not be terminated if they violate the new policy.
AllowUnsignedPluginsCondition unsignedPluginConditionFunc
GrafanaLatestVersion string
GrafanaHasUpdate bool
pluginScanningErrors map[string]PluginError
}
func init() {
registry.RegisterService(&PluginManager{})
}
func (pm *PluginManager) Init() error {
pm.log = log.New("plugins")
plog = log.New("plugins")
DataSources = map[string]*DataSourcePlugin{}
StaticRoutes = []*PluginStaticRoute{}
Panels = map[string]*PanelPlugin{}
Apps = map[string]*AppPlugin{}
Plugins = map[string]*PluginBase{}
PluginTypes = map[string]interface{}{
"panel": PanelPlugin{},
"datasource": DataSourcePlugin{},
"app": AppPlugin{},
"renderer": RendererPlugin{},
}
pm.pluginScanningErrors = map[string]PluginError{}
pm.log.Info("Starting plugin search")
plugDir := filepath.Join(pm.Cfg.StaticRootPath, "app/plugins")
pm.log.Debug("Scanning core plugin directory", "dir", plugDir)
if err := pm.scan(plugDir, false); err != nil {
return errutil.Wrapf(err, "failed to scan core plugin directory '%s'", plugDir)
}
plugDir = pm.Cfg.BundledPluginsPath
pm.log.Debug("Scanning bundled plugins directory", "dir", plugDir)
exists, err := fs.Exists(plugDir)
if err != nil {
return err
}
if exists {
if err := pm.scan(plugDir, false); err != nil {
return errutil.Wrapf(err, "failed to scan bundled plugins directory '%s'", plugDir)
}
}
// check if plugins dir exists
exists, err = fs.Exists(pm.Cfg.PluginsPath)
if err != nil {
return err
}
if !exists {
if err = os.MkdirAll(pm.Cfg.PluginsPath, os.ModePerm); err != nil {
pm.log.Error("failed to create external plugins directory", "dir", pm.Cfg.PluginsPath, "error", err)
} else {
pm.log.Info("External plugins directory created", "directory", pm.Cfg.PluginsPath)
}
} else {
pm.log.Debug("Scanning external plugins directory", "dir", pm.Cfg.PluginsPath)
if err := pm.scan(pm.Cfg.PluginsPath, true); err != nil {
return errutil.Wrapf(err, "failed to scan external plugins directory '%s'",
pm.Cfg.PluginsPath)
}
}
if err := pm.scanPluginPaths(); err != nil {
return err
}
for _, panel := range Panels {
panel.initFrontendPlugin()
}
for _, ds := range DataSources {
ds.initFrontendPlugin()
}
for _, app := range Apps {
app.initApp()
}
if Renderer != nil {
Renderer.initFrontendPlugin()
}
for _, p := range Plugins {
if p.IsCorePlugin {
p.Signature = pluginSignatureInternal
} else {
metrics.SetPluginBuildInformation(p.Id, p.Type, p.Info.Version)
}
}
return nil
}
func (pm *PluginManager) Run(ctx context.Context) error {
pm.updateAppDashboards()
pm.checkForUpdates()
ticker := time.NewTicker(time.Minute * 10)
run := true
for run {
select {
case <-ticker.C:
pm.checkForUpdates()
case <-ctx.Done():
run = false
}
}
return ctx.Err()
}
// scanPluginPaths scans configured plugin paths.
func (pm *PluginManager) scanPluginPaths() error {
for pluginID, settings := range pm.Cfg.PluginSettings {
path, exists := settings["path"]
if !exists || path == "" {
continue
}
if err := pm.scan(path, true); err != nil {
return errutil.Wrapf(err, "failed to scan directory configured for plugin '%s': '%s'", pluginID, path)
}
}
return nil
}
// scan a directory for plugins.
func (pm *PluginManager) scan(pluginDir string, requireSigned bool) error {
scanner := &PluginScanner{
pluginPath: pluginDir,
backendPluginManager: pm.BackendPluginManager,
cfg: pm.Cfg,
requireSigned: requireSigned,
log: pm.log,
plugins: map[string]*PluginBase{},
allowUnsignedPluginsCondition: pm.AllowUnsignedPluginsCondition,
}
// 1st pass: Scan plugins, also mapping plugins to their respective directories
if err := util.Walk(pluginDir, true, true, scanner.walker); err != nil {
if errors.Is(err, os.ErrNotExist) {
pm.log.Debug("Couldn't scan directory since it doesn't exist", "pluginDir", pluginDir, "err", err)
return nil
}
if errors.Is(err, os.ErrPermission) {
pm.log.Debug("Couldn't scan directory due to lack of permissions", "pluginDir", pluginDir, "err", err)
return nil
}
if pluginDir != "data/plugins" {
pm.log.Warn("Could not scan dir", "pluginDir", pluginDir, "err", err)
}
return err
}
pm.log.Debug("Initial plugin loading done")
// 2nd pass: Validate and register plugins
for dpath, plugin := range scanner.plugins {
// Try to find any root plugin
ancestors := strings.Split(dpath, string(filepath.Separator))
ancestors = ancestors[0 : len(ancestors)-1]
aPath := ""
if runtime.GOOS != "windows" && filepath.IsAbs(dpath) {
aPath = "/"
}
for _, a := range ancestors {
aPath = filepath.Join(aPath, a)
if root, ok := scanner.plugins[aPath]; ok {
plugin.Root = root
break
}
}
pm.log.Debug("Found plugin", "id", plugin.Id, "signature", plugin.Signature, "hasRoot", plugin.Root != nil)
signingError := scanner.validateSignature(plugin)
if signingError != nil {
pm.log.Debug("Failed to validate plugin signature. Will skip loading", "id", plugin.Id,
"signature", plugin.Signature, "status", signingError.ErrorCode)
pm.pluginScanningErrors[plugin.Id] = *signingError
continue
}
pm.log.Debug("Attempting to add plugin", "id", plugin.Id)
pluginGoType, exists := PluginTypes[plugin.Type]
if !exists {
return fmt.Errorf("unknown plugin type %q", plugin.Type)
}
jsonFPath := filepath.Join(plugin.PluginDir, "plugin.json")
// External plugins need a module.js file for SystemJS to load
if !strings.HasPrefix(jsonFPath, pm.Cfg.StaticRootPath) && !scanner.IsBackendOnlyPlugin(plugin.Type) {
module := filepath.Join(plugin.PluginDir, "module.js")
exists, err := fs.Exists(module)
if err != nil {
return err
}
if !exists {
scanner.log.Warn("Plugin missing module.js",
"name", plugin.Name,
"warning", "Missing module.js, If you loaded this plugin from git, make sure to compile it.",
"path", module)
}
}
// nolint:gosec
// We can ignore the gosec G304 warning on this one because `jsonFPath` is based
// on plugin the folder structure on disk and not user input.
reader, err := os.Open(jsonFPath)
if err != nil {
return err
}
defer func() {
if err := reader.Close(); err != nil {
scanner.log.Warn("Failed to close JSON file", "path", jsonFPath, "err", err)
}
}()
jsonParser := json.NewDecoder(reader)
loader := reflect.New(reflect.TypeOf(pluginGoType)).Interface().(PluginLoader)
// Load the full plugin, and add it to manager
if err := loader.Load(jsonParser, plugin, scanner.backendPluginManager); err != nil {
if errors.Is(err, duplicatePluginError{}) {
pm.log.Warn("Plugin is duplicate", "error", err)
scanner.errors = append(scanner.errors, err)
continue
}
return err
}
pm.log.Debug("Successfully added plugin", "id", plugin.Id)
}
if len(scanner.errors) > 0 {
pm.log.Warn("Some plugins failed to load", "errors", scanner.errors)
pm.scanningErrors = scanner.errors
}
return nil
}
// GetDatasource returns a datasource based on passed pluginID if it exists
//
// This function fetches the datasource from the global variable DataSources in this package.
// Rather then refactor all dependencies on the global variable we can use this as an transition.
func (pm *PluginManager) GetDatasource(pluginID string) (*DataSourcePlugin, bool) {
ds, exist := DataSources[pluginID]
return ds, exist
}
func (s *PluginScanner) walker(currentPath string, f os.FileInfo, err error) error {
// We scan all the subfolders for plugin.json (with some exceptions) so that we also load embedded plugins, for
// example https://github.com/raintank/worldping-app/tree/master/dist/grafana-worldmap-panel worldmap panel plugin
// is embedded in worldping app.
if err != nil {
return fmt.Errorf("filepath.Walk reported an error for %q: %w", currentPath, err)
}
if f.Name() == "node_modules" || f.Name() == "Chromium.app" {
return util.ErrWalkSkipDir
}
if f.IsDir() {
return nil
}
if f.Name() != "plugin.json" {
return nil
}
if err := s.loadPlugin(currentPath); err != nil {
s.log.Error("Failed to load plugin", "error", err, "pluginPath", filepath.Dir(currentPath))
s.errors = append(s.errors, err)
}
return nil
}
func (s *PluginScanner) loadPlugin(pluginJSONFilePath string) error {
s.log.Debug("Loading plugin", "path", pluginJSONFilePath)
currentDir := filepath.Dir(pluginJSONFilePath)
// nolint:gosec
// We can ignore the gosec G304 warning on this one because `currentPath` is based
// on plugin the folder structure on disk and not user input.
reader, err := os.Open(pluginJSONFilePath)
if err != nil {
return err
}
defer func() {
if err := reader.Close(); err != nil {
s.log.Warn("Failed to close JSON file", "path", pluginJSONFilePath, "err", err)
}
}()
jsonParser := json.NewDecoder(reader)
pluginCommon := PluginBase{}
if err := jsonParser.Decode(&pluginCommon); err != nil {
return err
}
if pluginCommon.Id == "" || pluginCommon.Type == "" {
return errors.New("did not find type or id properties in plugin.json")
}
pluginCommon.PluginDir = filepath.Dir(pluginJSONFilePath)
pluginCommon.Files, err = collectPluginFilesWithin(pluginCommon.PluginDir)
if err != nil {
s.log.Warn("Could not collect plugin file information in directory", "pluginID", pluginCommon.Id, "dir", pluginCommon.PluginDir)
return err
}
signatureState, err := getPluginSignatureState(s.log, &pluginCommon)
if err != nil {
s.log.Warn("Could not get plugin signature state", "pluginID", pluginCommon.Id, "err", err)
return err
}
pluginCommon.Signature = signatureState.Status
pluginCommon.SignatureType = signatureState.Type
pluginCommon.SignatureOrg = signatureState.SigningOrg
s.plugins[currentDir] = &pluginCommon
return nil
}
func (*PluginScanner) IsBackendOnlyPlugin(pluginType string) bool {
return pluginType == "renderer"
}
// validateSignature validates a plugin's signature.
func (s *PluginScanner) validateSignature(plugin *PluginBase) *PluginError {
if plugin.Signature == pluginSignatureValid {
s.log.Debug("Plugin has valid signature", "id", plugin.Id)
return nil
}
if plugin.Root != nil {
// If a descendant plugin with invalid signature, set signature to that of root
if plugin.IsCorePlugin || plugin.Signature == pluginSignatureInternal {
s.log.Debug("Not setting descendant plugin's signature to that of root since it's core or internal",
"plugin", plugin.Id, "signature", plugin.Signature, "isCore", plugin.IsCorePlugin)
} else {
s.log.Debug("Setting descendant plugin's signature to that of root", "plugin", plugin.Id,
"root", plugin.Root.Id, "signature", plugin.Signature, "rootSignature", plugin.Root.Signature)
plugin.Signature = plugin.Root.Signature
if plugin.Signature == pluginSignatureValid {
s.log.Debug("Plugin has valid signature (inherited from root)", "id", plugin.Id)
return nil
}
}
} else {
s.log.Debug("Non-valid plugin Signature", "pluginID", plugin.Id, "pluginDir", plugin.PluginDir,
"state", plugin.Signature)
}
// For the time being, we choose to only require back-end plugins to be signed
// NOTE: the state is calculated again when setting metadata on the object
if !plugin.Backend || !s.requireSigned {
return nil
}
switch plugin.Signature {
case pluginSignatureUnsigned:
if allowed := s.allowUnsigned(plugin); !allowed {
s.log.Debug("Plugin is unsigned", "id", plugin.Id)
s.errors = append(s.errors, fmt.Errorf("plugin %q is unsigned", plugin.Id))
return &PluginError{
ErrorCode: signatureMissing,
}
}
s.log.Warn("Running an unsigned backend plugin", "pluginID", plugin.Id, "pluginDir",
plugin.PluginDir)
return nil
case pluginSignatureInvalid:
s.log.Debug("Plugin %q has an invalid signature", plugin.Id)
s.errors = append(s.errors, fmt.Errorf("plugin %q has an invalid signature", plugin.Id))
return &PluginError{
ErrorCode: signatureInvalid,
}
case pluginSignatureModified:
s.log.Debug("Plugin %q has a modified signature", plugin.Id)
s.errors = append(s.errors, fmt.Errorf("plugin %q's signature has been modified", plugin.Id))
return &PluginError{
ErrorCode: signatureModified,
}
default:
panic(fmt.Sprintf("Plugin %q has unrecognized plugin signature state %q", plugin.Id, plugin.Signature))
}
}
func (s *PluginScanner) allowUnsigned(plugin *PluginBase) bool {
if s.allowUnsignedPluginsCondition != nil {
return s.allowUnsignedPluginsCondition(plugin)
}
if s.cfg.Env == setting.Dev {
return true
}
for _, plug := range s.cfg.PluginsAllowUnsigned {
if plug == plugin.Id {
return true
}
}
return false
}
// ScanningErrors returns plugin scanning errors encountered.
func (pm *PluginManager) ScanningErrors() []PluginError {
scanningErrs := make([]PluginError, 0)
for id, e := range pm.pluginScanningErrors {
scanningErrs = append(scanningErrs, PluginError{
ErrorCode: e.ErrorCode,
PluginID: id,
})
}
return scanningErrs
}
func GetPluginMarkdown(pluginId string, name string) ([]byte, error) {
plug, exists := Plugins[pluginId]
if !exists {
return nil, PluginNotFoundError{pluginId}
}
// nolint:gosec
// We can ignore the gosec G304 warning on this one because `plug.PluginDir` is based
// on plugin the folder structure on disk and not user input.
path := filepath.Join(plug.PluginDir, fmt.Sprintf("%s.md", strings.ToUpper(name)))
exists, err := fs.Exists(path)
if err != nil {
return nil, err
}
if !exists {
path = filepath.Join(plug.PluginDir, fmt.Sprintf("%s.md", strings.ToLower(name)))
}
exists, err = fs.Exists(path)
if err != nil {
return nil, err
}
if !exists {
return make([]byte, 0), nil
}
// nolint:gosec
// We can ignore the gosec G304 warning on this one because `plug.PluginDir` is based
// on plugin the folder structure on disk and not user input.
data, err := ioutil.ReadFile(path)
if err != nil {
return nil, err
}
return data, nil
}
// gets plugin filenames that require verification for plugin signing
func collectPluginFilesWithin(rootDir string) ([]string, error) {
var files []string
err := filepath.Walk(rootDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && info.Name() != "MANIFEST.txt" {
file, err := filepath.Rel(rootDir, path)
if err != nil {
return err
}
files = append(files, filepath.ToSlash(file))
}
return nil
})
return files, err
}

View File

@ -22,29 +22,25 @@ type RendererPlugin struct {
backendPluginManager backendplugin.Manager backendPluginManager backendplugin.Manager
} }
func (r *RendererPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error { func (r *RendererPlugin) Load(decoder *json.Decoder, base *PluginBase,
backendPluginManager backendplugin.Manager) (interface{}, error) {
if err := decoder.Decode(r); err != nil { if err := decoder.Decode(r); err != nil {
return err return nil, err
}
if err := r.registerPlugin(base); err != nil {
return err
} }
r.backendPluginManager = backendPluginManager r.backendPluginManager = backendPluginManager
cmd := ComposePluginStartCommand("plugin_start") cmd := ComposePluginStartCommand("plugin_start")
fullpath := filepath.Join(r.PluginDir, cmd) fullpath := filepath.Join(base.PluginDir, cmd)
factory := grpcplugin.NewRendererPlugin(r.Id, fullpath, grpcplugin.PluginStartFuncs{ factory := grpcplugin.NewRendererPlugin(r.Id, fullpath, grpcplugin.PluginStartFuncs{
OnLegacyStart: r.onLegacyPluginStart, OnLegacyStart: r.onLegacyPluginStart,
OnStart: r.onPluginStart, OnStart: r.onPluginStart,
}) })
if err := backendPluginManager.Register(r.Id, factory); err != nil { if err := backendPluginManager.Register(r.Id, factory); err != nil {
return errutil.Wrapf(err, "Failed to register backend plugin") return nil, errutil.Wrapf(err, "failed to register backend plugin")
} }
Renderer = r return r, nil
return nil
} }
func (r *RendererPlugin) Start(ctx context.Context) error { func (r *RendererPlugin) Start(ctx context.Context) error {

38
pkg/plugins/state.go Normal file
View File

@ -0,0 +1,38 @@
package plugins
type PluginSignatureStatus string
func (pss PluginSignatureStatus) IsValid() bool {
return pss == PluginSignatureValid
}
func (pss PluginSignatureStatus) IsInternal() bool {
return pss == PluginSignatureInternal
}
const (
PluginSignatureInternal PluginSignatureStatus = "internal" // core plugin, no signature
PluginSignatureValid PluginSignatureStatus = "valid" // signed and accurate MANIFEST
PluginSignatureInvalid PluginSignatureStatus = "invalid" // invalid signature
PluginSignatureModified PluginSignatureStatus = "modified" // valid signature, but content mismatch
PluginSignatureUnsigned PluginSignatureStatus = "unsigned" // no MANIFEST file
)
type PluginState string
const (
PluginStateAlpha PluginState = "alpha"
)
type PluginSignatureType string
const (
GrafanaType PluginSignatureType = "grafana"
PrivateType PluginSignatureType = "private"
)
type PluginSignatureState struct {
Status PluginSignatureStatus
Type PluginSignatureType
SigningOrg string
}

311
pkg/plugins/tsdb.go Normal file
View File

@ -0,0 +1,311 @@
package plugins
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"strconv"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/timberio/go-datemath"
)
// DataSubQuery represents a data sub-query.
type DataSubQuery struct {
RefID string `json:"refId"`
Model *simplejson.Json `json:"model,omitempty"`
DataSource *models.DataSource `json:"datasource"`
MaxDataPoints int64 `json:"maxDataPoints"`
IntervalMS int64 `json:"intervalMs"`
QueryType string `json:"queryType"`
}
// DataQuery contains all information about a data query request.
type DataQuery struct {
TimeRange *DataTimeRange
Queries []DataSubQuery
Headers map[string]string
Debug bool
User *models.SignedInUser
}
type DataTimeRange struct {
From string
To string
Now time.Time
}
type DataTable struct {
Columns []DataTableColumn `json:"columns"`
Rows []DataRowValues `json:"rows"`
}
type DataTableColumn struct {
Text string `json:"text"`
}
type DataTimePoint [2]null.Float
type DataTimeSeriesPoints []DataTimePoint
type DataTimeSeriesSlice []DataTimeSeries
type DataRowValues []interface{}
type DataQueryResult struct {
Error error `json:"-"`
ErrorString string `json:"error,omitempty"`
RefID string `json:"refId"`
Meta *simplejson.Json `json:"meta,omitempty"`
Series DataTimeSeriesSlice `json:"series"`
Tables []DataTable `json:"tables"`
Dataframes DataFrames `json:"dataframes"`
}
// UnmarshalJSON deserializes a DataQueryResult from JSON.
//
// Deserialization support is required by tests.
func (r *DataQueryResult) UnmarshalJSON(b []byte) error {
m := map[string]interface{}{}
if err := json.Unmarshal(b, &m); err != nil {
return err
}
refID, ok := m["refId"].(string)
if !ok {
return fmt.Errorf("can't decode field refId - not a string")
}
var meta *simplejson.Json
if m["meta"] != nil {
mm, ok := m["meta"].(map[string]interface{})
if !ok {
return fmt.Errorf("can't decode field meta - not a JSON object")
}
meta = simplejson.NewFromAny(mm)
}
var series DataTimeSeriesSlice
/* TODO
if m["series"] != nil {
}
*/
var tables []DataTable
if m["tables"] != nil {
ts, ok := m["tables"].([]interface{})
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
for _, ti := range ts {
tm, ok := ti.(map[string]interface{})
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
var columns []DataTableColumn
cs, ok := tm["columns"].([]interface{})
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
for _, ci := range cs {
cm, ok := ci.(map[string]interface{})
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
val, ok := cm["text"].(string)
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
columns = append(columns, DataTableColumn{Text: val})
}
rs, ok := tm["rows"].([]interface{})
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
var rows []DataRowValues
for _, ri := range rs {
vals, ok := ri.([]interface{})
if !ok {
return fmt.Errorf("can't decode field tables - not an array of Tables")
}
rows = append(rows, vals)
}
tables = append(tables, DataTable{
Columns: columns,
Rows: rows,
})
}
}
var dfs *dataFrames
if m["dataframes"] != nil {
raw, ok := m["dataframes"].([]interface{})
if !ok {
return fmt.Errorf("can't decode field dataframes - not an array of byte arrays")
}
var encoded [][]byte
for _, ra := range raw {
encS, ok := ra.(string)
if !ok {
return fmt.Errorf("can't decode field dataframes - not an array of byte arrays")
}
enc, err := base64.StdEncoding.DecodeString(encS)
if err != nil {
return fmt.Errorf("can't decode field dataframes - not an array of arrow frames")
}
encoded = append(encoded, enc)
}
decoded, err := data.UnmarshalArrowFrames(encoded)
if err != nil {
return err
}
dfs = &dataFrames{
decoded: decoded,
encoded: encoded,
}
}
r.RefID = refID
r.Meta = meta
r.Series = series
r.Tables = tables
if dfs != nil {
r.Dataframes = dfs
}
return nil
}
type DataTimeSeries struct {
Name string `json:"name"`
Points DataTimeSeriesPoints `json:"points"`
Tags map[string]string `json:"tags,omitempty"`
}
type DataResponse struct {
Results map[string]DataQueryResult `json:"results"`
Message string `json:"message,omitempty"`
}
type DataPlugin interface {
DataQuery(ctx context.Context, ds *models.DataSource, query DataQuery) (DataResponse, error)
}
func NewDataTimeRange(from, to string) DataTimeRange {
return DataTimeRange{
From: from,
To: to,
Now: time.Now(),
}
}
func (tr *DataTimeRange) GetFromAsMsEpoch() int64 {
return tr.MustGetFrom().UnixNano() / int64(time.Millisecond)
}
func (tr *DataTimeRange) GetFromAsSecondsEpoch() int64 {
return tr.GetFromAsMsEpoch() / 1000
}
func (tr *DataTimeRange) GetFromAsTimeUTC() time.Time {
return tr.MustGetFrom().UTC()
}
func (tr *DataTimeRange) GetToAsMsEpoch() int64 {
return tr.MustGetTo().UnixNano() / int64(time.Millisecond)
}
func (tr *DataTimeRange) GetToAsSecondsEpoch() int64 {
return tr.GetToAsMsEpoch() / 1000
}
func (tr *DataTimeRange) GetToAsTimeUTC() time.Time {
return tr.MustGetTo().UTC()
}
func (tr *DataTimeRange) MustGetFrom() time.Time {
res, err := tr.ParseFrom()
if err != nil {
return time.Unix(0, 0)
}
return res
}
func (tr *DataTimeRange) MustGetTo() time.Time {
res, err := tr.ParseTo()
if err != nil {
return time.Unix(0, 0)
}
return res
}
func (tr DataTimeRange) ParseFrom() (time.Time, error) {
return parseTimeRange(tr.From, tr.Now, false, nil)
}
func (tr DataTimeRange) ParseTo() (time.Time, error) {
return parseTimeRange(tr.To, tr.Now, true, nil)
}
func (tr DataTimeRange) ParseFromWithLocation(location *time.Location) (time.Time, error) {
return parseTimeRange(tr.From, tr.Now, false, location)
}
func (tr DataTimeRange) ParseToWithLocation(location *time.Location) (time.Time, error) {
return parseTimeRange(tr.To, tr.Now, true, location)
}
func parseTimeRange(s string, now time.Time, withRoundUp bool, location *time.Location) (time.Time, error) {
if val, err := strconv.ParseInt(s, 10, 64); err == nil {
seconds := val / 1000
nano := (val - seconds*1000) * 1000000
return time.Unix(seconds, nano), nil
}
diff, err := time.ParseDuration("-" + s)
if err != nil {
options := []func(*datemath.Options){
datemath.WithNow(now),
datemath.WithRoundUp(withRoundUp),
}
if location != nil {
options = append(options, datemath.WithLocation(location))
}
return datemath.ParseAndEvaluate(s, options...)
}
return now.Add(diff), nil
}
// SeriesToFrame converts a DataTimeSeries to an SDK frame.
func SeriesToFrame(series DataTimeSeries) (*data.Frame, error) {
timeVec := make([]*time.Time, len(series.Points))
floatVec := make([]*float64, len(series.Points))
for idx, point := range series.Points {
timeVec[idx], floatVec[idx] = convertDataTimePoint(point)
}
frame := data.NewFrame(series.Name,
data.NewField("time", nil, timeVec),
data.NewField("value", data.Labels(series.Tags), floatVec),
)
return frame, nil
}
// convertDataTimePoint converts a DataTimePoint into two values appropriate
// for Series values.
func convertDataTimePoint(point DataTimePoint) (t *time.Time, f *float64) {
timeIdx, valueIdx := 1, 0
if point[timeIdx].Valid { // Assuming valid is null?
tI := int64(point[timeIdx].Float64)
uT := time.Unix(tI/int64(1e+3), (tI%int64(1e+3))*int64(1e+6)) // time.Time from millisecond unix ts
t = &uT
}
if point[valueIdx].Valid {
f = &point[valueIdx].Float64
}
return
}

View File

@ -6,7 +6,9 @@ import (
"strings" "strings"
"time" "time"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/prometheus" "github.com/grafana/grafana/pkg/tsdb/prometheus"
"github.com/grafana/grafana/pkg/tsdb/tsdbifaces"
gocontext "context" gocontext "context"
@ -16,7 +18,6 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/alerting"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/util/errutil" "github.com/grafana/grafana/pkg/util/errutil"
) )
@ -29,12 +30,11 @@ func init() {
// QueryCondition is responsible for issue and query, reduce the // QueryCondition is responsible for issue and query, reduce the
// timeseries into single values and evaluate if they are firing or not. // timeseries into single values and evaluate if they are firing or not.
type QueryCondition struct { type QueryCondition struct {
Index int Index int
Query AlertQuery Query AlertQuery
Reducer *queryReducer Reducer *queryReducer
Evaluator AlertEvaluator Evaluator AlertEvaluator
Operator string Operator string
HandleRequest tsdb.HandleRequestFunc
} }
// AlertQuery contains information about what datasource a query // AlertQuery contains information about what datasource a query
@ -47,10 +47,10 @@ type AlertQuery struct {
} }
// Eval evaluates the `QueryCondition`. // Eval evaluates the `QueryCondition`.
func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.ConditionResult, error) { func (c *QueryCondition) Eval(context *alerting.EvalContext, requestHandler tsdbifaces.RequestHandler) (*alerting.ConditionResult, error) {
timeRange := tsdb.NewTimeRange(c.Query.From, c.Query.To) timeRange := plugins.NewDataTimeRange(c.Query.From, c.Query.To)
seriesList, err := c.executeQuery(context, timeRange) seriesList, err := c.executeQuery(context, timeRange, requestHandler)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -109,7 +109,8 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.Conditio
}, nil }, nil
} }
func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *tsdb.TimeRange) (tsdb.TimeSeriesSlice, error) { func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange plugins.DataTimeRange,
requestHandler tsdbifaces.RequestHandler) (plugins.DataTimeSeriesSlice, error) {
getDsInfo := &models.GetDataSourceQuery{ getDsInfo := &models.GetDataSourceQuery{
Id: c.Query.DatasourceID, Id: c.Query.DatasourceID,
OrgId: context.Rule.OrgID, OrgId: context.Rule.OrgID,
@ -125,7 +126,7 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *
} }
req := c.getRequestForAlertRule(getDsInfo.Result, timeRange, context.IsDebug) req := c.getRequestForAlertRule(getDsInfo.Result, timeRange, context.IsDebug)
result := make(tsdb.TimeSeriesSlice, 0) result := make(plugins.DataTimeSeriesSlice, 0)
if context.IsDebug { if context.IsDebug {
data := simplejson.New() data := simplejson.New()
@ -139,20 +140,20 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *
Model *simplejson.Json `json:"model"` Model *simplejson.Json `json:"model"`
Datasource *simplejson.Json `json:"datasource"` Datasource *simplejson.Json `json:"datasource"`
MaxDataPoints int64 `json:"maxDataPoints"` MaxDataPoints int64 `json:"maxDataPoints"`
IntervalMs int64 `json:"intervalMs"` IntervalMS int64 `json:"intervalMs"`
} }
queries := []*queryDto{} queries := []*queryDto{}
for _, q := range req.Queries { for _, q := range req.Queries {
queries = append(queries, &queryDto{ queries = append(queries, &queryDto{
RefID: q.RefId, RefID: q.RefID,
Model: q.Model, Model: q.Model,
Datasource: simplejson.NewFromAny(map[string]interface{}{ Datasource: simplejson.NewFromAny(map[string]interface{}{
"id": q.DataSource.Id, "id": q.DataSource.Id,
"name": q.DataSource.Name, "name": q.DataSource.Name,
}), }),
MaxDataPoints: q.MaxDataPoints, MaxDataPoints: q.MaxDataPoints,
IntervalMs: q.IntervalMs, IntervalMS: q.IntervalMS,
}) })
} }
@ -164,29 +165,30 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *
}) })
} }
resp, err := c.HandleRequest(context.Ctx, getDsInfo.Result, req) resp, err := requestHandler.HandleRequest(context.Ctx, getDsInfo.Result, req)
if err != nil { if err != nil {
return nil, toCustomError(err) return nil, toCustomError(err)
} }
for _, v := range resp.Results { for _, v := range resp.Results {
if v.Error != nil { if v.Error != nil {
return nil, fmt.Errorf("tsdb.HandleRequest() response error %v", v) return nil, fmt.Errorf("request handler response error %v", v)
} }
// If there are dataframes but no series on the result // If there are dataframes but no series on the result
useDataframes := v.Dataframes != nil && (v.Series == nil || len(v.Series) == 0) useDataframes := v.Dataframes != nil && (v.Series == nil || len(v.Series) == 0)
if useDataframes { // convert the dataframes to tsdb.TimeSeries if useDataframes { // convert the dataframes to plugins.DataTimeSeries
frames, err := v.Dataframes.Decoded() frames, err := v.Dataframes.Decoded()
if err != nil { if err != nil {
return nil, errutil.Wrap("tsdb.HandleRequest() failed to unmarshal arrow dataframes from bytes", err) return nil, errutil.Wrap("request handler failed to unmarshal arrow dataframes from bytes", err)
} }
for _, frame := range frames { for _, frame := range frames {
ss, err := FrameToSeriesSlice(frame) ss, err := FrameToSeriesSlice(frame)
if err != nil { if err != nil {
return nil, errutil.Wrapf(err, `tsdb.HandleRequest() failed to convert dataframe "%v" to tsdb.TimeSeriesSlice`, frame.Name) return nil, errutil.Wrapf(err,
`request handler failed to convert dataframe "%v" to plugins.DataTimeSeriesSlice`, frame.Name)
} }
result = append(result, ss...) result = append(result, ss...)
} }
@ -218,13 +220,14 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *
return result, nil return result, nil
} }
func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, timeRange *tsdb.TimeRange, debug bool) *tsdb.TsdbQuery { func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, timeRange plugins.DataTimeRange,
debug bool) plugins.DataQuery {
queryModel := c.Query.Model queryModel := c.Query.Model
req := &tsdb.TsdbQuery{ req := plugins.DataQuery{
TimeRange: timeRange, TimeRange: &timeRange,
Queries: []*tsdb.Query{ Queries: []plugins.DataSubQuery{
{ {
RefId: "A", RefID: "A",
Model: queryModel, Model: queryModel,
DataSource: datasource, DataSource: datasource,
QueryType: queryModel.Get("queryType").MustString(""), QueryType: queryModel.Get("queryType").MustString(""),
@ -242,7 +245,6 @@ func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, t
func newQueryCondition(model *simplejson.Json, index int) (*QueryCondition, error) { func newQueryCondition(model *simplejson.Json, index int) (*QueryCondition, error) {
condition := QueryCondition{} condition := QueryCondition{}
condition.Index = index condition.Index = index
condition.HandleRequest = tsdb.HandleRequest
queryJSON := model.Get("query") queryJSON := model.Get("query")
@ -301,23 +303,23 @@ func validateToValue(to string) error {
} }
// FrameToSeriesSlice converts a frame that is a valid time series as per data.TimeSeriesSchema() // FrameToSeriesSlice converts a frame that is a valid time series as per data.TimeSeriesSchema()
// to a TimeSeriesSlice. // to a DataTimeSeriesSlice.
func FrameToSeriesSlice(frame *data.Frame) (tsdb.TimeSeriesSlice, error) { func FrameToSeriesSlice(frame *data.Frame) (plugins.DataTimeSeriesSlice, error) {
tsSchema := frame.TimeSeriesSchema() tsSchema := frame.TimeSeriesSchema()
if tsSchema.Type == data.TimeSeriesTypeNot { if tsSchema.Type == data.TimeSeriesTypeNot {
// If no fields, or only a time field, create an empty tsdb.TimeSeriesSlice with a single // If no fields, or only a time field, create an empty plugins.DataTimeSeriesSlice with a single
// time series in order to trigger "no data" in alerting. // time series in order to trigger "no data" in alerting.
if len(frame.Fields) == 0 || (len(frame.Fields) == 1 && frame.Fields[0].Type().Time()) { if len(frame.Fields) == 0 || (len(frame.Fields) == 1 && frame.Fields[0].Type().Time()) {
return tsdb.TimeSeriesSlice{{ return plugins.DataTimeSeriesSlice{{
Name: frame.Name, Name: frame.Name,
Points: make(tsdb.TimeSeriesPoints, 0), Points: make(plugins.DataTimeSeriesPoints, 0),
}}, nil }}, nil
} }
return nil, fmt.Errorf("input frame is not recognized as a time series") return nil, fmt.Errorf("input frame is not recognized as a time series")
} }
seriesCount := len(tsSchema.ValueIndices) seriesCount := len(tsSchema.ValueIndices)
seriesSlice := make(tsdb.TimeSeriesSlice, 0, seriesCount) seriesSlice := make(plugins.DataTimeSeriesSlice, 0, seriesCount)
timeField := frame.Fields[tsSchema.TimeIndex] timeField := frame.Fields[tsSchema.TimeIndex]
timeNullFloatSlice := make([]null.Float, timeField.Len()) timeNullFloatSlice := make([]null.Float, timeField.Len())
@ -331,8 +333,8 @@ func FrameToSeriesSlice(frame *data.Frame) (tsdb.TimeSeriesSlice, error) {
for _, fieldIdx := range tsSchema.ValueIndices { // create a TimeSeries for each value Field for _, fieldIdx := range tsSchema.ValueIndices { // create a TimeSeries for each value Field
field := frame.Fields[fieldIdx] field := frame.Fields[fieldIdx]
ts := &tsdb.TimeSeries{ ts := plugins.DataTimeSeries{
Points: make(tsdb.TimeSeriesPoints, field.Len()), Points: make(plugins.DataTimeSeriesPoints, field.Len()),
} }
if len(field.Labels) > 0 { if len(field.Labels) > 0 {
@ -355,9 +357,10 @@ func FrameToSeriesSlice(frame *data.Frame) (tsdb.TimeSeriesSlice, error) {
for rowIdx := 0; rowIdx < field.Len(); rowIdx++ { // for each value in the field, make a TimePoint for rowIdx := 0; rowIdx < field.Len(); rowIdx++ { // for each value in the field, make a TimePoint
val, err := field.FloatAt(rowIdx) val, err := field.FloatAt(rowIdx)
if err != nil { if err != nil {
return nil, errutil.Wrapf(err, "failed to convert frame to tsdb.series, can not convert value %v to float", field.At(rowIdx)) return nil, errutil.Wrapf(err,
"failed to convert frame to DataTimeSeriesSlice, can not convert value %v to float", field.At(rowIdx))
} }
ts.Points[rowIdx] = tsdb.TimePoint{ ts.Points[rowIdx] = plugins.DataTimePoint{
null.FloatFrom(val), null.FloatFrom(val),
timeNullFloatSlice[rowIdx], timeNullFloatSlice[rowIdx],
} }
@ -381,5 +384,5 @@ func toCustomError(err error) error {
} }
// generic fallback // generic fallback
return fmt.Errorf("tsdb.HandleRequest() error %v", err) return fmt.Errorf("request handler error: %w", err)
} }

View File

@ -15,18 +15,18 @@ import (
"github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/alerting"
"github.com/grafana/grafana/pkg/tsdb"
. "github.com/smartystreets/goconvey/convey" . "github.com/smartystreets/goconvey/convey"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/xorcare/pointer" "github.com/xorcare/pointer"
) )
func newTimeSeriesPointsFromArgs(values ...float64) tsdb.TimeSeriesPoints { func newTimeSeriesPointsFromArgs(values ...float64) plugins.DataTimeSeriesPoints {
points := make(tsdb.TimeSeriesPoints, 0) points := make(plugins.DataTimeSeriesPoints, 0)
for i := 0; i < len(values); i += 2 { for i := 0; i < len(values); i += 2 {
points = append(points, tsdb.NewTimePoint(null.FloatFrom(values[i]), values[i+1])) points = append(points, plugins.DataTimePoint{null.FloatFrom(values[i]), null.FloatFrom(values[i+1])})
} }
return points return points
@ -60,7 +60,7 @@ func TestQueryCondition(t *testing.T) {
Convey("should fire when avg is above 100", func() { Convey("should fire when avg is above 100", func() {
points := newTimeSeriesPointsFromArgs(120, 0) points := newTimeSeriesPointsFromArgs(120, 0)
ctx.series = tsdb.TimeSeriesSlice{&tsdb.TimeSeries{Name: "test1", Points: points}} ctx.series = plugins.DataTimeSeriesSlice{plugins.DataTimeSeries{Name: "test1", Points: points}}
cr, err := ctx.exec() cr, err := ctx.exec()
So(err, ShouldBeNil) So(err, ShouldBeNil)
@ -80,7 +80,7 @@ func TestQueryCondition(t *testing.T) {
Convey("Should not fire when avg is below 100", func() { Convey("Should not fire when avg is below 100", func() {
points := newTimeSeriesPointsFromArgs(90, 0) points := newTimeSeriesPointsFromArgs(90, 0)
ctx.series = tsdb.TimeSeriesSlice{&tsdb.TimeSeries{Name: "test1", Points: points}} ctx.series = plugins.DataTimeSeriesSlice{plugins.DataTimeSeries{Name: "test1", Points: points}}
cr, err := ctx.exec() cr, err := ctx.exec()
So(err, ShouldBeNil) So(err, ShouldBeNil)
@ -99,9 +99,9 @@ func TestQueryCondition(t *testing.T) {
}) })
Convey("Should fire if only first series matches", func() { Convey("Should fire if only first series matches", func() {
ctx.series = tsdb.TimeSeriesSlice{ ctx.series = plugins.DataTimeSeriesSlice{
&tsdb.TimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs(120, 0)}, plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs(120, 0)},
&tsdb.TimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(0, 0)}, plugins.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(0, 0)},
} }
cr, err := ctx.exec() cr, err := ctx.exec()
@ -111,7 +111,7 @@ func TestQueryCondition(t *testing.T) {
Convey("No series", func() { Convey("No series", func() {
Convey("Should set NoDataFound when condition is gt", func() { Convey("Should set NoDataFound when condition is gt", func() {
ctx.series = tsdb.TimeSeriesSlice{} ctx.series = plugins.DataTimeSeriesSlice{}
cr, err := ctx.exec() cr, err := ctx.exec()
So(err, ShouldBeNil) So(err, ShouldBeNil)
@ -121,7 +121,7 @@ func TestQueryCondition(t *testing.T) {
Convey("Should be firing when condition is no_value", func() { Convey("Should be firing when condition is no_value", func() {
ctx.evaluator = `{"type": "no_value", "params": []}` ctx.evaluator = `{"type": "no_value", "params": []}`
ctx.series = tsdb.TimeSeriesSlice{} ctx.series = plugins.DataTimeSeriesSlice{}
cr, err := ctx.exec() cr, err := ctx.exec()
So(err, ShouldBeNil) So(err, ShouldBeNil)
@ -132,8 +132,8 @@ func TestQueryCondition(t *testing.T) {
Convey("Empty series", func() { Convey("Empty series", func() {
Convey("Should set Firing if eval match", func() { Convey("Should set Firing if eval match", func() {
ctx.evaluator = `{"type": "no_value", "params": []}` ctx.evaluator = `{"type": "no_value", "params": []}`
ctx.series = tsdb.TimeSeriesSlice{ ctx.series = plugins.DataTimeSeriesSlice{
&tsdb.TimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()}, plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()},
} }
cr, err := ctx.exec() cr, err := ctx.exec()
@ -142,9 +142,9 @@ func TestQueryCondition(t *testing.T) {
}) })
Convey("Should set NoDataFound both series are empty", func() { Convey("Should set NoDataFound both series are empty", func() {
ctx.series = tsdb.TimeSeriesSlice{ ctx.series = plugins.DataTimeSeriesSlice{
&tsdb.TimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()}, plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()},
&tsdb.TimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs()}, plugins.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs()},
} }
cr, err := ctx.exec() cr, err := ctx.exec()
@ -153,9 +153,9 @@ func TestQueryCondition(t *testing.T) {
}) })
Convey("Should set NoDataFound both series contains null", func() { Convey("Should set NoDataFound both series contains null", func() {
ctx.series = tsdb.TimeSeriesSlice{ ctx.series = plugins.DataTimeSeriesSlice{
&tsdb.TimeSeries{Name: "test1", Points: tsdb.TimeSeriesPoints{tsdb.TimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}}, plugins.DataTimeSeries{Name: "test1", Points: plugins.DataTimeSeriesPoints{plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}},
&tsdb.TimeSeries{Name: "test2", Points: tsdb.TimeSeriesPoints{tsdb.TimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}}, plugins.DataTimeSeries{Name: "test2", Points: plugins.DataTimeSeriesPoints{plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}},
} }
cr, err := ctx.exec() cr, err := ctx.exec()
@ -164,9 +164,9 @@ func TestQueryCondition(t *testing.T) {
}) })
Convey("Should not set NoDataFound if one series is empty", func() { Convey("Should not set NoDataFound if one series is empty", func() {
ctx.series = tsdb.TimeSeriesSlice{ ctx.series = plugins.DataTimeSeriesSlice{
&tsdb.TimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()}, plugins.DataTimeSeries{Name: "test1", Points: newTimeSeriesPointsFromArgs()},
&tsdb.TimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(120, 0)}, plugins.DataTimeSeries{Name: "test2", Points: newTimeSeriesPointsFromArgs(120, 0)},
} }
cr, err := ctx.exec() cr, err := ctx.exec()
@ -181,7 +181,7 @@ func TestQueryCondition(t *testing.T) {
type queryConditionTestContext struct { type queryConditionTestContext struct {
reducer string reducer string
evaluator string evaluator string
series tsdb.TimeSeriesSlice series plugins.DataTimeSeriesSlice
frame *data.Frame frame *data.Frame
result *alerting.EvalContext result *alerting.EvalContext
condition *QueryCondition condition *QueryCondition
@ -207,25 +207,33 @@ func (ctx *queryConditionTestContext) exec() (*alerting.ConditionResult, error)
ctx.condition = condition ctx.condition = condition
qr := &tsdb.QueryResult{ qr := plugins.DataQueryResult{
Series: ctx.series, Series: ctx.series,
} }
if ctx.frame != nil { if ctx.frame != nil {
qr = &tsdb.QueryResult{ qr = plugins.DataQueryResult{
Dataframes: tsdb.NewDecodedDataFrames(data.Frames{ctx.frame}), Dataframes: plugins.NewDecodedDataFrames(data.Frames{ctx.frame}),
} }
} }
reqHandler := fakeReqHandler{
condition.HandleRequest = func(context context.Context, dsInfo *models.DataSource, req *tsdb.TsdbQuery) (*tsdb.Response, error) { response: plugins.DataResponse{
return &tsdb.Response{ Results: map[string]plugins.DataQueryResult{
Results: map[string]*tsdb.QueryResult{
"A": qr, "A": qr,
}, },
}, nil },
} }
return condition.Eval(ctx.result) return condition.Eval(ctx.result, reqHandler)
}
type fakeReqHandler struct {
response plugins.DataResponse
}
func (rh fakeReqHandler) HandleRequest(context.Context, *models.DataSource, plugins.DataQuery) (
plugins.DataResponse, error) {
return rh.response, nil
} }
func queryConditionScenario(desc string, fn queryConditionScenarioFunc) { func queryConditionScenario(desc string, fn queryConditionScenarioFunc) {
@ -249,7 +257,7 @@ func TestFrameToSeriesSlice(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
frame *data.Frame frame *data.Frame
seriesSlice tsdb.TimeSeriesSlice seriesSlice plugins.DataTimeSeriesSlice
Err require.ErrorAssertionFunc Err require.ErrorAssertionFunc
}{ }{
{ {
@ -268,21 +276,21 @@ func TestFrameToSeriesSlice(t *testing.T) {
4.0, 4.0,
})), })),
seriesSlice: tsdb.TimeSeriesSlice{ seriesSlice: plugins.DataTimeSeriesSlice{
&tsdb.TimeSeries{ plugins.DataTimeSeries{
Name: "Values Int64s {Animal Factor=cat}", Name: "Values Int64s {Animal Factor=cat}",
Tags: map[string]string{"Animal Factor": "cat"}, Tags: map[string]string{"Animal Factor": "cat"},
Points: tsdb.TimeSeriesPoints{ Points: plugins.DataTimeSeriesPoints{
tsdb.TimePoint{null.FloatFrom(math.NaN()), null.FloatFrom(1577934240000)}, plugins.DataTimePoint{null.FloatFrom(math.NaN()), null.FloatFrom(1577934240000)},
tsdb.TimePoint{null.FloatFrom(3), null.FloatFrom(1577934270000)}, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(1577934270000)},
}, },
}, },
&tsdb.TimeSeries{ plugins.DataTimeSeries{
Name: "Values Floats {Animal Factor=sloth}", Name: "Values Floats {Animal Factor=sloth}",
Tags: map[string]string{"Animal Factor": "sloth"}, Tags: map[string]string{"Animal Factor": "sloth"},
Points: tsdb.TimeSeriesPoints{ Points: plugins.DataTimeSeriesPoints{
tsdb.TimePoint{null.FloatFrom(2), null.FloatFrom(1577934240000)}, plugins.DataTimePoint{null.FloatFrom(2), null.FloatFrom(1577934240000)},
tsdb.TimePoint{null.FloatFrom(4), null.FloatFrom(1577934270000)}, plugins.DataTimePoint{null.FloatFrom(4), null.FloatFrom(1577934270000)},
}, },
}, },
}, },
@ -295,16 +303,16 @@ func TestFrameToSeriesSlice(t *testing.T) {
data.NewField(`Values Int64s`, data.Labels{"Animal Factor": "cat"}, []*int64{}), data.NewField(`Values Int64s`, data.Labels{"Animal Factor": "cat"}, []*int64{}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []float64{})), data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []float64{})),
seriesSlice: tsdb.TimeSeriesSlice{ seriesSlice: plugins.DataTimeSeriesSlice{
&tsdb.TimeSeries{ plugins.DataTimeSeries{
Name: "Values Int64s {Animal Factor=cat}", Name: "Values Int64s {Animal Factor=cat}",
Tags: map[string]string{"Animal Factor": "cat"}, Tags: map[string]string{"Animal Factor": "cat"},
Points: tsdb.TimeSeriesPoints{}, Points: plugins.DataTimeSeriesPoints{},
}, },
&tsdb.TimeSeries{ plugins.DataTimeSeries{
Name: "Values Floats {Animal Factor=sloth}", Name: "Values Floats {Animal Factor=sloth}",
Tags: map[string]string{"Animal Factor": "sloth"}, Tags: map[string]string{"Animal Factor": "sloth"},
Points: tsdb.TimeSeriesPoints{}, Points: plugins.DataTimeSeriesPoints{},
}, },
}, },
Err: require.NoError, Err: require.NoError,
@ -315,10 +323,10 @@ func TestFrameToSeriesSlice(t *testing.T) {
data.NewField("Time", data.Labels{}, []time.Time{}), data.NewField("Time", data.Labels{}, []time.Time{}),
data.NewField(`Values`, data.Labels{}, []float64{})), data.NewField(`Values`, data.Labels{}, []float64{})),
seriesSlice: tsdb.TimeSeriesSlice{ seriesSlice: plugins.DataTimeSeriesSlice{
&tsdb.TimeSeries{ plugins.DataTimeSeries{
Name: "Values", Name: "Values",
Points: tsdb.TimeSeriesPoints{}, Points: plugins.DataTimeSeriesPoints{},
}, },
}, },
Err: require.NoError, Err: require.NoError,
@ -331,10 +339,10 @@ func TestFrameToSeriesSlice(t *testing.T) {
DisplayNameFromDS: "sloth", DisplayNameFromDS: "sloth",
})), })),
seriesSlice: tsdb.TimeSeriesSlice{ seriesSlice: plugins.DataTimeSeriesSlice{
&tsdb.TimeSeries{ plugins.DataTimeSeries{
Name: "sloth", Name: "sloth",
Points: tsdb.TimeSeriesPoints{}, Points: plugins.DataTimeSeriesPoints{},
Tags: map[string]string{"Rating": "10"}, Tags: map[string]string{"Rating": "10"},
}, },
}, },
@ -349,10 +357,10 @@ func TestFrameToSeriesSlice(t *testing.T) {
DisplayNameFromDS: "sloth #2", DisplayNameFromDS: "sloth #2",
})), })),
seriesSlice: tsdb.TimeSeriesSlice{ seriesSlice: plugins.DataTimeSeriesSlice{
&tsdb.TimeSeries{ plugins.DataTimeSeries{
Name: "sloth #1", Name: "sloth #1",
Points: tsdb.TimeSeriesPoints{}, Points: plugins.DataTimeSeriesPoints{},
}, },
}, },
Err: require.NoError, Err: require.NoError,

View File

@ -6,7 +6,7 @@ import (
"sort" "sort"
"github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/plugins"
) )
// queryReducer reduces a timeseries to a nullable float // queryReducer reduces a timeseries to a nullable float
@ -18,7 +18,7 @@ type queryReducer struct {
} }
//nolint: gocyclo //nolint: gocyclo
func (s *queryReducer) Reduce(series *tsdb.TimeSeries) null.Float { func (s *queryReducer) Reduce(series plugins.DataTimeSeries) null.Float {
if len(series.Points) == 0 { if len(series.Points) == 0 {
return null.FloatFromPtr(nil) return null.FloatFromPtr(nil)
} }
@ -126,7 +126,7 @@ func newSimpleReducer(t string) *queryReducer {
return &queryReducer{Type: t} return &queryReducer{Type: t}
} }
func calculateDiff(series *tsdb.TimeSeries, allNull bool, value float64, fn func(float64, float64) float64) (bool, float64) { func calculateDiff(series plugins.DataTimeSeries, allNull bool, value float64, fn func(float64, float64) float64) (bool, float64) {
var ( var (
points = series.Points points = series.Points
first float64 first float64

View File

@ -7,7 +7,7 @@ import (
. "github.com/smartystreets/goconvey/convey" . "github.com/smartystreets/goconvey/convey"
"github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/plugins"
) )
func TestSimpleReducer(t *testing.T) { func TestSimpleReducer(t *testing.T) {
@ -54,16 +54,16 @@ func TestSimpleReducer(t *testing.T) {
Convey("median should ignore null values", func() { Convey("median should ignore null values", func() {
reducer := newSimpleReducer("median") reducer := newSimpleReducer("median")
series := &tsdb.TimeSeries{ series := plugins.DataTimeSeries{
Name: "test time series", Name: "test time series",
} }
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 3)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(3)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(1)), 4)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(float64(1)), null.FloatFrom(4)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(2)), 5)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(float64(2)), null.FloatFrom(5)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(float64(3)), 6)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(float64(3)), null.FloatFrom(6)})
result := reducer.Reduce(series) result := reducer.Reduce(series)
So(result.Valid, ShouldEqual, true) So(result.Valid, ShouldEqual, true)
@ -77,25 +77,25 @@ func TestSimpleReducer(t *testing.T) {
Convey("avg with only nulls", func() { Convey("avg with only nulls", func() {
reducer := newSimpleReducer("avg") reducer := newSimpleReducer("avg")
series := &tsdb.TimeSeries{ series := plugins.DataTimeSeries{
Name: "test time series", Name: "test time series",
} }
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
So(reducer.Reduce(series).Valid, ShouldEqual, false) So(reducer.Reduce(series).Valid, ShouldEqual, false)
}) })
Convey("count_non_null", func() { Convey("count_non_null", func() {
Convey("with null values and real values", func() { Convey("with null values and real values", func() {
reducer := newSimpleReducer("count_non_null") reducer := newSimpleReducer("count_non_null")
series := &tsdb.TimeSeries{ series := plugins.DataTimeSeries{
Name: "test time series", Name: "test time series",
} }
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 3)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(3)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 4)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(4)})
So(reducer.Reduce(series).Valid, ShouldEqual, true) So(reducer.Reduce(series).Valid, ShouldEqual, true)
So(reducer.Reduce(series).Float64, ShouldEqual, 2) So(reducer.Reduce(series).Float64, ShouldEqual, 2)
@ -103,12 +103,12 @@ func TestSimpleReducer(t *testing.T) {
Convey("with null values", func() { Convey("with null values", func() {
reducer := newSimpleReducer("count_non_null") reducer := newSimpleReducer("count_non_null")
series := &tsdb.TimeSeries{ series := plugins.DataTimeSeries{
Name: "test time series", Name: "test time series",
} }
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
So(reducer.Reduce(series).Valid, ShouldEqual, false) So(reducer.Reduce(series).Valid, ShouldEqual, false)
}) })
@ -116,14 +116,14 @@ func TestSimpleReducer(t *testing.T) {
Convey("avg of number values and null values should ignore nulls", func() { Convey("avg of number values and null values should ignore nulls", func() {
reducer := newSimpleReducer("avg") reducer := newSimpleReducer("avg")
series := &tsdb.TimeSeries{ series := plugins.DataTimeSeries{
Name: "test time series", Name: "test time series",
} }
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 1)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(1)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 3)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(3)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(3), 4)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(3), null.FloatFrom(4)})
So(reducer.Reduce(series).Float64, ShouldEqual, float64(3)) So(reducer.Reduce(series).Float64, ShouldEqual, float64(3))
}) })
@ -181,12 +181,12 @@ func TestSimpleReducer(t *testing.T) {
Convey("diff with only nulls", func() { Convey("diff with only nulls", func() {
reducer := newSimpleReducer("diff") reducer := newSimpleReducer("diff")
series := &tsdb.TimeSeries{ series := plugins.DataTimeSeries{
Name: "test time series", Name: "test time series",
} }
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
So(reducer.Reduce(series).Valid, ShouldEqual, false) So(reducer.Reduce(series).Valid, ShouldEqual, false)
}) })
@ -244,12 +244,12 @@ func TestSimpleReducer(t *testing.T) {
Convey("diff_abs with only nulls", func() { Convey("diff_abs with only nulls", func() {
reducer := newSimpleReducer("diff_abs") reducer := newSimpleReducer("diff_abs")
series := &tsdb.TimeSeries{ series := plugins.DataTimeSeries{
Name: "test time series", Name: "test time series",
} }
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
So(reducer.Reduce(series).Valid, ShouldEqual, false) So(reducer.Reduce(series).Valid, ShouldEqual, false)
}) })
@ -307,12 +307,12 @@ func TestSimpleReducer(t *testing.T) {
Convey("percent_diff with only nulls", func() { Convey("percent_diff with only nulls", func() {
reducer := newSimpleReducer("percent_diff") reducer := newSimpleReducer("percent_diff")
series := &tsdb.TimeSeries{ series := plugins.DataTimeSeries{
Name: "test time series", Name: "test time series",
} }
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
So(reducer.Reduce(series).Valid, ShouldEqual, false) So(reducer.Reduce(series).Valid, ShouldEqual, false)
}) })
@ -370,12 +370,12 @@ func TestSimpleReducer(t *testing.T) {
Convey("percent_diff_abs with only nulls", func() { Convey("percent_diff_abs with only nulls", func() {
reducer := newSimpleReducer("percent_diff_abs") reducer := newSimpleReducer("percent_diff_abs")
series := &tsdb.TimeSeries{ series := plugins.DataTimeSeries{
Name: "test time series", Name: "test time series",
} }
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 1)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(1)})
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), 2)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFromPtr(nil), null.FloatFrom(2)})
So(reducer.Reduce(series).Valid, ShouldEqual, false) So(reducer.Reduce(series).Valid, ShouldEqual, false)
}) })
@ -399,12 +399,12 @@ func TestSimpleReducer(t *testing.T) {
func testReducer(reducerType string, datapoints ...float64) float64 { func testReducer(reducerType string, datapoints ...float64) float64 {
reducer := newSimpleReducer(reducerType) reducer := newSimpleReducer(reducerType)
series := &tsdb.TimeSeries{ series := plugins.DataTimeSeries{
Name: "test time series", Name: "test time series",
} }
for idx := range datapoints { for idx := range datapoints {
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(datapoints[idx]), 1234134)) series.Points = append(series.Points, plugins.DataTimePoint{null.FloatFrom(datapoints[idx]), null.FloatFrom(1234134)})
} }
return reducer.Reduce(series).Float64 return reducer.Reduce(series).Float64

View File

@ -13,6 +13,7 @@ import (
"github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/services/rendering" "github.com/grafana/grafana/pkg/services/rendering"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/opentracing/opentracing-go" "github.com/opentracing/opentracing-go"
"github.com/opentracing/opentracing-go/ext" "github.com/opentracing/opentracing-go/ext"
tlog "github.com/opentracing/opentracing-go/log" tlog "github.com/opentracing/opentracing-go/log"
@ -26,6 +27,7 @@ type AlertEngine struct {
RenderService rendering.Service `inject:""` RenderService rendering.Service `inject:""`
Bus bus.Bus `inject:""` Bus bus.Bus `inject:""`
RequestValidator models.PluginRequestValidator `inject:""` RequestValidator models.PluginRequestValidator `inject:""`
DataService *tsdb.Service `inject:""`
execQueue chan *Job execQueue chan *Job
ticker *Ticker ticker *Ticker
@ -50,7 +52,7 @@ func (e *AlertEngine) Init() error {
e.ticker = NewTicker(time.Now(), time.Second*0, clock.New(), 1) e.ticker = NewTicker(time.Now(), time.Second*0, clock.New(), 1)
e.execQueue = make(chan *Job, 1000) e.execQueue = make(chan *Job, 1000)
e.scheduler = newScheduler() e.scheduler = newScheduler()
e.evalHandler = NewEvalHandler() e.evalHandler = NewEvalHandler(e.DataService)
e.ruleReader = newRuleReader() e.ruleReader = newRuleReader()
e.log = log.New("alerting.engine") e.log = log.New("alerting.engine")
e.resultHandler = newResultHandler(e.RenderService) e.resultHandler = newResultHandler(e.RenderService)

View File

@ -7,19 +7,22 @@ import (
"github.com/grafana/grafana/pkg/infra/log" "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/metrics" "github.com/grafana/grafana/pkg/infra/metrics"
"github.com/grafana/grafana/pkg/tsdb/tsdbifaces"
) )
// DefaultEvalHandler is responsible for evaluating the alert rule. // DefaultEvalHandler is responsible for evaluating the alert rule.
type DefaultEvalHandler struct { type DefaultEvalHandler struct {
log log.Logger log log.Logger
alertJobTimeout time.Duration alertJobTimeout time.Duration
requestHandler tsdbifaces.RequestHandler
} }
// NewEvalHandler is the `DefaultEvalHandler` constructor. // NewEvalHandler is the `DefaultEvalHandler` constructor.
func NewEvalHandler() *DefaultEvalHandler { func NewEvalHandler(requestHandler tsdbifaces.RequestHandler) *DefaultEvalHandler {
return &DefaultEvalHandler{ return &DefaultEvalHandler{
log: log.New("alerting.evalHandler"), log: log.New("alerting.evalHandler"),
alertJobTimeout: time.Second * 5, alertJobTimeout: time.Second * 5,
requestHandler: requestHandler,
} }
} }
@ -31,7 +34,7 @@ func (e *DefaultEvalHandler) Eval(context *EvalContext) {
for i := 0; i < len(context.Rule.Conditions); i++ { for i := 0; i < len(context.Rule.Conditions); i++ {
condition := context.Rule.Conditions[i] condition := context.Rule.Conditions[i]
cr, err := condition.Eval(context) cr, err := condition.Eval(context, e.requestHandler)
if err != nil { if err != nil {
context.Error = err context.Error = err
} }

View File

@ -5,6 +5,7 @@ import (
"testing" "testing"
"github.com/grafana/grafana/pkg/services/validations" "github.com/grafana/grafana/pkg/services/validations"
"github.com/grafana/grafana/pkg/tsdb/tsdbifaces"
. "github.com/smartystreets/goconvey/convey" . "github.com/smartystreets/goconvey/convey"
) )
@ -16,13 +17,13 @@ type conditionStub struct {
noData bool noData bool
} }
func (c *conditionStub) Eval(context *EvalContext) (*ConditionResult, error) { func (c *conditionStub) Eval(context *EvalContext, reqHandler tsdbifaces.RequestHandler) (*ConditionResult, error) {
return &ConditionResult{Firing: c.firing, EvalMatches: c.matches, Operator: c.operator, NoDataFound: c.noData}, nil return &ConditionResult{Firing: c.firing, EvalMatches: c.matches, Operator: c.operator, NoDataFound: c.noData}, nil
} }
func TestAlertingEvaluationHandler(t *testing.T) { func TestAlertingEvaluationHandler(t *testing.T) {
Convey("Test alert evaluation handler", t, func() { Convey("Test alert evaluation handler", t, func() {
handler := NewEvalHandler() handler := NewEvalHandler(nil)
Convey("Show return triggered with single passing condition", func() { Convey("Show return triggered with single passing condition", func() {
context := NewEvalContext(context.TODO(), &Rule{ context := NewEvalContext(context.TODO(), &Rule{

View File

@ -5,6 +5,7 @@ import (
"time" "time"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb/tsdbifaces"
) )
type evalHandler interface { type evalHandler interface {
@ -59,5 +60,5 @@ type ConditionResult struct {
// Condition is responsible for evaluating an alert condition. // Condition is responsible for evaluating an alert condition.
type Condition interface { type Condition interface {
Eval(result *EvalContext) (*ConditionResult, error) Eval(result *EvalContext, requestHandler tsdbifaces.RequestHandler) (*ConditionResult, error)
} }

View File

@ -6,6 +6,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/tsdb/tsdbifaces"
. "github.com/smartystreets/goconvey/convey" . "github.com/smartystreets/goconvey/convey"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -13,7 +14,7 @@ import (
type FakeCondition struct{} type FakeCondition struct{}
func (f *FakeCondition) Eval(context *EvalContext) (*ConditionResult, error) { func (f *FakeCondition) Eval(context *EvalContext, reqHandler tsdbifaces.RequestHandler) (*ConditionResult, error) {
return &ConditionResult{}, nil return &ConditionResult{}, nil
} }

Some files were not shown because too many files have changed in this diff Show More