WIP: Spawn backend plugins v2 (#19835)

* WIP: Spawn backend plugins v2

* Add test for plugin version

* Fix support for SDK plugins

Co-authored-by: Kyle Brandt <kyle@kbrandt.com>
Co-authored-by: Marcus Olsson <olsson.e.marcus@gmail.com>
Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>

* WIP: Draft PR for fork of V2 sdk / bi-directional support (#19890)

* temporary use export-datasource-plugin branch of grafana-plugin-sdk

* fix failing test

* remove debug (spew) lines

* misc cleanup

* add expressions feature toggle

* use latest grafana-plugin-sdk-go
This commit is contained in:
Sofia Papagiannaki
2019-10-24 18:15:27 +03:00
committed by Marcus Efraimsson
parent b0198e7c9c
commit fc08c26025
276 changed files with 30794 additions and 2504 deletions

View File

@@ -0,0 +1,176 @@
package wrapper
import (
"context"
"errors"
"fmt"
sdk "github.com/grafana/grafana-plugin-sdk-go"
"github.com/grafana/grafana-plugin-sdk-go/dataframe"
"github.com/grafana/grafana-plugin-sdk-go/genproto/datasource"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
)
type grafanaAPI struct {
logger log.Logger
}
func (s *grafanaAPI) QueryDatasource(ctx context.Context, req *datasource.QueryDatasourceRequest) (*datasource.QueryDatasourceResponse, error) {
if len(req.Queries) == 0 {
return nil, fmt.Errorf("zero queries found in datasource request")
}
getDsInfo := &models.GetDataSourceByIdQuery{
Id: req.DatasourceId,
OrgId: req.OrgId,
}
if err := bus.Dispatch(getDsInfo); err != nil {
return nil, fmt.Errorf("Could not find datasource %v", err)
}
// Convert plugin-model (datasource) queries to tsdb queries
queries := make([]*tsdb.Query, len(req.Queries))
for i, query := range req.Queries {
sj, err := simplejson.NewJson([]byte(query.ModelJson))
if err != nil {
return nil, err
}
queries[i] = &tsdb.Query{
RefId: query.RefId,
IntervalMs: query.IntervalMs,
MaxDataPoints: query.MaxDataPoints,
DataSource: getDsInfo.Result,
Model: sj,
}
}
timeRange := tsdb.NewTimeRange(req.TimeRange.FromRaw, req.TimeRange.ToRaw)
tQ := &tsdb.TsdbQuery{
TimeRange: timeRange,
Queries: queries,
}
// Execute the converted queries
tsdbRes, err := tsdb.HandleRequest(ctx, getDsInfo.Result, tQ)
if err != nil {
return nil, err
}
// Convert tsdb results (map) to plugin-model/datasource (slice) results
// Only error and Series responses mapped.
results := make([]*datasource.QueryResult, len(tsdbRes.Results))
resIdx := 0
for refID, res := range tsdbRes.Results {
qr := &datasource.QueryResult{
RefId: refID,
}
if res.Error != nil {
qr.Error = res.ErrorString
results[resIdx] = qr
resIdx++
continue
}
encodedFrames := make([][]byte, len(res.Series))
for sIdx, series := range res.Series {
frame, err := tsdb.SeriesToFrame(series)
if err != nil {
return nil, err
}
encodedFrames[sIdx], err = dataframe.MarshalArrow(frame)
if err != nil {
return nil, err
}
}
qr.Dataframes = encodedFrames
results[resIdx] = qr
resIdx++
}
return &datasource.QueryDatasourceResponse{Results: results}, nil
}
func NewDatasourcePluginWrapperV2(log log.Logger, plugin sdk.DatasourcePlugin) *DatasourcePluginWrapperV2 {
return &DatasourcePluginWrapperV2{DatasourcePlugin: plugin, logger: log}
}
type DatasourcePluginWrapperV2 struct {
sdk.DatasourcePlugin
logger log.Logger
}
func (tw *DatasourcePluginWrapperV2) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) {
jsonData, err := ds.JsonData.MarshalJSON()
if err != nil {
return nil, err
}
pbQuery := &datasource.DatasourceRequest{
Datasource: &datasource.DatasourceInfo{
Name: ds.Name,
Type: ds.Type,
Url: ds.Url,
Id: ds.Id,
OrgId: ds.OrgId,
JsonData: string(jsonData),
DecryptedSecureJsonData: ds.SecureJsonData.Decrypt(),
},
TimeRange: &datasource.TimeRange{
FromRaw: query.TimeRange.From,
ToRaw: query.TimeRange.To,
ToEpochMs: query.TimeRange.GetToAsMsEpoch(),
FromEpochMs: query.TimeRange.GetFromAsMsEpoch(),
},
Queries: []*datasource.Query{},
}
for _, q := range query.Queries {
modelJSON, err := q.Model.MarshalJSON()
if err != nil {
return nil, err
}
pbQuery.Queries = append(pbQuery.Queries, &datasource.Query{
ModelJson: string(modelJSON),
IntervalMs: q.IntervalMs,
RefId: q.RefId,
MaxDataPoints: q.MaxDataPoints,
})
}
pbres, err := tw.DatasourcePlugin.Query(ctx, pbQuery, &grafanaAPI{logger: tw.logger})
if err != nil {
return nil, err
}
res := &tsdb.Response{
Results: map[string]*tsdb.QueryResult{},
}
for _, r := range pbres.Results {
qr := &tsdb.QueryResult{
RefId: r.RefId,
}
if r.Error != "" {
qr.Error = errors.New(r.Error)
qr.ErrorString = r.Error
}
if r.MetaJson != "" {
metaJSON, err := simplejson.NewJson([]byte(r.MetaJson))
if err != nil {
tw.logger.Error("Error parsing JSON Meta field: " + err.Error())
}
qr.Meta = metaJSON
}
qr.Dataframes = r.Dataframes
res.Results[r.RefId] = qr
}
return res, nil
}

View File

@@ -3,11 +3,16 @@ package plugins
import (
"context"
"encoding/json"
"errors"
"fmt"
"os/exec"
"path"
"time"
"github.com/grafana/grafana-plugin-model/go/datasource"
"github.com/grafana/grafana/pkg/setting"
datasourceV1 "github.com/grafana/grafana-plugin-model/go/datasource"
sdk "github.com/grafana/grafana-plugin-sdk-go"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins/datasource/wrapper"
@@ -34,16 +39,29 @@ type DataSourcePlugin struct {
Backend bool `json:"backend,omitempty"`
Executable string `json:"executable,omitempty"`
SDK bool `json:"sdk,omitempty"`
log log.Logger
client *plugin.Client
}
func isExpressionsEnabled() bool {
v, ok := setting.FeatureToggles["expressions"]
if !ok {
return false
}
return v
}
func (p *DataSourcePlugin) Load(decoder *json.Decoder, pluginDir string) error {
if err := decoder.Decode(&p); err != nil {
return err
}
if !p.isVersionOne() && !isExpressionsEnabled() {
return errors.New("A plugin version 2 was found but expressions feature toggle are not enabled")
}
if err := p.registerPlugin(pluginDir); err != nil {
return err
}
@@ -73,18 +91,35 @@ func (p *DataSourcePlugin) startBackendPlugin(ctx context.Context, log log.Logge
return nil
}
func (p *DataSourcePlugin) isVersionOne() bool {
return !p.SDK
}
func (p *DataSourcePlugin) spawnSubProcess() error {
cmd := ComposePluginStartCommmand(p.Executable)
fullpath := path.Join(p.PluginDir, cmd)
p.client = plugin.NewClient(&plugin.ClientConfig{
HandshakeConfig: handshakeConfig,
Plugins: map[string]plugin.Plugin{p.Id: &datasource.DatasourcePluginImpl{}},
Cmd: exec.Command(fullpath),
AllowedProtocols: []plugin.Protocol{plugin.ProtocolGRPC},
Logger: LogWrapper{Logger: p.log},
})
var newClient *plugin.Client
if p.isVersionOne() {
newClient = plugin.NewClient(&plugin.ClientConfig{
HandshakeConfig: handshakeConfig,
Plugins: map[string]plugin.Plugin{p.Id: &datasourceV1.DatasourcePluginImpl{}},
Cmd: exec.Command(fullpath),
AllowedProtocols: []plugin.Protocol{plugin.ProtocolGRPC},
Logger: LogWrapper{Logger: p.log},
})
} else {
newClient = plugin.NewClient(&plugin.ClientConfig{
HandshakeConfig: handshakeConfig,
Plugins: map[string]plugin.Plugin{p.Id: &sdk.DatasourcePluginImpl{}},
Cmd: exec.Command(fullpath),
AllowedProtocols: []plugin.Protocol{plugin.ProtocolGRPC},
Logger: LogWrapper{Logger: p.log},
})
}
p.client = newClient
rpcClient, err := p.client.Client()
if err != nil {
@@ -96,10 +131,22 @@ func (p *DataSourcePlugin) spawnSubProcess() error {
return err
}
plugin := raw.(datasource.DatasourcePlugin)
if p.isVersionOne() {
plugin := raw.(datasourceV1.DatasourcePlugin)
tsdb.RegisterTsdbQueryEndpoint(p.Id, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
return wrapper.NewDatasourcePluginWrapper(p.log, plugin), nil
})
return nil
}
plugin, ok := raw.(sdk.DatasourcePlugin)
if !ok {
return fmt.Errorf("unxpected type %T, expeced sdk.DatasourcePlugin", raw)
}
tsdb.RegisterTsdbQueryEndpoint(p.Id, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
return wrapper.NewDatasourcePluginWrapper(p.log, plugin), nil
return wrapper.NewDatasourcePluginWrapperV2(p.log, plugin), nil
})
return nil

View File

@@ -0,0 +1,34 @@
package plugins
import (
"bytes"
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
)
func TestLoadDatasourceVersion(t *testing.T) {
t.Run("If plugin version is not set, it should be treated as plugin version one", func(t *testing.T) {
pluginJSON, _ := json.Marshal(DataSourcePlugin{})
datasourcePlugin := DataSourcePlugin{}
(&datasourcePlugin).Load(json.NewDecoder(bytes.NewReader(pluginJSON)), "/tmp")
assert.True(t, datasourcePlugin.isVersionOne())
})
t.Run("If plugin version is set to one, it should be treated as plugin version one", func(t *testing.T) {
pluginJSON, _ := json.Marshal(DataSourcePlugin{SDK: false})
datasourcePlugin := DataSourcePlugin{}
(&datasourcePlugin).Load(json.NewDecoder(bytes.NewReader(pluginJSON)), "/tmp")
assert.True(t, datasourcePlugin.isVersionOne())
assert.False(t, datasourcePlugin.SDK)
})
t.Run("If plugin version is set to two, it should not be treated as plugin version one", func(t *testing.T) {
pluginJSON, _ := json.Marshal(DataSourcePlugin{SDK: true})
datasourcePlugin := DataSourcePlugin{}
(&datasourcePlugin).Load(json.NewDecoder(bytes.NewReader(pluginJSON)), "/tmp")
assert.False(t, datasourcePlugin.isVersionOne())
assert.True(t, datasourcePlugin.SDK)
})
}

View File

@@ -206,6 +206,7 @@ var (
S3TempImageStoreSecretKey string
ImageUploadProvider string
FeatureToggles map[string]bool
)
// TODO move all global vars to this struct
@@ -955,6 +956,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error {
for _, feature := range util.SplitString(featuresTogglesStr) {
cfg.FeatureToggles[feature] = true
}
FeatureToggles = cfg.FeatureToggles
// check old location for this option
if panelsSection.Key("enable_alpha").MustBool(false) {

37
pkg/tsdb/frame_util.go Normal file
View File

@@ -0,0 +1,37 @@
package tsdb
import (
"time"
"github.com/grafana/grafana-plugin-sdk-go/dataframe"
)
// SeriesToFrame converts a TimeSeries to a sdk Frame
func SeriesToFrame(series *TimeSeries) (*dataframe.Frame, error) {
timeVec := make([]*time.Time, len(series.Points))
floatVec := make([]*float64, len(series.Points))
for idx, point := range series.Points {
timeVec[idx], floatVec[idx] = convertTSDBTimePoint(point)
}
frame := dataframe.New(series.Name, dataframe.Labels(series.Tags),
dataframe.NewField("time", dataframe.FieldTypeTime, timeVec),
dataframe.NewField("value", dataframe.FieldTypeNumber, floatVec),
)
return frame, nil
}
// convertTSDBTimePoint coverts a tsdb.TimePoint into two values appropriate
// for Series values.
func convertTSDBTimePoint(point TimePoint) (t *time.Time, f *float64) {
timeIdx, valueIdx := 1, 0
if point[timeIdx].Valid { // Assuming valid is null?
tI := int64(point[timeIdx].Float64)
uT := time.Unix(tI/int64(1e+3), (tI%int64(1e+3))*int64(1e+6)) // time.Time from millisecond unix ts
t = &uT
}
if point[valueIdx].Valid {
f = &point[valueIdx].Float64
}
return
}

View File

@@ -32,6 +32,7 @@ type QueryResult struct {
Meta *simplejson.Json `json:"meta,omitempty"`
Series TimeSeriesSlice `json:"series"`
Tables []*Table `json:"tables"`
Dataframes [][]byte `json:"dataframes"`
}
type TimeSeries struct {