mirror of
https://github.com/grafana/grafana.git
synced 2024-11-30 20:54:22 -06:00
b80fbe03f0
* add core plugin flow * add instrumentation * move func * remove cruft * support external backend plugins * refactor + clean up * remove comments * refactor loader * simplify core plugin path arg * cleanup loggers * move signature validator to plugins package * fix sig packaging * cleanup plugin model * remove unnecessary plugin field * add start+stop for pm * fix failures * add decommissioned state * export fields just to get things flowing * fix comments * set static routes * make image loading idempotent * merge with backend plugin manager * re-use funcs * reorder imports + remove unnecessary interface * add some TODOs + remove unused func * remove unused instrumentation func * simplify client usage * remove import alias * re-use backendplugin.Plugin interface * re order funcs * improve var name * fix log statements * refactor data model * add logic for dupe check during loading * cleanup state setting * refactor loader * cleanup manager interface * add rendering flow * refactor loading + init * add renderer support * fix renderer plugin * reformat imports * track errors * fix plugin signature inheritance * name param in interface * update func comment * fix func arg name * introduce class concept * remove func * fix external plugin check * apply changes from pm-experiment * fix core plugins * fix imports * rename interface * comment API interface * add support for testdata plugin * enable alerting + use correct core plugin contracts * slim manager API * fix param name * fix filter * support static routes * fix rendering * tidy rendering * get tests compiling * fix install+uninstall * start finder test * add finder test coverage * start loader tests * add test for core plugins * load core + bundled test * add test for nested plugin loading * add test files * clean interface + fix registering some core plugins * refactoring * reformat and create sub packages * simplify core plugin init * fix ctx cancel scenario * migrate initializer * remove Init() funcs * add test starter * new logger * flesh out initializer tests * refactoring * remove unused svc * refactor rendering flow * fixup loader tests * add enabled helper func * fix logger name * fix data fetchers * fix case where plugin dir doesn't exist * improve coverage + move dupe checking to loader * remove noisy debug logs * register core plugins automagically * add support for renderer in catalog * make private func + fix req validation * use interface * re-add check for renderer in catalog * tidy up from moving to auto reg core plugins * core plugin registrar * guards * copy over core plugins for test infra * all tests green * renames * propagate new interfaces * kill old manager * get compiling * tidy up * update naming * refactor manager test + cleanup * add more cases to finder test * migrate validator to field * more coverage * refactor dupe checking * add test for plugin class * add coverage for initializer * split out rendering * move * fixup tests * fix uss test * fix frontend settings * fix grafanads test * add check when checking sig errors * fix enabled map * fixup * allow manual setup of CM * rename to cloud-monitoring * remove TODO * add installer interface for testing * loader interface returns * tests passing * refactor + add more coverage * support 'stackdriver' * fix frontend settings loading * improve naming based on package name * small tidy * refactor test * fix renderer start * make cloud-monitoring plugin ID clearer * add plugin update test * add integration tests * don't break all if sig can't be calculated * add root URL check test * add more signature verification tests * update DTO name * update enabled plugins comment * update comments * fix linter * revert fe naming change * fix errors endpoint * reset error code field name * re-order test to help verify * assert -> require * pm check * add missing entry + re-order * re-check * dump icon log * verify manager contents first * reformat * apply PR feedback * apply style changes * fix one vs all loading err * improve log output * only start when no signature error * move log * rework plugin update check * fix test * fix multi loading from cfg.PluginSettings * improve log output #2 * add error abstraction to capture errors without registering a plugin * add debug log * add unsigned warning * e2e test attempt * fix logger * set home path * prevent panic * alternate * ugh.. fix home path * return renderer even if not started * make renderer plugin managed * add fallback renderer icon, update renderer badge + prevent changes when renderer is installed * fix icon loading * rollback renderer changes * use correct field * remove unneccessary block * remove newline * remove unused func * fix bundled plugins base + module fields * remove unused field since refactor * add authorizer abstraction * loader only returns plugins expected to run * fix multi log output
270 lines
7.1 KiB
Go
270 lines
7.1 KiB
Go
package opentsdb
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"io/ioutil"
|
|
"net/http"
|
|
"net/url"
|
|
"path"
|
|
"strconv"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
|
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
|
|
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
|
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
|
"github.com/grafana/grafana/pkg/infra/httpclient"
|
|
"github.com/grafana/grafana/pkg/infra/log"
|
|
"github.com/grafana/grafana/pkg/plugins"
|
|
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
|
|
"github.com/grafana/grafana/pkg/setting"
|
|
"golang.org/x/net/context/ctxhttp"
|
|
)
|
|
|
|
type Service struct {
|
|
logger log.Logger
|
|
im instancemgmt.InstanceManager
|
|
}
|
|
|
|
func ProvideService(httpClientProvider httpclient.Provider, registrar plugins.CoreBackendRegistrar) (*Service, error) {
|
|
im := datasource.NewInstanceManager(newInstanceSettings(httpClientProvider))
|
|
s := &Service{
|
|
logger: log.New("tsdb.opentsdb"),
|
|
im: im,
|
|
}
|
|
|
|
factory := coreplugin.New(backend.ServeOpts{
|
|
QueryDataHandler: s,
|
|
})
|
|
if err := registrar.LoadAndRegister("opentsdb", factory); err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
return s, nil
|
|
}
|
|
|
|
type datasourceInfo struct {
|
|
HTTPClient *http.Client
|
|
URL string
|
|
}
|
|
|
|
type DsAccess string
|
|
|
|
func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.InstanceFactoryFunc {
|
|
return func(settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
|
|
opts, err := settings.HTTPClientOptions()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
client, err := httpClientProvider.New(opts)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
model := &datasourceInfo{
|
|
HTTPClient: client,
|
|
URL: settings.URL,
|
|
}
|
|
|
|
return model, nil
|
|
}
|
|
}
|
|
|
|
func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
|
var tsdbQuery OpenTsdbQuery
|
|
|
|
q := req.Queries[0]
|
|
|
|
tsdbQuery.Start = q.TimeRange.From.UnixNano() / int64(time.Millisecond)
|
|
tsdbQuery.End = q.TimeRange.To.UnixNano() / int64(time.Millisecond)
|
|
|
|
for _, query := range req.Queries {
|
|
metric := s.buildMetric(query)
|
|
tsdbQuery.Queries = append(tsdbQuery.Queries, metric)
|
|
}
|
|
|
|
// TODO: Don't use global variable
|
|
if setting.Env == setting.Dev {
|
|
s.logger.Debug("OpenTsdb request", "params", tsdbQuery)
|
|
}
|
|
|
|
dsInfo, err := s.getDSInfo(req.PluginContext)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
request, err := s.createRequest(dsInfo, tsdbQuery)
|
|
if err != nil {
|
|
return &backend.QueryDataResponse{}, err
|
|
}
|
|
|
|
res, err := ctxhttp.Do(ctx, dsInfo.HTTPClient, request)
|
|
if err != nil {
|
|
return &backend.QueryDataResponse{}, err
|
|
}
|
|
|
|
result, err := s.parseResponse(res)
|
|
if err != nil {
|
|
return &backend.QueryDataResponse{}, err
|
|
}
|
|
|
|
return result, nil
|
|
}
|
|
|
|
func (s *Service) createRequest(dsInfo *datasourceInfo, data OpenTsdbQuery) (*http.Request, error) {
|
|
u, err := url.Parse(dsInfo.URL)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
u.Path = path.Join(u.Path, "api/query")
|
|
|
|
postData, err := json.Marshal(data)
|
|
if err != nil {
|
|
s.logger.Info("Failed marshaling data", "error", err)
|
|
return nil, fmt.Errorf("failed to create request: %w", err)
|
|
}
|
|
|
|
req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(string(postData)))
|
|
if err != nil {
|
|
s.logger.Info("Failed to create request", "error", err)
|
|
return nil, fmt.Errorf("failed to create request: %w", err)
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
return req, nil
|
|
}
|
|
|
|
func (s *Service) parseResponse(res *http.Response) (*backend.QueryDataResponse, error) {
|
|
resp := backend.NewQueryDataResponse()
|
|
|
|
body, err := ioutil.ReadAll(res.Body)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
defer func() {
|
|
if err := res.Body.Close(); err != nil {
|
|
s.logger.Warn("Failed to close response body", "err", err)
|
|
}
|
|
}()
|
|
|
|
if res.StatusCode/100 != 2 {
|
|
s.logger.Info("Request failed", "status", res.Status, "body", string(body))
|
|
return nil, fmt.Errorf("request failed, status: %s", res.Status)
|
|
}
|
|
|
|
var responseData []OpenTsdbResponse
|
|
err = json.Unmarshal(body, &responseData)
|
|
if err != nil {
|
|
s.logger.Info("Failed to unmarshal opentsdb response", "error", err, "status", res.Status, "body", string(body))
|
|
return nil, err
|
|
}
|
|
|
|
frames := data.Frames{}
|
|
for _, val := range responseData {
|
|
timeVector := make([]time.Time, 0, len(val.DataPoints))
|
|
values := make([]float64, 0, len(val.DataPoints))
|
|
name := val.Metric
|
|
|
|
for timeString, value := range val.DataPoints {
|
|
timestamp, err := strconv.ParseInt(timeString, 10, 64)
|
|
if err != nil {
|
|
s.logger.Info("Failed to unmarshal opentsdb timestamp", "timestamp", timeString)
|
|
return nil, err
|
|
}
|
|
timeVector = append(timeVector, time.Unix(timestamp, 0).UTC())
|
|
values = append(values, value)
|
|
}
|
|
frames = append(frames, data.NewFrame(name,
|
|
data.NewField("time", nil, timeVector),
|
|
data.NewField("value", nil, values)))
|
|
}
|
|
result := resp.Responses["A"]
|
|
result.Frames = frames
|
|
resp.Responses["A"] = result
|
|
return resp, nil
|
|
}
|
|
|
|
func (s *Service) buildMetric(query backend.DataQuery) map[string]interface{} {
|
|
metric := make(map[string]interface{})
|
|
|
|
model, err := simplejson.NewJson(query.JSON)
|
|
if err != nil {
|
|
return nil
|
|
}
|
|
|
|
// Setting metric and aggregator
|
|
metric["metric"] = model.Get("metric").MustString()
|
|
metric["aggregator"] = model.Get("aggregator").MustString()
|
|
|
|
// Setting downsampling options
|
|
disableDownsampling := model.Get("disableDownsampling").MustBool()
|
|
if !disableDownsampling {
|
|
downsampleInterval := model.Get("downsampleInterval").MustString()
|
|
if downsampleInterval == "" {
|
|
downsampleInterval = "1m" // default value for blank
|
|
}
|
|
downsample := downsampleInterval + "-" + model.Get("downsampleAggregator").MustString()
|
|
if model.Get("downsampleFillPolicy").MustString() != "none" {
|
|
metric["downsample"] = downsample + "-" + model.Get("downsampleFillPolicy").MustString()
|
|
} else {
|
|
metric["downsample"] = downsample
|
|
}
|
|
}
|
|
|
|
// Setting rate options
|
|
if model.Get("shouldComputeRate").MustBool() {
|
|
metric["rate"] = true
|
|
rateOptions := make(map[string]interface{})
|
|
rateOptions["counter"] = model.Get("isCounter").MustBool()
|
|
|
|
counterMax, counterMaxCheck := model.CheckGet("counterMax")
|
|
if counterMaxCheck {
|
|
rateOptions["counterMax"] = counterMax.MustFloat64()
|
|
}
|
|
|
|
resetValue, resetValueCheck := model.CheckGet("counterResetValue")
|
|
if resetValueCheck {
|
|
rateOptions["resetValue"] = resetValue.MustFloat64()
|
|
}
|
|
|
|
if !counterMaxCheck && (!resetValueCheck || resetValue.MustFloat64() == 0) {
|
|
rateOptions["dropResets"] = true
|
|
}
|
|
|
|
metric["rateOptions"] = rateOptions
|
|
}
|
|
|
|
// Setting tags
|
|
tags, tagsCheck := model.CheckGet("tags")
|
|
if tagsCheck && len(tags.MustMap()) > 0 {
|
|
metric["tags"] = tags.MustMap()
|
|
}
|
|
|
|
// Setting filters
|
|
filters, filtersCheck := model.CheckGet("filters")
|
|
if filtersCheck && len(filters.MustArray()) > 0 {
|
|
metric["filters"] = filters.MustArray()
|
|
}
|
|
|
|
return metric
|
|
}
|
|
|
|
func (s *Service) getDSInfo(pluginCtx backend.PluginContext) (*datasourceInfo, error) {
|
|
i, err := s.im.Get(pluginCtx)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
instance, ok := i.(*datasourceInfo)
|
|
if !ok {
|
|
return nil, fmt.Errorf("failed to cast datsource info")
|
|
}
|
|
|
|
return instance, nil
|
|
}
|