grafana/pkg/tsdb/opentsdb/opentsdb.go

274 lines
7.2 KiB
Go
Raw Normal View History

package opentsdb
import (
2016-10-14 03:15:30 -05:00
"context"
Plugins: Refactor Plugin Management (#40477) * add core plugin flow * add instrumentation * move func * remove cruft * support external backend plugins * refactor + clean up * remove comments * refactor loader * simplify core plugin path arg * cleanup loggers * move signature validator to plugins package * fix sig packaging * cleanup plugin model * remove unnecessary plugin field * add start+stop for pm * fix failures * add decommissioned state * export fields just to get things flowing * fix comments * set static routes * make image loading idempotent * merge with backend plugin manager * re-use funcs * reorder imports + remove unnecessary interface * add some TODOs + remove unused func * remove unused instrumentation func * simplify client usage * remove import alias * re-use backendplugin.Plugin interface * re order funcs * improve var name * fix log statements * refactor data model * add logic for dupe check during loading * cleanup state setting * refactor loader * cleanup manager interface * add rendering flow * refactor loading + init * add renderer support * fix renderer plugin * reformat imports * track errors * fix plugin signature inheritance * name param in interface * update func comment * fix func arg name * introduce class concept * remove func * fix external plugin check * apply changes from pm-experiment * fix core plugins * fix imports * rename interface * comment API interface * add support for testdata plugin * enable alerting + use correct core plugin contracts * slim manager API * fix param name * fix filter * support static routes * fix rendering * tidy rendering * get tests compiling * fix install+uninstall * start finder test * add finder test coverage * start loader tests * add test for core plugins * load core + bundled test * add test for nested plugin loading * add test files * clean interface + fix registering some core plugins * refactoring * reformat and create sub packages * simplify core plugin init * fix ctx cancel scenario * migrate initializer * remove Init() funcs * add test starter * new logger * flesh out initializer tests * refactoring * remove unused svc * refactor rendering flow * fixup loader tests * add enabled helper func * fix logger name * fix data fetchers * fix case where plugin dir doesn't exist * improve coverage + move dupe checking to loader * remove noisy debug logs * register core plugins automagically * add support for renderer in catalog * make private func + fix req validation * use interface * re-add check for renderer in catalog * tidy up from moving to auto reg core plugins * core plugin registrar * guards * copy over core plugins for test infra * all tests green * renames * propagate new interfaces * kill old manager * get compiling * tidy up * update naming * refactor manager test + cleanup * add more cases to finder test * migrate validator to field * more coverage * refactor dupe checking * add test for plugin class * add coverage for initializer * split out rendering * move * fixup tests * fix uss test * fix frontend settings * fix grafanads test * add check when checking sig errors * fix enabled map * fixup * allow manual setup of CM * rename to cloud-monitoring * remove TODO * add installer interface for testing * loader interface returns * tests passing * refactor + add more coverage * support 'stackdriver' * fix frontend settings loading * improve naming based on package name * small tidy * refactor test * fix renderer start * make cloud-monitoring plugin ID clearer * add plugin update test * add integration tests * don't break all if sig can't be calculated * add root URL check test * add more signature verification tests * update DTO name * update enabled plugins comment * update comments * fix linter * revert fe naming change * fix errors endpoint * reset error code field name * re-order test to help verify * assert -> require * pm check * add missing entry + re-order * re-check * dump icon log * verify manager contents first * reformat * apply PR feedback * apply style changes * fix one vs all loading err * improve log output * only start when no signature error * move log * rework plugin update check * fix test * fix multi loading from cfg.PluginSettings * improve log output #2 * add error abstraction to capture errors without registering a plugin * add debug log * add unsigned warning * e2e test attempt * fix logger * set home path * prevent panic * alternate * ugh.. fix home path * return renderer even if not started * make renderer plugin managed * add fallback renderer icon, update renderer badge + prevent changes when renderer is installed * fix icon loading * rollback renderer changes * use correct field * remove unneccessary block * remove newline * remove unused func * fix bundled plugins base + module fields * remove unused field since refactor * add authorizer abstraction * loader only returns plugins expected to run * fix multi log output
2021-11-01 04:53:33 -05:00
"encoding/json"
2016-10-14 03:15:30 -05:00
"fmt"
"io"
"net/http"
"net/url"
2016-10-14 03:15:30 -05:00
"path"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log"
2016-10-14 03:15:30 -05:00
"github.com/grafana/grafana/pkg/setting"
)
var logger = log.New("tsdb.opentsdb")
type Service struct {
im instancemgmt.InstanceManager
}
func ProvideService(httpClientProvider httpclient.Provider) *Service {
return &Service{
im: datasource.NewInstanceManager(newInstanceSettings(httpClientProvider)),
}
}
type datasourceInfo struct {
HTTPClient *http.Client
URL string
}
type DsAccess string
func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.InstanceFactoryFunc {
return func(ctx context.Context, settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
opts, err := settings.HTTPClientOptions(ctx)
if err != nil {
return nil, err
}
client, err := httpClientProvider.New(opts)
if err != nil {
return nil, err
}
model := &datasourceInfo{
HTTPClient: client,
URL: settings.URL,
}
return model, nil
}
}
func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
2016-10-14 03:15:30 -05:00
var tsdbQuery OpenTsdbQuery
logger := logger.FromContext(ctx)
q := req.Queries[0]
2016-10-16 06:12:13 -05:00
myRefID := q.RefID
tsdbQuery.Start = q.TimeRange.From.UnixNano() / int64(time.Millisecond)
tsdbQuery.End = q.TimeRange.To.UnixNano() / int64(time.Millisecond)
for _, query := range req.Queries {
metric := s.buildMetric(query)
2016-11-02 02:51:34 -05:00
tsdbQuery.Queries = append(tsdbQuery.Queries, metric)
2016-10-14 03:15:30 -05:00
}
// TODO: Don't use global variable
if setting.Env == setting.Dev {
logger.Debug("OpenTsdb request", "params", tsdbQuery)
2016-10-14 03:15:30 -05:00
}
dsInfo, err := s.getDSInfo(ctx, req.PluginContext)
2017-09-21 08:03:47 -05:00
if err != nil {
return nil, err
2017-09-21 08:03:47 -05:00
}
request, err := s.createRequest(ctx, logger, dsInfo, tsdbQuery)
2016-10-14 03:15:30 -05:00
if err != nil {
return &backend.QueryDataResponse{}, err
2016-10-14 03:15:30 -05:00
}
res, err := dsInfo.HTTPClient.Do(request)
2016-10-14 03:15:30 -05:00
if err != nil {
return &backend.QueryDataResponse{}, err
2016-10-14 03:15:30 -05:00
}
defer func() {
err := res.Body.Close()
if err != nil {
logger.Warn("failed to close response body", "error", err)
}
}()
result, err := s.parseResponse(logger, res, myRefID)
2016-10-14 03:15:30 -05:00
if err != nil {
return &backend.QueryDataResponse{}, err
2016-10-14 03:15:30 -05:00
}
return result, nil
}
func (s *Service) createRequest(ctx context.Context, logger log.Logger, dsInfo *datasourceInfo, data OpenTsdbQuery) (*http.Request, error) {
u, err := url.Parse(dsInfo.URL)
if err != nil {
return nil, err
}
2016-10-14 03:15:30 -05:00
u.Path = path.Join(u.Path, "api/query")
queryParams := u.Query()
queryParams.Set("arrays", "true")
u.RawQuery = queryParams.Encode()
2016-10-14 03:15:30 -05:00
postData, err := json.Marshal(data)
2018-04-22 13:51:58 -05:00
if err != nil {
logger.Info("Failed marshaling data", "error", err)
return nil, fmt.Errorf("failed to create request: %w", err)
2018-04-22 13:51:58 -05:00
}
2016-10-14 03:15:30 -05:00
req, err := http.NewRequestWithContext(ctx, http.MethodPost, u.String(), strings.NewReader(string(postData)))
2016-10-14 03:15:30 -05:00
if err != nil {
logger.Info("Failed to create request", "error", err)
return nil, fmt.Errorf("failed to create request: %w", err)
2016-10-14 03:15:30 -05:00
}
req.Header.Set("Content-Type", "application/json")
return req, nil
}
func (s *Service) parseResponse(logger log.Logger, res *http.Response, myRefID string) (*backend.QueryDataResponse, error) {
resp := backend.NewQueryDataResponse()
2016-10-14 03:15:30 -05:00
body, err := io.ReadAll(res.Body)
2016-10-14 03:15:30 -05:00
if err != nil {
return nil, err
}
defer func() {
if err := res.Body.Close(); err != nil {
logger.Warn("Failed to close response body", "err", err)
}
}()
2016-10-14 03:15:30 -05:00
if res.StatusCode/100 != 2 {
logger.Info("Request failed", "status", res.Status, "body", string(body))
return nil, fmt.Errorf("request failed, status: %s", res.Status)
2016-10-14 03:15:30 -05:00
}
var responseData []OpenTsdbResponse
err = json.Unmarshal(body, &responseData)
2016-10-14 03:15:30 -05:00
if err != nil {
logger.Info("Failed to unmarshal opentsdb response", "error", err, "status", res.Status, "body", string(body))
2016-10-14 03:15:30 -05:00
return nil, err
}
frames := data.Frames{}
for _, val := range responseData {
labels := data.Labels{}
for label, value := range val.Tags {
labels[label] = value
2016-10-14 03:15:30 -05:00
}
frame := data.NewFrameOfFieldTypes(val.Metric, len(val.DataPoints), data.FieldTypeTime, data.FieldTypeFloat64)
frame.Meta = &data.FrameMeta{Type: data.FrameTypeTimeSeriesMulti, TypeVersion: data.FrameTypeVersion{0, 1}}
frame.RefID = myRefID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
dataField := frame.Fields[1]
dataField.Name = "value"
dataField.Labels = labels
points := val.DataPoints
for i, point := range points {
frame.SetRow(i, time.Unix(int64(point[0]), 0).UTC(), point[1])
}
frames = append(frames, frame)
2016-10-14 03:15:30 -05:00
}
result := resp.Responses[myRefID]
result.Frames = frames
resp.Responses[myRefID] = result
return resp, nil
}
2016-10-17 16:50:53 -05:00
func (s *Service) buildMetric(query backend.DataQuery) map[string]any {
metric := make(map[string]any)
2016-10-17 16:50:53 -05:00
model, err := simplejson.NewJson(query.JSON)
if err != nil {
return nil
}
2016-11-02 02:51:34 -05:00
// Setting metric and aggregator
metric["metric"] = model.Get("metric").MustString()
metric["aggregator"] = model.Get("aggregator").MustString()
2016-10-17 16:50:53 -05:00
2016-11-02 02:51:34 -05:00
// Setting downsampling options
disableDownsampling := model.Get("disableDownsampling").MustBool()
2016-11-02 02:51:34 -05:00
if !disableDownsampling {
downsampleInterval := model.Get("downsampleInterval").MustString()
2016-11-02 02:51:34 -05:00
if downsampleInterval == "" {
downsampleInterval = "1m" // default value for blank
2016-11-02 02:51:34 -05:00
}
downsample := downsampleInterval + "-" + model.Get("downsampleAggregator").MustString()
if model.Get("downsampleFillPolicy").MustString() != "none" {
metric["downsample"] = downsample + "-" + model.Get("downsampleFillPolicy").MustString()
2016-11-02 02:51:34 -05:00
} else {
metric["downsample"] = downsample
2016-10-17 16:50:53 -05:00
}
2016-11-02 02:51:34 -05:00
}
2016-10-17 16:50:53 -05:00
2016-11-02 02:51:34 -05:00
// Setting rate options
if model.Get("shouldComputeRate").MustBool() {
2016-11-02 02:51:34 -05:00
metric["rate"] = true
rateOptions := make(map[string]any)
rateOptions["counter"] = model.Get("isCounter").MustBool()
2016-10-17 16:50:53 -05:00
counterMax, counterMaxCheck := model.CheckGet("counterMax")
2016-11-02 02:51:34 -05:00
if counterMaxCheck {
rateOptions["counterMax"] = counterMax.MustFloat64()
2016-10-17 16:50:53 -05:00
}
resetValue, resetValueCheck := model.CheckGet("counterResetValue")
2016-11-02 02:51:34 -05:00
if resetValueCheck {
rateOptions["resetValue"] = resetValue.MustFloat64()
2016-10-17 16:50:53 -05:00
}
if !counterMaxCheck && (!resetValueCheck || resetValue.MustFloat64() == 0) {
rateOptions["dropResets"] = true
}
2016-11-02 02:51:34 -05:00
metric["rateOptions"] = rateOptions
}
// Setting tags
tags, tagsCheck := model.CheckGet("tags")
2016-11-02 02:51:34 -05:00
if tagsCheck && len(tags.MustMap()) > 0 {
metric["tags"] = tags.MustMap()
}
// Setting filters
filters, filtersCheck := model.CheckGet("filters")
2016-11-02 02:51:34 -05:00
if filtersCheck && len(filters.MustArray()) > 0 {
metric["filters"] = filters.MustArray()
}
2016-10-17 16:50:53 -05:00
2016-11-02 02:51:34 -05:00
return metric
2016-10-17 16:50:53 -05:00
}
func (s *Service) getDSInfo(ctx context.Context, pluginCtx backend.PluginContext) (*datasourceInfo, error) {
i, err := s.im.Get(ctx, pluginCtx)
if err != nil {
return nil, err
}
instance, ok := i.(*datasourceInfo)
if !ok {
return nil, fmt.Errorf("failed to cast datsource info")
}
return instance, nil
}