2016-09-30 00:35:48 -05:00
|
|
|
package opentsdb
|
|
|
|
|
|
|
|
import (
|
2016-10-14 03:15:30 -05:00
|
|
|
"context"
|
|
|
|
"fmt"
|
|
|
|
"path"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
2016-10-14 03:21:47 -05:00
|
|
|
|
|
|
|
"golang.org/x/net/context/ctxhttp"
|
2016-10-08 01:58:27 -05:00
|
|
|
|
2016-11-02 02:51:34 -05:00
|
|
|
"encoding/json"
|
2016-10-14 03:15:30 -05:00
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
|
|
|
"net/url"
|
2016-10-08 01:58:27 -05:00
|
|
|
|
2017-01-13 05:32:30 -06:00
|
|
|
"github.com/grafana/grafana/pkg/components/null"
|
2019-05-13 01:45:54 -05:00
|
|
|
"github.com/grafana/grafana/pkg/infra/log"
|
2016-12-07 04:10:42 -06:00
|
|
|
"github.com/grafana/grafana/pkg/models"
|
2016-10-14 03:15:30 -05:00
|
|
|
"github.com/grafana/grafana/pkg/setting"
|
|
|
|
"github.com/grafana/grafana/pkg/tsdb"
|
2016-09-30 00:35:48 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
type OpenTsdbExecutor struct {
|
|
|
|
}
|
|
|
|
|
2017-09-21 03:44:25 -05:00
|
|
|
func NewOpenTsdbExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
|
2018-02-19 01:26:16 -06:00
|
|
|
return &OpenTsdbExecutor{}, nil
|
2016-09-30 00:35:48 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
var (
|
2016-12-07 04:10:42 -06:00
|
|
|
plog log.Logger
|
2016-09-30 00:35:48 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
plog = log.New("tsdb.opentsdb")
|
2017-09-21 03:44:25 -05:00
|
|
|
tsdb.RegisterTsdbQueryEndpoint("opentsdb", NewOpenTsdbExecutor)
|
2016-10-08 01:58:27 -05:00
|
|
|
}
|
|
|
|
|
2017-09-21 11:04:06 -05:00
|
|
|
func (e *OpenTsdbExecutor) Query(ctx context.Context, dsInfo *models.DataSource, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) {
|
|
|
|
result := &tsdb.Response{}
|
2016-10-08 01:58:27 -05:00
|
|
|
|
2016-10-14 03:15:30 -05:00
|
|
|
var tsdbQuery OpenTsdbQuery
|
|
|
|
|
|
|
|
tsdbQuery.Start = queryContext.TimeRange.GetFromAsMsEpoch()
|
|
|
|
tsdbQuery.End = queryContext.TimeRange.GetToAsMsEpoch()
|
2016-10-16 06:12:13 -05:00
|
|
|
|
2017-09-20 11:56:33 -05:00
|
|
|
for _, query := range queryContext.Queries {
|
2016-11-02 02:51:34 -05:00
|
|
|
metric := e.buildMetric(query)
|
|
|
|
tsdbQuery.Queries = append(tsdbQuery.Queries, metric)
|
2016-10-14 03:15:30 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if setting.Env == setting.DEV {
|
|
|
|
plog.Debug("OpenTsdb request", "params", tsdbQuery)
|
|
|
|
}
|
|
|
|
|
2017-09-21 08:03:47 -05:00
|
|
|
req, err := e.createRequest(dsInfo, tsdbQuery)
|
|
|
|
if err != nil {
|
2017-09-21 11:04:06 -05:00
|
|
|
return nil, err
|
2017-09-21 08:03:47 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
httpClient, err := dsInfo.GetHttpClient()
|
2016-10-14 03:15:30 -05:00
|
|
|
if err != nil {
|
2017-09-21 11:04:06 -05:00
|
|
|
return nil, err
|
2016-10-14 03:15:30 -05:00
|
|
|
}
|
|
|
|
|
2017-09-21 08:03:47 -05:00
|
|
|
res, err := ctxhttp.Do(ctx, httpClient, req)
|
2016-10-14 03:15:30 -05:00
|
|
|
if err != nil {
|
2017-09-21 11:04:06 -05:00
|
|
|
return nil, err
|
2016-10-14 03:15:30 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
queryResult, err := e.parseResponse(tsdbQuery, res)
|
|
|
|
if err != nil {
|
2017-09-21 11:04:06 -05:00
|
|
|
return nil, err
|
2016-10-14 03:15:30 -05:00
|
|
|
}
|
|
|
|
|
2017-09-21 11:04:06 -05:00
|
|
|
result.Results = queryResult
|
|
|
|
return result, nil
|
2016-09-30 00:35:48 -05:00
|
|
|
}
|
|
|
|
|
2017-09-21 08:03:47 -05:00
|
|
|
func (e *OpenTsdbExecutor) createRequest(dsInfo *models.DataSource, data OpenTsdbQuery) (*http.Request, error) {
|
|
|
|
u, _ := url.Parse(dsInfo.Url)
|
2016-10-14 03:15:30 -05:00
|
|
|
u.Path = path.Join(u.Path, "api/query")
|
|
|
|
|
|
|
|
postData, err := json.Marshal(data)
|
2018-04-22 13:51:58 -05:00
|
|
|
if err != nil {
|
2018-12-04 16:29:34 -06:00
|
|
|
plog.Info("Failed marshaling data", "error", err)
|
2018-04-22 13:51:58 -05:00
|
|
|
return nil, fmt.Errorf("Failed to create request. error: %v", err)
|
|
|
|
}
|
2016-10-14 03:15:30 -05:00
|
|
|
|
|
|
|
req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(string(postData)))
|
|
|
|
if err != nil {
|
|
|
|
plog.Info("Failed to create request", "error", err)
|
|
|
|
return nil, fmt.Errorf("Failed to create request. error: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
2017-09-21 08:03:47 -05:00
|
|
|
if dsInfo.BasicAuth {
|
2019-04-15 04:11:17 -05:00
|
|
|
req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.DecryptedBasicAuthPassword())
|
2016-10-14 03:15:30 -05:00
|
|
|
}
|
2016-11-02 02:51:34 -05:00
|
|
|
|
2016-10-14 03:15:30 -05:00
|
|
|
return req, err
|
2016-09-30 00:35:48 -05:00
|
|
|
}
|
2016-10-08 01:58:27 -05:00
|
|
|
|
|
|
|
func (e *OpenTsdbExecutor) parseResponse(query OpenTsdbQuery, res *http.Response) (map[string]*tsdb.QueryResult, error) {
|
2016-10-14 03:15:30 -05:00
|
|
|
|
|
|
|
queryResults := make(map[string]*tsdb.QueryResult)
|
|
|
|
queryRes := tsdb.NewQueryResult()
|
|
|
|
|
|
|
|
body, err := ioutil.ReadAll(res.Body)
|
|
|
|
defer res.Body.Close()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if res.StatusCode/100 != 2 {
|
|
|
|
plog.Info("Request failed", "status", res.Status, "body", string(body))
|
|
|
|
return nil, fmt.Errorf("Request failed status: %v", res.Status)
|
|
|
|
}
|
|
|
|
|
|
|
|
var data []OpenTsdbResponse
|
|
|
|
err = json.Unmarshal(body, &data)
|
|
|
|
if err != nil {
|
|
|
|
plog.Info("Failed to unmarshal opentsdb response", "error", err, "status", res.Status, "body", string(body))
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, val := range data {
|
|
|
|
series := tsdb.TimeSeries{
|
|
|
|
Name: val.Metric,
|
|
|
|
}
|
|
|
|
|
|
|
|
for timeString, value := range val.DataPoints {
|
|
|
|
timestamp, err := strconv.ParseFloat(timeString, 64)
|
|
|
|
if err != nil {
|
|
|
|
plog.Info("Failed to unmarshal opentsdb timestamp", "timestamp", timeString)
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(value), timestamp))
|
|
|
|
}
|
|
|
|
|
|
|
|
queryRes.Series = append(queryRes.Series, &series)
|
|
|
|
}
|
|
|
|
|
|
|
|
queryResults["A"] = queryRes
|
|
|
|
return queryResults, nil
|
2016-09-30 00:35:48 -05:00
|
|
|
}
|
2016-10-17 16:50:53 -05:00
|
|
|
|
2016-11-02 02:51:34 -05:00
|
|
|
func (e *OpenTsdbExecutor) buildMetric(query *tsdb.Query) map[string]interface{} {
|
2016-10-17 16:50:53 -05:00
|
|
|
|
|
|
|
metric := make(map[string]interface{})
|
|
|
|
|
2016-11-02 02:51:34 -05:00
|
|
|
// Setting metric and aggregator
|
|
|
|
metric["metric"] = query.Model.Get("metric").MustString()
|
|
|
|
metric["aggregator"] = query.Model.Get("aggregator").MustString()
|
2016-10-17 16:50:53 -05:00
|
|
|
|
2016-11-02 02:51:34 -05:00
|
|
|
// Setting downsampling options
|
|
|
|
disableDownsampling := query.Model.Get("disableDownsampling").MustBool()
|
|
|
|
if !disableDownsampling {
|
|
|
|
downsampleInterval := query.Model.Get("downsampleInterval").MustString()
|
|
|
|
if downsampleInterval == "" {
|
|
|
|
downsampleInterval = "1m" //default value for blank
|
|
|
|
}
|
|
|
|
downsample := downsampleInterval + "-" + query.Model.Get("downsampleAggregator").MustString()
|
|
|
|
if query.Model.Get("downsampleFillPolicy").MustString() != "none" {
|
|
|
|
metric["downsample"] = downsample + "-" + query.Model.Get("downsampleFillPolicy").MustString()
|
|
|
|
} else {
|
|
|
|
metric["downsample"] = downsample
|
2016-10-17 16:50:53 -05:00
|
|
|
}
|
2016-11-02 02:51:34 -05:00
|
|
|
}
|
2016-10-17 16:50:53 -05:00
|
|
|
|
2016-11-02 02:51:34 -05:00
|
|
|
// Setting rate options
|
|
|
|
if query.Model.Get("shouldComputeRate").MustBool() {
|
2016-10-17 16:50:53 -05:00
|
|
|
|
2016-11-02 02:51:34 -05:00
|
|
|
metric["rate"] = true
|
|
|
|
rateOptions := make(map[string]interface{})
|
|
|
|
rateOptions["counter"] = query.Model.Get("isCounter").MustBool()
|
2016-10-17 16:50:53 -05:00
|
|
|
|
2016-11-02 02:51:34 -05:00
|
|
|
counterMax, counterMaxCheck := query.Model.CheckGet("counterMax")
|
|
|
|
if counterMaxCheck {
|
|
|
|
rateOptions["counterMax"] = counterMax.MustFloat64()
|
2016-10-17 16:50:53 -05:00
|
|
|
}
|
|
|
|
|
2016-11-02 02:51:34 -05:00
|
|
|
resetValue, resetValueCheck := query.Model.CheckGet("counterResetValue")
|
|
|
|
if resetValueCheck {
|
|
|
|
rateOptions["resetValue"] = resetValue.MustFloat64()
|
2016-10-17 16:50:53 -05:00
|
|
|
}
|
|
|
|
|
2017-03-06 09:19:37 -06:00
|
|
|
if !counterMaxCheck && (!resetValueCheck || resetValue.MustFloat64() == 0) {
|
2017-05-10 08:46:19 -05:00
|
|
|
rateOptions["dropResets"] = true
|
2017-03-06 09:19:37 -06:00
|
|
|
}
|
|
|
|
|
2016-11-02 02:51:34 -05:00
|
|
|
metric["rateOptions"] = rateOptions
|
|
|
|
}
|
|
|
|
|
|
|
|
// Setting tags
|
|
|
|
tags, tagsCheck := query.Model.CheckGet("tags")
|
|
|
|
if tagsCheck && len(tags.MustMap()) > 0 {
|
|
|
|
metric["tags"] = tags.MustMap()
|
|
|
|
}
|
|
|
|
|
|
|
|
// Setting filters
|
|
|
|
filters, filtersCheck := query.Model.CheckGet("filters")
|
|
|
|
if filtersCheck && len(filters.MustArray()) > 0 {
|
|
|
|
metric["filters"] = filters.MustArray()
|
|
|
|
}
|
2016-10-17 16:50:53 -05:00
|
|
|
|
2016-11-02 02:51:34 -05:00
|
|
|
return metric
|
2016-10-17 16:50:53 -05:00
|
|
|
|
|
|
|
}
|