mirror of
https://github.com/grafana/grafana.git
synced 2025-02-09 23:16:16 -06:00
InfluxDB: backend migration (run query in explore) (#43352)
* InfluxDB backend migration * Multiple queries and more * Added types * Updated preferredVisualisationType * Updated model parser test to include limit,slimit,orderByTime * Added test for building query with limit, slimit * Added test for building query with limit, slimit, orderByTime and puts them in the correct order * Add test: Influxdb response parser should parse two responses with different refIDs * Moved methods to responds parser * Add test to ensure ExecutedQueryString is populated * Move functions out of response parser class * Test for getSelectedParams * Merge cases * Change to const * Test get table columns correctly * Removed unecessary fields * Test get table rows correctly * Removed getSeries function * Added test for preferredVisualisationType * Added test for executedQueryString * Modified response parser * Removed test * Improvements * Tests * Review changes * Feature flag rename and code gen
This commit is contained in:
parent
7ef43fb959
commit
10232c7857
@ -33,6 +33,7 @@ export interface FeatureToggles {
|
||||
fullRangeLogsVolume?: boolean;
|
||||
accesscontrol?: boolean;
|
||||
prometheus_azure_auth?: boolean;
|
||||
influxdbBackendMigration?: boolean;
|
||||
newNavigation?: boolean;
|
||||
showFeatureFlagsInUI?: boolean;
|
||||
disable_http_request_histogram?: boolean;
|
||||
|
@ -94,6 +94,12 @@ var (
|
||||
Description: "Use azure authentication for prometheus datasource",
|
||||
State: FeatureStateBeta,
|
||||
},
|
||||
{
|
||||
Name: "influxdbBackendMigration",
|
||||
Description: "Query InfluxDB InfluxQL without the proxy",
|
||||
State: FeatureStateAlpha,
|
||||
FrontendOnly: true,
|
||||
},
|
||||
{
|
||||
Name: "newNavigation",
|
||||
Description: "Try the next gen navigation model",
|
||||
|
@ -71,6 +71,10 @@ const (
|
||||
// Use azure authentication for prometheus datasource
|
||||
FlagPrometheusAzureAuth = "prometheus_azure_auth"
|
||||
|
||||
// FlagInfluxdbBackendMigration
|
||||
// Query InfluxDB InfluxQL without the proxy
|
||||
FlagInfluxdbBackendMigration = "influxdbBackendMigration"
|
||||
|
||||
// FlagNewNavigation
|
||||
// Try the next gen navigation model
|
||||
FlagNewNavigation = "newNavigation"
|
||||
|
@ -94,24 +94,31 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
|
||||
|
||||
s.glog.Debug("Making a non-Flux type query")
|
||||
|
||||
// NOTE: the following path is currently only called from alerting queries
|
||||
// In dashboards, the request runs through proxy and are managed in the frontend
|
||||
var allRawQueries string
|
||||
var queries []Query
|
||||
|
||||
query, err := s.getQuery(dsInfo, req)
|
||||
if err != nil {
|
||||
return &backend.QueryDataResponse{}, err
|
||||
}
|
||||
for _, reqQuery := range req.Queries {
|
||||
query, err := s.queryParser.Parse(reqQuery)
|
||||
if err != nil {
|
||||
return &backend.QueryDataResponse{}, err
|
||||
}
|
||||
|
||||
rawQuery, err := query.Build(req)
|
||||
if err != nil {
|
||||
return &backend.QueryDataResponse{}, err
|
||||
rawQuery, err := query.Build(req)
|
||||
if err != nil {
|
||||
return &backend.QueryDataResponse{}, err
|
||||
}
|
||||
|
||||
allRawQueries = allRawQueries + rawQuery + ";"
|
||||
query.RefID = reqQuery.RefID
|
||||
query.RawQuery = rawQuery
|
||||
queries = append(queries, *query)
|
||||
}
|
||||
|
||||
if setting.Env == setting.Dev {
|
||||
s.glog.Debug("Influxdb query", "raw query", rawQuery)
|
||||
s.glog.Debug("Influxdb query", "raw query", allRawQueries)
|
||||
}
|
||||
|
||||
request, err := s.createRequest(ctx, dsInfo, rawQuery)
|
||||
request, err := s.createRequest(ctx, dsInfo, allRawQueries)
|
||||
if err != nil {
|
||||
return &backend.QueryDataResponse{}, err
|
||||
}
|
||||
@ -129,25 +136,11 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
|
||||
return &backend.QueryDataResponse{}, fmt.Errorf("InfluxDB returned error status: %s", res.Status)
|
||||
}
|
||||
|
||||
resp := s.responseParser.Parse(res.Body, query)
|
||||
resp := s.responseParser.Parse(res.Body, queries)
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (s *Service) getQuery(dsInfo *models.DatasourceInfo, query *backend.QueryDataRequest) (*Query, error) {
|
||||
queryCount := len(query.Queries)
|
||||
|
||||
// The model supports multiple queries, but right now this is only used from
|
||||
// alerting so we only needed to support batch executing 1 query at a time.
|
||||
if queryCount != 1 {
|
||||
return nil, fmt.Errorf("query request should contain exactly 1 query, it contains: %d", queryCount)
|
||||
}
|
||||
|
||||
q := query.Queries[0]
|
||||
|
||||
return s.queryParser.Parse(q)
|
||||
}
|
||||
|
||||
func (s *Service) createRequest(ctx context.Context, dsInfo *models.DatasourceInfo, query string) (*http.Request, error) {
|
||||
u, err := url.Parse(dsInfo.URL)
|
||||
if err != nil {
|
||||
|
@ -22,6 +22,9 @@ func (qp *InfluxdbQueryParser) Parse(query backend.DataQuery) (*Query, error) {
|
||||
useRawQuery := model.Get("rawQuery").MustBool(false)
|
||||
alias := model.Get("alias").MustString("")
|
||||
tz := model.Get("tz").MustString("")
|
||||
limit := model.Get("limit").MustString("")
|
||||
slimit := model.Get("slimit").MustString("")
|
||||
orderByTime := model.Get("orderByTime").MustString("")
|
||||
|
||||
measurement := model.Get("measurement").MustString("")
|
||||
|
||||
@ -60,6 +63,9 @@ func (qp *InfluxdbQueryParser) Parse(query backend.DataQuery) (*Query, error) {
|
||||
Alias: alias,
|
||||
UseRawQuery: useRawQuery,
|
||||
Tz: tz,
|
||||
Limit: limit,
|
||||
Slimit: slimit,
|
||||
OrderByTime: orderByTime,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
@ -36,6 +36,9 @@ func TestInfluxdbQueryParser_Parse(t *testing.T) {
|
||||
],
|
||||
"measurement": "logins.count",
|
||||
"tz": "Europe/Paris",
|
||||
"limit": "1",
|
||||
"slimit": "1",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "B",
|
||||
"resultFormat": "time_series",
|
||||
@ -113,6 +116,9 @@ func TestInfluxdbQueryParser_Parse(t *testing.T) {
|
||||
require.Len(t, res.Selects, 3)
|
||||
require.Len(t, res.Tags, 2)
|
||||
require.Equal(t, "Europe/Paris", res.Tz)
|
||||
require.Equal(t, "1", res.Limit)
|
||||
require.Equal(t, "1", res.Slimit)
|
||||
require.Equal(t, "ASC", res.OrderByTime)
|
||||
require.Equal(t, time.Second*20, res.Interval)
|
||||
require.Equal(t, "series alias", res.Alias)
|
||||
})
|
||||
|
@ -13,6 +13,10 @@ type Query struct {
|
||||
Alias string
|
||||
Interval time.Duration
|
||||
Tz string
|
||||
Limit string
|
||||
Slimit string
|
||||
OrderByTime string
|
||||
RefID string
|
||||
}
|
||||
|
||||
type Tag struct {
|
||||
|
@ -26,6 +26,9 @@ func (query *Query) Build(queryContext *backend.QueryDataRequest) (string, error
|
||||
res += query.renderWhereClause()
|
||||
res += query.renderTimeFilter(queryContext)
|
||||
res += query.renderGroupBy(queryContext)
|
||||
res += query.renderOrderByTime()
|
||||
res += query.renderLimit()
|
||||
res += query.renderSlimit()
|
||||
res += query.renderTz()
|
||||
}
|
||||
|
||||
@ -151,6 +154,14 @@ func (query *Query) renderGroupBy(queryContext *backend.QueryDataRequest) string
|
||||
return groupBy
|
||||
}
|
||||
|
||||
func (query *Query) renderOrderByTime() string {
|
||||
orderByTime := query.OrderByTime
|
||||
if orderByTime == "" {
|
||||
return ""
|
||||
}
|
||||
return fmt.Sprintf(" ORDER BY time %s", orderByTime)
|
||||
}
|
||||
|
||||
func (query *Query) renderTz() string {
|
||||
tz := query.Tz
|
||||
if tz == "" {
|
||||
@ -159,6 +170,22 @@ func (query *Query) renderTz() string {
|
||||
return fmt.Sprintf(" tz('%s')", tz)
|
||||
}
|
||||
|
||||
func (query *Query) renderLimit() string {
|
||||
limit := query.Limit
|
||||
if limit == "" {
|
||||
return ""
|
||||
}
|
||||
return fmt.Sprintf(" limit %s", limit)
|
||||
}
|
||||
|
||||
func (query *Query) renderSlimit() string {
|
||||
slimit := query.Slimit
|
||||
if slimit == "" {
|
||||
return ""
|
||||
}
|
||||
return fmt.Sprintf(" slimit %s", slimit)
|
||||
}
|
||||
|
||||
func epochMStoInfluxTime(tr *backend.TimeRange) (string, string) {
|
||||
from := tr.From.UnixNano() / int64(time.Millisecond)
|
||||
to := tr.To.UnixNano() / int64(time.Millisecond)
|
||||
|
@ -66,6 +66,23 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
|
||||
require.Equal(t, rawQuery, `SELECT mean("value") FROM "cpu" WHERE time > 1596240000000ms and time < 1596240300000ms GROUP BY time(5s) tz('Europe/Paris')`)
|
||||
})
|
||||
|
||||
t.Run("can build query with tz, limit, slimit, orderByTime and puts them in the correct order", func(t *testing.T) {
|
||||
query := &Query{
|
||||
Selects: []*Select{{*qp1, *qp2}},
|
||||
Measurement: "cpu",
|
||||
GroupBy: []*QueryPart{groupBy1},
|
||||
Tz: "Europe/Paris",
|
||||
Limit: "1",
|
||||
Slimit: "1",
|
||||
OrderByTime: "ASC",
|
||||
Interval: time.Second * 5,
|
||||
}
|
||||
|
||||
rawQuery, err := query.Build(queryContext)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, rawQuery, `SELECT mean("value") FROM "cpu" WHERE time > 1596240000000ms and time < 1596240300000ms GROUP BY time(5s) ORDER BY time ASC limit 1 slimit 1 tz('Europe/Paris')`)
|
||||
})
|
||||
|
||||
t.Run("can build query with group bys", func(t *testing.T) {
|
||||
query := &Query{
|
||||
Selects: []*Select{{*qp1, *qp2}},
|
||||
|
@ -19,32 +19,27 @@ var (
|
||||
legendFormat = regexp.MustCompile(`\[\[([\@\/\w-]+)(\.[\@\/\w-]+)*\]\]*|\$(\s*([\@\w-]+?))*`)
|
||||
)
|
||||
|
||||
func (rp *ResponseParser) Parse(buf io.ReadCloser, query *Query) *backend.QueryDataResponse {
|
||||
func (rp *ResponseParser) Parse(buf io.ReadCloser, queries []Query) *backend.QueryDataResponse {
|
||||
resp := backend.NewQueryDataResponse()
|
||||
queryRes := backend.DataResponse{}
|
||||
|
||||
response, jsonErr := parseJSON(buf)
|
||||
if jsonErr != nil {
|
||||
queryRes.Error = jsonErr
|
||||
resp.Responses["A"] = queryRes
|
||||
resp.Responses["A"] = backend.DataResponse{Error: jsonErr}
|
||||
return resp
|
||||
}
|
||||
|
||||
if response.Error != "" {
|
||||
queryRes.Error = fmt.Errorf(response.Error)
|
||||
resp.Responses["A"] = queryRes
|
||||
resp.Responses["A"] = backend.DataResponse{Error: fmt.Errorf(response.Error)}
|
||||
return resp
|
||||
}
|
||||
|
||||
frames := data.Frames{}
|
||||
for _, result := range response.Results {
|
||||
frames = append(frames, transformRows(result.Series, query)...)
|
||||
for i, result := range response.Results {
|
||||
if result.Error != "" {
|
||||
queryRes.Error = fmt.Errorf(result.Error)
|
||||
resp.Responses[queries[i].RefID] = backend.DataResponse{Error: fmt.Errorf(result.Error)}
|
||||
} else {
|
||||
resp.Responses[queries[i].RefID] = backend.DataResponse{Frames: transformRows(result.Series, queries[i])}
|
||||
}
|
||||
}
|
||||
queryRes.Frames = frames
|
||||
resp.Responses["A"] = queryRes
|
||||
|
||||
return resp
|
||||
}
|
||||
@ -58,7 +53,7 @@ func parseJSON(buf io.ReadCloser) (Response, error) {
|
||||
return response, err
|
||||
}
|
||||
|
||||
func transformRows(rows []Row, query *Query) data.Frames {
|
||||
func transformRows(rows []Row, query Query) data.Frames {
|
||||
frames := data.Frames{}
|
||||
for _, row := range rows {
|
||||
for columnIndex, column := range row.Columns {
|
||||
@ -86,14 +81,23 @@ func transformRows(rows []Row, query *Query) data.Frames {
|
||||
// set a nice name on the value-field
|
||||
valueField.SetConfig(&data.FieldConfig{DisplayNameFromDS: name})
|
||||
|
||||
frames = append(frames, data.NewFrame(name, timeField, valueField))
|
||||
frames = append(frames, newDataFrame(name, query.RawQuery, timeField, valueField))
|
||||
}
|
||||
}
|
||||
|
||||
return frames
|
||||
}
|
||||
|
||||
func formatFrameName(row Row, column string, query *Query) string {
|
||||
func newDataFrame(name string, queryString string, timeField *data.Field, valueField *data.Field) *data.Frame {
|
||||
frame := data.NewFrame(name, timeField, valueField)
|
||||
frame.Meta = &data.FrameMeta{
|
||||
ExecutedQueryString: queryString,
|
||||
}
|
||||
|
||||
return frame
|
||||
}
|
||||
|
||||
func formatFrameName(row Row, column string, query Query) string {
|
||||
if query.Alias == "" {
|
||||
return buildFrameNameFromQuery(row, column)
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ import (
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/xorcare/pointer"
|
||||
)
|
||||
@ -18,6 +19,14 @@ func prepare(text string) io.ReadCloser {
|
||||
return ioutil.NopCloser(strings.NewReader(text))
|
||||
}
|
||||
|
||||
func addQueryToQueries(query Query) []Query {
|
||||
var queries []Query
|
||||
query.RefID = "A"
|
||||
query.RawQuery = "Test raw query"
|
||||
queries = append(queries, query)
|
||||
return queries
|
||||
}
|
||||
|
||||
func TestInfluxdbResponseParser(t *testing.T) {
|
||||
t.Run("Influxdb response parser should handle invalid JSON", func(t *testing.T) {
|
||||
parser := &ResponseParser{}
|
||||
@ -26,7 +35,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
|
||||
query := &Query{}
|
||||
|
||||
result := parser.Parse(prepare(response), query)
|
||||
result := parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
|
||||
require.Nil(t, result.Responses["A"].Frames)
|
||||
require.Error(t, result.Responses["A"].Error)
|
||||
@ -72,8 +81,9 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}),
|
||||
newField,
|
||||
)
|
||||
testFrame.Meta = &data.FrameMeta{ExecutedQueryString: "Test raw query"}
|
||||
|
||||
result := parser.Parse(prepare(response), query)
|
||||
result := parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
|
||||
frame := result.Responses["A"]
|
||||
if diff := cmp.Diff(testFrame, frame.Frames[0], data.FrameTestCompareOptions()...); diff != "" {
|
||||
@ -81,6 +91,61 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("Influxdb response parser should parse two responses with different refIDs", func(t *testing.T) {
|
||||
parser := &ResponseParser{}
|
||||
|
||||
response := `
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"series": [{}]
|
||||
},
|
||||
{
|
||||
"series": [{}]
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
query := &Query{}
|
||||
var queries = addQueryToQueries(*query)
|
||||
queryB := &Query{}
|
||||
queryB.RefID = "B"
|
||||
queries = append(queries, *queryB)
|
||||
result := parser.Parse(prepare(response), queries)
|
||||
|
||||
assert.Len(t, result.Responses, 2)
|
||||
assert.Contains(t, result.Responses, "A")
|
||||
assert.Contains(t, result.Responses, "B")
|
||||
assert.NotContains(t, result.Responses, "C")
|
||||
})
|
||||
|
||||
t.Run("Influxdb response parser populates the RawQuery in the response meta ExecutedQueryString", func(t *testing.T) {
|
||||
parser := &ResponseParser{}
|
||||
|
||||
response := `
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"series": [
|
||||
{
|
||||
"name": "cpu",
|
||||
"columns": ["time","mean"]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
query := &Query{}
|
||||
query.RawQuery = "Test raw query"
|
||||
result := parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
|
||||
frame := result.Responses["A"]
|
||||
assert.Equal(t, frame.Frames[0].Meta.ExecutedQueryString, "Test raw query")
|
||||
})
|
||||
|
||||
t.Run("Influxdb response parser with invalid value-format", func(t *testing.T) {
|
||||
parser := &ResponseParser{}
|
||||
|
||||
@ -119,8 +184,9 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}),
|
||||
newField,
|
||||
)
|
||||
testFrame.Meta = &data.FrameMeta{ExecutedQueryString: "Test raw query"}
|
||||
|
||||
result := parser.Parse(prepare(response), query)
|
||||
result := parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
|
||||
frame := result.Responses["A"]
|
||||
if diff := cmp.Diff(testFrame, frame.Frames[0], data.FrameTestCompareOptions()...); diff != "" {
|
||||
@ -166,8 +232,9 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}),
|
||||
newField,
|
||||
)
|
||||
testFrame.Meta = &data.FrameMeta{ExecutedQueryString: "Test raw query"}
|
||||
|
||||
result := parser.Parse(prepare(response), query)
|
||||
result := parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
|
||||
frame := result.Responses["A"]
|
||||
if diff := cmp.Diff(testFrame, frame.Frames[0], data.FrameTestCompareOptions()...); diff != "" {
|
||||
@ -217,7 +284,8 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}),
|
||||
newField,
|
||||
)
|
||||
result := parser.Parse(prepare(response), query)
|
||||
testFrame.Meta = &data.FrameMeta{ExecutedQueryString: "Test raw query"}
|
||||
result := parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
t.Run("should parse aliases", func(t *testing.T) {
|
||||
frame := result.Responses["A"]
|
||||
if diff := cmp.Diff(testFrame, frame.Frames[0], data.FrameTestCompareOptions()...); diff != "" {
|
||||
@ -225,7 +293,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias $m $measurement", Measurement: "10m"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
|
||||
frame = result.Responses["A"]
|
||||
name := "alias 10m 10m"
|
||||
@ -236,7 +304,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias $col", Measurement: "10m"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias mean"
|
||||
testFrame.Name = name
|
||||
@ -256,7 +324,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias $tag_datacenter"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias America"
|
||||
testFrame.Name = name
|
||||
@ -270,7 +338,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias $tag_datacenter/$tag_datacenter"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias America/America"
|
||||
testFrame.Name = name
|
||||
@ -284,7 +352,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias [[col]]", Measurement: "10m"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias mean"
|
||||
testFrame.Name = name
|
||||
@ -294,7 +362,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias $0 $1 $2 $3 $4"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias cpu upc $2 $3 $4"
|
||||
testFrame.Name = name
|
||||
@ -304,7 +372,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias $1"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias upc"
|
||||
testFrame.Name = name
|
||||
@ -314,7 +382,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias $5"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias $5"
|
||||
testFrame.Name = name
|
||||
@ -324,7 +392,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "series alias"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "series alias"
|
||||
testFrame.Name = name
|
||||
@ -334,7 +402,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias [[m]] [[measurement]]", Measurement: "10m"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias 10m 10m"
|
||||
testFrame.Name = name
|
||||
@ -344,7 +412,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias [[tag_datacenter]]"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias America"
|
||||
testFrame.Name = name
|
||||
@ -354,7 +422,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias [[tag_dc.region.name]]"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias Northeast"
|
||||
testFrame.Name = name
|
||||
@ -364,7 +432,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias [[tag_cluster-name]]"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias Cluster"
|
||||
testFrame.Name = name
|
||||
@ -374,7 +442,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias [[tag_/cluster/name/]]"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias Cluster/"
|
||||
testFrame.Name = name
|
||||
@ -384,7 +452,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias [[tag_@cluster@name@]]"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias Cluster@"
|
||||
testFrame.Name = name
|
||||
@ -395,7 +463,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
})
|
||||
t.Run("shouldn't parse aliases", func(t *testing.T) {
|
||||
query = &Query{Alias: "alias words with no brackets"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame := result.Responses["A"]
|
||||
name := "alias words with no brackets"
|
||||
testFrame.Name = name
|
||||
@ -405,7 +473,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias Test 1.5"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias Test 1.5"
|
||||
testFrame.Name = name
|
||||
@ -415,7 +483,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}
|
||||
|
||||
query = &Query{Alias: "alias Test -1"}
|
||||
result = parser.Parse(prepare(response), query)
|
||||
result = parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
frame = result.Responses["A"]
|
||||
name = "alias Test -1"
|
||||
testFrame.Name = name
|
||||
@ -454,6 +522,10 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
`
|
||||
|
||||
query := &Query{}
|
||||
var queries = addQueryToQueries(*query)
|
||||
queryB := &Query{}
|
||||
queryB.RefID = "B"
|
||||
queries = append(queries, *queryB)
|
||||
labels, err := data.LabelsFromString("datacenter=America")
|
||||
require.Nil(t, err)
|
||||
newField := data.NewField("value", labels, []*float64{
|
||||
@ -469,14 +541,15 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
}),
|
||||
newField,
|
||||
)
|
||||
result := parser.Parse(prepare(response), query)
|
||||
testFrame.Meta = &data.FrameMeta{ExecutedQueryString: "Test raw query"}
|
||||
result := parser.Parse(prepare(response), queries)
|
||||
|
||||
frame := result.Responses["A"]
|
||||
if diff := cmp.Diff(testFrame, frame.Frames[0], data.FrameTestCompareOptions()...); diff != "" {
|
||||
t.Errorf("Result mismatch (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
require.EqualError(t, result.Responses["A"].Error, "query-timeout limit exceeded")
|
||||
require.EqualError(t, result.Responses["B"].Error, "query-timeout limit exceeded")
|
||||
})
|
||||
|
||||
t.Run("Influxdb response parser with top-level error", func(t *testing.T) {
|
||||
@ -490,7 +563,7 @@ func TestInfluxdbResponseParser(t *testing.T) {
|
||||
|
||||
query := &Query{}
|
||||
|
||||
result := parser.Parse(prepare(response), query)
|
||||
result := parser.Parse(prepare(response), addQueryToQueries(*query))
|
||||
|
||||
require.Nil(t, result.Responses["A"].Frames)
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { cloneDeep, extend, get, has, isString, map as _map, omit, pick, reduce } from 'lodash';
|
||||
import { cloneDeep, extend, get, groupBy, has, isString, map as _map, omit, pick, reduce } from 'lodash';
|
||||
import { lastValueFrom, Observable, of, throwError } from 'rxjs';
|
||||
import { catchError, map } from 'rxjs/operators';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
@ -22,8 +22,8 @@ import {
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
TIME_SERIES_VALUE_FIELD_NAME,
|
||||
TimeSeries,
|
||||
CoreApp,
|
||||
} from '@grafana/data';
|
||||
|
||||
import InfluxSeries from './influx_series';
|
||||
import InfluxQueryModel from './influx_query_model';
|
||||
import ResponseParser from './response_parser';
|
||||
@ -32,6 +32,7 @@ import { InfluxOptions, InfluxQuery, InfluxVersion } from './types';
|
||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||
import { FluxQueryEditor } from './components/FluxQueryEditor';
|
||||
import { buildRawQuery } from './queryUtils';
|
||||
import config from 'app/core/config';
|
||||
|
||||
// we detect the field type based on the value-array
|
||||
function getFieldType(values: unknown[]): FieldType {
|
||||
@ -113,6 +114,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
database: any;
|
||||
basicAuth: any;
|
||||
withCredentials: any;
|
||||
access: 'direct' | 'proxy';
|
||||
interval: any;
|
||||
responseParser: any;
|
||||
httpMode: string;
|
||||
@ -135,6 +137,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
this.database = instanceSettings.database;
|
||||
this.basicAuth = instanceSettings.basicAuth;
|
||||
this.withCredentials = instanceSettings.withCredentials;
|
||||
this.access = instanceSettings.access;
|
||||
const settingsData = instanceSettings.jsonData || ({} as InfluxOptions);
|
||||
this.interval = settingsData.timeInterval;
|
||||
this.httpMode = settingsData.httpMode || 'GET';
|
||||
@ -150,17 +153,58 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
}
|
||||
|
||||
query(request: DataQueryRequest<InfluxQuery>): Observable<DataQueryResponse> {
|
||||
// for not-flux queries we call `this.classicQuery`, and that
|
||||
// handles the is-hidden situation.
|
||||
// for the flux-case, we do the filtering here
|
||||
const filteredRequest = {
|
||||
...request,
|
||||
targets: request.targets.filter((t) => t.hide !== true),
|
||||
};
|
||||
|
||||
if (this.isFlux) {
|
||||
// for not-flux queries we call `this.classicQuery`, and that
|
||||
// handles the is-hidden situation.
|
||||
// for the flux-case, we do the filtering here
|
||||
const filteredRequest = {
|
||||
...request,
|
||||
targets: request.targets.filter((t) => t.hide !== true),
|
||||
};
|
||||
return super.query(filteredRequest);
|
||||
}
|
||||
|
||||
if (config.featureToggles.influxdbBackendMigration && this.access === 'proxy' && request.app === CoreApp.Explore) {
|
||||
return super.query(filteredRequest).pipe(
|
||||
map((res) => {
|
||||
if (res.error) {
|
||||
throw {
|
||||
message: 'InfluxDB Error: ' + res.error.message,
|
||||
res,
|
||||
};
|
||||
}
|
||||
|
||||
const seriesList: any[] = [];
|
||||
|
||||
const groupedFrames = groupBy(res.data, (x) => x.refId);
|
||||
if (Object.keys(groupedFrames).length > 0) {
|
||||
filteredRequest.targets.forEach((target) => {
|
||||
const filteredFrames = groupedFrames[target.refId] ?? [];
|
||||
switch (target.resultFormat) {
|
||||
case 'logs':
|
||||
case 'table':
|
||||
seriesList.push(
|
||||
this.responseParser.getTable(filteredFrames, target, {
|
||||
preferredVisualisationType: target.resultFormat,
|
||||
})
|
||||
);
|
||||
break;
|
||||
default: {
|
||||
for (let i = 0; i < filteredFrames.length; i++) {
|
||||
seriesList.push(filteredFrames[i]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return { data: seriesList };
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Fallback to classic query support
|
||||
return this.classicQuery(request);
|
||||
}
|
||||
@ -185,7 +229,7 @@ export default class InfluxDatasource extends DataSourceWithBackend<InfluxQuery,
|
||||
applyTemplateVariables(query: InfluxQuery, scopedVars: ScopedVars): Record<string, any> {
|
||||
// this only works in flux-mode, it should not be called in non-flux-mode
|
||||
if (!this.isFlux) {
|
||||
throw new Error('applyTemplateVariables called in influxql-mode. this should never happen');
|
||||
return query;
|
||||
}
|
||||
|
||||
// We want to interpolate these variables on backend
|
||||
|
@ -1,4 +1,7 @@
|
||||
import { each, isArray } from 'lodash';
|
||||
import { DataFrame, FieldType, QueryResultMeta } from '@grafana/data';
|
||||
import TableModel from 'app/core/table_model';
|
||||
import { each, groupBy, isArray } from 'lodash';
|
||||
import { InfluxQuery } from './types';
|
||||
|
||||
export default class ResponseParser {
|
||||
parse(query: string, results: { results: any }) {
|
||||
@ -50,6 +53,106 @@ export default class ResponseParser {
|
||||
// order is insertion-order, so this should be ok.
|
||||
return Array.from(res).map((v) => ({ text: v }));
|
||||
}
|
||||
|
||||
getTable(dfs: DataFrame[], target: InfluxQuery, meta: QueryResultMeta): TableModel {
|
||||
let table = new TableModel();
|
||||
|
||||
if (dfs.length > 0) {
|
||||
table.meta = {
|
||||
...meta,
|
||||
executedQueryString: dfs[0].meta?.executedQueryString,
|
||||
};
|
||||
|
||||
table.refId = target.refId;
|
||||
table = getTableCols(dfs, table, target);
|
||||
|
||||
// if group by tag(s) added
|
||||
if (dfs[0].fields[1].labels) {
|
||||
let dfsByLabels: any = groupBy(dfs, (df: DataFrame) =>
|
||||
df.fields[1].labels ? Object.values(df.fields[1].labels!) : null
|
||||
);
|
||||
const labels = Object.keys(dfsByLabels);
|
||||
dfsByLabels = Object.values(dfsByLabels);
|
||||
|
||||
for (let i = 0; i < dfsByLabels.length; i++) {
|
||||
table = getTableRows(dfsByLabels[i], table, [...labels[i].split(',')]);
|
||||
}
|
||||
} else {
|
||||
table = getTableRows(dfs, table, []);
|
||||
}
|
||||
}
|
||||
|
||||
return table;
|
||||
}
|
||||
}
|
||||
|
||||
function getTableCols(dfs: DataFrame[], table: TableModel, target: InfluxQuery): TableModel {
|
||||
const selectedParams = getSelectedParams(target);
|
||||
|
||||
dfs[0].fields.forEach((field) => {
|
||||
// Time col
|
||||
if (field.name === 'time') {
|
||||
table.columns.push({ text: 'Time', type: FieldType.time });
|
||||
}
|
||||
|
||||
// Group by (label) column(s)
|
||||
else if (field.name === 'value') {
|
||||
if (field.labels) {
|
||||
Object.keys(field.labels).forEach((key) => {
|
||||
table.columns.push({ text: key });
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Select (metric) column(s)
|
||||
for (let i = 0; i < selectedParams.length; i++) {
|
||||
table.columns.push({ text: selectedParams[i] });
|
||||
}
|
||||
|
||||
return table;
|
||||
}
|
||||
|
||||
function getTableRows(dfs: DataFrame[], table: TableModel, labels: string[]): TableModel {
|
||||
const values = dfs[0].fields[0].values.toArray();
|
||||
|
||||
for (let i = 0; i < values.length; i++) {
|
||||
const time = values[i];
|
||||
const metrics = dfs.map((df: DataFrame) => {
|
||||
return df.fields[1].values.toArray()[i];
|
||||
});
|
||||
table.rows.push([time, ...labels, ...metrics]);
|
||||
}
|
||||
return table;
|
||||
}
|
||||
|
||||
export function getSelectedParams(target: InfluxQuery): string[] {
|
||||
let allParams: string[] = [];
|
||||
target.select?.forEach((select) => {
|
||||
const selector = select.filter((x) => x.type !== 'field');
|
||||
if (selector.length > 0) {
|
||||
allParams.push(selector[0].type);
|
||||
} else {
|
||||
if (select[0] && select[0].params && select[0].params[0]) {
|
||||
allParams.push(select[0].params[0].toString());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let uniqueParams: string[] = [];
|
||||
allParams.forEach((param) => {
|
||||
uniqueParams.push(incrementName(param, param, uniqueParams, 0));
|
||||
});
|
||||
|
||||
return uniqueParams;
|
||||
}
|
||||
|
||||
function incrementName(name: string, nameIncremenet: string, params: string[], index: number): string {
|
||||
if (params.indexOf(nameIncremenet) > -1) {
|
||||
index++;
|
||||
return incrementName(name, name + '_' + index, params, index);
|
||||
}
|
||||
return nameIncremenet;
|
||||
}
|
||||
|
||||
function addUnique(s: Set<string>, value: string | number) {
|
||||
|
@ -1,5 +1,7 @@
|
||||
import { size } from 'lodash';
|
||||
import ResponseParser from '../response_parser';
|
||||
import ResponseParser, { getSelectedParams } from '../response_parser';
|
||||
import InfluxQueryModel from '../influx_query_model';
|
||||
import { FieldType, MutableDataFrame } from '@grafana/data';
|
||||
|
||||
describe('influxdb response parser', () => {
|
||||
const parser = new ResponseParser();
|
||||
@ -202,4 +204,82 @@ describe('influxdb response parser', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Should name the selected params correctly', () => {
|
||||
it('when there are no duplicates', () => {
|
||||
const query = new InfluxQueryModel({
|
||||
refId: 'A',
|
||||
select: [[{ type: 'field', params: ['usage_iowait'] }], [{ type: 'field', params: ['usage_idle'] }]],
|
||||
});
|
||||
|
||||
const selectedParams = getSelectedParams(query.target);
|
||||
|
||||
expect(selectedParams.length).toBe(2);
|
||||
expect(selectedParams[0]).toBe('usage_iowait');
|
||||
expect(selectedParams[1]).toBe('usage_idle');
|
||||
});
|
||||
|
||||
it('when there are duplicates', () => {
|
||||
const query = new InfluxQueryModel({
|
||||
refId: 'A',
|
||||
select: [
|
||||
[{ type: 'field', params: ['usage_iowait'] }],
|
||||
[{ type: 'field', params: ['usage_iowait'] }],
|
||||
[{ type: 'field', params: ['usage_iowait'] }],
|
||||
[{ type: 'field', params: ['usage_idle'] }],
|
||||
],
|
||||
});
|
||||
|
||||
const selectedParams = getSelectedParams(query.target);
|
||||
|
||||
expect(selectedParams.length).toBe(4);
|
||||
expect(selectedParams[0]).toBe('usage_iowait');
|
||||
expect(selectedParams[1]).toBe('usage_iowait_1');
|
||||
expect(selectedParams[2]).toBe('usage_iowait_2');
|
||||
expect(selectedParams[3]).toBe('usage_idle');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Should get the table', () => {
|
||||
const dataFrame = new MutableDataFrame({
|
||||
fields: [
|
||||
{ name: 'time', type: FieldType.time, values: [1640257340000] },
|
||||
{ name: 'value', type: FieldType.number, values: [3234232323] },
|
||||
],
|
||||
meta: {
|
||||
executedQueryString: 'SELECT everything!',
|
||||
},
|
||||
});
|
||||
|
||||
const query = new InfluxQueryModel({
|
||||
refId: 'A',
|
||||
select: [[{ type: 'field', params: ['usage_iowait'] }], [{ type: 'field', params: ['usage_idle'] }]],
|
||||
});
|
||||
|
||||
const table = parser.getTable([dataFrame], query.target, {
|
||||
preferredVisualisationType: 'table',
|
||||
});
|
||||
|
||||
it('columns correctly', () => {
|
||||
expect(table.columns.length).toBe(3);
|
||||
expect(table.columns[0].text).toBe('Time');
|
||||
expect(table.columns[1].text).toBe('usage_iowait');
|
||||
expect(table.columns[2].text).toBe('usage_idle');
|
||||
});
|
||||
|
||||
it('rows correctly', () => {
|
||||
expect(table.rows.length).toBe(1);
|
||||
expect(table.rows[0].length).toBe(2);
|
||||
expect(table.rows[0][0]).toBe(1640257340000);
|
||||
expect(table.rows[0][1]).toBe(3234232323);
|
||||
});
|
||||
|
||||
it('preferredVisualisationType correctly', () => {
|
||||
expect(table.meta?.preferredVisualisationType).toBe('table');
|
||||
});
|
||||
|
||||
it('executedQueryString correctly', () => {
|
||||
expect(table.meta?.executedQueryString).toBe('SELECT everything!');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user