Alerting/Expr: New SSE Request/QueryType, alerting move data source UID (#33282)

This commit is contained in:
Kyle Brandt 2021-04-23 10:52:32 -04:00 committed by GitHub
parent 659ea20c3c
commit 5e818146de
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 179 additions and 248 deletions

View File

@ -6,7 +6,6 @@ import (
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/gtime"
"github.com/grafana/grafana/pkg/expr/mathexp"
)
@ -139,12 +138,12 @@ type ResampleCommand struct {
VarToResample string
Downsampler string
Upsampler string
TimeRange backend.TimeRange
TimeRange TimeRange
refID string
}
// NewResampleCommand creates a new ResampleCMD.
func NewResampleCommand(refID, rawWindow, varToResample string, downsampler string, upsampler string, tr backend.TimeRange) (*ResampleCommand, error) {
func NewResampleCommand(refID, rawWindow, varToResample string, downsampler string, upsampler string, tr TimeRange) (*ResampleCommand, error) {
// TODO: validate reducer here, before execution
window, err := gtime.ParseDuration(rawWindow)
if err != nil {
@ -218,7 +217,7 @@ func (gr *ResampleCommand) Execute(ctx context.Context, vars mathexp.Vars) (math
if !ok {
return newRes, fmt.Errorf("can only resample type series, got type %v", val.Type())
}
num, err := series.Resample(gr.refID, gr.Window, gr.Downsampler, gr.Upsampler, gr.TimeRange)
num, err := series.Resample(gr.refID, gr.Window, gr.Downsampler, gr.Upsampler, gr.TimeRange.From, gr.TimeRange.To)
if err != nil {
return newRes, err
}

View File

@ -5,7 +5,6 @@ import (
"encoding/json"
"fmt"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/expr/mathexp"
"gonum.org/v1/gonum/graph"
@ -52,7 +51,7 @@ func (dp *DataPipeline) execute(c context.Context, s *Service) (mathexp.Vars, er
// BuildPipeline builds a graph of the nodes, and returns the nodes in an
// executable order.
func (s *Service) buildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) {
func (s *Service) buildPipeline(req *Request) (DataPipeline, error) {
graph, err := s.buildDependencyGraph(req)
if err != nil {
return nil, err
@ -67,7 +66,7 @@ func (s *Service) buildPipeline(req *backend.QueryDataRequest) (DataPipeline, er
}
// buildDependencyGraph returns a dependency graph for a set of queries.
func (s *Service) buildDependencyGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) {
func (s *Service) buildDependencyGraph(req *Request) (*simple.DirectedGraph, error) {
graph, err := s.buildGraph(req)
if err != nil {
return nil, err
@ -113,20 +112,26 @@ func buildNodeRegistry(g *simple.DirectedGraph) map[string]Node {
}
// buildGraph creates a new graph populated with nodes for every query.
func (s *Service) buildGraph(req *backend.QueryDataRequest) (*simple.DirectedGraph, error) {
func (s *Service) buildGraph(req *Request) (*simple.DirectedGraph, error) {
dp := simple.NewDirectedGraph()
for _, query := range req.Queries {
rawQueryProp := make(map[string]interface{})
err := json.Unmarshal(query.JSON, &rawQueryProp)
queryBytes, err := query.JSON.MarshalJSON()
if err != nil {
return nil, err
}
err = json.Unmarshal(queryBytes, &rawQueryProp)
if err != nil {
return nil, err
}
rn := &rawNode{
Query: rawQueryProp,
RefID: query.RefID,
TimeRange: query.TimeRange,
QueryType: query.QueryType,
Query: rawQueryProp,
RefID: query.RefID,
TimeRange: query.TimeRange,
QueryType: query.QueryType,
DatasourceUID: query.DatasourceUID,
}
dsName, err := rn.GetDatasourceName()
@ -134,17 +139,14 @@ func (s *Service) buildGraph(req *backend.QueryDataRequest) (*simple.DirectedGra
return nil, err
}
dsUID, err := rn.GetDatasourceUid()
if err != nil {
return nil, err
}
dsUID := rn.DatasourceUID
var node graph.Node
switch {
case dsName == DatasourceName || dsUID == DatasourceUID:
node, err = buildCMDNode(dp, rn)
default: // If it's not an expression query, it's a data source query.
node, err = s.buildDSNode(dp, rn, req.PluginContext.OrgID)
node, err = s.buildDSNode(dp, rn, req.OrgId)
}
if err != nil {
return nil, err

View File

@ -4,13 +4,12 @@ import (
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// Resample turns the Series into a Number based on the given reduction function
func (s Series) Resample(refID string, interval time.Duration, downsampler string, upsampler string, tr backend.TimeRange) (Series, error) {
newSeriesLength := int(float64(tr.To.Sub(tr.From).Nanoseconds()) / float64(interval.Nanoseconds()))
func (s Series) Resample(refID string, interval time.Duration, downsampler string, upsampler string, from, to time.Time) (Series, error) {
newSeriesLength := int(float64(to.Sub(from).Nanoseconds()) / float64(interval.Nanoseconds()))
if newSeriesLength <= 0 {
return s, fmt.Errorf("the series cannot be sampled further; the time range is shorter than the interval")
}
@ -18,8 +17,8 @@ func (s Series) Resample(refID string, interval time.Duration, downsampler strin
bookmark := 0
var lastSeen *float64
idx := 0
t := tr.From
for !t.After(tr.To) && idx <= newSeriesLength {
t := from
for !t.After(to) && idx <= newSeriesLength {
vals := make([]*float64, 0)
sIdx := bookmark
for {

View File

@ -276,7 +276,7 @@ func TestResampleSeries(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
series, err := tt.seriesToResample.Resample("", tt.interval, tt.downsampler, tt.upsampler, tt.timeRange)
series, err := tt.seriesToResample.Resample("", tt.interval, tt.downsampler, tt.upsampler, tt.timeRange.From, tt.timeRange.To)
if tt.series.Frame == nil {
require.Error(t, err)
} else {

View File

@ -21,10 +21,11 @@ type baseNode struct {
}
type rawNode struct {
RefID string `json:"refId"`
Query map[string]interface{}
QueryType string
TimeRange backend.TimeRange
RefID string `json:"refId"`
Query map[string]interface{}
QueryType string
TimeRange TimeRange
DatasourceUID string
}
func (rn *rawNode) GetDatasourceName() (string, error) {
@ -39,18 +40,6 @@ func (rn *rawNode) GetDatasourceName() (string, error) {
return dsName, nil
}
func (rn *rawNode) GetDatasourceUid() (string, error) {
rawDs, ok := rn.Query["datasourceUid"]
if !ok {
return "", nil
}
dsUID, ok := rawDs.(string)
if !ok {
return "", fmt.Errorf("expected datasource identifier to be a string, got %T", rawDs)
}
return dsUID, nil
}
func (rn *rawNode) GetCommandType() (c CommandType, err error) {
rawType, ok := rn.Query["type"]
if !ok {
@ -144,7 +133,7 @@ type DSNode struct {
orgID int64
queryType string
timeRange backend.TimeRange
timeRange TimeRange
intervalMS int64
maxDP int64
}
@ -182,15 +171,10 @@ func (s *Service) buildDSNode(dp *simple.DirectedGraph, rn *rawNode, orgID int64
}
dsNode.datasourceID = int64(floatDsID)
default:
rawDsUID, ok := rn.Query["datasourceUid"]
if !ok {
if rn.DatasourceUID == "" {
return nil, fmt.Errorf("neither datasourceId or datasourceUid in expression data source request for refId %v", rn.RefID)
}
strDsUID, ok := rawDsUID.(string)
if !ok {
return nil, fmt.Errorf("expected datasourceUid to be a string, got type %T for refId %v", rawDsUID, rn.RefID)
}
dsNode.datasourceUID = strDsUID
dsNode.datasourceUID = rn.DatasourceUID
}
var floatIntervalMS float64
@ -230,8 +214,11 @@ func (dn *DSNode) Execute(ctx context.Context, vars mathexp.Vars, s *Service) (m
MaxDataPoints: dn.maxDP,
Interval: time.Duration(int64(time.Millisecond) * dn.intervalMS),
JSON: dn.query,
TimeRange: dn.timeRange,
QueryType: dn.queryType,
TimeRange: backend.TimeRange{
From: dn.timeRange.From,
To: dn.timeRange.To,
},
QueryType: dn.queryType,
},
}

View File

@ -34,7 +34,7 @@ func (s *Service) isDisabled() bool {
}
// BuildPipeline builds a pipeline from a request.
func (s *Service) BuildPipeline(req *backend.QueryDataRequest) (DataPipeline, error) {
func (s *Service) BuildPipeline(req *Request) (DataPipeline, error) {
return s.buildPipeline(req)
}

View File

@ -41,7 +41,7 @@ func TestService(t *testing.T) {
return nil
})
queries := []backend.DataQuery{
queries := []Query{
{
RefID: "A",
JSON: json.RawMessage(`{ "datasource": "test", "datasourceId": 1, "orgId": 1, "intervalMs": 1000, "maxDataPoints": 1000 }`),
@ -52,7 +52,7 @@ func TestService(t *testing.T) {
},
}
req := &backend.QueryDataRequest{Queries: queries}
req := &Request{Queries: queries}
pl, err := s.BuildPipeline(req)
require.NoError(t, err)

View File

@ -36,11 +36,9 @@ func init() {
// WrapTransformData creates and executes transform requests
func (s *Service) WrapTransformData(ctx context.Context, query plugins.DataQuery) (*backend.QueryDataResponse, error) {
sdkReq := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{
OrgID: query.User.OrgId,
},
Queries: []backend.DataQuery{},
req := Request{
OrgId: query.User.OrgId,
Queries: []Query{},
}
for _, q := range query.Queries {
@ -48,24 +46,50 @@ func (s *Service) WrapTransformData(ctx context.Context, query plugins.DataQuery
if err != nil {
return nil, err
}
sdkReq.Queries = append(sdkReq.Queries, backend.DataQuery{
req.Queries = append(req.Queries, Query{
JSON: modelJSON,
Interval: time.Duration(q.IntervalMS) * time.Millisecond,
RefID: q.RefID,
MaxDataPoints: q.MaxDataPoints,
QueryType: q.QueryType,
TimeRange: backend.TimeRange{
TimeRange: TimeRange{
From: query.TimeRange.GetFromAsTimeUTC(),
To: query.TimeRange.GetToAsTimeUTC(),
},
})
}
return s.TransformData(ctx, sdkReq)
return s.TransformData(ctx, &req)
}
// Request is similar to plugins.DataQuery but with the Time Ranges is per Query.
type Request struct {
Headers map[string]string
Debug bool
OrgId int64
Queries []Query
}
// Query is like plugins.DataSubQuery, but with a a time range, and only the UID
// for the data source. Also interval is a time.Duration.
type Query struct {
RefID string
TimeRange TimeRange
DatasourceUID string
JSON json.RawMessage
Interval time.Duration
QueryType string
MaxDataPoints int64
}
// TimeRange is a time.Time based TimeRange.
type TimeRange struct {
From time.Time
To time.Time
}
// TransformData takes Queries which are either expressions nodes
// or are datasource requests.
func (s *Service) TransformData(ctx context.Context, req *backend.QueryDataRequest) (r *backend.QueryDataResponse, err error) {
func (s *Service) TransformData(ctx context.Context, req *Request) (r *backend.QueryDataResponse, err error) {
if s.isDisabled() {
return nil, status.Error(codes.PermissionDenied, "Expressions are disabled")
}
@ -116,7 +140,7 @@ func (s *Service) TransformData(ctx context.Context, req *backend.QueryDataReque
return responses, nil
}
func hiddenRefIDs(queries []backend.DataQuery) (map[string]struct{}, error) {
func hiddenRefIDs(queries []Query) (map[string]struct{}, error) {
hidden := make(map[string]struct{})
for _, query := range queries {

View File

@ -34,7 +34,7 @@ func DashboardAlertConditions(rawDCondJSON []byte, orgID int64) (*ngmodels.Condi
return nil, err
}
backendReq, err := eval.GetQueryDataRequest(eval.AlertExecCtx{ExpressionsEnabled: true}, ngCond.Data, time.Unix(500, 0))
backendReq, err := eval.GetExprRequest(eval.AlertExecCtx{ExpressionsEnabled: true}, ngCond.Data, time.Unix(500, 0))
if err != nil {
return nil, err
@ -191,7 +191,6 @@ func (dc *dashConditionsJSON) GetNew(orgID int64) (*ngmodels.Condition, error) {
}
queryObj["datasource"] = getDsInfo.Result.Name
queryObj["datasourceUid"] = getDsInfo.Result.Uid
queryObj["refId"] = refID
encodedObj, err := json.Marshal(queryObj)
@ -211,7 +210,7 @@ func (dc *dashConditionsJSON) GetNew(orgID int64) (*ngmodels.Condition, error) {
RefID: refID,
Model: encodedObj,
RelativeTimeRange: *rTR,
DatasourceUID: getDsInfo.Uid,
DatasourceUID: getDsInfo.Result.Uid,
}
ngCond.Data = append(ngCond.Data, alertQuery)
}
@ -240,14 +239,12 @@ func (dc *dashConditionsJSON) GetNew(orgID int64) (*ngmodels.Condition, error) {
ngCond.OrgID = orgID
exprModel := struct {
Type string `json:"type"`
RefID string `json:"refId"`
DatasourceUID string `json:"datasourceUid"`
Conditions []classic.ClassicConditionJSON `json:"conditions"`
Type string `json:"type"`
RefID string `json:"refId"`
Conditions []classic.ClassicConditionJSON `json:"conditions"`
}{
"classic_conditions",
ccRefID,
expr.DatasourceUID,
conditions,
}
@ -257,8 +254,9 @@ func (dc *dashConditionsJSON) GetNew(orgID int64) (*ngmodels.Condition, error) {
}
ccAlertQuery := ngmodels.AlertQuery{
RefID: ccRefID,
Model: exprModelJSON,
RefID: ccRefID,
Model: exprModelJSON,
DatasourceUID: expr.DatasourceUID,
}
ngCond.Data = append(ngCond.Data, ccAlertQuery)

View File

@ -14,9 +14,8 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "000000002",
"model": {
"datasource": "gdev-prometheus",
"datasourceUid": "000000002",
"expr": "http_request_duration_microseconds_count",
"hide": false,
"interval": "",
@ -33,9 +32,8 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasource": "__expr__",
"datasourceUid": "-100",
"expression": "query",
"hide": false,
"intervalMs": 1000,
@ -52,9 +50,8 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasource": "__expr__",
"datasourceUid": "-100",
"expression": "$reduced > 10",
"hide": false,
"intervalMs": 1000,
@ -80,10 +77,9 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "000000004",
"model": {
"alias": "just-testing",
"datasource": "gdev-testdata",
"datasourceUid": "000000004",
"intervalMs": 1000,
"maxDataPoints": 100,
"orgId": 0,
@ -99,9 +95,8 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasource": "__expr__",
"datasourceUid": "__expr__",
"expression": "$A",
"intervalMs": 2000,
"maxDataPoints": 200,

View File

@ -21,9 +21,8 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "000000002",
"model": {
"datasource": "gdev-prometheus",
"datasourceUid": "000000002",
"expr": "http_request_duration_microseconds_count",
"hide": false,
"interval": "",
@ -40,9 +39,8 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasource": "__expr__",
"datasourceUid": "-100",
"expression": "query",
"hide": false,
"intervalMs": 1000,
@ -59,9 +57,8 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasource": "__expr__",
"datasourceUid": "-100",
"expression": "$reduced > 10",
"hide": false,
"intervalMs": 1000,
@ -87,10 +84,9 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "000000004",
"model": {
"alias": "just-testing",
"datasource": "gdev-testdata",
"datasourceUid": "000000004",
"intervalMs": 1000,
"maxDataPoints": 100,
"orgId": 0,
@ -106,9 +102,8 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasource": "__expr__",
"datasourceUid": "__expr__",
"expression": "$A",
"intervalMs": 2000,
"maxDataPoints": 200,

View File

@ -76,8 +76,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "000000002",
"model": {
"datasourceUid": "000000002",
"expr": "http_request_duration_microseconds_count",
"hide": false,
"interval": "",
@ -94,8 +94,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "query",
"hide": false,
"intervalMs": 1000,
@ -112,8 +112,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "$reduced > 10",
"hide": false,
"intervalMs": 1000,
@ -158,8 +158,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "000000002",
"model": {
"datasourceUid": "000000002",
"expr": "http_request_duration_microseconds_count",
"hide": false,
"interval": "",
@ -176,8 +176,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "query",
"hide": false,
"intervalMs": 1000,
@ -194,8 +194,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "$reduced > 42",
"hide": false,
"intervalMs": 1000,

View File

@ -80,9 +80,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "000000002",
"model": {
"datasource": "gdev-prometheus",
"datasourceUid": "000000002",
"expr": "http_request_duration_microseconds_count",
"hide": false,
"interval": "",
@ -99,8 +98,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "query",
"hide": false,
"intervalMs": 1000,
@ -117,8 +116,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "$reduced > 10",
"hide": false,
"intervalMs": 1000,
@ -167,8 +166,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "000000002",
"model": {
"datasourceUid": "000000002",
"expr": "http_request_duration_microseconds_count",
"hide": false,
"interval": "",
@ -185,8 +184,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "query",
"hide": false,
"intervalMs": 1000,
@ -203,8 +202,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "$reduced > 42",
"hide": false,
"intervalMs": 1000,
@ -266,8 +265,8 @@ Content-Type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "000000002",
"model": {
"datasourceUid": "000000002",
"expr": "http_request_duration_microseconds_count",
"hide": false,
"interval": "",
@ -284,8 +283,8 @@ Content-Type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "query",
"hide": false,
"intervalMs": 1000,
@ -302,8 +301,8 @@ Content-Type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "$reduced > 42",
"hide": false,
"intervalMs": 1000,

View File

@ -9,8 +9,8 @@
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasource": "__expr__",
"type": "math",
"expression": "2 + 2 > 1"
}

View File

@ -15,8 +15,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"type":"math",
"expression":"1 < 2"
}
@ -39,8 +39,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "000000004",
"model": {
"datasourceUid": "000000004",
"intervalMs": 1000,
"maxDataPoints": 100,
"orgId": 0,
@ -56,8 +56,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"expression": "$A",
"intervalMs": 2000,
"maxDataPoints": 200,
@ -113,8 +113,8 @@ content-type: application/json
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"type":"math",
"expression":"1 < 2"
}

View File

@ -102,13 +102,10 @@ type AlertExecCtx struct {
Ctx context.Context
}
// GetQueryDataRequest validates the condition and creates a backend.QueryDataRequest from it.
func GetQueryDataRequest(ctx AlertExecCtx, data []models.AlertQuery, now time.Time) (*backend.QueryDataRequest, error) {
queryDataReq := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{
OrgID: ctx.OrgID,
},
Queries: []backend.DataQuery{},
// GetExprRequest validates the condition and creates a expr.Request from it.
func GetExprRequest(ctx AlertExecCtx, data []models.AlertQuery, now time.Time) (*expr.Request, error) {
req := &expr.Request{
OrgId: ctx.OrgID,
}
for i := range data {
@ -127,16 +124,20 @@ func GetQueryDataRequest(ctx AlertExecCtx, data []models.AlertQuery, now time.Ti
return nil, fmt.Errorf("failed to retrieve maxDatapoints from the model: %w", err)
}
queryDataReq.Queries = append(queryDataReq.Queries, backend.DataQuery{
req.Queries = append(req.Queries, expr.Query{
TimeRange: expr.TimeRange{
From: q.RelativeTimeRange.ToTimeRange(now).From,
To: q.RelativeTimeRange.ToTimeRange(now).To,
},
DatasourceUID: q.DatasourceUID,
JSON: model,
Interval: interval,
RefID: q.RefID,
MaxDataPoints: maxDatapoints,
QueryType: q.QueryType,
TimeRange: q.RelativeTimeRange.ToTimeRange(now),
})
}
return queryDataReq, nil
return req, nil
}
func executeCondition(ctx AlertExecCtx, c *models.Condition, now time.Time, dataService *tsdb.Service) (*ExecutionResults, error) {
@ -165,7 +166,7 @@ func executeCondition(ctx AlertExecCtx, c *models.Condition, now time.Time, data
}
func executeQueriesAndExpressions(ctx AlertExecCtx, data []models.AlertQuery, now time.Time, dataService *tsdb.Service) (*backend.QueryDataResponse, error) {
queryDataReq, err := GetQueryDataRequest(ctx, data, now)
queryDataReq, err := GetExprRequest(ctx, data, now)
if err != nil {
return nil, err
}

View File

@ -68,7 +68,7 @@ type AlertQuery struct {
// RelativeTimeRange is the relative Start and End of the query as sent by the frontend.
RelativeTimeRange RelativeTimeRange `json:"relativeTimeRange"`
DatasourceUID string `json:"-"`
DatasourceUID string `json:"datasourceUid"`
// JSON is the raw JSON query and includes the above properties as well as custom properties.
Model json.RawMessage `json:"model"`
@ -86,34 +86,8 @@ func (aq *AlertQuery) setModelProps() error {
return nil
}
// setDatasource sets DatasourceUID.
// If it's an expression sets DefaultExprDatasourceUID.
func (aq *AlertQuery) setDatasource() error {
if aq.modelProps == nil {
err := aq.setModelProps()
if err != nil {
return err
}
}
i, ok := aq.modelProps["datasourceUid"]
if !ok {
return fmt.Errorf("failed to get datasourceUid from query model")
}
dsUID, ok := i.(string)
if !ok {
return fmt.Errorf("failed to cast datasourceUid to string: %v", i)
}
aq.DatasourceUID = dsUID
return nil
}
// IsExpression returns true if the alert query is an expression.
func (aq *AlertQuery) IsExpression() (bool, error) {
err := aq.setDatasource()
if err != nil {
return false, err
}
return aq.DatasourceUID == expr.DatasourceUID, nil
}
@ -196,20 +170,11 @@ func (aq *AlertQuery) GetIntervalDuration() (time.Duration, error) {
// GetDatasource returns the query datasource identifier.
func (aq *AlertQuery) GetDatasource() (string, error) {
err := aq.setDatasource()
if err != nil {
return "", err
}
return aq.DatasourceUID, nil
}
func (aq *AlertQuery) GetModel() ([]byte, error) {
err := aq.setDatasource()
if err != nil {
return nil, err
}
err = aq.setMaxDatapoints()
err := aq.setMaxDatapoints()
if err != nil {
return nil, err
}
@ -250,10 +215,6 @@ func (aq *AlertQuery) setQueryType() error {
// PreSave sets query's properties.
// It should be called before being saved.
func (aq *AlertQuery) PreSave() error {
if err := aq.setDatasource(); err != nil {
return fmt.Errorf("failed to set datasource to query model: %w", err)
}
if err := aq.setQueryType(); err != nil {
return fmt.Errorf("failed to set query type to query model: %w", err)
}

View File

@ -6,147 +6,130 @@ import (
"testing"
"time"
"github.com/grafana/grafana/pkg/expr"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestAlertQuery(t *testing.T) {
testCases := []struct {
desc string
alertQuery AlertQuery
expectedIsExpression bool
expectedDatasource string
expectedDatasourceUID string
expectedMaxPoints int64
expectedIntervalMS int64
err error
desc string
alertQuery AlertQuery
expectedIsExpression bool
expectedDatasource string
expectedMaxPoints int64
expectedIntervalMS int64
err error
}{
{
desc: "given an expression query",
alertQuery: AlertQuery{
RefID: "A",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"queryType": "metricQuery",
"extraParam": "some text"
}`),
DatasourceUID: "-100",
},
expectedIsExpression: true,
expectedDatasourceUID: expr.DatasourceUID,
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
expectedIsExpression: true,
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
},
{
desc: "given a query",
alertQuery: AlertQuery{
RefID: "A",
Model: json.RawMessage(`{
"datasourceUid": "000000001",
"queryType": "metricQuery",
"extraParam": "some text"
}`),
},
expectedIsExpression: false,
expectedDatasourceUID: "000000001",
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
expectedIsExpression: false,
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
},
{
desc: "given a query with valid maxDataPoints",
alertQuery: AlertQuery{
RefID: "A",
Model: json.RawMessage(`{
"datasourceUid": "000000001",
"queryType": "metricQuery",
"maxDataPoints": 200,
"extraParam": "some text"
}`),
},
expectedIsExpression: false,
expectedDatasourceUID: "000000001",
expectedMaxPoints: 200,
expectedIntervalMS: int64(defaultIntervalMS),
expectedIsExpression: false,
expectedMaxPoints: 200,
expectedIntervalMS: int64(defaultIntervalMS),
},
{
desc: "given a query with invalid maxDataPoints",
alertQuery: AlertQuery{
RefID: "A",
Model: json.RawMessage(`{
"datasourceUid": "000000001",
"queryType": "metricQuery",
"maxDataPoints": "invalid",
"extraParam": "some text"
}`),
},
expectedIsExpression: false,
expectedDatasourceUID: "000000001",
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
expectedIsExpression: false,
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
},
{
desc: "given a query with zero maxDataPoints",
alertQuery: AlertQuery{
RefID: "A",
Model: json.RawMessage(`{
"datasourceUid": "000000001",
"queryType": "metricQuery",
"maxDataPoints": 0,
"extraParam": "some text"
}`),
},
expectedIsExpression: false,
expectedDatasourceUID: "000000001",
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
expectedIsExpression: false,
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
},
{
desc: "given a query with valid intervalMs",
alertQuery: AlertQuery{
RefID: "A",
Model: json.RawMessage(`{
"datasourceUid": "000000001",
"queryType": "metricQuery",
"intervalMs": 2000,
"extraParam": "some text"
}`),
},
expectedIsExpression: false,
expectedDatasourceUID: "000000001",
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: 2000,
expectedIsExpression: false,
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: 2000,
},
{
desc: "given a query with invalid intervalMs",
alertQuery: AlertQuery{
RefID: "A",
Model: json.RawMessage(`{
"datasourceUid": "000000001",
"queryType": "metricQuery",
"intervalMs": "invalid",
"extraParam": "some text"
}`),
},
expectedIsExpression: false,
expectedDatasourceUID: "000000001",
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
expectedIsExpression: false,
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
},
{
desc: "given a query with invalid intervalMs",
alertQuery: AlertQuery{
RefID: "A",
Model: json.RawMessage(`{
"datasourceUid": "000000001",
"queryType": "metricQuery",
"intervalMs": 0,
"extraParam": "some text"
}`),
},
expectedIsExpression: false,
expectedDatasourceUID: "000000001",
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
expectedIsExpression: false,
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
},
}
@ -158,12 +141,6 @@ func TestAlertQuery(t *testing.T) {
assert.Equal(t, tc.expectedIsExpression, isExpression)
})
t.Run("can set datasource for expression", func(t *testing.T) {
err := tc.alertQuery.setDatasource()
require.NoError(t, err)
require.Equal(t, tc.expectedDatasourceUID, tc.alertQuery.DatasourceUID)
})
t.Run("can set queryType for expression", func(t *testing.T) {
err := tc.alertQuery.setQueryType()
require.NoError(t, err)
@ -189,13 +166,7 @@ func TestAlertQuery(t *testing.T) {
err = json.Unmarshal(blob, &model)
require.NoError(t, err)
i, ok := model["datasourceUid"]
require.True(t, ok)
datasourceUID, ok := i.(string)
require.True(t, ok)
require.Equal(t, tc.expectedDatasourceUID, datasourceUID)
i, ok = model["maxDataPoints"]
i, ok := model["maxDataPoints"]
require.True(t, ok)
maxDataPoints, ok := i.(float64)
require.True(t, ok)

View File

@ -90,8 +90,8 @@ func TestAlertAndGroupsQuery(t *testing.T) {
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type": "math",
"expression": "2 + 3 > 1"
}`),
@ -208,8 +208,8 @@ func TestAlertRuleCRUD(t *testing.T) {
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type": "math",
"expression": "2 + 3 > 1"
}`),
@ -238,8 +238,8 @@ func TestAlertRuleCRUD(t *testing.T) {
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type": "math",
"expression": "2 + 3 > 1"
}`),
@ -268,8 +268,8 @@ func TestAlertRuleCRUD(t *testing.T) {
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type": "math",
"expression": "2 + 3 > 1"
}`),
@ -299,8 +299,8 @@ func TestAlertRuleCRUD(t *testing.T) {
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type": "math",
"expression": "2 + 3 > 1"
}`),
@ -368,8 +368,8 @@ func TestAlertRuleCRUD(t *testing.T) {
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type": "math",
"expression": "2 + 3 > 1"
}`),
@ -388,8 +388,8 @@ func TestAlertRuleCRUD(t *testing.T) {
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type": "math",
"expression": "2 + 3 > 1"
}`),
@ -474,8 +474,8 @@ func TestAlertRuleCRUD(t *testing.T) {
"from":18000,
"to":10800
},
"datasourceUid":"-100",
"model":{
"datasourceUid":"-100",
"expression":"2 + 3 \u003e 1",
"intervalMs":1000,
"maxDataPoints":100,
@ -509,8 +509,8 @@ func TestAlertRuleCRUD(t *testing.T) {
"from":18000,
"to":10800
},
"datasourceUid":"-100",
"model":{
"datasourceUid":"-100",
"expression":"2 + 3 \u003e 1",
"intervalMs":1000,
"maxDataPoints":100,
@ -567,8 +567,8 @@ func TestAlertRuleCRUD(t *testing.T) {
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type": "math",
"expression": "2 + 3 < 1"
}`),
@ -651,8 +651,8 @@ func TestAlertRuleCRUD(t *testing.T) {
From: ngmodels.Duration(time.Duration(5) * time.Hour),
To: ngmodels.Duration(time.Duration(3) * time.Hour),
},
DatasourceUID: "-100",
Model: json.RawMessage(`{
"datasourceUid": "-100",
"type": "math",
"expression": "2 + 3 < 1"
}`),
@ -733,8 +733,8 @@ func TestAlertRuleCRUD(t *testing.T) {
"from":18000,
"to":10800
},
"model":{
"datasourceUid":"-100",
"datasourceUid":"-100",
"model":{
"expression":"2 + 3 \u003C 1",
"intervalMs":1000,
"maxDataPoints":100,
@ -817,8 +817,8 @@ func TestAlertRuleCRUD(t *testing.T) {
"from": 18000,
"to": 10800
},
"datasourceUid":"-100",
"model": {
"datasourceUid": "-100",
"type":"math",
"expression":"1 < 2"
}
@ -868,8 +868,8 @@ func TestAlertRuleCRUD(t *testing.T) {
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"type":"math",
"expression":"1 > 2"
}
@ -919,8 +919,8 @@ func TestAlertRuleCRUD(t *testing.T) {
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"type":"math",
"expression":"1 > 2"
}
@ -946,8 +946,8 @@ func TestAlertRuleCRUD(t *testing.T) {
"from": 18000,
"to": 10800
},
"datasourceUid": "unknown",
"model": {
"datasourceUid": "unknown"
}
}
],
@ -997,8 +997,8 @@ func TestAlertRuleCRUD(t *testing.T) {
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"type":"math",
"expression":"1 < 2"
}
@ -1050,8 +1050,8 @@ func TestAlertRuleCRUD(t *testing.T) {
"from": 18000,
"to": 10800
},
"datasourceUid": "-100",
"model": {
"datasourceUid": "-100",
"type":"math",
"expression":"1 > 2"
}
@ -1103,8 +1103,8 @@ func TestAlertRuleCRUD(t *testing.T) {
"from": 18000,
"to": 10800
},
"datasourceUid": "unknown",
"model": {
"datasourceUid": "unknown"
}
}
],