2016-07-27 09:18:10 -05:00
|
|
|
package conditions
|
2016-07-19 09:15:26 -05:00
|
|
|
|
2016-07-19 10:45:37 -05:00
|
|
|
import (
|
2020-11-19 07:47:17 -06:00
|
|
|
"errors"
|
2016-07-20 07:28:02 -05:00
|
|
|
"fmt"
|
2016-09-21 00:01:53 -05:00
|
|
|
"strings"
|
|
|
|
"time"
|
2016-07-19 09:15:26 -05:00
|
|
|
|
2021-11-10 04:52:16 -06:00
|
|
|
"github.com/grafana/grafana/pkg/tsdb/legacydata"
|
|
|
|
"github.com/grafana/grafana/pkg/tsdb/legacydata/interval"
|
2020-11-26 06:38:36 -06:00
|
|
|
"github.com/grafana/grafana/pkg/tsdb/prometheus"
|
|
|
|
|
2017-09-11 13:07:57 -05:00
|
|
|
gocontext "context"
|
|
|
|
|
2020-10-09 07:21:16 -05:00
|
|
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
2017-01-13 05:32:30 -06:00
|
|
|
"github.com/grafana/grafana/pkg/components/null"
|
2016-07-19 10:45:37 -05:00
|
|
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
2019-05-14 01:15:05 -05:00
|
|
|
"github.com/grafana/grafana/pkg/models"
|
2016-07-27 09:18:10 -05:00
|
|
|
"github.com/grafana/grafana/pkg/services/alerting"
|
2020-03-18 09:30:07 -05:00
|
|
|
"github.com/grafana/grafana/pkg/util/errutil"
|
2016-07-19 10:45:37 -05:00
|
|
|
)
|
2016-07-19 09:15:26 -05:00
|
|
|
|
2016-07-27 09:18:10 -05:00
|
|
|
func init() {
|
2016-07-27 09:29:28 -05:00
|
|
|
alerting.RegisterCondition("query", func(model *simplejson.Json, index int) (alerting.Condition, error) {
|
2019-06-03 03:25:58 -05:00
|
|
|
return newQueryCondition(model, index)
|
2016-07-27 09:18:10 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-06-03 03:25:58 -05:00
|
|
|
// QueryCondition is responsible for issue and query, reduce the
|
|
|
|
// timeseries into single values and evaluate if they are firing or not.
|
2016-07-19 09:15:26 -05:00
|
|
|
type QueryCondition struct {
|
2021-03-08 00:02:49 -06:00
|
|
|
Index int
|
|
|
|
Query AlertQuery
|
|
|
|
Reducer *queryReducer
|
|
|
|
Evaluator AlertEvaluator
|
|
|
|
Operator string
|
2016-07-19 09:15:26 -05:00
|
|
|
}
|
|
|
|
|
2019-06-03 03:25:58 -05:00
|
|
|
// AlertQuery contains information about what datasource a query
|
|
|
|
// should be sent to and the query object.
|
2016-07-27 09:18:10 -05:00
|
|
|
type AlertQuery struct {
|
|
|
|
Model *simplejson.Json
|
2019-06-03 03:25:58 -05:00
|
|
|
DatasourceID int64
|
2016-07-27 09:18:10 -05:00
|
|
|
From string
|
|
|
|
To string
|
|
|
|
}
|
|
|
|
|
2019-06-03 03:25:58 -05:00
|
|
|
// Eval evaluates the `QueryCondition`.
|
2021-11-10 04:52:16 -06:00
|
|
|
func (c *QueryCondition) Eval(context *alerting.EvalContext, requestHandler legacydata.RequestHandler) (*alerting.ConditionResult, error) {
|
|
|
|
timeRange := legacydata.NewDataTimeRange(c.Query.From, c.Query.To)
|
2016-11-03 09:26:17 -05:00
|
|
|
|
2021-03-08 00:02:49 -06:00
|
|
|
seriesList, err := c.executeQuery(context, timeRange, requestHandler)
|
2016-07-20 07:28:02 -05:00
|
|
|
if err != nil {
|
2016-11-03 09:26:17 -05:00
|
|
|
return nil, err
|
2016-07-20 07:28:02 -05:00
|
|
|
}
|
|
|
|
|
2020-04-25 15:48:20 -05:00
|
|
|
emptySeriesCount := 0
|
2016-11-03 01:14:34 -05:00
|
|
|
evalMatchCount := 0
|
2022-04-13 15:04:10 -05:00
|
|
|
|
|
|
|
// matches represents all the series that violate the alert condition
|
2016-11-03 09:26:17 -05:00
|
|
|
var matches []*alerting.EvalMatch
|
2022-04-13 15:04:10 -05:00
|
|
|
// allMatches capture all evaluation matches irregardless on whether the condition is met or not
|
|
|
|
var allMatches []*alerting.EvalMatch
|
2017-01-13 05:32:30 -06:00
|
|
|
|
2016-07-20 07:28:02 -05:00
|
|
|
for _, series := range seriesList {
|
|
|
|
reducedValue := c.Reducer.Reduce(series)
|
2016-09-06 13:40:12 -05:00
|
|
|
evalMatch := c.Evaluator.Eval(reducedValue)
|
2016-07-21 14:54:12 -05:00
|
|
|
|
Simplify comparison to bool constant (gosimple)
This fixes:
build.go:553:6: should omit comparison to bool constant, can be simplified to !strings.Contains(path, ".sha256") (S1002)
pkg/cmd/grafana-cli/commands/ls_command.go:27:5: should omit comparison to bool constant, can be simplified to !pluginDirInfo.IsDir() (S1002)
pkg/components/dynmap/dynmap_test.go:24:5: should omit comparison to bool constant, can be simplified to !value (S1002)
pkg/components/dynmap/dynmap_test.go:122:14: should omit comparison to bool constant, can be simplified to b (S1002)
pkg/components/dynmap/dynmap_test.go:125:14: should omit comparison to bool constant, can be simplified to !b (S1002)
pkg/components/dynmap/dynmap_test.go:128:14: should omit comparison to bool constant, can be simplified to !b (S1002)
pkg/models/org_user.go:51:5: should omit comparison to bool constant, can be simplified to !(*r).IsValid() (S1002)
pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go:77:12: should omit comparison to bool constant, can be simplified to !haveBool (S1002)
pkg/services/alerting/conditions/evaluator.go:23:9: should omit comparison to bool constant, can be simplified to !reducedValue.Valid (S1002)
pkg/services/alerting/conditions/evaluator.go:48:5: should omit comparison to bool constant, can be simplified to !reducedValue.Valid (S1002)
pkg/services/alerting/conditions/evaluator.go:91:5: should omit comparison to bool constant, can be simplified to !reducedValue.Valid (S1002)
pkg/services/alerting/conditions/query.go:56:6: should omit comparison to bool constant, can be simplified to !reducedValue.Valid (S1002)
pkg/services/alerting/extractor.go:107:20: should omit comparison to bool constant, can be simplified to !enabled.MustBool() (S1002)
pkg/services/alerting/notifiers/telegram.go:222:41: should omit comparison to bool constant, can be simplified to this.UploadImage (S1002)
pkg/services/sqlstore/apikey.go:58:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/apikey.go:72:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/dashboard.go:66:33: should omit comparison to bool constant, can be simplified to !cmd.Overwrite (S1002)
pkg/services/sqlstore/dashboard.go:175:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/dashboard.go:311:13: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/dashboard.go:444:12: should omit comparison to bool constant, can be simplified to !exists (S1002)
pkg/services/sqlstore/dashboard.go:472:12: should omit comparison to bool constant, can be simplified to !exists (S1002)
pkg/services/sqlstore/dashboard.go:554:32: should omit comparison to bool constant, can be simplified to !cmd.Overwrite (S1002)
pkg/services/sqlstore/dashboard_snapshot.go:83:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/plugin_setting.go:39:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/quota.go:34:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/quota.go:111:6: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/quota.go:136:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/quota.go:213:6: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/temp_user.go:129:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/user.go:157:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/user.go:182:5: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/user.go:191:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/user.go:212:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/services/sqlstore/user.go:307:12: should omit comparison to bool constant, can be simplified to !has (S1002)
pkg/social/generic_oauth.go:185:5: should omit comparison to bool constant, can be simplified to !s.extractToken(&data, token) (S1002)
pkg/tsdb/mssql/mssql.go:148:39: should omit comparison to bool constant, can be simplified to ok (S1002)
pkg/tsdb/mssql/mssql.go:212:6: should omit comparison to bool constant, can be simplified to !query.Model.Get("fillNull").MustBool(false) (S1002)
pkg/tsdb/mssql/mssql.go:247:56: should omit comparison to bool constant, can be simplified to ok (S1002)
pkg/tsdb/mssql/mssql.go:274:7: should omit comparison to bool constant, can be simplified to !exist (S1002)
pkg/tsdb/mssql/mssql.go:282:8: should omit comparison to bool constant, can be simplified to !exist (S1002)
pkg/tsdb/mysql/mysql.go:221:6: should omit comparison to bool constant, can be simplified to !query.Model.Get("fillNull").MustBool(false) (S1002)
pkg/tsdb/mysql/mysql.go:256:56: should omit comparison to bool constant, can be simplified to ok (S1002)
pkg/tsdb/mysql/mysql.go:283:7: should omit comparison to bool constant, can be simplified to !exist (S1002)
pkg/tsdb/mysql/mysql.go:291:8: should omit comparison to bool constant, can be simplified to !exist (S1002)
pkg/tsdb/postgres/postgres.go:134:39: should omit comparison to bool constant, can be simplified to ok (S1002)
pkg/tsdb/postgres/postgres.go:201:6: should omit comparison to bool constant, can be simplified to !query.Model.Get("fillNull").MustBool(false) (S1002)
pkg/tsdb/postgres/postgres.go:236:56: should omit comparison to bool constant, can be simplified to ok (S1002)
pkg/tsdb/postgres/postgres.go:263:7: should omit comparison to bool constant, can be simplified to !exist (S1002)
pkg/tsdb/postgres/postgres.go:271:8: should omit comparison to bool constant, can be simplified to !exist (S1002)
2018-04-16 13:12:59 -05:00
|
|
|
if !reducedValue.Valid {
|
2020-04-25 15:48:20 -05:00
|
|
|
emptySeriesCount++
|
2016-09-16 07:58:10 -05:00
|
|
|
}
|
|
|
|
|
2016-07-21 14:54:12 -05:00
|
|
|
if context.IsTestRun {
|
2016-07-27 09:29:28 -05:00
|
|
|
context.Logs = append(context.Logs, &alerting.ResultLogEntry{
|
2017-01-13 05:32:30 -06:00
|
|
|
Message: fmt.Sprintf("Condition[%d]: Eval: %v, Metric: %s, Value: %s", c.Index, evalMatch, series.Name, reducedValue),
|
2016-07-21 14:54:12 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2022-04-13 15:04:10 -05:00
|
|
|
em := alerting.EvalMatch{
|
|
|
|
Metric: series.Name,
|
|
|
|
Value: reducedValue,
|
|
|
|
Tags: series.Tags,
|
|
|
|
}
|
|
|
|
|
|
|
|
allMatches = append(allMatches, &em)
|
|
|
|
|
2016-08-15 08:12:43 -05:00
|
|
|
if evalMatch {
|
2016-11-03 01:14:34 -05:00
|
|
|
evalMatchCount++
|
2022-04-13 15:04:10 -05:00
|
|
|
matches = append(matches, &em)
|
2017-01-13 05:32:30 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// handle no series special case
|
|
|
|
if len(seriesList) == 0 {
|
|
|
|
// eval condition for null value
|
|
|
|
evalMatch := c.Evaluator.Eval(null.FloatFromPtr(nil))
|
|
|
|
|
|
|
|
if context.IsTestRun {
|
|
|
|
context.Logs = append(context.Logs, &alerting.ResultLogEntry{
|
2017-02-05 14:19:39 -06:00
|
|
|
Message: fmt.Sprintf("Condition: Eval: %v, Query Returned No Series (reduced to null/no value)", evalMatch),
|
2016-07-21 14:54:12 -05:00
|
|
|
})
|
2016-07-20 07:28:02 -05:00
|
|
|
}
|
2017-01-13 05:32:30 -06:00
|
|
|
|
|
|
|
if evalMatch {
|
|
|
|
evalMatchCount++
|
|
|
|
matches = append(matches, &alerting.EvalMatch{Metric: "NoData", Value: null.FloatFromPtr(nil)})
|
|
|
|
}
|
2016-07-20 07:28:02 -05:00
|
|
|
}
|
2016-09-08 06:28:41 -05:00
|
|
|
|
2016-11-03 09:26:17 -05:00
|
|
|
return &alerting.ConditionResult{
|
|
|
|
Firing: evalMatchCount > 0,
|
2020-04-25 15:48:20 -05:00
|
|
|
NoDataFound: emptySeriesCount == len(seriesList),
|
2016-11-15 08:35:25 -06:00
|
|
|
Operator: c.Operator,
|
2016-11-03 09:26:17 -05:00
|
|
|
EvalMatches: matches,
|
2022-04-13 15:04:10 -05:00
|
|
|
AllMatches: allMatches,
|
2016-11-03 09:26:17 -05:00
|
|
|
}, nil
|
2016-07-20 07:28:02 -05:00
|
|
|
}
|
|
|
|
|
2021-11-10 04:52:16 -06:00
|
|
|
func calculateInterval(timeRange legacydata.DataTimeRange, model *simplejson.Json, dsInfo *models.DataSource) (time.Duration, error) {
|
2021-09-06 02:24:32 -05:00
|
|
|
// if there is no min-interval specified in the datasource or in the dashboard-panel,
|
|
|
|
// the value of 1ms is used (this is how it is done in the dashboard-interval-calculation too,
|
|
|
|
// see https://github.com/grafana/grafana/blob/9a0040c0aeaae8357c650cec2ee644a571dddf3d/packages/grafana-data/src/datetime/rangeutil.ts#L264)
|
|
|
|
defaultMinInterval := time.Millisecond * 1
|
|
|
|
|
2021-08-31 07:49:30 -05:00
|
|
|
// interval.GetIntervalFrom has two problems (but they do not affect us here):
|
|
|
|
// - it returns the min-interval, so it should be called interval.GetMinIntervalFrom
|
|
|
|
// - it falls back to model.intervalMs. it should not, because that one is the real final
|
|
|
|
// interval-value calculated by the browser. but, in this specific case (old-alert),
|
|
|
|
// that value is not set, so the fallback never happens.
|
2021-09-06 02:24:32 -05:00
|
|
|
minInterval, err := interval.GetIntervalFrom(dsInfo, model, defaultMinInterval)
|
2021-08-31 07:49:30 -05:00
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
return time.Duration(0), err
|
|
|
|
}
|
|
|
|
|
|
|
|
calc := interval.NewCalculator()
|
|
|
|
|
2021-09-09 07:05:08 -05:00
|
|
|
interval := calc.Calculate(timeRange, minInterval)
|
2021-08-31 07:49:30 -05:00
|
|
|
|
|
|
|
return interval.Value, nil
|
|
|
|
}
|
|
|
|
|
2021-11-10 04:52:16 -06:00
|
|
|
func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange legacydata.DataTimeRange,
|
|
|
|
requestHandler legacydata.RequestHandler) (legacydata.DataTimeSeriesSlice, error) {
|
2021-01-13 12:16:27 -06:00
|
|
|
getDsInfo := &models.GetDataSourceQuery{
|
2019-06-03 03:25:58 -05:00
|
|
|
Id: c.Query.DatasourceID,
|
|
|
|
OrgId: context.Rule.OrgID,
|
2016-07-20 07:28:02 -05:00
|
|
|
}
|
|
|
|
|
2022-02-03 06:26:05 -06:00
|
|
|
if err := context.Store.GetDataSource(context.Ctx, getDsInfo); err != nil {
|
2020-11-05 04:57:20 -06:00
|
|
|
return nil, fmt.Errorf("could not find datasource: %w", err)
|
2016-07-20 07:28:02 -05:00
|
|
|
}
|
|
|
|
|
2021-02-03 13:47:45 -06:00
|
|
|
err := context.RequestValidator.Validate(getDsInfo.Result.Url, nil)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("access denied: %w", err)
|
|
|
|
}
|
|
|
|
|
2021-08-31 07:49:30 -05:00
|
|
|
req, err := c.getRequestForAlertRule(getDsInfo.Result, timeRange, context.IsDebug)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("interval calculation failed: %w", err)
|
|
|
|
}
|
2021-11-10 04:52:16 -06:00
|
|
|
result := make(legacydata.DataTimeSeriesSlice, 0)
|
2016-07-20 07:28:02 -05:00
|
|
|
|
2019-06-25 01:52:17 -05:00
|
|
|
if context.IsDebug {
|
|
|
|
data := simplejson.New()
|
|
|
|
if req.TimeRange != nil {
|
|
|
|
data.Set("from", req.TimeRange.GetFromAsMsEpoch())
|
|
|
|
data.Set("to", req.TimeRange.GetToAsMsEpoch())
|
|
|
|
}
|
|
|
|
|
|
|
|
type queryDto struct {
|
2020-04-16 09:09:27 -05:00
|
|
|
RefID string `json:"refId"`
|
2019-06-25 01:52:17 -05:00
|
|
|
Model *simplejson.Json `json:"model"`
|
|
|
|
Datasource *simplejson.Json `json:"datasource"`
|
|
|
|
MaxDataPoints int64 `json:"maxDataPoints"`
|
2021-03-08 00:02:49 -06:00
|
|
|
IntervalMS int64 `json:"intervalMs"`
|
2019-06-25 01:52:17 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
queries := []*queryDto{}
|
|
|
|
for _, q := range req.Queries {
|
|
|
|
queries = append(queries, &queryDto{
|
2021-03-08 00:02:49 -06:00
|
|
|
RefID: q.RefID,
|
2019-06-25 01:52:17 -05:00
|
|
|
Model: q.Model,
|
|
|
|
Datasource: simplejson.NewFromAny(map[string]interface{}{
|
|
|
|
"id": q.DataSource.Id,
|
|
|
|
"name": q.DataSource.Name,
|
|
|
|
}),
|
|
|
|
MaxDataPoints: q.MaxDataPoints,
|
2021-03-08 00:02:49 -06:00
|
|
|
IntervalMS: q.IntervalMS,
|
2019-06-25 01:52:17 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
data.Set("queries", queries)
|
|
|
|
|
|
|
|
context.Logs = append(context.Logs, &alerting.ResultLogEntry{
|
|
|
|
Message: fmt.Sprintf("Condition[%d]: Query", c.Index),
|
|
|
|
Data: data,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-03-08 00:02:49 -06:00
|
|
|
resp, err := requestHandler.HandleRequest(context.Ctx, getDsInfo.Result, req)
|
2016-07-20 07:28:02 -05:00
|
|
|
if err != nil {
|
2020-11-26 06:38:36 -06:00
|
|
|
return nil, toCustomError(err)
|
2016-07-20 07:28:02 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, v := range resp.Results {
|
|
|
|
if v.Error != nil {
|
2021-03-08 00:02:49 -06:00
|
|
|
return nil, fmt.Errorf("request handler response error %v", v)
|
2016-07-20 07:28:02 -05:00
|
|
|
}
|
|
|
|
|
2020-03-18 09:30:07 -05:00
|
|
|
// If there are dataframes but no series on the result
|
|
|
|
useDataframes := v.Dataframes != nil && (v.Series == nil || len(v.Series) == 0)
|
|
|
|
|
2021-03-08 00:02:49 -06:00
|
|
|
if useDataframes { // convert the dataframes to plugins.DataTimeSeries
|
2020-06-09 06:13:06 -05:00
|
|
|
frames, err := v.Dataframes.Decoded()
|
2020-03-18 09:30:07 -05:00
|
|
|
if err != nil {
|
2021-03-08 00:02:49 -06:00
|
|
|
return nil, errutil.Wrap("request handler failed to unmarshal arrow dataframes from bytes", err)
|
2020-03-18 09:30:07 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, frame := range frames {
|
2020-10-09 07:21:16 -05:00
|
|
|
ss, err := FrameToSeriesSlice(frame)
|
2020-03-18 09:30:07 -05:00
|
|
|
if err != nil {
|
2021-03-08 00:02:49 -06:00
|
|
|
return nil, errutil.Wrapf(err,
|
|
|
|
`request handler failed to convert dataframe "%v" to plugins.DataTimeSeriesSlice`, frame.Name)
|
2020-03-18 09:30:07 -05:00
|
|
|
}
|
|
|
|
result = append(result, ss...)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
result = append(result, v.Series...)
|
|
|
|
}
|
2016-07-21 06:09:12 -05:00
|
|
|
|
2019-06-25 01:52:17 -05:00
|
|
|
queryResultData := map[string]interface{}{}
|
|
|
|
|
2016-07-21 06:09:12 -05:00
|
|
|
if context.IsTestRun {
|
2020-03-18 09:30:07 -05:00
|
|
|
queryResultData["series"] = result
|
2019-06-25 01:52:17 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
if context.IsDebug && v.Meta != nil {
|
|
|
|
queryResultData["meta"] = v.Meta
|
|
|
|
}
|
|
|
|
|
|
|
|
if context.IsTestRun || context.IsDebug {
|
2020-03-18 09:30:07 -05:00
|
|
|
if useDataframes {
|
|
|
|
queryResultData["fromDataframe"] = true
|
|
|
|
}
|
2016-07-27 09:29:28 -05:00
|
|
|
context.Logs = append(context.Logs, &alerting.ResultLogEntry{
|
2016-07-21 14:54:12 -05:00
|
|
|
Message: fmt.Sprintf("Condition[%d]: Query Result", c.Index),
|
2019-06-25 01:52:17 -05:00
|
|
|
Data: simplejson.NewFromAny(queryResultData),
|
2016-07-21 06:09:12 -05:00
|
|
|
})
|
|
|
|
}
|
2016-07-20 07:28:02 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
|
2021-11-10 04:52:16 -06:00
|
|
|
func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, timeRange legacydata.DataTimeRange,
|
|
|
|
debug bool) (legacydata.DataQuery, error) {
|
2020-08-19 09:03:39 -05:00
|
|
|
queryModel := c.Query.Model
|
2021-08-31 07:49:30 -05:00
|
|
|
|
|
|
|
calculatedInterval, err := calculateInterval(timeRange, queryModel, datasource)
|
|
|
|
if err != nil {
|
2021-11-10 04:52:16 -06:00
|
|
|
return legacydata.DataQuery{}, err
|
2021-08-31 07:49:30 -05:00
|
|
|
}
|
|
|
|
|
2021-11-10 04:52:16 -06:00
|
|
|
req := legacydata.DataQuery{
|
2021-03-08 00:02:49 -06:00
|
|
|
TimeRange: &timeRange,
|
2021-11-10 04:52:16 -06:00
|
|
|
Queries: []legacydata.DataSubQuery{
|
2016-07-20 07:28:02 -05:00
|
|
|
{
|
2021-08-31 07:49:30 -05:00
|
|
|
RefID: "A",
|
|
|
|
Model: queryModel,
|
|
|
|
DataSource: datasource,
|
|
|
|
QueryType: queryModel.Get("queryType").MustString(""),
|
|
|
|
MaxDataPoints: interval.DefaultRes,
|
|
|
|
IntervalMS: calculatedInterval.Milliseconds(),
|
2016-07-20 07:28:02 -05:00
|
|
|
},
|
|
|
|
},
|
2020-04-25 15:48:20 -05:00
|
|
|
Headers: map[string]string{
|
2021-09-09 09:16:05 -05:00
|
|
|
"FromAlert": "true",
|
|
|
|
"X-Cache-Skip": "true",
|
2020-04-25 15:48:20 -05:00
|
|
|
},
|
2019-06-25 01:52:17 -05:00
|
|
|
Debug: debug,
|
2016-07-20 07:28:02 -05:00
|
|
|
}
|
|
|
|
|
2021-08-31 07:49:30 -05:00
|
|
|
return req, nil
|
2016-07-19 09:15:26 -05:00
|
|
|
}
|
|
|
|
|
2019-06-03 03:25:58 -05:00
|
|
|
func newQueryCondition(model *simplejson.Json, index int) (*QueryCondition, error) {
|
2016-07-19 10:45:37 -05:00
|
|
|
condition := QueryCondition{}
|
2016-07-21 14:54:12 -05:00
|
|
|
condition.Index = index
|
2016-07-19 10:45:37 -05:00
|
|
|
|
2019-06-03 03:25:58 -05:00
|
|
|
queryJSON := model.Get("query")
|
2016-07-19 10:45:37 -05:00
|
|
|
|
2019-06-03 03:25:58 -05:00
|
|
|
condition.Query.Model = queryJSON.Get("model")
|
|
|
|
condition.Query.From = queryJSON.Get("params").MustArray()[1].(string)
|
|
|
|
condition.Query.To = queryJSON.Get("params").MustArray()[2].(string)
|
2016-09-21 00:01:53 -05:00
|
|
|
|
|
|
|
if err := validateFromValue(condition.Query.From); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := validateToValue(condition.Query.To); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2019-06-03 03:25:58 -05:00
|
|
|
condition.Query.DatasourceID = queryJSON.Get("datasourceId").MustInt64()
|
2016-07-19 10:45:37 -05:00
|
|
|
|
2019-06-03 03:25:58 -05:00
|
|
|
reducerJSON := model.Get("reducer")
|
|
|
|
condition.Reducer = newSimpleReducer(reducerJSON.Get("type").MustString())
|
2016-07-19 10:45:37 -05:00
|
|
|
|
2019-06-03 03:25:58 -05:00
|
|
|
evaluatorJSON := model.Get("evaluator")
|
|
|
|
evaluator, err := NewAlertEvaluator(evaluatorJSON)
|
2016-07-19 10:45:37 -05:00
|
|
|
if err != nil {
|
2019-07-22 12:23:33 -05:00
|
|
|
return nil, fmt.Errorf("error in condition %v: %v", index, err)
|
2016-07-19 10:45:37 -05:00
|
|
|
}
|
|
|
|
condition.Evaluator = evaluator
|
2016-11-15 08:35:25 -06:00
|
|
|
|
2019-06-03 03:25:58 -05:00
|
|
|
operatorJSON := model.Get("operator")
|
|
|
|
operator := operatorJSON.Get("type").MustString("and")
|
2016-11-15 08:35:25 -06:00
|
|
|
condition.Operator = operator
|
|
|
|
|
2016-07-19 10:45:37 -05:00
|
|
|
return &condition, nil
|
2016-07-19 09:15:26 -05:00
|
|
|
}
|
2016-09-21 00:01:53 -05:00
|
|
|
|
|
|
|
func validateFromValue(from string) error {
|
|
|
|
fromRaw := strings.Replace(from, "now-", "", 1)
|
|
|
|
|
|
|
|
_, err := time.ParseDuration("-" + fromRaw)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
func validateToValue(to string) error {
|
|
|
|
if to == "now" {
|
|
|
|
return nil
|
|
|
|
} else if strings.HasPrefix(to, "now-") {
|
|
|
|
withoutNow := strings.Replace(to, "now-", "", 1)
|
|
|
|
|
|
|
|
_, err := time.ParseDuration("-" + withoutNow)
|
|
|
|
if err == nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-09-26 03:01:33 -05:00
|
|
|
_, err := time.ParseDuration(to)
|
|
|
|
return err
|
2016-09-21 00:01:53 -05:00
|
|
|
}
|
2020-10-09 07:21:16 -05:00
|
|
|
|
|
|
|
// FrameToSeriesSlice converts a frame that is a valid time series as per data.TimeSeriesSchema()
|
2021-03-08 00:02:49 -06:00
|
|
|
// to a DataTimeSeriesSlice.
|
2021-11-10 04:52:16 -06:00
|
|
|
func FrameToSeriesSlice(frame *data.Frame) (legacydata.DataTimeSeriesSlice, error) {
|
2020-10-09 07:21:16 -05:00
|
|
|
tsSchema := frame.TimeSeriesSchema()
|
|
|
|
if tsSchema.Type == data.TimeSeriesTypeNot {
|
2021-03-08 00:02:49 -06:00
|
|
|
// If no fields, or only a time field, create an empty plugins.DataTimeSeriesSlice with a single
|
2020-10-09 07:21:16 -05:00
|
|
|
// time series in order to trigger "no data" in alerting.
|
2021-08-26 09:39:50 -05:00
|
|
|
if frame.Rows() == 0 || (len(frame.Fields) == 1 && frame.Fields[0].Type().Time()) {
|
2021-11-10 04:52:16 -06:00
|
|
|
return legacydata.DataTimeSeriesSlice{{
|
2020-10-09 07:21:16 -05:00
|
|
|
Name: frame.Name,
|
2021-11-10 04:52:16 -06:00
|
|
|
Points: make(legacydata.DataTimeSeriesPoints, 0),
|
2020-10-09 07:21:16 -05:00
|
|
|
}}, nil
|
|
|
|
}
|
|
|
|
return nil, fmt.Errorf("input frame is not recognized as a time series")
|
|
|
|
}
|
|
|
|
seriesCount := len(tsSchema.ValueIndices)
|
2021-11-10 04:52:16 -06:00
|
|
|
seriesSlice := make(legacydata.DataTimeSeriesSlice, 0, seriesCount)
|
2020-10-09 07:21:16 -05:00
|
|
|
timeField := frame.Fields[tsSchema.TimeIndex]
|
|
|
|
timeNullFloatSlice := make([]null.Float, timeField.Len())
|
|
|
|
|
|
|
|
for i := 0; i < timeField.Len(); i++ { // built slice of time as epoch ms in null floats
|
|
|
|
tStamp, err := timeField.FloatAt(i)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
timeNullFloatSlice[i] = null.FloatFrom(tStamp)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, fieldIdx := range tsSchema.ValueIndices { // create a TimeSeries for each value Field
|
|
|
|
field := frame.Fields[fieldIdx]
|
2021-11-10 04:52:16 -06:00
|
|
|
ts := legacydata.DataTimeSeries{
|
|
|
|
Points: make(legacydata.DataTimeSeriesPoints, field.Len()),
|
2020-10-09 07:21:16 -05:00
|
|
|
}
|
|
|
|
|
2020-12-16 11:07:45 -06:00
|
|
|
if len(field.Labels) > 0 {
|
|
|
|
ts.Tags = field.Labels.Copy()
|
|
|
|
}
|
|
|
|
|
2020-10-09 07:21:16 -05:00
|
|
|
switch {
|
|
|
|
case field.Config != nil && field.Config.DisplayName != "":
|
|
|
|
ts.Name = field.Config.DisplayName
|
2020-12-08 15:04:05 -06:00
|
|
|
case field.Config != nil && field.Config.DisplayNameFromDS != "":
|
|
|
|
ts.Name = field.Config.DisplayNameFromDS
|
|
|
|
case len(field.Labels) > 0:
|
2020-10-09 07:21:16 -05:00
|
|
|
// Tags are appended to the name so they are eventually included in EvalMatch's Metric property
|
|
|
|
// for display in notifications.
|
|
|
|
ts.Name = fmt.Sprintf("%v {%v}", field.Name, field.Labels.String())
|
|
|
|
default:
|
|
|
|
ts.Name = field.Name
|
|
|
|
}
|
|
|
|
|
|
|
|
for rowIdx := 0; rowIdx < field.Len(); rowIdx++ { // for each value in the field, make a TimePoint
|
|
|
|
val, err := field.FloatAt(rowIdx)
|
|
|
|
if err != nil {
|
2021-03-08 00:02:49 -06:00
|
|
|
return nil, errutil.Wrapf(err,
|
|
|
|
"failed to convert frame to DataTimeSeriesSlice, can not convert value %v to float", field.At(rowIdx))
|
2020-10-09 07:21:16 -05:00
|
|
|
}
|
2021-11-10 04:52:16 -06:00
|
|
|
ts.Points[rowIdx] = legacydata.DataTimePoint{
|
2020-10-09 07:21:16 -05:00
|
|
|
null.FloatFrom(val),
|
|
|
|
timeNullFloatSlice[rowIdx],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
seriesSlice = append(seriesSlice, ts)
|
|
|
|
}
|
|
|
|
|
|
|
|
return seriesSlice, nil
|
|
|
|
}
|
2020-11-26 06:38:36 -06:00
|
|
|
|
|
|
|
func toCustomError(err error) error {
|
|
|
|
// is context timeout
|
|
|
|
if errors.Is(err, gocontext.DeadlineExceeded) {
|
|
|
|
return fmt.Errorf("alert execution exceeded the timeout")
|
|
|
|
}
|
|
|
|
|
|
|
|
// is Prometheus error
|
|
|
|
if prometheus.IsAPIError(err) {
|
|
|
|
return prometheus.ConvertAPIError(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
// generic fallback
|
2021-03-08 00:02:49 -06:00
|
|
|
return fmt.Errorf("request handler error: %w", err)
|
2020-11-26 06:38:36 -06:00
|
|
|
}
|