mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
feat(alerting): add query optimizations for prometheus (#76015)
This commit is contained in:
parent
d28e365f74
commit
2b8c6d66e1
@ -556,11 +556,11 @@ func (st DBstore) GetAlertRulesForScheduling(ctx context.Context, query *ngmodel
|
||||
// In previous versions of Grafana, Loki datasources would default to range queries
|
||||
// instead of instant queries, sometimes creating unnecessary load. This is only
|
||||
// done for Grafana Cloud.
|
||||
if indices, migratable := canBeInstant(rule); migratable {
|
||||
if err := migrateToInstant(rule, indices); err != nil {
|
||||
if optimizations, migratable := canBeInstant(rule); migratable {
|
||||
if err := migrateToInstant(rule, optimizations); err != nil {
|
||||
st.Logger.Error("Could not migrate rule from range to instant query", "rule", rule.UID, "err", err)
|
||||
} else {
|
||||
st.Logger.Info("Migrated rule from range to instant query", "rule", rule.UID, "migrated_queries", len(indices))
|
||||
st.Logger.Info("Migrated rule from range to instant query", "rule", rule.UID, "migrated_queries", len(optimizations))
|
||||
}
|
||||
}
|
||||
rules = append(rules, rule)
|
||||
|
@ -1,92 +0,0 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/grafana/grafana/pkg/expr"
|
||||
"github.com/grafana/grafana/pkg/services/datasources"
|
||||
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||
)
|
||||
|
||||
// DSType can be used to check the datasource type if it's set in the model.
|
||||
type dsType struct {
|
||||
DS struct {
|
||||
Type string `json:"type"`
|
||||
} `json:"datasource"`
|
||||
}
|
||||
|
||||
func (t dsType) isLoki() bool {
|
||||
return t.DS.Type == datasources.DS_LOKI
|
||||
}
|
||||
|
||||
// canBeInstant checks if any of the query nodes that are loki range queries can be migrated to instant queries.
|
||||
// If any are migratable, those indices are returned.
|
||||
func canBeInstant(r *models.AlertRule) ([]int, bool) {
|
||||
if len(r.Data) < 2 {
|
||||
return nil, false
|
||||
}
|
||||
var (
|
||||
optimizableIndices []int
|
||||
canBeOptimized = false
|
||||
)
|
||||
// Loop over query nodes to find all Loki range queries.
|
||||
for i := range r.Data {
|
||||
if r.Data[i].QueryType != "range" {
|
||||
continue
|
||||
}
|
||||
var t dsType
|
||||
// We can ignore the error here, the query just won't be optimized.
|
||||
_ = json.Unmarshal(r.Data[i].Model, &t)
|
||||
|
||||
if !t.isLoki() {
|
||||
continue
|
||||
}
|
||||
var validReducers bool
|
||||
// Loop over all query nodes to find the reduce node.
|
||||
for ii := range r.Data {
|
||||
// Second query part should be and expression.
|
||||
if !expr.IsDataSource(r.Data[ii].DatasourceUID) {
|
||||
continue
|
||||
}
|
||||
exprRaw := make(map[string]any)
|
||||
if err := json.Unmarshal(r.Data[ii].Model, &exprRaw); err != nil {
|
||||
continue
|
||||
}
|
||||
// Second query part should use first query part as expression.
|
||||
if ref, ok := exprRaw["expression"].(string); !ok || ref != r.Data[i].RefID {
|
||||
continue
|
||||
}
|
||||
// Second query part should be "last()"
|
||||
if val, ok := exprRaw["reducer"].(string); !ok || val != "last" {
|
||||
validReducers = false
|
||||
break
|
||||
}
|
||||
validReducers = true
|
||||
}
|
||||
// If we found a reduce node that uses last, we can add the loki query to the optimizations.
|
||||
if validReducers {
|
||||
canBeOptimized = true
|
||||
optimizableIndices = append(optimizableIndices, i)
|
||||
}
|
||||
}
|
||||
return optimizableIndices, canBeOptimized
|
||||
}
|
||||
|
||||
// migrateToInstant will move the provided indices from a range-query to an instant query. This should only
|
||||
// be used for loki.
|
||||
func migrateToInstant(r *models.AlertRule, optimizableIndices []int) error {
|
||||
for _, lokiQueryIndex := range optimizableIndices {
|
||||
modelRaw := make(map[string]any)
|
||||
if err := json.Unmarshal(r.Data[lokiQueryIndex].Model, &modelRaw); err != nil {
|
||||
return err
|
||||
}
|
||||
modelRaw["queryType"] = "instant"
|
||||
model, err := json.Marshal(modelRaw)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
r.Data[lokiQueryIndex].Model = model
|
||||
r.Data[lokiQueryIndex].QueryType = "instant"
|
||||
}
|
||||
return nil
|
||||
}
|
@ -1,242 +0,0 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||
)
|
||||
|
||||
func TestCanBeInstant(t *testing.T) {
|
||||
tcs := []struct {
|
||||
name string
|
||||
expected bool
|
||||
expectedIndices []int
|
||||
rule *models.AlertRule
|
||||
}{
|
||||
{
|
||||
name: "valid rule that can be migrated from range to instant",
|
||||
expected: true,
|
||||
expectedIndices: []int{0},
|
||||
rule: createMigrateableLokiRule(t),
|
||||
},
|
||||
{
|
||||
name: "valid rule with external loki datasource",
|
||||
expected: true,
|
||||
expectedIndices: []int{0},
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0].DatasourceUID = "something-external"
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "valid multi query rule with loki datasources",
|
||||
expected: true,
|
||||
expectedIndices: []int{0, 1},
|
||||
rule: createMultiQueryMigratableLokiRule(t),
|
||||
},
|
||||
{
|
||||
name: "invalid rule where the data array is too short to be migrateable",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data = []models.AlertQuery{r.Data[0]}
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "invalid rule that is not a range query",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0].QueryType = "something-else"
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "invalid rule that has not last() as aggregation",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[1] = reducer(t, "B", "A", "avg")
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "invalid rule that has not all reducers last()",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data = append(r.Data, reducer(t, "invalid-reducer", "A", "min"))
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "invalid rule that has no aggregation",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[1].DatasourceUID = "something-else"
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "invalid rule that has not last() pointing to range query",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
raw := make(map[string]any)
|
||||
err := json.Unmarshal(r.Data[1].Model, &raw)
|
||||
require.NoError(t, err)
|
||||
raw["expression"] = "C"
|
||||
r.Data[1].Model, err = json.Marshal(raw)
|
||||
require.NoError(t, err)
|
||||
}),
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
indicies, canBe := canBeInstant(tc.rule)
|
||||
require.Equal(t, tc.expected, canBe)
|
||||
require.Equal(t, tc.expectedIndices, indicies)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMigrateLokiQueryToInstant(t *testing.T) {
|
||||
original := createMigrateableLokiRule(t)
|
||||
mirgrated := createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0] = lokiQuery(t, "A", "instant", "grafanacloud-logs")
|
||||
})
|
||||
|
||||
optimizableIndices, canBeOptimized := canBeInstant(original)
|
||||
require.True(t, canBeOptimized)
|
||||
require.NoError(t, migrateToInstant(original, optimizableIndices))
|
||||
|
||||
require.Equal(t, mirgrated.Data[0].QueryType, original.Data[0].QueryType)
|
||||
|
||||
originalModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(original.Data[0].Model, &originalModel))
|
||||
migratedModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(mirgrated.Data[0].Model, &migratedModel))
|
||||
|
||||
require.Equal(t, migratedModel, originalModel)
|
||||
|
||||
_, canBeOptimized = canBeInstant(original)
|
||||
require.False(t, canBeOptimized)
|
||||
}
|
||||
|
||||
func TestMigrateMultiLokiQueryToInstant(t *testing.T) {
|
||||
original := createMultiQueryMigratableLokiRule(t)
|
||||
mirgrated := createMultiQueryMigratableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0] = lokiQuery(t, "TotalRequests", "instant", "grafanacloud-logs")
|
||||
r.Data[1] = lokiQuery(t, "TotalErrors", "instant", "grafanacloud-logs")
|
||||
})
|
||||
|
||||
optimizableIndices, canBeOptimized := canBeInstant(original)
|
||||
require.True(t, canBeOptimized)
|
||||
require.NoError(t, migrateToInstant(original, optimizableIndices))
|
||||
|
||||
require.Equal(t, mirgrated.Data[0].QueryType, original.Data[0].QueryType)
|
||||
require.Equal(t, mirgrated.Data[1].QueryType, original.Data[1].QueryType)
|
||||
|
||||
originalModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(original.Data[0].Model, &originalModel))
|
||||
migratedModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(mirgrated.Data[0].Model, &migratedModel))
|
||||
|
||||
require.Equal(t, migratedModel, originalModel)
|
||||
|
||||
originalModel = make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(original.Data[1].Model, &originalModel))
|
||||
migratedModel = make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(mirgrated.Data[1].Model, &migratedModel))
|
||||
|
||||
require.Equal(t, migratedModel, originalModel)
|
||||
|
||||
_, canBeOptimized = canBeInstant(original)
|
||||
require.False(t, canBeOptimized)
|
||||
}
|
||||
|
||||
func createMigrateableLokiRule(t *testing.T, muts ...func(*models.AlertRule)) *models.AlertRule {
|
||||
t.Helper()
|
||||
r := &models.AlertRule{
|
||||
Data: []models.AlertQuery{
|
||||
lokiQuery(t, "A", "range", "grafanacloud-logs"),
|
||||
reducer(t, "B", "A", "last"),
|
||||
},
|
||||
}
|
||||
for _, m := range muts {
|
||||
m(r)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func createMultiQueryMigratableLokiRule(t *testing.T, muts ...func(*models.AlertRule)) *models.AlertRule {
|
||||
t.Helper()
|
||||
r := &models.AlertRule{
|
||||
Data: []models.AlertQuery{
|
||||
lokiQuery(t, "TotalRequests", "range", "grafanacloud-logs"),
|
||||
lokiQuery(t, "TotalErrors", "range", "grafanacloud-logs"),
|
||||
reducer(t, "TotalRequests_Last", "TotalRequests", "last"),
|
||||
reducer(t, "TotalErrors_Last", "TotalErrors", "last"),
|
||||
},
|
||||
}
|
||||
for _, m := range muts {
|
||||
m(r)
|
||||
}
|
||||
return r
|
||||
}
|
||||
func lokiQuery(t *testing.T, refID, queryType, datasourceUID string) models.AlertQuery {
|
||||
t.Helper()
|
||||
return models.AlertQuery{
|
||||
RefID: refID,
|
||||
QueryType: queryType,
|
||||
DatasourceUID: datasourceUID,
|
||||
Model: []byte(fmt.Sprintf(`{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "%s"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "1",
|
||||
"intervalMs": 1000,
|
||||
"maxDataPoints": 43200,
|
||||
"queryType": "%s",
|
||||
"refId": "%s"
|
||||
}`, datasourceUID, queryType, refID)),
|
||||
}
|
||||
}
|
||||
|
||||
func reducer(t *testing.T, refID, exp, op string) models.AlertQuery {
|
||||
t.Helper()
|
||||
return models.AlertQuery{
|
||||
RefID: refID,
|
||||
DatasourceUID: "__expr__",
|
||||
Model: []byte(fmt.Sprintf(`{
|
||||
"conditions": [
|
||||
{
|
||||
"evaluator": {
|
||||
"params": [],
|
||||
"type": "gt"
|
||||
},
|
||||
"operator": {
|
||||
"type": "and"
|
||||
},
|
||||
"query": {
|
||||
"params": [
|
||||
"B"
|
||||
]
|
||||
},
|
||||
"reducer": {
|
||||
"params": [],
|
||||
"type": "%s"
|
||||
},
|
||||
"type": "query"
|
||||
}
|
||||
],
|
||||
"datasource": {
|
||||
"type": "__expr__",
|
||||
"uid": "__expr__"
|
||||
},
|
||||
"expression": "%s",
|
||||
"hide": false,
|
||||
"intervalMs": 1000,
|
||||
"maxDataPoints": 43200,
|
||||
"reducer": "%s",
|
||||
"refId": "%s",
|
||||
"type": "reduce"
|
||||
}`, op, exp, op, refID)),
|
||||
}
|
||||
}
|
132
pkg/services/ngalert/store/range_to_instant.go
Normal file
132
pkg/services/ngalert/store/range_to_instant.go
Normal file
@ -0,0 +1,132 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/grafana/grafana/pkg/expr"
|
||||
"github.com/grafana/grafana/pkg/services/datasources"
|
||||
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||
)
|
||||
|
||||
const (
|
||||
grafanaCloudProm = "grafanacloud-prom"
|
||||
grafanaCloudUsage = "grafanacloud-usage"
|
||||
)
|
||||
|
||||
// DSType can be used to check the datasource type if it's set in the model.
|
||||
type dsType struct {
|
||||
DS struct {
|
||||
Type string `json:"type"`
|
||||
} `json:"datasource"`
|
||||
Range bool `json:"range"`
|
||||
}
|
||||
|
||||
type optimization struct {
|
||||
// Index of the query that can be optimized
|
||||
i int
|
||||
// Type of the query that ca be optimized (loki,prometheus)
|
||||
t string
|
||||
}
|
||||
|
||||
// canBeInstant checks if any of the query nodes that are loki or prometheus range queries can be migrated to instant queries.
|
||||
// If any are migratable, those indices are returned.
|
||||
func canBeInstant(r *models.AlertRule) ([]optimization, bool) {
|
||||
if len(r.Data) < 2 {
|
||||
return nil, false
|
||||
}
|
||||
var (
|
||||
optimizableIndices []optimization
|
||||
canBeOptimized = false
|
||||
)
|
||||
// Loop over query nodes to find all range queries.
|
||||
for i := range r.Data {
|
||||
var t dsType
|
||||
// We can ignore the error here, the query just won't be optimized.
|
||||
_ = json.Unmarshal(r.Data[i].Model, &t)
|
||||
|
||||
switch t.DS.Type {
|
||||
case datasources.DS_PROMETHEUS:
|
||||
if !t.Range {
|
||||
continue
|
||||
}
|
||||
case datasources.DS_LOKI:
|
||||
if r.Data[i].QueryType != "range" {
|
||||
continue
|
||||
}
|
||||
default:
|
||||
// The default datasource is not saved as datasource, this is why we need to check for the datasource name.
|
||||
// Here we check the well-known grafana cloud datasources.
|
||||
if r.Data[i].DatasourceUID != grafanaCloudProm && r.Data[i].DatasourceUID != grafanaCloudUsage {
|
||||
continue
|
||||
}
|
||||
if !t.Range {
|
||||
continue
|
||||
}
|
||||
t.DS.Type = datasources.DS_PROMETHEUS
|
||||
}
|
||||
|
||||
var validReducers bool
|
||||
// Loop over all query nodes to find the reduce node.
|
||||
for ii := range r.Data {
|
||||
// Second query part should be and expression.
|
||||
if !expr.IsDataSource(r.Data[ii].DatasourceUID) {
|
||||
continue
|
||||
}
|
||||
exprRaw := make(map[string]any)
|
||||
if err := json.Unmarshal(r.Data[ii].Model, &exprRaw); err != nil {
|
||||
continue
|
||||
}
|
||||
// Second query part should use first query part as expression.
|
||||
if ref, ok := exprRaw["expression"].(string); !ok || ref != r.Data[i].RefID {
|
||||
continue
|
||||
}
|
||||
// Second query part should be "last()"
|
||||
if val, ok := exprRaw["reducer"].(string); !ok || val != "last" {
|
||||
validReducers = false
|
||||
break
|
||||
}
|
||||
validReducers = true
|
||||
}
|
||||
// If we found a reduce node that uses last, we can add the query to the optimizations.
|
||||
if validReducers {
|
||||
canBeOptimized = true
|
||||
optimizableIndices = append(optimizableIndices, optimization{
|
||||
i: i,
|
||||
t: t.DS.Type,
|
||||
})
|
||||
}
|
||||
}
|
||||
return optimizableIndices, canBeOptimized
|
||||
}
|
||||
|
||||
// migrateToInstant will move the provided indices from a range-query to an instant query.
|
||||
func migrateToInstant(r *models.AlertRule, optimizations []optimization) error {
|
||||
for _, opti := range optimizations {
|
||||
modelRaw := make(map[string]any)
|
||||
if err := json.Unmarshal(r.Data[opti.i].Model, &modelRaw); err != nil {
|
||||
return err
|
||||
}
|
||||
switch opti.t {
|
||||
case datasources.DS_PROMETHEUS:
|
||||
modelRaw["instant"] = true
|
||||
modelRaw["range"] = false
|
||||
model, err := json.Marshal(modelRaw)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
r.Data[opti.i].Model = model
|
||||
case datasources.DS_LOKI:
|
||||
modelRaw["queryType"] = "instant"
|
||||
model, err := json.Marshal(modelRaw)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
r.Data[opti.i].Model = model
|
||||
r.Data[opti.i].QueryType = "instant"
|
||||
default:
|
||||
return fmt.Errorf("optimization for datasource of type %s not possible", opti.t)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
426
pkg/services/ngalert/store/range_to_instant_test.go
Normal file
426
pkg/services/ngalert/store/range_to_instant_test.go
Normal file
@ -0,0 +1,426 @@
|
||||
package store
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/services/datasources"
|
||||
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||
)
|
||||
|
||||
const (
|
||||
promIsInstant = true
|
||||
promIsNotInstant = false
|
||||
promExternalDS = "some-external-ds"
|
||||
)
|
||||
|
||||
func TestCanBeInstant(t *testing.T) {
|
||||
tcs := []struct {
|
||||
name string
|
||||
expected bool
|
||||
expectedOptimizations []optimization
|
||||
rule *models.AlertRule
|
||||
}{
|
||||
{
|
||||
name: "valid loki rule that can be migrated from range to instant",
|
||||
expected: true,
|
||||
expectedOptimizations: []optimization{{i: 0, t: datasources.DS_LOKI}},
|
||||
rule: createMigrateableLokiRule(t),
|
||||
},
|
||||
{
|
||||
name: "valid prom rule that can be migrated from range to instant",
|
||||
expected: true,
|
||||
expectedOptimizations: []optimization{{i: 0, t: datasources.DS_PROMETHEUS}},
|
||||
rule: createMigratablePromRule(t),
|
||||
},
|
||||
{
|
||||
name: "valid loki rule with external loki datasource",
|
||||
expected: true,
|
||||
expectedOptimizations: []optimization{{i: 0, t: datasources.DS_LOKI}},
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0].DatasourceUID = "something-external"
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "valid prom rule with external prometheus datasource",
|
||||
expected: true,
|
||||
expectedOptimizations: []optimization{{i: 0, t: datasources.DS_PROMETHEUS}},
|
||||
rule: createMigratablePromRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0].DatasourceUID = "something-external"
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "valid prom rule with missing datasource",
|
||||
expected: true,
|
||||
expectedOptimizations: []optimization{{i: 0, t: datasources.DS_PROMETHEUS}},
|
||||
rule: createMigratablePromRuleWithDefaultDS(t),
|
||||
},
|
||||
{
|
||||
name: "valid prom rule with missing datasource and instant query",
|
||||
expected: false,
|
||||
rule: createMigratablePromRuleWithDefaultDS(t, func(r *models.AlertRule) {
|
||||
raw := make(map[string]any)
|
||||
err := json.Unmarshal(r.Data[0].Model, &raw)
|
||||
require.NoError(t, err)
|
||||
raw["range"] = false
|
||||
r.Data[0].Model, err = json.Marshal(raw)
|
||||
require.NoError(t, err)
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "valid loki multi query rule with loki datasources",
|
||||
expected: true,
|
||||
expectedOptimizations: []optimization{
|
||||
{i: 0, t: datasources.DS_LOKI},
|
||||
{i: 1, t: datasources.DS_LOKI},
|
||||
},
|
||||
rule: createMultiQueryMigratableLokiRule(t),
|
||||
},
|
||||
{
|
||||
name: "valid prom multi query rule with prom datasources",
|
||||
expected: true,
|
||||
expectedOptimizations: []optimization{
|
||||
{i: 0, t: datasources.DS_PROMETHEUS},
|
||||
{i: 1, t: datasources.DS_PROMETHEUS},
|
||||
},
|
||||
rule: createMultiQueryMigratablePromRule(t),
|
||||
},
|
||||
{
|
||||
name: "invalid rule where the data array is too short to be migrateable",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data = []models.AlertQuery{r.Data[0]}
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "invalid rule that is not a range query",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0].QueryType = "something-else"
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "invalid rule that has not last() as aggregation",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[1] = reducer(t, "B", "A", "avg")
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "invalid rule that has not all reducers last()",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data = append(r.Data, reducer(t, "invalid-reducer", "A", "min"))
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "invalid rule that has no aggregation",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[1].DatasourceUID = "something-else"
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "invalid rule that has not last() pointing to range query",
|
||||
expected: false,
|
||||
rule: createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
raw := make(map[string]any)
|
||||
err := json.Unmarshal(r.Data[1].Model, &raw)
|
||||
require.NoError(t, err)
|
||||
raw["expression"] = "C"
|
||||
r.Data[1].Model, err = json.Marshal(raw)
|
||||
require.NoError(t, err)
|
||||
}),
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
optimizations, canBe := canBeInstant(tc.rule)
|
||||
require.Equal(t, tc.expected, canBe)
|
||||
require.Equal(t, tc.expectedOptimizations, optimizations)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestMigrateLokiQueryToInstant(t *testing.T) {
|
||||
original := createMigrateableLokiRule(t)
|
||||
mirgrated := createMigrateableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0] = lokiQuery(t, "A", "instant", "grafanacloud-logs")
|
||||
})
|
||||
|
||||
optimizableIndices, canBeOptimized := canBeInstant(original)
|
||||
require.True(t, canBeOptimized)
|
||||
require.NoError(t, migrateToInstant(original, optimizableIndices))
|
||||
|
||||
require.Equal(t, mirgrated.Data[0].QueryType, original.Data[0].QueryType)
|
||||
|
||||
originalModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(original.Data[0].Model, &originalModel))
|
||||
migratedModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(mirgrated.Data[0].Model, &migratedModel))
|
||||
|
||||
require.Equal(t, migratedModel, originalModel)
|
||||
|
||||
_, canBeOptimized = canBeInstant(original)
|
||||
require.False(t, canBeOptimized)
|
||||
}
|
||||
|
||||
func TestMigrateMultiLokiQueryToInstant(t *testing.T) {
|
||||
original := createMultiQueryMigratableLokiRule(t)
|
||||
mirgrated := createMultiQueryMigratableLokiRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0] = lokiQuery(t, "TotalRequests", "instant", "grafanacloud-logs")
|
||||
r.Data[1] = lokiQuery(t, "TotalErrors", "instant", "grafanacloud-logs")
|
||||
})
|
||||
|
||||
optimizableIndices, canBeOptimized := canBeInstant(original)
|
||||
require.True(t, canBeOptimized)
|
||||
require.NoError(t, migrateToInstant(original, optimizableIndices))
|
||||
|
||||
require.Equal(t, mirgrated.Data[0].QueryType, original.Data[0].QueryType)
|
||||
require.Equal(t, mirgrated.Data[1].QueryType, original.Data[1].QueryType)
|
||||
|
||||
originalModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(original.Data[0].Model, &originalModel))
|
||||
migratedModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(mirgrated.Data[0].Model, &migratedModel))
|
||||
|
||||
require.Equal(t, migratedModel, originalModel)
|
||||
|
||||
originalModel = make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(original.Data[1].Model, &originalModel))
|
||||
migratedModel = make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(mirgrated.Data[1].Model, &migratedModel))
|
||||
|
||||
require.Equal(t, migratedModel, originalModel)
|
||||
|
||||
_, canBeOptimized = canBeInstant(original)
|
||||
require.False(t, canBeOptimized)
|
||||
}
|
||||
|
||||
func TestMigratePromQueryToInstant(t *testing.T) {
|
||||
original := createMigratablePromRule(t)
|
||||
mirgrated := createMigratablePromRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0] = prometheusQuery(t, "A", promExternalDS, promIsInstant)
|
||||
})
|
||||
|
||||
optimizableIndices, canBeOptimized := canBeInstant(original)
|
||||
require.True(t, canBeOptimized)
|
||||
require.NoError(t, migrateToInstant(original, optimizableIndices))
|
||||
|
||||
originalModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(original.Data[0].Model, &originalModel))
|
||||
migratedModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(mirgrated.Data[0].Model, &migratedModel))
|
||||
|
||||
require.Equal(t, migratedModel, originalModel)
|
||||
|
||||
_, canBeOptimized = canBeInstant(original)
|
||||
require.False(t, canBeOptimized)
|
||||
}
|
||||
|
||||
func TestMigrateMultiPromQueryToInstant(t *testing.T) {
|
||||
original := createMultiQueryMigratablePromRule(t)
|
||||
mirgrated := createMultiQueryMigratablePromRule(t, func(r *models.AlertRule) {
|
||||
r.Data[0] = prometheusQuery(t, "TotalRequests", promExternalDS, promIsInstant)
|
||||
r.Data[1] = prometheusQuery(t, "TotalErrors", promExternalDS, promIsInstant)
|
||||
})
|
||||
|
||||
optimizableIndices, canBeOptimized := canBeInstant(original)
|
||||
require.True(t, canBeOptimized)
|
||||
require.NoError(t, migrateToInstant(original, optimizableIndices))
|
||||
|
||||
originalModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(original.Data[0].Model, &originalModel))
|
||||
migratedModel := make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(mirgrated.Data[0].Model, &migratedModel))
|
||||
|
||||
require.Equal(t, migratedModel, originalModel)
|
||||
|
||||
originalModel = make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(original.Data[1].Model, &originalModel))
|
||||
migratedModel = make(map[string]any)
|
||||
require.NoError(t, json.Unmarshal(mirgrated.Data[1].Model, &migratedModel))
|
||||
|
||||
require.Equal(t, migratedModel, originalModel)
|
||||
|
||||
_, canBeOptimized = canBeInstant(original)
|
||||
require.False(t, canBeOptimized)
|
||||
}
|
||||
|
||||
func createMigrateableLokiRule(t *testing.T, muts ...func(*models.AlertRule)) *models.AlertRule {
|
||||
t.Helper()
|
||||
r := &models.AlertRule{
|
||||
Data: []models.AlertQuery{
|
||||
lokiQuery(t, "A", "range", "grafanacloud-logs"),
|
||||
reducer(t, "B", "A", "last"),
|
||||
},
|
||||
}
|
||||
for _, m := range muts {
|
||||
m(r)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func createMultiQueryMigratableLokiRule(t *testing.T, muts ...func(*models.AlertRule)) *models.AlertRule {
|
||||
t.Helper()
|
||||
r := &models.AlertRule{
|
||||
Data: []models.AlertQuery{
|
||||
lokiQuery(t, "TotalRequests", "range", "grafanacloud-logs"),
|
||||
lokiQuery(t, "TotalErrors", "range", "grafanacloud-logs"),
|
||||
reducer(t, "TotalRequests_Last", "TotalRequests", "last"),
|
||||
reducer(t, "TotalErrors_Last", "TotalErrors", "last"),
|
||||
},
|
||||
}
|
||||
for _, m := range muts {
|
||||
m(r)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func createMigratablePromRule(t *testing.T, muts ...func(*models.AlertRule)) *models.AlertRule {
|
||||
t.Helper()
|
||||
r := &models.AlertRule{
|
||||
Data: []models.AlertQuery{
|
||||
prometheusQuery(t, "A", promExternalDS, promIsNotInstant),
|
||||
reducer(t, "B", "A", "last"),
|
||||
},
|
||||
}
|
||||
for _, m := range muts {
|
||||
m(r)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func createMigratablePromRuleWithDefaultDS(t *testing.T, muts ...func(*models.AlertRule)) *models.AlertRule {
|
||||
t.Helper()
|
||||
r := &models.AlertRule{
|
||||
Data: []models.AlertQuery{
|
||||
prometheusQueryWithoutDS(t, "A", grafanaCloudProm, promIsNotInstant),
|
||||
reducer(t, "B", "A", "last"),
|
||||
},
|
||||
}
|
||||
for _, m := range muts {
|
||||
m(r)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func createMultiQueryMigratablePromRule(t *testing.T, muts ...func(*models.AlertRule)) *models.AlertRule {
|
||||
t.Helper()
|
||||
r := &models.AlertRule{
|
||||
Data: []models.AlertQuery{
|
||||
prometheusQuery(t, "TotalRequests", promExternalDS, promIsNotInstant),
|
||||
prometheusQuery(t, "TotalErrors", promExternalDS, promIsNotInstant),
|
||||
reducer(t, "TotalRequests_Last", "TotalRequests", "last"),
|
||||
reducer(t, "TotalErrors_Last", "TotalErrors", "last"),
|
||||
},
|
||||
}
|
||||
for _, m := range muts {
|
||||
m(r)
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
func lokiQuery(t *testing.T, refID, queryType, datasourceUID string) models.AlertQuery {
|
||||
t.Helper()
|
||||
return models.AlertQuery{
|
||||
RefID: refID,
|
||||
QueryType: queryType,
|
||||
DatasourceUID: datasourceUID,
|
||||
Model: []byte(fmt.Sprintf(`{
|
||||
"datasource": {
|
||||
"type": "loki",
|
||||
"uid": "%s"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"expr": "1",
|
||||
"intervalMs": 1000,
|
||||
"maxDataPoints": 43200,
|
||||
"queryType": "%s",
|
||||
"refId": "%s"
|
||||
}`, datasourceUID, queryType, refID)),
|
||||
}
|
||||
}
|
||||
|
||||
func prometheusQuery(t *testing.T, refID, datasourceUID string, isInstant bool) models.AlertQuery {
|
||||
t.Helper()
|
||||
return models.AlertQuery{
|
||||
RefID: refID,
|
||||
DatasourceUID: datasourceUID,
|
||||
Model: []byte(fmt.Sprintf(`{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "%s"
|
||||
},
|
||||
"instant": %t,
|
||||
"range": %t,
|
||||
"editorMode": "code",
|
||||
"expr": "1",
|
||||
"intervalMs": 1000,
|
||||
"maxDataPoints": 43200,
|
||||
"refId": "%s"
|
||||
}`, datasourceUID, isInstant, !isInstant, refID)),
|
||||
}
|
||||
}
|
||||
|
||||
func prometheusQueryWithoutDS(t *testing.T, refID, datasourceUID string, isInstant bool) models.AlertQuery {
|
||||
t.Helper()
|
||||
return models.AlertQuery{
|
||||
RefID: refID,
|
||||
DatasourceUID: datasourceUID,
|
||||
Model: []byte(fmt.Sprintf(`{
|
||||
"instant": %t,
|
||||
"range": %t,
|
||||
"editorMode": "code",
|
||||
"expr": "1",
|
||||
"intervalMs": 1000,
|
||||
"maxDataPoints": 43200,
|
||||
"refId": "%s"
|
||||
}`, isInstant, !isInstant, refID)),
|
||||
}
|
||||
}
|
||||
|
||||
func reducer(t *testing.T, refID, exp, op string) models.AlertQuery {
|
||||
t.Helper()
|
||||
return models.AlertQuery{
|
||||
RefID: refID,
|
||||
DatasourceUID: "__expr__",
|
||||
Model: []byte(fmt.Sprintf(`{
|
||||
"conditions": [
|
||||
{
|
||||
"evaluator": {
|
||||
"params": [],
|
||||
"type": "gt"
|
||||
},
|
||||
"operator": {
|
||||
"type": "and"
|
||||
},
|
||||
"query": {
|
||||
"params": [
|
||||
"B"
|
||||
]
|
||||
},
|
||||
"reducer": {
|
||||
"params": [],
|
||||
"type": "%s"
|
||||
},
|
||||
"type": "query"
|
||||
}
|
||||
],
|
||||
"datasource": {
|
||||
"type": "__expr__",
|
||||
"uid": "__expr__"
|
||||
},
|
||||
"expression": "%s",
|
||||
"hide": false,
|
||||
"intervalMs": 1000,
|
||||
"maxDataPoints": 43200,
|
||||
"reducer": "%s",
|
||||
"refId": "%s",
|
||||
"type": "reduce"
|
||||
}`, op, exp, op, refID)),
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user