Alerting: Add recording rules to ruler API and validation (#87779)

* Read path, main API

* Define record field for incoming requests

* Refactor several alerting specific validators into two paths

* Refactor validateCondition actually contain all the condition validation logic

* Move condition validation inside rule path

* Validators for recording rules

* Wire feature flag through to validators

* Test for accepting a valid recording rule

* Tests for negative case, no UID

* Test for ignoring alerting fields

* Build conditions based on recording rules as well

* Regenerate swagger docs

* Fix CRUD test to cover the right thing

* Re-generate swagger docs with backdated v0.30.2 version

* Regenerate base spec

* Regenerate ngalert specs

* Regenerate top level specs

* Comment and rename

* Return struct instead of modifying ref
This commit is contained in:
Alexander Weaver 2024-05-21 14:39:28 -05:00 committed by GitHub
parent fa319f36fb
commit 49c8deb1ea
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 399 additions and 75 deletions

View File

@ -530,6 +530,7 @@ func toGettableExtendedRuleNode(r ngmodels.AlertRule, provenanceRecords map[stri
Provenance: apimodels.Provenance(provenance),
IsPaused: r.IsPaused,
NotificationSettings: AlertRuleNotificationSettingsFromNotificationSettings(r.NotificationSettings),
Record: ApiRecordFromModelRecord(r.Record),
},
}
forDuration := model.Duration(r.For)

View File

@ -12,6 +12,7 @@ import (
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/setting"
prommodels "github.com/prometheus/common/model"
)
type RuleLimits struct {
@ -48,6 +49,7 @@ func validateRuleNode(
return nil, fmt.Errorf("not Grafana managed alert rule")
}
isRecordingRule := ruleNode.GrafanaManagedAlert.Record != nil
// if UID is specified then we can accept partial model. Therefore, some validation can be skipped as it will be patched later
canPatch := ruleNode.GrafanaManagedAlert.UID != ""
@ -59,46 +61,6 @@ func validateRuleNode(
return nil, fmt.Errorf("alert rule title is too long. Max length is %d", store.AlertRuleMaxTitleLength)
}
noDataState := ngmodels.NoData
if ruleNode.GrafanaManagedAlert.NoDataState == "" && canPatch {
noDataState = ""
}
if ruleNode.GrafanaManagedAlert.NoDataState != "" {
noDataState, err = ngmodels.NoDataStateFromString(string(ruleNode.GrafanaManagedAlert.NoDataState))
if err != nil {
return nil, err
}
}
errorState := ngmodels.AlertingErrState
if ruleNode.GrafanaManagedAlert.ExecErrState == "" && canPatch {
errorState = ""
}
if ruleNode.GrafanaManagedAlert.ExecErrState != "" {
errorState, err = ngmodels.ErrStateFromString(string(ruleNode.GrafanaManagedAlert.ExecErrState))
if err != nil {
return nil, err
}
}
if len(ruleNode.GrafanaManagedAlert.Data) == 0 {
if canPatch {
if ruleNode.GrafanaManagedAlert.Condition != "" {
return nil, fmt.Errorf("%w: query is not specified by condition is. You must specify both query and condition to update existing alert rule", ngmodels.ErrAlertRuleFailedValidation)
}
} else {
return nil, fmt.Errorf("%w: no queries or expressions are found", ngmodels.ErrAlertRuleFailedValidation)
}
} else {
err = validateCondition(ruleNode.GrafanaManagedAlert.Condition, ruleNode.GrafanaManagedAlert.Data)
if err != nil {
return nil, fmt.Errorf("%w: %s", ngmodels.ErrAlertRuleFailedValidation, err.Error())
}
}
queries := AlertQueriesFromApiAlertQueries(ruleNode.GrafanaManagedAlert.Data)
newAlertRule := ngmodels.AlertRule{
@ -110,21 +72,13 @@ func validateRuleNode(
IntervalSeconds: intervalSeconds,
NamespaceUID: namespaceUID,
RuleGroup: groupName,
NoDataState: noDataState,
ExecErrState: errorState,
// Recording Rule fields will be implemented in the future.
// For now, no rules can be recording rules. So, we force these to be empty.
Record: nil,
}
if ruleNode.GrafanaManagedAlert.NotificationSettings != nil {
newAlertRule.NotificationSettings, err = validateNotificationSettings(ruleNode.GrafanaManagedAlert.NotificationSettings)
if err != nil {
return nil, err
}
if isRecordingRule {
newAlertRule, err = validateRecordingRuleFields(ruleNode, newAlertRule, limits, canPatch)
} else {
newAlertRule, err = validateAlertingRuleFields(ruleNode, newAlertRule, canPatch)
}
newAlertRule.For, err = validateForInterval(ruleNode)
if err != nil {
return nil, err
}
@ -145,6 +99,89 @@ func validateRuleNode(
return &newAlertRule, nil
}
// validateAlertingRuleFields validates only the fields on a rule that are specific to Alerting rules.
// it will load fields that pass validation onto newRule and return the result.
func validateAlertingRuleFields(in *apimodels.PostableExtendedRuleNode, newRule ngmodels.AlertRule, canPatch bool) (ngmodels.AlertRule, error) {
var err error
if in.GrafanaManagedAlert.Record != nil {
return ngmodels.AlertRule{}, fmt.Errorf("%w: rule cannot be simultaneously an alerting and recording rule", ngmodels.ErrAlertRuleFailedValidation)
}
noDataState := ngmodels.NoData
if in.GrafanaManagedAlert.NoDataState == "" && canPatch {
noDataState = ""
}
if in.GrafanaManagedAlert.NoDataState != "" {
noDataState, err = ngmodels.NoDataStateFromString(string(in.GrafanaManagedAlert.NoDataState))
if err != nil {
return ngmodels.AlertRule{}, err
}
}
newRule.NoDataState = noDataState
errorState := ngmodels.AlertingErrState
if in.GrafanaManagedAlert.ExecErrState == "" && canPatch {
errorState = ""
}
if in.GrafanaManagedAlert.ExecErrState != "" {
errorState, err = ngmodels.ErrStateFromString(string(in.GrafanaManagedAlert.ExecErrState))
if err != nil {
return ngmodels.AlertRule{}, err
}
}
newRule.ExecErrState = errorState
err = validateCondition(in.GrafanaManagedAlert.Condition, in.GrafanaManagedAlert.Data, canPatch)
if err != nil {
return ngmodels.AlertRule{}, err
}
if in.GrafanaManagedAlert.NotificationSettings != nil {
newRule.NotificationSettings, err = validateNotificationSettings(in.GrafanaManagedAlert.NotificationSettings)
if err != nil {
return ngmodels.AlertRule{}, err
}
}
newRule.For, err = validateForInterval(in)
if err != nil {
return ngmodels.AlertRule{}, err
}
return newRule, nil
}
// validateRecordingRuleFields validates only the fields on a rule that are specific to Recording rules.
// it will load fields that pass validation onto newRule and return the result.
func validateRecordingRuleFields(in *apimodels.PostableExtendedRuleNode, newRule ngmodels.AlertRule, limits RuleLimits, canPatch bool) (ngmodels.AlertRule, error) {
if !limits.RecordingRulesAllowed {
return ngmodels.AlertRule{}, fmt.Errorf("%w: recording rules cannot be created on this instance", ngmodels.ErrAlertRuleFailedValidation)
}
err := validateCondition(in.GrafanaManagedAlert.Record.From, in.GrafanaManagedAlert.Data, canPatch)
if err != nil {
return ngmodels.AlertRule{}, fmt.Errorf("%w: %s", ngmodels.ErrAlertRuleFailedValidation, err.Error())
}
metricName := prommodels.LabelValue(in.GrafanaManagedAlert.Record.Metric)
if !metricName.IsValid() {
return ngmodels.AlertRule{}, fmt.Errorf("%w: %s", ngmodels.ErrAlertRuleFailedValidation, "metric name for recording rule must be a valid utf8 string")
}
if !prommodels.IsValidMetricName(metricName) {
return ngmodels.AlertRule{}, fmt.Errorf("%w: %s", ngmodels.ErrAlertRuleFailedValidation, "metric name for recording rule must be a valid Prometheus metric name")
}
newRule.Record = ModelRecordFromApiRecord(in.GrafanaManagedAlert.Record)
newRule.NoDataState = ""
newRule.ExecErrState = ""
newRule.Condition = ""
newRule.For = 0
newRule.NotificationSettings = nil
return newRule, nil
}
func validateLabels(l map[string]string) error {
for key := range l {
if _, ok := ngmodels.LabelsUserCannotSpecify[key]; ok {
@ -154,20 +191,34 @@ func validateLabels(l map[string]string) error {
return nil
}
func validateCondition(condition string, queries []apimodels.AlertQuery) error {
func validateCondition(condition string, queries []apimodels.AlertQuery, canPatch bool) error {
if canPatch {
// Patch requests may leave both query and condition blank. If a request supplies one, it must supply the other.
if len(queries) == 0 && condition == "" {
return nil
}
if len(queries) == 0 && condition != "" {
return fmt.Errorf("%w: query is not specified but condition is. You must specify both query and condition to update existing alert rule", ngmodels.ErrAlertRuleFailedValidation)
}
if len(queries) > 0 && condition == "" {
return fmt.Errorf("%w: condition is not specified but query is. You must specify both query and condition to update existing alert rule", ngmodels.ErrAlertRuleFailedValidation)
}
}
if condition == "" {
return errors.New("condition cannot be empty")
return fmt.Errorf("%w: condition cannot be empty", ngmodels.ErrAlertRuleFailedValidation)
}
if len(queries) == 0 {
return errors.New("no query/expressions specified")
return fmt.Errorf("%w: no queries or expressions are found", ngmodels.ErrAlertRuleFailedValidation)
}
refIDs := make(map[string]int, len(queries))
for idx, query := range queries {
if query.RefID == "" {
return fmt.Errorf("refID is not specified for data query/expression at index %d", idx)
return fmt.Errorf("%w: refID is not specified for data query/expression at index %d", ngmodels.ErrAlertRuleFailedValidation, idx)
}
if usedIdx, ok := refIDs[query.RefID]; ok {
return fmt.Errorf("refID '%s' is already used by query/expression at index %d", query.RefID, usedIdx)
return fmt.Errorf("%w: refID '%s' is already used by query/expression at index %d", ngmodels.ErrAlertRuleFailedValidation, query.RefID, usedIdx)
}
refIDs[query.RefID] = idx
}
@ -177,7 +228,7 @@ func validateCondition(condition string, queries []apimodels.AlertQuery) error {
ids = append(ids, id)
}
sort.Strings(ids)
return fmt.Errorf("condition %s does not exist, must be one of [%s]", condition, strings.Join(ids, ","))
return fmt.Errorf("%w: condition %s does not exist, must be one of [%s]", ngmodels.ErrAlertRuleFailedValidation, condition, strings.Join(ids, ","))
}
return nil
}

View File

@ -48,6 +48,11 @@ func makeLimits(cfg *setting.UnifiedAlertingSettings) RuleLimits {
return RuleLimitsFromConfig(cfg, baseToggles)
}
func allowRecording(lim RuleLimits) *RuleLimits {
lim.RecordingRulesAllowed = true
return &lim
}
func validRule() apimodels.PostableExtendedRuleNode {
forDuration := model.Duration(rand.Int63n(1000))
uid := util.GenerateShortUID()
@ -125,7 +130,7 @@ func TestValidateCondition(t *testing.T) {
name: "error when data is empty",
condition: "A",
data: []apimodels.AlertQuery{},
errorMsg: "no query/expressions specified",
errorMsg: "no queries or expressions are found",
},
{
name: "error when condition does not exist",
@ -182,7 +187,7 @@ func TestValidateCondition(t *testing.T) {
for _, tc := range testcases {
t.Run(tc.name, func(t *testing.T) {
err := validateCondition(tc.condition, tc.data)
err := validateCondition(tc.condition, tc.data, false)
if tc.errorMsg == "" {
require.NoError(t, err)
} else {
@ -321,6 +326,7 @@ func TestValidateRuleNode_NoUID(t *testing.T) {
testCases := []struct {
name string
rule func() *apimodels.PostableExtendedRuleNode
limits *RuleLimits
assert func(t *testing.T, model *apimodels.PostableExtendedRuleNode, rule *models.AlertRule)
}{
{
@ -403,14 +409,79 @@ func TestValidateRuleNode_NoUID(t *testing.T) {
require.Equal(t, int64(panelId), *alert.PanelID)
},
},
{
name: "accepts and converts recording rule when toggle is enabled",
limits: allowRecording(limits),
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Record = &apimodels.Record{Metric: "some_metric", From: "A"}
r.GrafanaManagedAlert.Condition = ""
r.GrafanaManagedAlert.NoDataState = ""
r.GrafanaManagedAlert.ExecErrState = ""
r.GrafanaManagedAlert.NotificationSettings = nil
r.ApiRuleNode.For = nil
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
// Shared fields
require.Equal(t, int64(0), alert.ID)
require.Equal(t, orgId, alert.OrgID)
require.Equal(t, api.GrafanaManagedAlert.Title, alert.Title)
require.Equal(t, AlertQueriesFromApiAlertQueries(api.GrafanaManagedAlert.Data), alert.Data)
require.Equal(t, time.Time{}, alert.Updated)
require.Equal(t, int64(interval.Seconds()), alert.IntervalSeconds)
require.Equal(t, int64(0), alert.Version)
require.Equal(t, api.GrafanaManagedAlert.UID, alert.UID)
require.Equal(t, folder.UID, alert.NamespaceUID)
require.Nil(t, alert.DashboardUID)
require.Nil(t, alert.PanelID)
require.Equal(t, name, alert.RuleGroup)
require.Equal(t, api.ApiRuleNode.Annotations, alert.Annotations)
require.Equal(t, api.ApiRuleNode.Labels, alert.Labels)
// Alerting fields
require.Empty(t, alert.Condition)
require.Empty(t, alert.NoDataState)
require.Empty(t, alert.ExecErrState)
require.Nil(t, alert.NotificationSettings)
require.Zero(t, alert.For)
// Recording fields
require.Equal(t, api.GrafanaManagedAlert.Record.From, alert.Record.From)
require.Equal(t, api.GrafanaManagedAlert.Record.Metric, alert.Record.Metric)
},
},
{
name: "recording rules ignore fields that only make sense for Alerting rules",
limits: allowRecording(limits),
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Record = &apimodels.Record{Metric: "some_metric", From: "A"}
r.GrafanaManagedAlert.Condition = "A"
r.GrafanaManagedAlert.NoDataState = apimodels.OK
r.GrafanaManagedAlert.ExecErrState = apimodels.AlertingErrState
r.GrafanaManagedAlert.NotificationSettings = &apimodels.AlertRuleNotificationSettings{}
r.ApiRuleNode.For = func() *model.Duration { five := model.Duration(time.Second * 5); return &five }()
return &r
},
assert: func(t *testing.T, api *apimodels.PostableExtendedRuleNode, alert *models.AlertRule) {
require.Empty(t, alert.Condition)
require.Empty(t, alert.NoDataState)
require.Empty(t, alert.ExecErrState)
require.Nil(t, alert.NotificationSettings)
require.Zero(t, alert.For)
},
},
}
for _, testCase := range testCases {
t.Run(testCase.name, func(t *testing.T) {
r := testCase.rule()
r.GrafanaManagedAlert.UID = ""
lim := limits
if testCase.limits != nil {
lim = *testCase.limits
}
alert, err := validateRuleNode(r, name, interval, orgId, folder.UID, limits)
alert, err := validateRuleNode(r, name, interval, orgId, folder.UID, lim)
require.NoError(t, err)
testCase.assert(t, r, alert)
})
@ -434,6 +505,8 @@ func TestValidateRuleNodeFailures_NoUID(t *testing.T) {
name string
interval *time.Duration
rule func() *apimodels.PostableExtendedRuleNode
limits *RuleLimits
expErr string
assert func(t *testing.T, model *apimodels.PostableExtendedRuleNode, err error)
allowedIfNoUId bool
}{
@ -558,6 +631,65 @@ func TestValidateRuleNodeFailures_NoUID(t *testing.T) {
return &r
},
},
{
name: "rejects valid recording rules if toggle is disabled",
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Record = &apimodels.Record{Metric: "some_metric", From: "A"}
r.GrafanaManagedAlert.Condition = ""
r.GrafanaManagedAlert.NoDataState = ""
r.GrafanaManagedAlert.ExecErrState = ""
r.GrafanaManagedAlert.NotificationSettings = nil
r.ApiRuleNode.For = nil
return &r
},
expErr: "recording rules cannot be created",
},
{
name: "rejects recording rule with invalid metric name",
limits: allowRecording(limits),
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Record = &apimodels.Record{Metric: "", From: "A"}
r.GrafanaManagedAlert.Condition = ""
r.GrafanaManagedAlert.NoDataState = ""
r.GrafanaManagedAlert.ExecErrState = ""
r.GrafanaManagedAlert.NotificationSettings = nil
r.ApiRuleNode.For = nil
return &r
},
expErr: "must be a valid Prometheus metric name",
},
{
name: "rejects recording rule with empty from",
limits: allowRecording(limits),
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Record = &apimodels.Record{Metric: "my_metric", From: ""}
r.GrafanaManagedAlert.Condition = ""
r.GrafanaManagedAlert.NoDataState = ""
r.GrafanaManagedAlert.ExecErrState = ""
r.GrafanaManagedAlert.NotificationSettings = nil
r.ApiRuleNode.For = nil
return &r
},
expErr: "cannot be empty",
},
{
name: "rejects recording rule with from not matching",
limits: allowRecording(limits),
rule: func() *apimodels.PostableExtendedRuleNode {
r := validRule()
r.GrafanaManagedAlert.Record = &apimodels.Record{Metric: "my_metric", From: "NOTEXIST"}
r.GrafanaManagedAlert.Condition = ""
r.GrafanaManagedAlert.NoDataState = ""
r.GrafanaManagedAlert.ExecErrState = ""
r.GrafanaManagedAlert.NotificationSettings = nil
r.ApiRuleNode.For = nil
return &r
},
expErr: "NOTEXIST does not exist",
},
}
for _, testCase := range testCases {
@ -572,8 +704,16 @@ func TestValidateRuleNodeFailures_NoUID(t *testing.T) {
interval = *testCase.interval
}
_, err := validateRuleNode(r, "", interval, orgId, folder.UID, limits)
lim := limits
if testCase.limits != nil {
lim = *testCase.limits
}
_, err := validateRuleNode(r, "", interval, orgId, folder.UID, lim)
require.Error(t, err)
if testCase.expErr != "" {
require.ErrorContains(t, err, testCase.expErr)
}
if testCase.assert != nil {
testCase.assert(t, r, err)
}

View File

@ -455,3 +455,23 @@ func NotificationSettingsFromAlertRuleNotificationSettings(ns *definitions.Alert
},
}
}
func ApiRecordFromModelRecord(r *models.Record) *definitions.Record {
if r == nil {
return nil
}
return &definitions.Record{
Metric: r.Metric,
From: r.From,
}
}
func ModelRecordFromApiRecord(r *definitions.Record) *models.Record {
if r == nil {
return nil
}
return &models.Record{
Metric: r.Metric,
From: r.From,
}
}

View File

@ -1573,6 +1573,9 @@
"provenance": {
"$ref": "#/definitions/Provenance"
},
"record": {
"$ref": "#/definitions/Record"
},
"rule_group": {
"type": "string"
},
@ -2730,6 +2733,9 @@
"notification_settings": {
"$ref": "#/definitions/AlertRuleNotificationSettings"
},
"record": {
"$ref": "#/definitions/Record"
},
"title": {
"type": "string"
},
@ -3247,6 +3253,18 @@
"title": "ReceiverExport is the provisioned file export of alerting.ReceiverV1.",
"type": "object"
},
"Record": {
"properties": {
"from": {
"type": "string"
},
"metric": {
"type": "string"
}
},
"title": "Record defines how data produced by a recording rule is written.",
"type": "object"
},
"RelativeTimeRange": {
"description": "RelativeTimeRange is the per query start and end time\nfor requests.",
"properties": {
@ -4417,6 +4435,7 @@
"type": "object"
},
"alertGroup": {
"description": "AlertGroup alert group",
"properties": {
"alerts": {
"description": "alerts",
@ -4545,7 +4564,6 @@
"type": "object"
},
"gettableAlert": {
"description": "GettableAlert gettable alert",
"properties": {
"annotations": {
"$ref": "#/definitions/labelSet"
@ -4601,6 +4619,7 @@
"type": "object"
},
"gettableAlerts": {
"description": "GettableAlerts gettable alerts",
"items": {
"$ref": "#/definitions/gettableAlert"
},
@ -4655,13 +4674,13 @@
"type": "object"
},
"gettableSilences": {
"description": "GettableSilences gettable silences",
"items": {
"$ref": "#/definitions/gettableSilence"
},
"type": "array"
},
"integration": {
"description": "Integration integration",
"properties": {
"lastNotifyAttempt": {
"description": "A timestamp indicating the last attempt to deliver a notification regardless of the outcome.\nFormat: date-time",
@ -4805,7 +4824,6 @@
"type": "array"
},
"postableSilence": {
"description": "PostableSilence postable silence",
"properties": {
"comment": {
"description": "comment",
@ -4843,6 +4861,7 @@
"type": "object"
},
"receiver": {
"description": "Receiver receiver",
"properties": {
"active": {
"description": "active",

View File

@ -487,6 +487,7 @@ type PostableGrafanaRule struct {
ExecErrState ExecutionErrorState `json:"exec_err_state" yaml:"exec_err_state"`
IsPaused *bool `json:"is_paused" yaml:"is_paused"`
NotificationSettings *AlertRuleNotificationSettings `json:"notification_settings" yaml:"notification_settings"`
Record *Record `json:"record" yaml:"record"`
}
// swagger:model
@ -507,6 +508,7 @@ type GettableGrafanaRule struct {
Provenance Provenance `json:"provenance,omitempty" yaml:"provenance,omitempty"`
IsPaused bool `json:"is_paused" yaml:"is_paused"`
NotificationSettings *AlertRuleNotificationSettings `json:"notification_settings,omitempty" yaml:"notification_settings,omitempty"`
Record *Record `json:"record,omitempty" yaml:"record,omitempty"`
}
// AlertQuery represents a single query associated with an alert definition.
@ -576,6 +578,12 @@ func (d *Duration) UnmarshalYAML(unmarshal func(any) error) error {
}
}
// Record defines how data produced by a recording rule is written.
type Record struct {
Metric string `json:"metric" yaml:"metric"`
From string `json:"from" yaml:"from"`
}
// swagger:model
type UpdateRuleGroupResponse struct {
Message string `json:"message"`

View File

@ -1573,6 +1573,9 @@
"provenance": {
"$ref": "#/definitions/Provenance"
},
"record": {
"$ref": "#/definitions/Record"
},
"rule_group": {
"type": "string"
},
@ -2730,6 +2733,9 @@
"notification_settings": {
"$ref": "#/definitions/AlertRuleNotificationSettings"
},
"record": {
"$ref": "#/definitions/Record"
},
"title": {
"type": "string"
},
@ -3247,6 +3253,18 @@
"title": "ReceiverExport is the provisioned file export of alerting.ReceiverV1.",
"type": "object"
},
"Record": {
"properties": {
"from": {
"type": "string"
},
"metric": {
"type": "string"
}
},
"title": "Record defines how data produced by a recording rule is written.",
"type": "object"
},
"RelativeTimeRange": {
"description": "RelativeTimeRange is the per query start and end time\nfor requests.",
"properties": {
@ -4418,6 +4436,7 @@
"type": "object"
},
"alertGroup": {
"description": "AlertGroup alert group",
"properties": {
"alerts": {
"description": "alerts",
@ -4441,7 +4460,6 @@
"type": "object"
},
"alertGroups": {
"description": "AlertGroups alert groups",
"items": {
"$ref": "#/definitions/alertGroup"
},
@ -4602,12 +4620,14 @@
"type": "object"
},
"gettableAlerts": {
"description": "GettableAlerts gettable alerts",
"items": {
"$ref": "#/definitions/gettableAlert"
},
"type": "array"
},
"gettableSilence": {
"description": "GettableSilence gettable silence",
"properties": {
"comment": {
"description": "comment",

View File

@ -5088,6 +5088,9 @@
"provenance": {
"$ref": "#/definitions/Provenance"
},
"record": {
"$ref": "#/definitions/Record"
},
"rule_group": {
"type": "string"
},
@ -6246,6 +6249,9 @@
"notification_settings": {
"$ref": "#/definitions/AlertRuleNotificationSettings"
},
"record": {
"$ref": "#/definitions/Record"
},
"title": {
"type": "string"
},
@ -6762,6 +6768,18 @@
}
}
},
"Record": {
"type": "object",
"title": "Record defines how data produced by a recording rule is written.",
"properties": {
"from": {
"type": "string"
},
"metric": {
"type": "string"
}
}
},
"RelativeTimeRange": {
"description": "RelativeTimeRange is the per query start and end time\nfor requests.",
"type": "object",
@ -7933,6 +7951,7 @@
}
},
"alertGroup": {
"description": "AlertGroup alert group",
"type": "object",
"required": [
"alerts",
@ -7957,7 +7976,6 @@
"$ref": "#/definitions/alertGroup"
},
"alertGroups": {
"description": "AlertGroups alert groups",
"type": "array",
"items": {
"$ref": "#/definitions/alertGroup"
@ -8120,6 +8138,7 @@
"$ref": "#/definitions/gettableAlert"
},
"gettableAlerts": {
"description": "GettableAlerts gettable alerts",
"type": "array",
"items": {
"$ref": "#/definitions/gettableAlert"
@ -8127,6 +8146,7 @@
"$ref": "#/definitions/gettableAlerts"
},
"gettableSilence": {
"description": "GettableSilence gettable silence",
"type": "object",
"required": [
"comment",

View File

@ -337,6 +337,12 @@ func (alertRule *AlertRule) GetLabels(opts ...LabelOption) map[string]string {
}
func (alertRule *AlertRule) GetEvalCondition() Condition {
if alertRule.IsRecordingRule() {
return Condition{
Condition: alertRule.Record.From,
Data: alertRule.Data,
}
}
return Condition{
Condition: alertRule.Condition,
Data: alertRule.Data,

View File

@ -908,8 +908,9 @@ func TestIntegrationAlertRuleCRUD(t *testing.T) {
Annotations: map[string]string{"annotation1": "val1"},
},
GrafanaManagedAlert: &apimodels.PostableGrafanaRule{
Title: "AlwaysFiring",
Data: []apimodels.AlertQuery{},
Title: "AlwaysFiring",
Condition: "A",
Data: []apimodels.AlertQuery{},
},
},
expectedMessage: "invalid rule specification at index [0]: invalid alert rule: no queries or expressions are found",

View File

@ -15319,6 +15319,9 @@
"provenance": {
"$ref": "#/definitions/Provenance"
},
"record": {
"$ref": "#/definitions/Record"
},
"rule_group": {
"type": "string"
},
@ -17529,6 +17532,9 @@
"notification_settings": {
"$ref": "#/definitions/AlertRuleNotificationSettings"
},
"record": {
"$ref": "#/definitions/Record"
},
"title": {
"type": "string"
},
@ -18337,6 +18343,18 @@
}
}
},
"Record": {
"type": "object",
"title": "Record defines how data produced by a recording rule is written.",
"properties": {
"from": {
"type": "string"
},
"metric": {
"type": "string"
}
}
},
"RecordingRuleJSON": {
"description": "RecordingRuleJSON is the external representation of a recording rule",
"type": "object",
@ -21288,6 +21306,7 @@
}
},
"alertGroup": {
"description": "AlertGroup alert group",
"type": "object",
"required": [
"alerts",
@ -21444,7 +21463,6 @@
}
},
"gettableAlert": {
"description": "GettableAlert gettable alert",
"type": "object",
"required": [
"labels",
@ -21500,6 +21518,7 @@
}
},
"gettableAlerts": {
"description": "GettableAlerts gettable alerts",
"type": "array",
"items": {
"$ref": "#/definitions/gettableAlert"
@ -21554,13 +21573,13 @@
}
},
"gettableSilences": {
"description": "GettableSilences gettable silences",
"type": "array",
"items": {
"$ref": "#/definitions/gettableSilence"
}
},
"integration": {
"description": "Integration integration",
"type": "object",
"required": [
"name",
@ -21704,7 +21723,6 @@
}
},
"postableSilence": {
"description": "PostableSilence postable silence",
"type": "object",
"required": [
"comment",
@ -21770,6 +21788,7 @@
}
},
"receiver": {
"description": "Receiver receiver",
"type": "object",
"required": [
"active",

View File

@ -5839,6 +5839,9 @@
"provenance": {
"$ref": "#/components/schemas/Provenance"
},
"record": {
"$ref": "#/components/schemas/Record"
},
"rule_group": {
"type": "string"
},
@ -8049,6 +8052,9 @@
"notification_settings": {
"$ref": "#/components/schemas/AlertRuleNotificationSettings"
},
"record": {
"$ref": "#/components/schemas/Record"
},
"title": {
"type": "string"
},
@ -8858,6 +8864,18 @@
"title": "ReceiverExport is the provisioned file export of alerting.ReceiverV1.",
"type": "object"
},
"Record": {
"properties": {
"from": {
"type": "string"
},
"metric": {
"type": "string"
}
},
"title": "Record defines how data produced by a recording rule is written.",
"type": "object"
},
"RecordingRuleJSON": {
"description": "RecordingRuleJSON is the external representation of a recording rule",
"properties": {
@ -11808,6 +11826,7 @@
"type": "object"
},
"alertGroup": {
"description": "AlertGroup alert group",
"properties": {
"alerts": {
"description": "alerts",
@ -11964,7 +11983,6 @@
"type": "object"
},
"gettableAlert": {
"description": "GettableAlert gettable alert",
"properties": {
"annotations": {
"$ref": "#/components/schemas/labelSet"
@ -12020,6 +12038,7 @@
"type": "object"
},
"gettableAlerts": {
"description": "GettableAlerts gettable alerts",
"items": {
"$ref": "#/components/schemas/gettableAlert"
},
@ -12074,13 +12093,13 @@
"type": "object"
},
"gettableSilences": {
"description": "GettableSilences gettable silences",
"items": {
"$ref": "#/components/schemas/gettableSilence"
},
"type": "array"
},
"integration": {
"description": "Integration integration",
"properties": {
"lastNotifyAttempt": {
"description": "A timestamp indicating the last attempt to deliver a notification regardless of the outcome.\nFormat: date-time",
@ -12224,7 +12243,6 @@
"type": "array"
},
"postableSilence": {
"description": "PostableSilence postable silence",
"properties": {
"comment": {
"description": "comment",
@ -12290,6 +12308,7 @@
"type": "object"
},
"receiver": {
"description": "Receiver receiver",
"properties": {
"active": {
"description": "active",