feat(alerting): removed severity

This commit is contained in:
Torkel Ödegaard 2016-09-13 15:09:55 +02:00
parent 6375418d8c
commit b0c7e61ef8
23 changed files with 143 additions and 238 deletions

View File

@ -49,7 +49,6 @@ func GetAlerts(c *middleware.Context) Response {
Name: alert.Name,
Message: alert.Message,
State: alert.State,
Severity: alert.Severity,
EvalDate: alert.EvalDate,
NewStateDate: alert.NewStateDate,
ExecutionError: alert.ExecutionError,
@ -219,7 +218,6 @@ func NotificationTest(c *middleware.Context, dto dtos.NotificationTestCommand) R
cmd := &alerting.NotificationTestCommand{
Name: dto.Name,
Type: dto.Type,
Severity: dto.Severity,
Settings: dto.Settings,
}

View File

@ -14,7 +14,6 @@ type AlertRule struct {
Name string `json:"name"`
Message string `json:"message"`
State m.AlertStateType `json:"state"`
Severity m.AlertSeverityType `json:"severity"`
NewStateDate time.Time `json:"newStateDate"`
EvalDate time.Time `json:"evalDate"`
ExecutionError string `json:"executionError"`
@ -58,5 +57,4 @@ type NotificationTestCommand struct {
Name string `json:"name"`
Type string `json:"type"`
Settings *simplejson.Json `json:"settings"`
Severity string `json:"severity"`
}

View File

@ -36,12 +36,11 @@ var (
M_Api_Dashboard_Snapshot_External Counter
M_Api_Dashboard_Snapshot_Get Counter
M_Models_Dashboard_Insert Counter
M_Alerting_Result_State_Critical Counter
M_Alerting_Result_State_Warning Counter
M_Alerting_Result_State_Alerting Counter
M_Alerting_Result_State_Ok Counter
M_Alerting_Result_State_Paused Counter
M_Alerting_Result_State_Unknown Counter
M_Alerting_Result_State_ExecutionError Counter
M_Alerting_Result_State_NoData Counter
M_Alerting_Result_State_ExecError Counter
M_Alerting_Active_Alerts Counter
M_Alerting_Notification_Sent_Slack Counter
M_Alerting_Notification_Sent_Email Counter
@ -92,12 +91,11 @@ func initMetricVars(settings *MetricSettings) {
M_Models_Dashboard_Insert = RegCounter("models.dashboard.insert")
M_Alerting_Result_State_Critical = RegCounter("alerting.result", "state", "critical")
M_Alerting_Result_State_Warning = RegCounter("alerting.result", "state", "warning")
M_Alerting_Result_State_Alerting = RegCounter("alerting.result", "state", "alerting")
M_Alerting_Result_State_Ok = RegCounter("alerting.result", "state", "ok")
M_Alerting_Result_State_Paused = RegCounter("alerting.result", "state", "paused")
M_Alerting_Result_State_Unknown = RegCounter("alerting.result", "state", "unknown")
M_Alerting_Result_State_ExecutionError = RegCounter("alerting.result", "state", "execution_error")
M_Alerting_Result_State_NoData = RegCounter("alerting.result", "state", "no_data")
M_Alerting_Result_State_ExecError = RegCounter("alerting.result", "state", "exec_error")
M_Alerting_Active_Alerts = RegCounter("alerting.active_alerts")
M_Alerting_Notification_Sent_Slack = RegCounter("alerting.notifications_sent", "type", "slack")

View File

@ -10,27 +10,15 @@ type AlertStateType string
type AlertSeverityType string
const (
AlertStateUnknown AlertStateType = "unknown"
AlertStateExeuctionError AlertStateType = "execution_error"
AlertStateNoData AlertStateType = "no_data"
AlertStateExecError AlertStateType = "execution_error"
AlertStatePaused AlertStateType = "paused"
AlertStateCritical AlertStateType = "critical"
AlertStateWarning AlertStateType = "warning"
AlertStateAlerting AlertStateType = "alerting"
AlertStateOK AlertStateType = "ok"
)
func (s AlertStateType) IsValid() bool {
return s == AlertStateOK || s == AlertStateUnknown || s == AlertStateExeuctionError || s == AlertStatePaused || s == AlertStateCritical || s == AlertStateWarning
}
const (
AlertSeverityCritical AlertSeverityType = "critical"
AlertSeverityWarning AlertSeverityType = "warning"
AlertSeverityInfo AlertSeverityType = "info"
AlertSeverityOK AlertSeverityType = "ok"
)
func (s AlertSeverityType) IsValid() bool {
return s == AlertSeverityCritical || s == AlertSeverityInfo || s == AlertSeverityWarning
return s == AlertStateOK || s == AlertStateNoData || s == AlertStateExecError || s == AlertStatePaused
}
type Alert struct {
@ -41,7 +29,7 @@ type Alert struct {
PanelId int64
Name string
Message string
Severity AlertSeverityType
Severity string
State AlertStateType
Handler int64
Silenced bool

View File

@ -43,25 +43,20 @@ func (c *EvalContext) GetStateModel() *StateDescription {
Color: "#36a64f",
Text: "OK",
}
case m.AlertStateUnknown:
case m.AlertStateNoData:
return &StateDescription{
Color: "#888888",
Text: "UNKNOWN",
Text: "No Data",
}
case m.AlertStateExeuctionError:
case m.AlertStateExecError:
return &StateDescription{
Color: "#000",
Text: "EXECUTION_ERROR",
Text: "Execution Error",
}
case m.AlertStateWarning:
return &StateDescription{
Color: "#fd821b",
Text: "WARNING",
}
case m.AlertStateCritical:
case m.AlertStateAlerting:
return &StateDescription{
Color: "#D63232",
Text: "CRITICAL",
Text: "Alerting",
}
default:
panic("Unknown rule state " + c.Rule.State)

View File

@ -88,14 +88,9 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) {
Name: jsonAlert.Get("name").MustString(),
Handler: jsonAlert.Get("handler").MustInt64(),
Message: jsonAlert.Get("message").MustString(),
Severity: m.AlertSeverityType(jsonAlert.Get("severity").MustString()),
Frequency: getTimeDurationStringToSeconds(jsonAlert.Get("frequency").MustString()),
}
if !alert.Severity.IsValid() {
return nil, ValidationError{Reason: "Invalid alert Severity"}
}
for _, condition := range jsonAlert.Get("conditions").MustArray() {
jsonCondition := simplejson.NewFromAny(condition)

View File

@ -2,8 +2,6 @@ package alerting
import (
"time"
"github.com/grafana/grafana/pkg/models"
)
type EvalHandler interface {
@ -19,7 +17,7 @@ type Notifier interface {
Notify(alertResult *EvalContext)
GetType() string
NeedsImage() bool
MatchSeverity(result models.AlertSeverityType) bool
PassesFilter(rule *Rule) bool
}
type Condition interface {

View File

@ -28,7 +28,7 @@ func (n *RootNotifier) NeedsImage() bool {
return false
}
func (n *RootNotifier) MatchSeverity(result m.AlertSeverityType) bool {
func (n *RootNotifier) PassesFilter(rule *Rule) bool {
return false
}
@ -130,7 +130,7 @@ func shouldUseNotification(notifier Notifier, context *EvalContext) bool {
return true
}
return notifier.MatchSeverity(context.Rule.Severity)
return notifier.PassesFilter(context.Rule)
}
type NotifierFactory func(notification *m.AlertNotification) (Notifier, error)

View File

@ -6,6 +6,7 @@ import (
"fmt"
"github.com/grafana/grafana/pkg/models"
m "github.com/grafana/grafana/pkg/models"
. "github.com/smartystreets/goconvey/convey"
)
@ -23,7 +24,7 @@ func (fn *FakeNotifier) NeedsImage() bool {
func (fn *FakeNotifier) Notify(alertResult *EvalContext) {}
func (fn *FakeNotifier) MatchSeverity(result models.AlertSeverityType) bool {
func (fn *FakeNotifier) PassesFilter(rule *Rule) bool {
return fn.FakeMatchResult
}
@ -34,7 +35,7 @@ func TestAlertNotificationExtraction(t *testing.T) {
ctx := &EvalContext{
Firing: false,
Rule: &Rule{
Severity: models.AlertSeverityCritical,
State: m.AlertStateAlerting,
},
}
notifier := &FakeNotifier{FakeMatchResult: false}
@ -42,12 +43,12 @@ func TestAlertNotificationExtraction(t *testing.T) {
So(shouldUseNotification(notifier, ctx), ShouldBeTrue)
})
Convey("exeuction error cannot be ignored", func() {
Convey("execution error cannot be ignored", func() {
ctx := &EvalContext{
Firing: true,
Error: fmt.Errorf("I used to be a programmer just like you"),
Rule: &Rule{
Severity: models.AlertSeverityCritical,
State: m.AlertStateOK,
},
}
notifier := &FakeNotifier{FakeMatchResult: false}
@ -59,7 +60,7 @@ func TestAlertNotificationExtraction(t *testing.T) {
ctx := &EvalContext{
Firing: true,
Rule: &Rule{
Severity: models.AlertSeverityCritical,
State: models.AlertStateAlerting,
},
}
notifier := &FakeNotifier{FakeMatchResult: true}
@ -70,9 +71,7 @@ func TestAlertNotificationExtraction(t *testing.T) {
Convey("firing alert that dont match", func() {
ctx := &EvalContext{
Firing: true,
Rule: &Rule{
Severity: models.AlertSeverityCritical,
},
Rule: &Rule{State: m.AlertStateOK},
}
notifier := &FakeNotifier{FakeMatchResult: false}

View File

@ -2,35 +2,23 @@ package notifiers
import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/alerting"
)
type NotifierBase struct {
Name string
Type string
SeverityFilter models.AlertSeverityType
}
func NewNotifierBase(name, notifierType string, model *simplejson.Json) NotifierBase {
base := NotifierBase{Name: name, Type: notifierType}
severityFilter := models.AlertSeverityType(model.Get("severityFilter").MustString(""))
if severityFilter == models.AlertSeverityCritical || severityFilter == models.AlertSeverityWarning {
base.SeverityFilter = severityFilter
}
return base
}
func (n *NotifierBase) MatchSeverity(result models.AlertSeverityType) bool {
if !n.SeverityFilter.IsValid() {
func (n *NotifierBase) PassesFilter(rule *alerting.Rule) bool {
return true
}
return n.SeverityFilter == result
}
func (n *NotifierBase) GetType() string {
return n.Type
}

View File

@ -3,34 +3,32 @@ package notifiers
import (
"testing"
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
. "github.com/smartystreets/goconvey/convey"
)
func TestBaseNotifier(t *testing.T) {
Convey("Parsing base notification severity", t, func() {
Convey("matches", func() {
json := `
{
"severityFilter": "critical"
}`
settingsJSON, _ := simplejson.NewJson([]byte(json))
not := NewNotifierBase("ops", "email", settingsJSON)
So(not.MatchSeverity(m.AlertSeverityCritical), ShouldBeTrue)
})
Convey("does not match", func() {
json := `
{
"severityFilter": "critical"
}`
settingsJSON, _ := simplejson.NewJson([]byte(json))
not := NewNotifierBase("ops", "email", settingsJSON)
So(not.MatchSeverity(m.AlertSeverityWarning), ShouldBeFalse)
})
})
// Convey("Parsing base notification state", t, func() {
//
// Convey("matches", func() {
// json := `
// {
// "states": "critical"
// }`
//
// settingsJSON, _ := simplejson.NewJson([]byte(json))
// not := NewNotifierBase("ops", "email", settingsJSON)
// So(not.MatchSeverity(m.AlertSeverityCritical), ShouldBeTrue)
// })
//
// Convey("does not match", func() {
// json := `
// {
// "severityFilter": "critical"
// }`
//
// settingsJSON, _ := simplejson.NewJson([]byte(json))
// not := NewNotifierBase("ops", "email", settingsJSON)
// So(not.MatchSeverity(m.AlertSeverityWarning), ShouldBeFalse)
// })
// })
}

View File

@ -50,8 +50,7 @@ func (this *EmailNotifier) Notify(context *alerting.EvalContext) {
"Title": context.GetNotificationTitle(),
"State": context.Rule.State,
"Name": context.Rule.Name,
"Severity": context.Rule.Severity,
"SeverityColor": context.GetStateModel().Color,
"StateModel": context.GetStateModel(),
"Message": context.Rule.Message,
"RuleUrl": ruleUrl,
"ImageLink": context.ImagePublicUrl,

View File

@ -45,7 +45,6 @@ func (this *WebhookNotifier) Notify(context *alerting.EvalContext) {
bodyJSON.Set("ruleId", context.Rule.Id)
bodyJSON.Set("ruleName", context.Rule.Name)
bodyJSON.Set("state", context.Rule.State)
bodyJSON.Set("severity", context.Rule.Severity)
bodyJSON.Set("evalMatches", context.EvalMatches)
ruleUrl, err := context.GetRuleUrl()

View File

@ -34,11 +34,11 @@ func (handler *DefaultResultHandler) Handle(ctx *EvalContext) {
annotationData := simplejson.New()
if ctx.Error != nil {
handler.log.Error("Alert Rule Result Error", "ruleId", ctx.Rule.Id, "error", ctx.Error)
ctx.Rule.State = m.AlertStateExeuctionError
ctx.Rule.State = m.AlertStateExecError
exeuctionError = ctx.Error.Error()
annotationData.Set("errorMessage", exeuctionError)
} else if ctx.Firing {
ctx.Rule.State = m.AlertStateType(ctx.Rule.Severity)
ctx.Rule.State = m.AlertStateAlerting
annotationData = simplejson.NewFromAny(ctx.EvalMatches)
} else {
// handle no data case
@ -90,17 +90,15 @@ func (handler *DefaultResultHandler) Handle(ctx *EvalContext) {
func countStateResult(state m.AlertStateType) {
switch state {
case m.AlertStateCritical:
metrics.M_Alerting_Result_State_Critical.Inc(1)
case m.AlertStateWarning:
metrics.M_Alerting_Result_State_Warning.Inc(1)
case m.AlertStateAlerting:
metrics.M_Alerting_Result_State_Alerting.Inc(1)
case m.AlertStateOK:
metrics.M_Alerting_Result_State_Ok.Inc(1)
case m.AlertStatePaused:
metrics.M_Alerting_Result_State_Paused.Inc(1)
case m.AlertStateUnknown:
metrics.M_Alerting_Result_State_Unknown.Inc(1)
case m.AlertStateExeuctionError:
metrics.M_Alerting_Result_State_ExecutionError.Inc(1)
case m.AlertStateNoData:
metrics.M_Alerting_Result_State_NoData.Inc(1)
case m.AlertStateExecError:
metrics.M_Alerting_Result_State_ExecError.Inc(1)
}
}

View File

@ -20,7 +20,6 @@ type Rule struct {
Message string
NoDataState m.AlertStateType
State m.AlertStateType
Severity m.AlertSeverityType
Conditions []Condition
Notifications []int64
}
@ -66,9 +65,8 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) {
model.Name = ruleDef.Name
model.Message = ruleDef.Message
model.Frequency = ruleDef.Frequency
model.Severity = ruleDef.Severity
model.State = ruleDef.State
model.NoDataState = m.AlertStateType(ruleDef.Settings.Get("noDataState").MustString("unknown"))
model.NoDataState = m.AlertStateType(ruleDef.Settings.Get("noDataState").MustString("no_data"))
for _, v := range ruleDef.Settings.Get("notifications").MustArray() {
jsonModel := simplejson.NewFromAny(v)

View File

@ -4,11 +4,11 @@ import (
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
m "github.com/grafana/grafana/pkg/models"
)
type NotificationTestCommand struct {
Severity string
State m.AlertStateType
Name string
Type string
Settings *simplejson.Json
@ -22,7 +22,7 @@ func init() {
func handleNotificationTestCommand(cmd *NotificationTestCommand) error {
notifier := NewRootNotifier()
model := &models.AlertNotification{
model := &m.AlertNotification{
Name: cmd.Name,
Type: cmd.Type,
Settings: cmd.Settings,
@ -35,23 +35,12 @@ func handleNotificationTestCommand(cmd *NotificationTestCommand) error {
return err
}
severity := models.AlertSeverityType(cmd.Severity)
notifier.sendNotifications([]Notifier{notifiers}, createTestEvalContext(severity))
notifier.sendNotifications([]Notifier{notifiers}, createTestEvalContext(cmd.State))
return nil
}
func createTestEvalContext(severity models.AlertSeverityType) *EvalContext {
state := models.AlertStateOK
firing := false
if severity == models.AlertSeverityCritical {
state = models.AlertStateCritical
firing = true
}
if severity == models.AlertSeverityWarning {
state = models.AlertStateWarning
firing = true
}
func createTestEvalContext(state m.AlertStateType) *EvalContext {
testRule := &Rule{
DashboardId: 1,
@ -59,23 +48,22 @@ func createTestEvalContext(severity models.AlertSeverityType) *EvalContext {
Name: "Test notification",
Message: "Someone is testing the alert notification within grafana.",
State: state,
Severity: severity,
}
ctx := NewEvalContext(testRule)
ctx.ImagePublicUrl = "http://grafana.org/assets/img/blog/mixed_styles.png"
ctx.IsTestRun = true
ctx.Firing = firing
ctx.Firing = state == m.AlertStateAlerting
ctx.Error = nil
ctx.EvalMatches = evalMatchesBasedOnSeverity(severity)
ctx.EvalMatches = evalMatchesBasedOnState(state)
return ctx
}
func evalMatchesBasedOnSeverity(severity models.AlertSeverityType) []*EvalMatch {
func evalMatchesBasedOnState(state m.AlertStateType) []*EvalMatch {
matches := make([]*EvalMatch, 0)
if severity == models.AlertSeverityOK {
if state == m.AlertStateOK {
return matches
}

View File

@ -159,7 +159,7 @@ func upsertAlerts(existingAlerts []*m.Alert, cmd *m.SaveAlertsCommand, sess *xor
} else {
alert.Updated = time.Now()
alert.Created = time.Now()
alert.State = m.AlertStateUnknown
alert.State = m.AlertStateNoData
alert.NewStateDate = time.Now()
_, err := sess.Insert(alert)

View File

@ -47,7 +47,7 @@ func TestAlertingDataAccess(t *testing.T) {
So(err2, ShouldBeNil)
So(alert.Name, ShouldEqual, "Alerting title")
So(alert.Message, ShouldEqual, "Alerting message")
So(alert.State, ShouldEqual, "unknown")
So(alert.State, ShouldEqual, "no_data")
So(alert.Frequency, ShouldEqual, 1)
})
@ -77,7 +77,7 @@ func TestAlertingDataAccess(t *testing.T) {
So(query.Result[0].Name, ShouldEqual, "Name")
Convey("Alert state should not be updated", func() {
So(query.Result[0].State, ShouldEqual, "unknown")
So(query.Result[0].State, ShouldEqual, "no_data")
})
})

View File

@ -38,9 +38,8 @@ var reducerTypes = [
var noDataModes = [
{text: 'OK', value: 'ok'},
{text: 'Critical', value: 'critical'},
{text: 'Warning', value: 'warning'},
{text: 'Unknown', value: 'unknown'},
{text: 'Alerting', value: 'alerting'},
{text: 'No Data', value: 'no_data'},
];
function createReducerPart(model) {
@ -48,10 +47,6 @@ function createReducerPart(model) {
return new QueryPart(model, def);
}
var severityLevels = {
'critical': {text: 'Critical', iconClass: 'icon-gf icon-gf-critical', stateClass: 'alert-state-critical'},
'warning': {text: 'Warning', iconClass: 'icon-gf icon-gf-warning', stateClass: 'alert-state-warning'},
};
function getStateDisplayModel(state) {
switch (state) {
@ -62,23 +57,16 @@ function getStateDisplayModel(state) {
stateClass: 'alert-state-ok'
};
}
case 'critical': {
case 'alerting': {
return {
text: 'CRITICAL',
text: 'ALERTING',
iconClass: 'icon-gf icon-gf-critical',
stateClass: 'alert-state-critical'
};
}
case 'warning': {
case 'no_data': {
return {
text: 'WARNING',
iconClass: 'icon-gf icon-gf-warning',
stateClass: 'alert-state-warning'
};
}
case 'unknown': {
return {
text: 'UNKNOWN',
text: 'NO DATA',
iconClass: "fa fa-question",
stateClass: 'alert-state-warning'
};
@ -106,7 +94,6 @@ export default {
getStateDisplayModel: getStateDisplayModel,
conditionTypes: conditionTypes,
evalFunctions: evalFunctions,
severityLevels: severityLevels,
noDataModes: noDataModes,
reducerTypes: reducerTypes,
createReducerPart: createReducerPart,

View File

@ -13,9 +13,8 @@ export class AlertListCtrl {
stateFilters = [
{text: 'All', value: null},
{text: 'OK', value: 'ok'},
{text: 'Unknown', value: 'unknown'},
{text: 'Warning', value: 'warning'},
{text: 'Critical', value: 'critical'},
{text: 'Alerting', value: 'alerting'},
{text: 'No Data', value: 'no_data'},
{text: 'Execution Error', value: 'execution_error'},
];

View File

@ -17,7 +17,6 @@ export class AlertTabCtrl {
alert: any;
conditionModels: any;
evalFunctions: any;
severityLevels: any;
noDataModes: any;
addNotificationSegment;
notifications;
@ -41,7 +40,6 @@ export class AlertTabCtrl {
this.subTabIndex = 0;
this.evalFunctions = alertDef.evalFunctions;
this.conditionTypes = alertDef.conditionTypes;
this.severityLevels = alertDef.severityLevels;
this.noDataModes = alertDef.noDataModes;
this.appSubUrl = config.appSubUrl;
}
@ -155,8 +153,7 @@ export class AlertTabCtrl {
alert.conditions.push(this.buildDefaultCondition());
}
alert.noDataState = alert.noDataState || 'unknown';
alert.severity = alert.severity || 'critical';
alert.noDataState = alert.noDataState || 'no_data';
alert.frequency = alert.frequency || '60s';
alert.handler = alert.handler || 1;
alert.notifications = alert.notifications || [];
@ -321,11 +318,6 @@ export class AlertTabCtrl {
this.panelCtrl.render();
}
severityChanged() {
ThresholdMapper.alertToGraphThresholds(this.panel);
this.panelCtrl.render();
}
evaluatorTypeChanged(evaluator) {
// ensure params array is correct length
switch (evaluator.type) {

View File

@ -29,18 +29,8 @@
<div class="gf-form">
<span class="gf-form-label width-8">Name</span>
<input type="text" class="gf-form-input width-25" ng-model="ctrl.alert.name">
</div>
<div class="gf-form-inline">
<div class="gf-form">
<span class="gf-form-label width-8">Evaluate every</span>
<input class="gf-form-input max-width-7" type="text" ng-model="ctrl.alert.frequency"></input>
</div>
<div class="gf-form">
<span class="gf-form-label">Severity</span>
<div class="gf-form-select-wrapper width-13">
<select class="gf-form-input" ng-model="ctrl.alert.severity" ng-options="key as value.text for (key, value) in ctrl.severityLevels" ng-change="ctrl.severityChanged()">
</select>
</div>
<input class="gf-form-input max-width-5" type="text" ng-model="ctrl.alert.frequency"></input>
</div>
</div>
</div>

View File

@ -62,7 +62,7 @@ export class ThresholdMapper {
for (var t of panel.thresholds) {
t.fill = true;
t.line = true;
t.colorMode = panel.alert.severity;
t.colorMode = 'critical';
}
var updated = true;