mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
feat(alerting): more refactoring work in backend code
This commit is contained in:
@@ -13,6 +13,7 @@ type Alert struct {
|
|||||||
PanelId int64
|
PanelId int64
|
||||||
Name string
|
Name string
|
||||||
Description string
|
Description string
|
||||||
|
Severity string
|
||||||
State string
|
State string
|
||||||
Handler int64
|
Handler int64
|
||||||
Enabled bool
|
Enabled bool
|
||||||
|
|||||||
@@ -6,31 +6,11 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
"github.com/grafana/grafana/pkg/services/alerting/transformers"
|
|
||||||
|
|
||||||
m "github.com/grafana/grafana/pkg/models"
|
m "github.com/grafana/grafana/pkg/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
type AlertRule struct {
|
type AlertRule struct {
|
||||||
Id int64
|
|
||||||
OrgId int64
|
|
||||||
DashboardId int64
|
|
||||||
PanelId int64
|
|
||||||
Frequency int64
|
|
||||||
Name string
|
|
||||||
Description string
|
|
||||||
State string
|
|
||||||
Warning Level
|
|
||||||
Critical Level
|
|
||||||
Query AlertQuery
|
|
||||||
Transform string
|
|
||||||
TransformParams simplejson.Json
|
|
||||||
Transformer transformers.Transformer
|
|
||||||
|
|
||||||
NotificationGroups []int64
|
|
||||||
}
|
|
||||||
|
|
||||||
type AlertRule2 struct {
|
|
||||||
Id int64
|
Id int64
|
||||||
OrgId int64
|
OrgId int64
|
||||||
DashboardId int64
|
DashboardId int64
|
||||||
@@ -38,7 +18,7 @@ type AlertRule2 struct {
|
|||||||
Frequency int64
|
Frequency int64
|
||||||
Name string
|
Name string
|
||||||
Description string
|
Description string
|
||||||
State string
|
Severity string
|
||||||
Conditions []AlertCondition
|
Conditions []AlertCondition
|
||||||
Notifications []int64
|
Notifications []int64
|
||||||
}
|
}
|
||||||
@@ -68,17 +48,13 @@ func getTimeDurationStringToSeconds(str string) int64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func NewAlertRuleFromDBModel(ruleDef *m.Alert) (*AlertRule, error) {
|
func NewAlertRuleFromDBModel(ruleDef *m.Alert) (*AlertRule, error) {
|
||||||
return nil, nil
|
model := &AlertRule{}
|
||||||
}
|
|
||||||
|
|
||||||
func NewAlertRuleFromDBModel2(ruleDef *m.Alert) (*AlertRule2, error) {
|
|
||||||
model := &AlertRule2{}
|
|
||||||
model.Id = ruleDef.Id
|
model.Id = ruleDef.Id
|
||||||
model.OrgId = ruleDef.OrgId
|
model.OrgId = ruleDef.OrgId
|
||||||
model.Name = ruleDef.Name
|
model.Name = ruleDef.Name
|
||||||
model.Description = ruleDef.Description
|
model.Description = ruleDef.Description
|
||||||
model.State = ruleDef.State
|
|
||||||
model.Frequency = ruleDef.Frequency
|
model.Frequency = ruleDef.Frequency
|
||||||
|
model.Severity = ruleDef.Severity
|
||||||
|
|
||||||
for _, v := range ruleDef.Settings.Get("notifications").MustArray() {
|
for _, v := range ruleDef.Settings.Get("notifications").MustArray() {
|
||||||
if id, ok := v.(int64); ok {
|
if id, ok := v.(int64); ok {
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ func TestAlertRuleModel(t *testing.T) {
|
|||||||
Settings: alertJSON,
|
Settings: alertJSON,
|
||||||
}
|
}
|
||||||
|
|
||||||
alertRule, err := NewAlertRuleFromDBModel2(alert)
|
alertRule, err := NewAlertRuleFromDBModel(alert)
|
||||||
So(err, ShouldBeNil)
|
So(err, ShouldBeNil)
|
||||||
|
|
||||||
So(alertRule.Conditions, ShouldHaveLength, 1)
|
So(alertRule.Conditions, ShouldHaveLength, 1)
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ type QueryCondition struct {
|
|||||||
Evaluator AlertEvaluator
|
Evaluator AlertEvaluator
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *QueryCondition) Eval() {
|
func (c *QueryCondition) Eval(context *AlertResultContext) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewQueryCondition(model *simplejson.Json) (*QueryCondition, error) {
|
func NewQueryCondition(model *simplejson.Json) (*QueryCondition, error) {
|
||||||
|
|||||||
@@ -6,12 +6,11 @@ import (
|
|||||||
|
|
||||||
"github.com/benbjohnson/clock"
|
"github.com/benbjohnson/clock"
|
||||||
"github.com/grafana/grafana/pkg/log"
|
"github.com/grafana/grafana/pkg/log"
|
||||||
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Engine struct {
|
type Engine struct {
|
||||||
execQueue chan *AlertJob
|
execQueue chan *AlertJob
|
||||||
resultQueue chan *AlertResult
|
resultQueue chan *AlertResultContext
|
||||||
clock clock.Clock
|
clock clock.Clock
|
||||||
ticker *Ticker
|
ticker *Ticker
|
||||||
scheduler Scheduler
|
scheduler Scheduler
|
||||||
@@ -26,7 +25,7 @@ func NewEngine() *Engine {
|
|||||||
e := &Engine{
|
e := &Engine{
|
||||||
ticker: NewTicker(time.Now(), time.Second*0, clock.New()),
|
ticker: NewTicker(time.Now(), time.Second*0, clock.New()),
|
||||||
execQueue: make(chan *AlertJob, 1000),
|
execQueue: make(chan *AlertJob, 1000),
|
||||||
resultQueue: make(chan *AlertResult, 1000),
|
resultQueue: make(chan *AlertResultContext, 1000),
|
||||||
scheduler: NewScheduler(),
|
scheduler: NewScheduler(),
|
||||||
handler: NewHandler(),
|
handler: NewHandler(),
|
||||||
ruleReader: NewRuleReader(),
|
ruleReader: NewRuleReader(),
|
||||||
@@ -91,15 +90,14 @@ func (e *Engine) execDispatch() {
|
|||||||
func (e *Engine) executeJob(job *AlertJob) {
|
func (e *Engine) executeJob(job *AlertJob) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
|
|
||||||
resultChan := make(chan *AlertResult, 1)
|
resultChan := make(chan *AlertResultContext, 1)
|
||||||
go e.handler.Execute(job, resultChan)
|
go e.handler.Execute(job.Rule, resultChan)
|
||||||
|
|
||||||
select {
|
select {
|
||||||
case <-time.After(e.alertJobTimeout):
|
case <-time.After(e.alertJobTimeout):
|
||||||
e.resultQueue <- &AlertResult{
|
e.resultQueue <- &AlertResultContext{
|
||||||
State: alertstates.Pending,
|
|
||||||
Error: fmt.Errorf("Timeout"),
|
Error: fmt.Errorf("Timeout"),
|
||||||
AlertJob: job,
|
Rule: job.Rule,
|
||||||
StartTime: startTime,
|
StartTime: startTime,
|
||||||
EndTime: time.Now(),
|
EndTime: time.Now(),
|
||||||
}
|
}
|
||||||
@@ -110,6 +108,8 @@ func (e *Engine) executeJob(job *AlertJob) {
|
|||||||
e.log.Debug("Job Execution done", "timeTakenMs", duration, "ruleId", job.Rule.Id)
|
e.log.Debug("Job Execution done", "timeTakenMs", duration, "ruleId", job.Rule.Id)
|
||||||
e.resultQueue <- result
|
e.resultQueue <- result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
job.Running = false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *Engine) resultHandler() {
|
func (e *Engine) resultHandler() {
|
||||||
@@ -120,25 +120,11 @@ func (e *Engine) resultHandler() {
|
|||||||
}()
|
}()
|
||||||
|
|
||||||
for result := range e.resultQueue {
|
for result := range e.resultQueue {
|
||||||
e.log.Debug("Alert Rule Result", "ruleId", result.AlertJob.Rule.Id, "state", result.State, "retry", result.AlertJob.RetryCount)
|
e.log.Debug("Alert Rule Result", "ruleId", result.Rule.Id, "triggered", result.Triggered)
|
||||||
|
|
||||||
result.AlertJob.Running = false
|
|
||||||
|
|
||||||
if result.Error != nil {
|
if result.Error != nil {
|
||||||
result.AlertJob.IncRetry()
|
e.log.Error("Alert Rule Result Error", "ruleId", result.Rule.Id, "error", result.Error, "retry")
|
||||||
|
|
||||||
if result.AlertJob.Retryable() {
|
|
||||||
e.log.Error("Alert Rule Result Error", "ruleId", result.AlertJob.Rule.Id, "error", result.Error, "retry", result.AlertJob.RetryCount)
|
|
||||||
e.execQueue <- result.AlertJob
|
|
||||||
} else {
|
|
||||||
e.log.Error("Alert Rule Result Error After Max Retries", "ruleId", result.AlertJob.Rule.Id, "error", result.Error, "retry", result.AlertJob.RetryCount)
|
|
||||||
|
|
||||||
result.State = alertstates.Critical
|
|
||||||
result.Description = fmt.Sprintf("Failed to run check after %d retires, Error: %v", maxAlertExecutionRetries, result.Error)
|
|
||||||
e.responseHandler.Handle(result)
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
result.AlertJob.ResetRetry()
|
|
||||||
e.responseHandler.Handle(result)
|
e.responseHandler.Handle(result)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ func TestAlertRuleExtraction(t *testing.T) {
|
|||||||
"handler": 1,
|
"handler": 1,
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"frequency": "60s",
|
"frequency": "60s",
|
||||||
|
"severity": "critical",
|
||||||
"conditions": [
|
"conditions": [
|
||||||
{
|
{
|
||||||
"type": "query",
|
"type": "query",
|
||||||
@@ -63,6 +64,7 @@ func TestAlertRuleExtraction(t *testing.T) {
|
|||||||
"handler": 0,
|
"handler": 0,
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"frequency": "60s",
|
"frequency": "60s",
|
||||||
|
"severity": "warning",
|
||||||
"conditions": [
|
"conditions": [
|
||||||
{
|
{
|
||||||
"type": "query",
|
"type": "query",
|
||||||
@@ -122,6 +124,11 @@ func TestAlertRuleExtraction(t *testing.T) {
|
|||||||
So(alerts[1].Handler, ShouldEqual, 0)
|
So(alerts[1].Handler, ShouldEqual, 0)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
Convey("should extract Severity property", func() {
|
||||||
|
So(alerts[0].Severity, ShouldEqual, "critical")
|
||||||
|
So(alerts[1].Severity, ShouldEqual, "warning")
|
||||||
|
})
|
||||||
|
|
||||||
Convey("should extract frequency in seconds", func() {
|
Convey("should extract frequency in seconds", func() {
|
||||||
So(alerts[0].Frequency, ShouldEqual, 60)
|
So(alerts[0].Frequency, ShouldEqual, 60)
|
||||||
So(alerts[1].Frequency, ShouldEqual, 60)
|
So(alerts[1].Frequency, ShouldEqual, 60)
|
||||||
|
|||||||
@@ -1,14 +1,9 @@
|
|||||||
package alerting
|
package alerting
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/bus"
|
|
||||||
"github.com/grafana/grafana/pkg/log"
|
"github.com/grafana/grafana/pkg/log"
|
||||||
m "github.com/grafana/grafana/pkg/models"
|
|
||||||
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
|
|
||||||
"github.com/grafana/grafana/pkg/tsdb"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -25,124 +20,119 @@ func NewHandler() *HandlerImpl {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *HandlerImpl) Execute(job *AlertJob, resultQueue chan *AlertResult) {
|
func (e *HandlerImpl) Execute(rule *AlertRule, resultQueue chan *AlertResultContext) {
|
||||||
startTime := time.Now()
|
resultQueue <- e.eval(rule)
|
||||||
|
}
|
||||||
|
|
||||||
timeSeries, err := e.executeQuery(job)
|
func (e *HandlerImpl) eval(rule *AlertRule) *AlertResultContext {
|
||||||
if err != nil {
|
result := &AlertResultContext{
|
||||||
resultQueue <- &AlertResult{
|
StartTime: time.Now(),
|
||||||
Error: err,
|
}
|
||||||
State: alertstates.Pending,
|
|
||||||
AlertJob: job,
|
for _, condition := range rule.Conditions {
|
||||||
StartTime: time.Now(),
|
condition.Eval(result)
|
||||||
EndTime: time.Now(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
result := e.evaluateRule(job.Rule, timeSeries)
|
|
||||||
result.AlertJob = job
|
|
||||||
result.StartTime = startTime
|
|
||||||
result.EndTime = time.Now()
|
result.EndTime = time.Now()
|
||||||
|
return result
|
||||||
resultQueue <- result
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *HandlerImpl) executeQuery(job *AlertJob) (tsdb.TimeSeriesSlice, error) {
|
// func (e *HandlerImpl) executeQuery(job *AlertJob) (tsdb.TimeSeriesSlice, error) {
|
||||||
getDsInfo := &m.GetDataSourceByIdQuery{
|
// getDsInfo := &m.GetDataSourceByIdQuery{
|
||||||
Id: job.Rule.Query.DatasourceId,
|
// Id: job.Rule.Query.DatasourceId,
|
||||||
OrgId: job.Rule.OrgId,
|
// OrgId: job.Rule.OrgId,
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
if err := bus.Dispatch(getDsInfo); err != nil {
|
// if err := bus.Dispatch(getDsInfo); err != nil {
|
||||||
return nil, fmt.Errorf("Could not find datasource")
|
// return nil, fmt.Errorf("Could not find datasource")
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
req := e.GetRequestForAlertRule(job.Rule, getDsInfo.Result)
|
// req := e.GetRequestForAlertRule(job.Rule, getDsInfo.Result)
|
||||||
result := make(tsdb.TimeSeriesSlice, 0)
|
// result := make(tsdb.TimeSeriesSlice, 0)
|
||||||
|
//
|
||||||
resp, err := tsdb.HandleRequest(req)
|
// resp, err := tsdb.HandleRequest(req)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return nil, fmt.Errorf("Alerting: GetSeries() tsdb.HandleRequest() error %v", err)
|
// return nil, fmt.Errorf("Alerting: GetSeries() tsdb.HandleRequest() error %v", err)
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
for _, v := range resp.Results {
|
// for _, v := range resp.Results {
|
||||||
if v.Error != nil {
|
// if v.Error != nil {
|
||||||
return nil, fmt.Errorf("Alerting: GetSeries() tsdb.HandleRequest() response error %v", v)
|
// return nil, fmt.Errorf("Alerting: GetSeries() tsdb.HandleRequest() response error %v", v)
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
result = append(result, v.Series...)
|
// result = append(result, v.Series...)
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
return result, nil
|
// return result, nil
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
func (e *HandlerImpl) GetRequestForAlertRule(rule *AlertRule, datasource *m.DataSource) *tsdb.Request {
|
// func (e *HandlerImpl) GetRequestForAlertRule(rule *AlertRule, datasource *m.DataSource) *tsdb.Request {
|
||||||
e.log.Debug("GetRequest", "query", rule.Query.Query, "from", rule.Query.From, "datasourceId", datasource.Id)
|
// e.log.Debug("GetRequest", "query", rule.Query.Query, "from", rule.Query.From, "datasourceId", datasource.Id)
|
||||||
req := &tsdb.Request{
|
// req := &tsdb.Request{
|
||||||
TimeRange: tsdb.TimeRange{
|
// TimeRange: tsdb.TimeRange{
|
||||||
From: "-" + rule.Query.From,
|
// From: "-" + rule.Query.From,
|
||||||
To: rule.Query.To,
|
// To: rule.Query.To,
|
||||||
},
|
// },
|
||||||
Queries: []*tsdb.Query{
|
// Queries: []*tsdb.Query{
|
||||||
{
|
// {
|
||||||
RefId: "A",
|
// RefId: "A",
|
||||||
Query: rule.Query.Query,
|
// Query: rule.Query.Query,
|
||||||
DataSource: &tsdb.DataSourceInfo{
|
// DataSource: &tsdb.DataSourceInfo{
|
||||||
Id: datasource.Id,
|
// Id: datasource.Id,
|
||||||
Name: datasource.Name,
|
// Name: datasource.Name,
|
||||||
PluginId: datasource.Type,
|
// PluginId: datasource.Type,
|
||||||
Url: datasource.Url,
|
// Url: datasource.Url,
|
||||||
},
|
// },
|
||||||
},
|
// },
|
||||||
},
|
// },
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
return req
|
// return req
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
func (e *HandlerImpl) evaluateRule(rule *AlertRule, series tsdb.TimeSeriesSlice) *AlertResult {
|
// func (e *HandlerImpl) evaluateRule(rule *AlertRule, series tsdb.TimeSeriesSlice) *AlertResult {
|
||||||
e.log.Debug("Evaluating Alerting Rule", "seriesCount", len(series), "ruleName", rule.Name)
|
// e.log.Debug("Evaluating Alerting Rule", "seriesCount", len(series), "ruleName", rule.Name)
|
||||||
|
//
|
||||||
triggeredAlert := make([]*TriggeredAlert, 0)
|
// triggeredAlert := make([]*TriggeredAlert, 0)
|
||||||
|
//
|
||||||
for _, serie := range series {
|
// for _, serie := range series {
|
||||||
e.log.Debug("Evaluating series", "series", serie.Name)
|
// e.log.Debug("Evaluating series", "series", serie.Name)
|
||||||
transformedValue, _ := rule.Transformer.Transform(serie)
|
// transformedValue, _ := rule.Transformer.Transform(serie)
|
||||||
|
//
|
||||||
critResult := evalCondition(rule.Critical, transformedValue)
|
// critResult := evalCondition(rule.Critical, transformedValue)
|
||||||
condition2 := fmt.Sprintf("%v %s %v ", transformedValue, rule.Critical.Operator, rule.Critical.Value)
|
// condition2 := fmt.Sprintf("%v %s %v ", transformedValue, rule.Critical.Operator, rule.Critical.Value)
|
||||||
e.log.Debug("Alert execution Crit", "name", serie.Name, "condition", condition2, "result", critResult)
|
// e.log.Debug("Alert execution Crit", "name", serie.Name, "condition", condition2, "result", critResult)
|
||||||
if critResult {
|
// if critResult {
|
||||||
triggeredAlert = append(triggeredAlert, &TriggeredAlert{
|
// triggeredAlert = append(triggeredAlert, &TriggeredAlert{
|
||||||
State: alertstates.Critical,
|
// State: alertstates.Critical,
|
||||||
Value: transformedValue,
|
// Value: transformedValue,
|
||||||
Metric: serie.Name,
|
// Metric: serie.Name,
|
||||||
})
|
// })
|
||||||
continue
|
// continue
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
warnResult := evalCondition(rule.Warning, transformedValue)
|
// warnResult := evalCondition(rule.Warning, transformedValue)
|
||||||
condition := fmt.Sprintf("%v %s %v ", transformedValue, rule.Warning.Operator, rule.Warning.Value)
|
// condition := fmt.Sprintf("%v %s %v ", transformedValue, rule.Warning.Operator, rule.Warning.Value)
|
||||||
e.log.Debug("Alert execution Warn", "name", serie.Name, "condition", condition, "result", warnResult)
|
// e.log.Debug("Alert execution Warn", "name", serie.Name, "condition", condition, "result", warnResult)
|
||||||
if warnResult {
|
// if warnResult {
|
||||||
triggeredAlert = append(triggeredAlert, &TriggeredAlert{
|
// triggeredAlert = append(triggeredAlert, &TriggeredAlert{
|
||||||
State: alertstates.Warn,
|
// State: alertstates.Warn,
|
||||||
Value: transformedValue,
|
// Value: transformedValue,
|
||||||
Metric: serie.Name,
|
// Metric: serie.Name,
|
||||||
})
|
// })
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
executionState := alertstates.Ok
|
// executionState := alertstates.Ok
|
||||||
for _, raised := range triggeredAlert {
|
// for _, raised := range triggeredAlert {
|
||||||
if raised.State == alertstates.Critical {
|
// if raised.State == alertstates.Critical {
|
||||||
executionState = alertstates.Critical
|
// executionState = alertstates.Critical
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
if executionState != alertstates.Critical && raised.State == alertstates.Warn {
|
// if executionState != alertstates.Critical && raised.State == alertstates.Warn {
|
||||||
executionState = alertstates.Warn
|
// executionState = alertstates.Warn
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
return &AlertResult{State: executionState, TriggeredAlerts: triggeredAlert}
|
// return &AlertResult{State: executionState, TriggeredAlerts: triggeredAlert}
|
||||||
}
|
// }
|
||||||
|
|||||||
@@ -3,149 +3,171 @@ package alerting
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
"github.com/grafana/grafana/pkg/services/alerting/transformers"
|
"github.com/grafana/grafana/pkg/models"
|
||||||
"github.com/grafana/grafana/pkg/tsdb"
|
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
. "github.com/smartystreets/goconvey/convey"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestAlertingExecutor(t *testing.T) {
|
func TestAlertingExecutor(t *testing.T) {
|
||||||
Convey("Test alert execution", t, func() {
|
Convey("Test alert execution", t, func() {
|
||||||
executor := NewHandler()
|
handler := NewHandler()
|
||||||
|
|
||||||
Convey("single time serie", func() {
|
Convey("single time serie", func() {
|
||||||
Convey("Show return ok since avg is above 2", func() {
|
Convey("Show return ok since avg is above 2", func() {
|
||||||
rule := &AlertRule{
|
json := `
|
||||||
Critical: Level{Value: 10, Operator: ">"},
|
{
|
||||||
Transformer: transformers.NewAggregationTransformer("avg"),
|
"name": "name2",
|
||||||
}
|
"description": "desc2",
|
||||||
|
"handler": 0,
|
||||||
|
"enabled": true,
|
||||||
|
"frequency": "60s",
|
||||||
|
"conditions": [
|
||||||
|
{
|
||||||
|
"type": "query",
|
||||||
|
"query": {
|
||||||
|
"params": ["A", "5m", "now"],
|
||||||
|
"datasourceId": 1,
|
||||||
|
"model": {"target": "aliasByNode(statsd.fakesite.counters.session_start.mobile.count, 4)"}
|
||||||
|
},
|
||||||
|
"reducer": {"type": "avg", "params": []},
|
||||||
|
"evaluator": {"type": ">", "params": [100]}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
timeSeries := []*tsdb.TimeSeries{
|
alertJSON, jsonErr := simplejson.NewJson([]byte(json))
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
|
So(jsonErr, ShouldBeNil)
|
||||||
}
|
|
||||||
|
|
||||||
result := executor.evaluateRule(rule, timeSeries)
|
alert := &models.Alert{Settings: alertJSON}
|
||||||
So(result.State, ShouldEqual, alertstates.Ok)
|
rule, _ := NewAlertRuleFromDBModel(alert)
|
||||||
|
|
||||||
|
// timeSeries := []*tsdb.TimeSeries{
|
||||||
|
// tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
|
||||||
|
// }
|
||||||
|
|
||||||
|
result := handler.eval(rule)
|
||||||
|
So(result.Triggered, ShouldEqual, true)
|
||||||
})
|
})
|
||||||
|
|
||||||
Convey("Show return critical since below 2", func() {
|
// Convey("Show return critical since below 2", func() {
|
||||||
rule := &AlertRule{
|
// rule := &AlertRule{
|
||||||
Critical: Level{Value: 10, Operator: "<"},
|
// Critical: Level{Value: 10, Operator: "<"},
|
||||||
Transformer: transformers.NewAggregationTransformer("avg"),
|
// Transformer: transformers.NewAggregationTransformer("avg"),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
timeSeries := []*tsdb.TimeSeries{
|
// timeSeries := []*tsdb.TimeSeries{
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
result := executor.evaluateRule(rule, timeSeries)
|
// result := executor.evaluateRule(rule, timeSeries)
|
||||||
So(result.State, ShouldEqual, alertstates.Critical)
|
// So(result.State, ShouldEqual, alertstates.Critical)
|
||||||
})
|
// })
|
||||||
|
//
|
||||||
Convey("Show return critical since sum is above 10", func() {
|
// Convey("Show return critical since sum is above 10", func() {
|
||||||
rule := &AlertRule{
|
// rule := &AlertRule{
|
||||||
Critical: Level{Value: 10, Operator: ">"},
|
// Critical: Level{Value: 10, Operator: ">"},
|
||||||
Transformer: transformers.NewAggregationTransformer("sum"),
|
// Transformer: transformers.NewAggregationTransformer("sum"),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
timeSeries := []*tsdb.TimeSeries{
|
// timeSeries := []*tsdb.TimeSeries{
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{9, 0}, {9, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{9, 0}, {9, 0}}),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
result := executor.evaluateRule(rule, timeSeries)
|
// result := executor.evaluateRule(rule, timeSeries)
|
||||||
So(result.State, ShouldEqual, alertstates.Critical)
|
// So(result.State, ShouldEqual, alertstates.Critical)
|
||||||
})
|
// })
|
||||||
|
//
|
||||||
Convey("Show return ok since avg is below 10", func() {
|
// Convey("Show return ok since avg is below 10", func() {
|
||||||
rule := &AlertRule{
|
// rule := &AlertRule{
|
||||||
Critical: Level{Value: 10, Operator: ">"},
|
// Critical: Level{Value: 10, Operator: ">"},
|
||||||
Transformer: transformers.NewAggregationTransformer("avg"),
|
// Transformer: transformers.NewAggregationTransformer("avg"),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
timeSeries := []*tsdb.TimeSeries{
|
// timeSeries := []*tsdb.TimeSeries{
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{9, 0}, {9, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{9, 0}, {9, 0}}),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
result := executor.evaluateRule(rule, timeSeries)
|
// result := executor.evaluateRule(rule, timeSeries)
|
||||||
So(result.State, ShouldEqual, alertstates.Ok)
|
// So(result.State, ShouldEqual, alertstates.Ok)
|
||||||
})
|
// })
|
||||||
|
//
|
||||||
Convey("Show return ok since min is below 10", func() {
|
// Convey("Show return ok since min is below 10", func() {
|
||||||
rule := &AlertRule{
|
// rule := &AlertRule{
|
||||||
Critical: Level{Value: 10, Operator: ">"},
|
// Critical: Level{Value: 10, Operator: ">"},
|
||||||
Transformer: transformers.NewAggregationTransformer("avg"),
|
// Transformer: transformers.NewAggregationTransformer("avg"),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
timeSeries := []*tsdb.TimeSeries{
|
// timeSeries := []*tsdb.TimeSeries{
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}, {9, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}, {9, 0}}),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
result := executor.evaluateRule(rule, timeSeries)
|
// result := executor.evaluateRule(rule, timeSeries)
|
||||||
So(result.State, ShouldEqual, alertstates.Ok)
|
// So(result.State, ShouldEqual, alertstates.Ok)
|
||||||
})
|
// })
|
||||||
|
//
|
||||||
Convey("Show return ok since max is above 10", func() {
|
// Convey("Show return ok since max is above 10", func() {
|
||||||
rule := &AlertRule{
|
// rule := &AlertRule{
|
||||||
Critical: Level{Value: 10, Operator: ">"},
|
// Critical: Level{Value: 10, Operator: ">"},
|
||||||
Transformer: transformers.NewAggregationTransformer("max"),
|
// Transformer: transformers.NewAggregationTransformer("max"),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
timeSeries := []*tsdb.TimeSeries{
|
// timeSeries := []*tsdb.TimeSeries{
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{6, 0}, {11, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{6, 0}, {11, 0}}),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
result := executor.evaluateRule(rule, timeSeries)
|
// result := executor.evaluateRule(rule, timeSeries)
|
||||||
So(result.State, ShouldEqual, alertstates.Critical)
|
// So(result.State, ShouldEqual, alertstates.Critical)
|
||||||
})
|
// })
|
||||||
|
//
|
||||||
})
|
// })
|
||||||
|
//
|
||||||
Convey("muliple time series", func() {
|
// Convey("muliple time series", func() {
|
||||||
Convey("both are ok", func() {
|
// Convey("both are ok", func() {
|
||||||
rule := &AlertRule{
|
// rule := &AlertRule{
|
||||||
Critical: Level{Value: 10, Operator: ">"},
|
// Critical: Level{Value: 10, Operator: ">"},
|
||||||
Transformer: transformers.NewAggregationTransformer("avg"),
|
// Transformer: transformers.NewAggregationTransformer("avg"),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
timeSeries := []*tsdb.TimeSeries{
|
// timeSeries := []*tsdb.TimeSeries{
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
result := executor.evaluateRule(rule, timeSeries)
|
// result := executor.evaluateRule(rule, timeSeries)
|
||||||
So(result.State, ShouldEqual, alertstates.Ok)
|
// So(result.State, ShouldEqual, alertstates.Ok)
|
||||||
})
|
// })
|
||||||
|
//
|
||||||
Convey("first serie is good, second is critical", func() {
|
// Convey("first serie is good, second is critical", func() {
|
||||||
rule := &AlertRule{
|
// rule := &AlertRule{
|
||||||
Critical: Level{Value: 10, Operator: ">"},
|
// Critical: Level{Value: 10, Operator: ">"},
|
||||||
Transformer: transformers.NewAggregationTransformer("avg"),
|
// Transformer: transformers.NewAggregationTransformer("avg"),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
timeSeries := []*tsdb.TimeSeries{
|
// timeSeries := []*tsdb.TimeSeries{
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{2, 0}}),
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}}),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
result := executor.evaluateRule(rule, timeSeries)
|
// result := executor.evaluateRule(rule, timeSeries)
|
||||||
So(result.State, ShouldEqual, alertstates.Critical)
|
// So(result.State, ShouldEqual, alertstates.Critical)
|
||||||
})
|
// })
|
||||||
|
//
|
||||||
Convey("first serie is warn, second is critical", func() {
|
// Convey("first serie is warn, second is critical", func() {
|
||||||
rule := &AlertRule{
|
// rule := &AlertRule{
|
||||||
Critical: Level{Value: 10, Operator: ">"},
|
// Critical: Level{Value: 10, Operator: ">"},
|
||||||
Warning: Level{Value: 5, Operator: ">"},
|
// Warning: Level{Value: 5, Operator: ">"},
|
||||||
Transformer: transformers.NewAggregationTransformer("avg"),
|
// Transformer: transformers.NewAggregationTransformer("avg"),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
timeSeries := []*tsdb.TimeSeries{
|
// timeSeries := []*tsdb.TimeSeries{
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{6, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{6, 0}}),
|
||||||
tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}}),
|
// tsdb.NewTimeSeries("test1", [][2]float64{{11, 0}}),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
result := executor.evaluateRule(rule, timeSeries)
|
// result := executor.evaluateRule(rule, timeSeries)
|
||||||
So(result.State, ShouldEqual, alertstates.Critical)
|
// So(result.State, ShouldEqual, alertstates.Critical)
|
||||||
})
|
// })
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ package alerting
|
|||||||
import "time"
|
import "time"
|
||||||
|
|
||||||
type AlertingHandler interface {
|
type AlertingHandler interface {
|
||||||
Execute(rule *AlertJob, resultChan chan *AlertResult)
|
Execute(rule *AlertRule, resultChan chan *AlertResultContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
type Scheduler interface {
|
type Scheduler interface {
|
||||||
@@ -12,11 +12,11 @@ type Scheduler interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Notifier interface {
|
type Notifier interface {
|
||||||
Notify(alertResult *AlertResult)
|
Notify(alertResult *AlertResultContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
type AlertCondition interface {
|
type AlertCondition interface {
|
||||||
Eval()
|
Eval(result *AlertResultContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
type QueryReducer interface {
|
type QueryReducer interface {
|
||||||
|
|||||||
@@ -22,18 +22,17 @@ func (aj *AlertJob) IncRetry() {
|
|||||||
aj.RetryCount++
|
aj.RetryCount++
|
||||||
}
|
}
|
||||||
|
|
||||||
type AlertResult struct {
|
type AlertResultContext struct {
|
||||||
State string
|
Triggered bool
|
||||||
TriggeredAlerts []*TriggeredAlert
|
Details []*AlertResultDetail
|
||||||
Error error
|
Error error
|
||||||
Description string
|
Description string
|
||||||
StartTime time.Time
|
StartTime time.Time
|
||||||
EndTime time.Time
|
EndTime time.Time
|
||||||
|
Rule *AlertRule
|
||||||
AlertJob *AlertJob
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type TriggeredAlert struct {
|
type AlertResultDetail struct {
|
||||||
Value float64
|
Value float64
|
||||||
Metric string
|
Metric string
|
||||||
State string
|
State string
|
||||||
|
|||||||
@@ -1,207 +1,195 @@
|
|||||||
package alerting
|
package alerting
|
||||||
|
|
||||||
import (
|
// type NotifierImpl struct {
|
||||||
"fmt"
|
// log log.Logger
|
||||||
"strconv"
|
// getNotifications func(orgId int64, notificationGroups []int64) []*Notification
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// func NewNotifier() *NotifierImpl {
|
||||||
|
// log := log.New("alerting.notifier")
|
||||||
|
// return &NotifierImpl{
|
||||||
|
// log: log,
|
||||||
|
// getNotifications: buildGetNotifiers(log),
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/bus"
|
// func (n NotifierImpl) ShouldDispath(alertResult *AlertResultContext, notifier *Notification) bool {
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
// warn := alertResult.State == alertstates.Warn && notifier.SendWarning
|
||||||
"github.com/grafana/grafana/pkg/log"
|
// crit := alertResult.State == alertstates.Critical && notifier.SendCritical
|
||||||
m "github.com/grafana/grafana/pkg/models"
|
// return (warn || crit) || alertResult.State == alertstates.Ok
|
||||||
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
|
// }
|
||||||
"github.com/grafana/grafana/pkg/setting"
|
//
|
||||||
)
|
// func (n *NotifierImpl) Notify(alertResult *AlertResultContext) {
|
||||||
|
// notifiers := n.getNotifications(alertResult.Rule.OrgId, alertResult.Rule.Notifications)
|
||||||
type NotifierImpl struct {
|
//
|
||||||
log log.Logger
|
// for _, notifier := range notifiers {
|
||||||
getNotifications func(orgId int64, notificationGroups []int64) []*Notification
|
// if n.ShouldDispath(alertResult, notifier) {
|
||||||
}
|
// n.log.Info("Sending notification", "state", alertResult.State, "type", notifier.Type)
|
||||||
|
// go notifier.Notifierr.Dispatch(alertResult)
|
||||||
func NewNotifier() *NotifierImpl {
|
// }
|
||||||
log := log.New("alerting.notifier")
|
// }
|
||||||
return &NotifierImpl{
|
// }
|
||||||
log: log,
|
//
|
||||||
getNotifications: buildGetNotifiers(log),
|
// type Notification struct {
|
||||||
}
|
// Name string
|
||||||
}
|
// Type string
|
||||||
|
// SendWarning bool
|
||||||
func (n NotifierImpl) ShouldDispath(alertResult *AlertResult, notifier *Notification) bool {
|
// SendCritical bool
|
||||||
warn := alertResult.State == alertstates.Warn && notifier.SendWarning
|
//
|
||||||
crit := alertResult.State == alertstates.Critical && notifier.SendCritical
|
// Notifierr NotificationDispatcher
|
||||||
return (warn || crit) || alertResult.State == alertstates.Ok
|
// }
|
||||||
}
|
//
|
||||||
|
// type EmailNotifier struct {
|
||||||
func (n *NotifierImpl) Notify(alertResult *AlertResult) {
|
// To string
|
||||||
notifiers := n.getNotifications(alertResult.AlertJob.Rule.OrgId, alertResult.AlertJob.Rule.NotificationGroups)
|
// log log.Logger
|
||||||
|
// }
|
||||||
for _, notifier := range notifiers {
|
//
|
||||||
if n.ShouldDispath(alertResult, notifier) {
|
// func (this *EmailNotifier) Dispatch(alertResult *AlertResult) {
|
||||||
n.log.Info("Sending notification", "state", alertResult.State, "type", notifier.Type)
|
// this.log.Info("Sending email")
|
||||||
go notifier.Notifierr.Dispatch(alertResult)
|
// grafanaUrl := fmt.Sprintf("%s:%s", setting.HttpAddr, setting.HttpPort)
|
||||||
}
|
// if setting.AppSubUrl != "" {
|
||||||
}
|
// grafanaUrl += "/" + setting.AppSubUrl
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
type Notification struct {
|
// query := &m.GetDashboardsQuery{
|
||||||
Name string
|
// DashboardIds: []int64{alertResult.AlertJob.Rule.DashboardId},
|
||||||
Type string
|
// }
|
||||||
SendWarning bool
|
//
|
||||||
SendCritical bool
|
// if err := bus.Dispatch(query); err != nil {
|
||||||
|
// this.log.Error("Failed to load dashboard", "error", err)
|
||||||
Notifierr NotificationDispatcher
|
// return
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
type EmailNotifier struct {
|
// if len(query.Result) != 1 {
|
||||||
To string
|
// this.log.Error("Can only support one dashboard", "result", len(query.Result))
|
||||||
log log.Logger
|
// return
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
func (this *EmailNotifier) Dispatch(alertResult *AlertResult) {
|
// dashboard := query.Result[0]
|
||||||
this.log.Info("Sending email")
|
//
|
||||||
grafanaUrl := fmt.Sprintf("%s:%s", setting.HttpAddr, setting.HttpPort)
|
// panelId := strconv.Itoa(int(alertResult.AlertJob.Rule.PanelId))
|
||||||
if setting.AppSubUrl != "" {
|
//
|
||||||
grafanaUrl += "/" + setting.AppSubUrl
|
// //TODO: get from alertrule and transforms to seconds
|
||||||
}
|
// from := "1466169458375"
|
||||||
|
// to := "1466171258375"
|
||||||
query := &m.GetDashboardsQuery{
|
//
|
||||||
DashboardIds: []int64{alertResult.AlertJob.Rule.DashboardId},
|
// renderUrl := fmt.Sprintf("%s/render/dashboard-solo/db/%s?from=%s&to=%s&panelId=%s&width=1000&height=500", grafanaUrl, dashboard.Slug, from, to, panelId)
|
||||||
}
|
// cmd := &m.SendEmailCommand{
|
||||||
|
// Data: map[string]interface{}{
|
||||||
if err := bus.Dispatch(query); err != nil {
|
// "Name": "Name",
|
||||||
this.log.Error("Failed to load dashboard", "error", err)
|
// "State": alertResult.State,
|
||||||
return
|
// "Description": alertResult.Description,
|
||||||
}
|
// "TriggeredAlerts": alertResult.TriggeredAlerts,
|
||||||
|
// "DashboardLink": grafanaUrl + "/dashboard/db/" + dashboard.Slug,
|
||||||
if len(query.Result) != 1 {
|
// "AlertPageUrl": grafanaUrl + "/alerting",
|
||||||
this.log.Error("Can only support one dashboard", "result", len(query.Result))
|
// "DashboardImage": renderUrl,
|
||||||
return
|
// },
|
||||||
}
|
// To: []string{this.To},
|
||||||
|
// Template: "alert_notification.html",
|
||||||
dashboard := query.Result[0]
|
// }
|
||||||
|
//
|
||||||
panelId := strconv.Itoa(int(alertResult.AlertJob.Rule.PanelId))
|
// err := bus.Dispatch(cmd)
|
||||||
|
// if err != nil {
|
||||||
//TODO: get from alertrule and transforms to seconds
|
// this.log.Error("Could not send alert notification as email", "error", err)
|
||||||
from := "1466169458375"
|
// }
|
||||||
to := "1466171258375"
|
// }
|
||||||
|
//
|
||||||
renderUrl := fmt.Sprintf("%s/render/dashboard-solo/db/%s?from=%s&to=%s&panelId=%s&width=1000&height=500", grafanaUrl, dashboard.Slug, from, to, panelId)
|
// type WebhookNotifier struct {
|
||||||
cmd := &m.SendEmailCommand{
|
// Url string
|
||||||
Data: map[string]interface{}{
|
// User string
|
||||||
"Name": "Name",
|
// Password string
|
||||||
"State": alertResult.State,
|
// log log.Logger
|
||||||
"Description": alertResult.Description,
|
// }
|
||||||
"TriggeredAlerts": alertResult.TriggeredAlerts,
|
//
|
||||||
"DashboardLink": grafanaUrl + "/dashboard/db/" + dashboard.Slug,
|
// func (this *WebhookNotifier) Dispatch(alertResult *AlertResultContext) {
|
||||||
"AlertPageUrl": grafanaUrl + "/alerting",
|
// this.log.Info("Sending webhook")
|
||||||
"DashboardImage": renderUrl,
|
//
|
||||||
},
|
// bodyJSON := simplejson.New()
|
||||||
To: []string{this.To},
|
// bodyJSON.Set("name", alertResult.AlertJob.Rule.Name)
|
||||||
Template: "alert_notification.html",
|
// bodyJSON.Set("state", alertResult.State)
|
||||||
}
|
// bodyJSON.Set("trigged", alertResult.TriggeredAlerts)
|
||||||
|
//
|
||||||
err := bus.Dispatch(cmd)
|
// body, _ := bodyJSON.MarshalJSON()
|
||||||
if err != nil {
|
//
|
||||||
this.log.Error("Could not send alert notification as email", "error", err)
|
// cmd := &m.SendWebhook{
|
||||||
}
|
// Url: this.Url,
|
||||||
}
|
// User: this.User,
|
||||||
|
// Password: this.Password,
|
||||||
type WebhookNotifier struct {
|
// Body: string(body),
|
||||||
Url string
|
// }
|
||||||
User string
|
//
|
||||||
Password string
|
// bus.Dispatch(cmd)
|
||||||
log log.Logger
|
// }
|
||||||
}
|
//
|
||||||
|
// type NotificationDispatcher interface {
|
||||||
func (this *WebhookNotifier) Dispatch(alertResult *AlertResult) {
|
// Dispatch(alertResult *AlertResult)
|
||||||
this.log.Info("Sending webhook")
|
// }
|
||||||
|
//
|
||||||
bodyJSON := simplejson.New()
|
// func buildGetNotifiers(log log.Logger) func(orgId int64, notificationGroups []int64) []*Notification {
|
||||||
bodyJSON.Set("name", alertResult.AlertJob.Rule.Name)
|
// return func(orgId int64, notificationGroups []int64) []*Notification {
|
||||||
bodyJSON.Set("state", alertResult.State)
|
// query := &m.GetAlertNotificationQuery{
|
||||||
bodyJSON.Set("trigged", alertResult.TriggeredAlerts)
|
// OrgID: orgId,
|
||||||
|
// Ids: notificationGroups,
|
||||||
body, _ := bodyJSON.MarshalJSON()
|
// IncludeAlwaysExecute: true,
|
||||||
|
// }
|
||||||
cmd := &m.SendWebhook{
|
// err := bus.Dispatch(query)
|
||||||
Url: this.Url,
|
// if err != nil {
|
||||||
User: this.User,
|
// log.Error("Failed to read notifications", "error", err)
|
||||||
Password: this.Password,
|
// }
|
||||||
Body: string(body),
|
//
|
||||||
}
|
// var result []*Notification
|
||||||
|
// for _, notification := range query.Result {
|
||||||
bus.Dispatch(cmd)
|
// not, err := NewNotificationFromDBModel(notification)
|
||||||
}
|
// if err == nil {
|
||||||
|
// result = append(result, not)
|
||||||
type NotificationDispatcher interface {
|
// } else {
|
||||||
Dispatch(alertResult *AlertResult)
|
// log.Error("Failed to read notification model", "error", err)
|
||||||
}
|
// }
|
||||||
|
// }
|
||||||
func buildGetNotifiers(log log.Logger) func(orgId int64, notificationGroups []int64) []*Notification {
|
//
|
||||||
return func(orgId int64, notificationGroups []int64) []*Notification {
|
// return result
|
||||||
query := &m.GetAlertNotificationQuery{
|
// }
|
||||||
OrgID: orgId,
|
// }
|
||||||
Ids: notificationGroups,
|
//
|
||||||
IncludeAlwaysExecute: true,
|
// func NewNotificationFromDBModel(model *m.AlertNotification) (*Notification, error) {
|
||||||
}
|
// notifier, err := createNotifier(model.Type, model.Settings)
|
||||||
err := bus.Dispatch(query)
|
//
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
log.Error("Failed to read notifications", "error", err)
|
// return nil, err
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
var result []*Notification
|
// return &Notification{
|
||||||
for _, notification := range query.Result {
|
// Name: model.Name,
|
||||||
not, err := NewNotificationFromDBModel(notification)
|
// Type: model.Type,
|
||||||
if err == nil {
|
// Notifierr: notifier,
|
||||||
result = append(result, not)
|
// SendCritical: model.Settings.Get("sendCrit").MustBool(),
|
||||||
} else {
|
// SendWarning: model.Settings.Get("sendWarn").MustBool(),
|
||||||
log.Error("Failed to read notification model", "error", err)
|
// }, nil
|
||||||
}
|
// }
|
||||||
}
|
//
|
||||||
|
// var createNotifier = func(notificationType string, settings *simplejson.Json) (NotificationDispatcher, error) {
|
||||||
return result
|
// if notificationType == "email" {
|
||||||
}
|
// to := settings.Get("to").MustString()
|
||||||
}
|
//
|
||||||
|
// if to == "" {
|
||||||
func NewNotificationFromDBModel(model *m.AlertNotification) (*Notification, error) {
|
// return nil, fmt.Errorf("Could not find to propertie in settings")
|
||||||
notifier, err := createNotifier(model.Type, model.Settings)
|
// }
|
||||||
|
//
|
||||||
if err != nil {
|
// return &EmailNotifier{
|
||||||
return nil, err
|
// To: to,
|
||||||
}
|
// log: log.New("alerting.notification.email"),
|
||||||
|
// }, nil
|
||||||
return &Notification{
|
// }
|
||||||
Name: model.Name,
|
//
|
||||||
Type: model.Type,
|
// url := settings.Get("url").MustString()
|
||||||
Notifierr: notifier,
|
// if url == "" {
|
||||||
SendCritical: model.Settings.Get("sendCrit").MustBool(),
|
// return nil, fmt.Errorf("Could not find url propertie in settings")
|
||||||
SendWarning: model.Settings.Get("sendWarn").MustBool(),
|
// }
|
||||||
}, nil
|
//
|
||||||
}
|
// return &WebhookNotifier{
|
||||||
|
// Url: url,
|
||||||
var createNotifier = func(notificationType string, settings *simplejson.Json) (NotificationDispatcher, error) {
|
// User: settings.Get("user").MustString(),
|
||||||
if notificationType == "email" {
|
// Password: settings.Get("password").MustString(),
|
||||||
to := settings.Get("to").MustString()
|
// log: log.New("alerting.notification.webhook"),
|
||||||
|
// }, nil
|
||||||
if to == "" {
|
// }
|
||||||
return nil, fmt.Errorf("Could not find to propertie in settings")
|
|
||||||
}
|
|
||||||
|
|
||||||
return &EmailNotifier{
|
|
||||||
To: to,
|
|
||||||
log: log.New("alerting.notification.email"),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
url := settings.Get("url").MustString()
|
|
||||||
if url == "" {
|
|
||||||
return nil, fmt.Errorf("Could not find url propertie in settings")
|
|
||||||
}
|
|
||||||
|
|
||||||
return &WebhookNotifier{
|
|
||||||
Url: url,
|
|
||||||
User: settings.Get("user").MustString(),
|
|
||||||
Password: settings.Get("password").MustString(),
|
|
||||||
log: log.New("alerting.notification.webhook"),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,125 +1,114 @@
|
|||||||
package alerting
|
package alerting
|
||||||
|
|
||||||
import (
|
// func TestAlertNotificationExtraction(t *testing.T) {
|
||||||
"testing"
|
// Convey("Notifier tests", t, func() {
|
||||||
|
// Convey("rules for sending notifications", func() {
|
||||||
"reflect"
|
// dummieNotifier := NotifierImpl{}
|
||||||
|
//
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
// result := &AlertResult{
|
||||||
m "github.com/grafana/grafana/pkg/models"
|
// State: alertstates.Critical,
|
||||||
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
|
// }
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
//
|
||||||
)
|
// notifier := &Notification{
|
||||||
|
// Name: "Test Notifier",
|
||||||
func TestAlertNotificationExtraction(t *testing.T) {
|
// Type: "TestType",
|
||||||
Convey("Notifier tests", t, func() {
|
// SendCritical: true,
|
||||||
Convey("rules for sending notifications", func() {
|
// SendWarning: true,
|
||||||
dummieNotifier := NotifierImpl{}
|
// }
|
||||||
|
//
|
||||||
result := &AlertResult{
|
// Convey("Should send notification", func() {
|
||||||
State: alertstates.Critical,
|
// So(dummieNotifier.ShouldDispath(result, notifier), ShouldBeTrue)
|
||||||
}
|
// })
|
||||||
|
//
|
||||||
notifier := &Notification{
|
// Convey("warn:false and state:warn should not send", func() {
|
||||||
Name: "Test Notifier",
|
// result.State = alertstates.Warn
|
||||||
Type: "TestType",
|
// notifier.SendWarning = false
|
||||||
SendCritical: true,
|
// So(dummieNotifier.ShouldDispath(result, notifier), ShouldBeFalse)
|
||||||
SendWarning: true,
|
// })
|
||||||
}
|
// })
|
||||||
|
//
|
||||||
Convey("Should send notification", func() {
|
// Convey("Parsing alert notification from settings", func() {
|
||||||
So(dummieNotifier.ShouldDispath(result, notifier), ShouldBeTrue)
|
// Convey("Parsing email", func() {
|
||||||
})
|
// Convey("empty settings should return error", func() {
|
||||||
|
// json := `{ }`
|
||||||
Convey("warn:false and state:warn should not send", func() {
|
//
|
||||||
result.State = alertstates.Warn
|
// settingsJSON, _ := simplejson.NewJson([]byte(json))
|
||||||
notifier.SendWarning = false
|
// model := &m.AlertNotification{
|
||||||
So(dummieNotifier.ShouldDispath(result, notifier), ShouldBeFalse)
|
// Name: "ops",
|
||||||
})
|
// Type: "email",
|
||||||
})
|
// Settings: settingsJSON,
|
||||||
|
// }
|
||||||
Convey("Parsing alert notification from settings", func() {
|
//
|
||||||
Convey("Parsing email", func() {
|
// _, err := NewNotificationFromDBModel(model)
|
||||||
Convey("empty settings should return error", func() {
|
// So(err, ShouldNotBeNil)
|
||||||
json := `{ }`
|
// })
|
||||||
|
//
|
||||||
settingsJSON, _ := simplejson.NewJson([]byte(json))
|
// Convey("from settings", func() {
|
||||||
model := &m.AlertNotification{
|
// json := `
|
||||||
Name: "ops",
|
// {
|
||||||
Type: "email",
|
// "to": "ops@grafana.org"
|
||||||
Settings: settingsJSON,
|
// }`
|
||||||
}
|
//
|
||||||
|
// settingsJSON, _ := simplejson.NewJson([]byte(json))
|
||||||
_, err := NewNotificationFromDBModel(model)
|
// model := &m.AlertNotification{
|
||||||
So(err, ShouldNotBeNil)
|
// Name: "ops",
|
||||||
})
|
// Type: "email",
|
||||||
|
// Settings: settingsJSON,
|
||||||
Convey("from settings", func() {
|
// }
|
||||||
json := `
|
//
|
||||||
{
|
// not, err := NewNotificationFromDBModel(model)
|
||||||
"to": "ops@grafana.org"
|
//
|
||||||
}`
|
// So(err, ShouldBeNil)
|
||||||
|
// So(not.Name, ShouldEqual, "ops")
|
||||||
settingsJSON, _ := simplejson.NewJson([]byte(json))
|
// So(not.Type, ShouldEqual, "email")
|
||||||
model := &m.AlertNotification{
|
// So(reflect.TypeOf(not.Notifierr).Elem().String(), ShouldEqual, "alerting.EmailNotifier")
|
||||||
Name: "ops",
|
//
|
||||||
Type: "email",
|
// email := not.Notifierr.(*EmailNotifier)
|
||||||
Settings: settingsJSON,
|
// So(email.To, ShouldEqual, "ops@grafana.org")
|
||||||
}
|
// })
|
||||||
|
// })
|
||||||
not, err := NewNotificationFromDBModel(model)
|
//
|
||||||
|
// Convey("Parsing webhook", func() {
|
||||||
So(err, ShouldBeNil)
|
// Convey("empty settings should return error", func() {
|
||||||
So(not.Name, ShouldEqual, "ops")
|
// json := `{ }`
|
||||||
So(not.Type, ShouldEqual, "email")
|
//
|
||||||
So(reflect.TypeOf(not.Notifierr).Elem().String(), ShouldEqual, "alerting.EmailNotifier")
|
// settingsJSON, _ := simplejson.NewJson([]byte(json))
|
||||||
|
// model := &m.AlertNotification{
|
||||||
email := not.Notifierr.(*EmailNotifier)
|
// Name: "ops",
|
||||||
So(email.To, ShouldEqual, "ops@grafana.org")
|
// Type: "webhook",
|
||||||
})
|
// Settings: settingsJSON,
|
||||||
})
|
// }
|
||||||
|
//
|
||||||
Convey("Parsing webhook", func() {
|
// _, err := NewNotificationFromDBModel(model)
|
||||||
Convey("empty settings should return error", func() {
|
// So(err, ShouldNotBeNil)
|
||||||
json := `{ }`
|
// })
|
||||||
|
//
|
||||||
settingsJSON, _ := simplejson.NewJson([]byte(json))
|
// Convey("from settings", func() {
|
||||||
model := &m.AlertNotification{
|
// json := `
|
||||||
Name: "ops",
|
// {
|
||||||
Type: "webhook",
|
// "url": "http://localhost:3000",
|
||||||
Settings: settingsJSON,
|
// "username": "username",
|
||||||
}
|
// "password": "password"
|
||||||
|
// }`
|
||||||
_, err := NewNotificationFromDBModel(model)
|
//
|
||||||
So(err, ShouldNotBeNil)
|
// settingsJSON, _ := simplejson.NewJson([]byte(json))
|
||||||
})
|
// model := &m.AlertNotification{
|
||||||
|
// Name: "slack",
|
||||||
Convey("from settings", func() {
|
// Type: "webhook",
|
||||||
json := `
|
// Settings: settingsJSON,
|
||||||
{
|
// }
|
||||||
"url": "http://localhost:3000",
|
//
|
||||||
"username": "username",
|
// not, err := NewNotificationFromDBModel(model)
|
||||||
"password": "password"
|
//
|
||||||
}`
|
// So(err, ShouldBeNil)
|
||||||
|
// So(not.Name, ShouldEqual, "slack")
|
||||||
settingsJSON, _ := simplejson.NewJson([]byte(json))
|
// So(not.Type, ShouldEqual, "webhook")
|
||||||
model := &m.AlertNotification{
|
// So(reflect.TypeOf(not.Notifierr).Elem().String(), ShouldEqual, "alerting.WebhookNotifier")
|
||||||
Name: "slack",
|
//
|
||||||
Type: "webhook",
|
// webhook := not.Notifierr.(*WebhookNotifier)
|
||||||
Settings: settingsJSON,
|
// So(webhook.Url, ShouldEqual, "http://localhost:3000")
|
||||||
}
|
// })
|
||||||
|
// })
|
||||||
not, err := NewNotificationFromDBModel(model)
|
// })
|
||||||
|
// })
|
||||||
So(err, ShouldBeNil)
|
// }
|
||||||
So(not.Name, ShouldEqual, "slack")
|
|
||||||
So(not.Type, ShouldEqual, "webhook")
|
|
||||||
So(reflect.TypeOf(not.Notifierr).Elem().String(), ShouldEqual, "alerting.WebhookNotifier")
|
|
||||||
|
|
||||||
webhook := not.Notifierr.(*WebhookNotifier)
|
|
||||||
So(webhook.Url, ShouldEqual, "http://localhost:3000")
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,16 +1,9 @@
|
|||||||
package alerting
|
package alerting
|
||||||
|
|
||||||
import (
|
import "github.com/grafana/grafana/pkg/log"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/bus"
|
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
|
||||||
"github.com/grafana/grafana/pkg/log"
|
|
||||||
m "github.com/grafana/grafana/pkg/models"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ResultHandler interface {
|
type ResultHandler interface {
|
||||||
Handle(result *AlertResult)
|
Handle(result *AlertResultContext)
|
||||||
}
|
}
|
||||||
|
|
||||||
type ResultHandlerImpl struct {
|
type ResultHandlerImpl struct {
|
||||||
@@ -20,49 +13,50 @@ type ResultHandlerImpl struct {
|
|||||||
|
|
||||||
func NewResultHandler() *ResultHandlerImpl {
|
func NewResultHandler() *ResultHandlerImpl {
|
||||||
return &ResultHandlerImpl{
|
return &ResultHandlerImpl{
|
||||||
log: log.New("alerting.responseHandler"),
|
log: log.New("alerting.responseHandler"),
|
||||||
notifier: NewNotifier(),
|
//notifier: NewNotifier(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (handler *ResultHandlerImpl) Handle(result *AlertResult) {
|
func (handler *ResultHandlerImpl) Handle(result *AlertResultContext) {
|
||||||
if handler.shouldUpdateState(result) {
|
// if handler.shouldUpdateState(result) {
|
||||||
cmd := &m.UpdateAlertStateCommand{
|
// cmd := &m.UpdateAlertStateCommand{
|
||||||
AlertId: result.AlertJob.Rule.Id,
|
// AlertId: result.Rule.Id,
|
||||||
State: result.State,
|
// State: result.Rule.Severity,
|
||||||
Info: result.Description,
|
// Info: result.Description,
|
||||||
OrgId: result.AlertJob.Rule.OrgId,
|
// OrgId: result.Rule.OrgId,
|
||||||
TriggeredAlerts: simplejson.NewFromAny(result.TriggeredAlerts),
|
// TriggeredAlerts: simplejson.NewFromAny(result.Details),
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
if err := bus.Dispatch(cmd); err != nil {
|
// if err := bus.Dispatch(cmd); err != nil {
|
||||||
handler.log.Error("Failed to save state", "error", err)
|
// handler.log.Error("Failed to save state", "error", err)
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
handler.log.Debug("will notify about new state", "new state", result.State)
|
// handler.log.Debug("will notify about new state", "new state", result.State)
|
||||||
handler.notifier.Notify(result)
|
// handler.notifier.Notify(result)
|
||||||
}
|
// }
|
||||||
}
|
}
|
||||||
|
|
||||||
func (handler *ResultHandlerImpl) shouldUpdateState(result *AlertResult) bool {
|
func (handler *ResultHandlerImpl) shouldUpdateState(result *AlertResultContext) bool {
|
||||||
query := &m.GetLastAlertStateQuery{
|
// query := &m.GetLastAlertStateQuery{
|
||||||
AlertId: result.AlertJob.Rule.Id,
|
// AlertId: result.AlertJob.Rule.Id,
|
||||||
OrgId: result.AlertJob.Rule.OrgId,
|
// OrgId: result.AlertJob.Rule.OrgId,
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
if err := bus.Dispatch(query); err != nil {
|
// if err := bus.Dispatch(query); err != nil {
|
||||||
log.Error2("Failed to read last alert state", "error", err)
|
// log.Error2("Failed to read last alert state", "error", err)
|
||||||
return false
|
// return false
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
if query.Result == nil {
|
// if query.Result == nil {
|
||||||
return true
|
// return true
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
lastExecution := query.Result.Created
|
// lastExecution := query.Result.Created
|
||||||
asdf := result.StartTime.Add(time.Minute * -15)
|
// asdf := result.StartTime.Add(time.Minute * -15)
|
||||||
olderThen15Min := lastExecution.Before(asdf)
|
// olderThen15Min := lastExecution.Before(asdf)
|
||||||
changedState := query.Result.State != result.State
|
// changedState := query.Result.State != result.State
|
||||||
|
//
|
||||||
return changedState || olderThen15Min
|
// return changedState || olderThen15Min
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,59 +1,49 @@
|
|||||||
package alerting
|
package alerting
|
||||||
|
|
||||||
import (
|
// func TestAlertResultHandler(t *testing.T) {
|
||||||
"testing"
|
// Convey("Test result Handler", t, func() {
|
||||||
"time"
|
// resultHandler := ResultHandlerImpl{}
|
||||||
|
// mockResult := &AlertResult{
|
||||||
"github.com/grafana/grafana/pkg/bus"
|
// State: alertstates.Ok,
|
||||||
m "github.com/grafana/grafana/pkg/models"
|
// AlertJob: &AlertJob{
|
||||||
"github.com/grafana/grafana/pkg/services/alerting/alertstates"
|
// Rule: &AlertRule{
|
||||||
. "github.com/smartystreets/goconvey/convey"
|
// Id: 1,
|
||||||
)
|
// OrgId: 1,
|
||||||
|
// },
|
||||||
func TestAlertResultHandler(t *testing.T) {
|
// },
|
||||||
Convey("Test result Handler", t, func() {
|
// }
|
||||||
resultHandler := ResultHandlerImpl{}
|
// mockAlertState := &m.AlertState{}
|
||||||
mockResult := &AlertResult{
|
// bus.ClearBusHandlers()
|
||||||
State: alertstates.Ok,
|
// bus.AddHandler("test", func(query *m.GetLastAlertStateQuery) error {
|
||||||
AlertJob: &AlertJob{
|
// query.Result = mockAlertState
|
||||||
Rule: &AlertRule{
|
// return nil
|
||||||
Id: 1,
|
// })
|
||||||
OrgId: 1,
|
//
|
||||||
},
|
// Convey("Should update", func() {
|
||||||
},
|
//
|
||||||
}
|
// Convey("when no earlier alert state", func() {
|
||||||
mockAlertState := &m.AlertState{}
|
// mockAlertState = nil
|
||||||
bus.ClearBusHandlers()
|
// So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
|
||||||
bus.AddHandler("test", func(query *m.GetLastAlertStateQuery) error {
|
// })
|
||||||
query.Result = mockAlertState
|
//
|
||||||
return nil
|
// Convey("alert state have changed", func() {
|
||||||
})
|
// mockAlertState = &m.AlertState{
|
||||||
|
// State: alertstates.Critical,
|
||||||
Convey("Should update", func() {
|
// }
|
||||||
|
// mockResult.State = alertstates.Ok
|
||||||
Convey("when no earlier alert state", func() {
|
// So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
|
||||||
mockAlertState = nil
|
// })
|
||||||
So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
|
//
|
||||||
})
|
// Convey("last alert state was 15min ago", func() {
|
||||||
|
// now := time.Now()
|
||||||
Convey("alert state have changed", func() {
|
// mockAlertState = &m.AlertState{
|
||||||
mockAlertState = &m.AlertState{
|
// State: alertstates.Critical,
|
||||||
State: alertstates.Critical,
|
// Created: now.Add(time.Minute * -30),
|
||||||
}
|
// }
|
||||||
mockResult.State = alertstates.Ok
|
// mockResult.State = alertstates.Critical
|
||||||
So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
|
// mockResult.StartTime = time.Now()
|
||||||
})
|
// So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
|
||||||
|
// })
|
||||||
Convey("last alert state was 15min ago", func() {
|
// })
|
||||||
now := time.Now()
|
// })
|
||||||
mockAlertState = &m.AlertState{
|
// }
|
||||||
State: alertstates.Critical,
|
|
||||||
Created: now.Add(time.Minute * -30),
|
|
||||||
}
|
|
||||||
mockResult.State = alertstates.Critical
|
|
||||||
mockResult.StartTime = time.Now()
|
|
||||||
So(resultHandler.shouldUpdateState(mockResult), ShouldBeTrue)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|||||||
Reference in New Issue
Block a user