refactor(tsdb): changed tsdb time series model to use null.Float instead of pointers

This commit is contained in:
Torkel Ödegaard
2016-09-28 09:15:48 +02:00
parent 63caedb094
commit 8d5857661e
11 changed files with 102 additions and 90 deletions

View File

@@ -5,6 +5,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/services/alerting"
"gopkg.in/guregu/null.v3"
)
var (
@@ -13,13 +14,13 @@ var (
)
type AlertEvaluator interface {
Eval(reducedValue *float64) bool
Eval(reducedValue null.Float) bool
}
type NoDataEvaluator struct{}
func (e *NoDataEvaluator) Eval(reducedValue *float64) bool {
return reducedValue == nil
func (e *NoDataEvaluator) Eval(reducedValue null.Float) bool {
return reducedValue.Valid == false
}
type ThresholdEvaluator struct {
@@ -43,16 +44,16 @@ func newThresholdEvaludator(typ string, model *simplejson.Json) (*ThresholdEvalu
return defaultEval, nil
}
func (e *ThresholdEvaluator) Eval(reducedValue *float64) bool {
if reducedValue == nil {
func (e *ThresholdEvaluator) Eval(reducedValue null.Float) bool {
if reducedValue.Valid == false {
return false
}
switch e.Type {
case "gt":
return *reducedValue > e.Threshold
return reducedValue.Float64 > e.Threshold
case "lt":
return *reducedValue < e.Threshold
return reducedValue.Float64 < e.Threshold
}
return false
@@ -86,16 +87,18 @@ func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, e
return rangedEval, nil
}
func (e *RangedEvaluator) Eval(reducedValue *float64) bool {
if reducedValue == nil {
func (e *RangedEvaluator) Eval(reducedValue null.Float) bool {
if reducedValue.Valid == false {
return false
}
floatValue := reducedValue.Float64
switch e.Type {
case "within_range":
return (e.Lower < *reducedValue && e.Upper > *reducedValue) || (e.Upper < *reducedValue && e.Lower > *reducedValue)
return (e.Lower < floatValue && e.Upper > floatValue) || (e.Upper < floatValue && e.Lower > floatValue)
case "outside_range":
return (e.Upper < *reducedValue && e.Lower < *reducedValue) || (e.Upper > *reducedValue && e.Lower > *reducedValue)
return (e.Upper < floatValue && e.Lower < floatValue) || (e.Upper > floatValue && e.Lower > floatValue)
}
return false

View File

@@ -3,6 +3,8 @@ package conditions
import (
"testing"
"gopkg.in/guregu/null.v3"
"github.com/grafana/grafana/pkg/components/simplejson"
. "github.com/smartystreets/goconvey/convey"
)
@@ -14,7 +16,7 @@ func evalutorScenario(json string, reducedValue float64, datapoints ...float64)
evaluator, err := NewAlertEvaluator(jsonModel)
So(err, ShouldBeNil)
return evaluator.Eval(&reducedValue)
return evaluator.Eval(null.FloatFrom(reducedValue))
}
func TestEvalutors(t *testing.T) {
@@ -51,6 +53,6 @@ func TestEvalutors(t *testing.T) {
evaluator, err := NewAlertEvaluator(jsonModel)
So(err, ShouldBeNil)
So(evaluator.Eval(nil), ShouldBeTrue)
So(evaluator.Eval(null.FloatFromPtr(nil)), ShouldBeTrue)
})
}

View File

@@ -46,21 +46,21 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) {
reducedValue := c.Reducer.Reduce(series)
evalMatch := c.Evaluator.Eval(reducedValue)
if reducedValue == nil {
if reducedValue.Valid == false {
emptySerieCount++
continue
}
if context.IsTestRun {
context.Logs = append(context.Logs, &alerting.ResultLogEntry{
Message: fmt.Sprintf("Condition[%d]: Eval: %v, Metric: %s, Value: %1.3f", c.Index, evalMatch, series.Name, *reducedValue),
Message: fmt.Sprintf("Condition[%d]: Eval: %v, Metric: %s, Value: %1.3f", c.Index, evalMatch, series.Name, reducedValue.Float64),
})
}
if evalMatch {
context.EvalMatches = append(context.EvalMatches, &alerting.EvalMatch{
Metric: series.Name,
Value: *reducedValue,
Value: reducedValue.Float64,
})
}
}

View File

@@ -3,6 +3,8 @@ package conditions
import (
"testing"
null "gopkg.in/guregu/null.v3"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
@@ -41,9 +43,8 @@ func TestQueryCondition(t *testing.T) {
})
Convey("should fire when avg is above 100", func() {
one := float64(120)
two := float64(0)
ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", [][2]*float64{{&one, &two}})}
points := tsdb.NewTimeSeriesPointsFromArgs(120, 0)
ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", points)}
ctx.exec()
So(ctx.result.Error, ShouldBeNil)
@@ -51,9 +52,8 @@ func TestQueryCondition(t *testing.T) {
})
Convey("Should not fire when avg is below 100", func() {
one := float64(90)
two := float64(0)
ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", [][2]*float64{{&one, &two}})}
points := tsdb.NewTimeSeriesPointsFromArgs(90, 0)
ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", points)}
ctx.exec()
So(ctx.result.Error, ShouldBeNil)
@@ -61,11 +61,9 @@ func TestQueryCondition(t *testing.T) {
})
Convey("Should fire if only first serie matches", func() {
one := float64(120)
two := float64(0)
ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{{&one, &two}}),
tsdb.NewTimeSeries("test2", [][2]*float64{{&two, &two}}),
tsdb.NewTimeSeries("test1", tsdb.NewTimeSeriesPointsFromArgs(120, 0)),
tsdb.NewTimeSeries("test2", tsdb.NewTimeSeriesPointsFromArgs(0, 0)),
}
ctx.exec()
@@ -76,8 +74,8 @@ func TestQueryCondition(t *testing.T) {
Convey("Empty series", func() {
Convey("Should set NoDataFound both series are empty", func() {
ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{}),
tsdb.NewTimeSeries("test2", [][2]*float64{}),
tsdb.NewTimeSeries("test1", tsdb.NewTimeSeriesPointsFromArgs()),
tsdb.NewTimeSeries("test2", tsdb.NewTimeSeriesPointsFromArgs()),
}
ctx.exec()
@@ -86,10 +84,9 @@ func TestQueryCondition(t *testing.T) {
})
Convey("Should set NoDataFound both series contains null", func() {
one := float64(120)
ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{{nil, &one}}),
tsdb.NewTimeSeries("test2", [][2]*float64{{nil, &one}}),
tsdb.NewTimeSeries("test1", tsdb.TimeSeriesPoints{tsdb.TimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}),
tsdb.NewTimeSeries("test2", tsdb.TimeSeriesPoints{tsdb.TimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}),
}
ctx.exec()
@@ -98,11 +95,9 @@ func TestQueryCondition(t *testing.T) {
})
Convey("Should not set NoDataFound if one serie is empty", func() {
one := float64(120)
two := float64(0)
ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{}),
tsdb.NewTimeSeries("test2", [][2]*float64{{&one, &two}}),
tsdb.NewTimeSeries("test1", tsdb.NewTimeSeriesPointsFromArgs()),
tsdb.NewTimeSeries("test2", tsdb.NewTimeSeriesPointsFromArgs(120, 0)),
}
ctx.exec()

View File

@@ -4,19 +4,20 @@ import (
"math"
"github.com/grafana/grafana/pkg/tsdb"
"gopkg.in/guregu/null.v3"
)
type QueryReducer interface {
Reduce(timeSeries *tsdb.TimeSeries) *float64
Reduce(timeSeries *tsdb.TimeSeries) null.Float
}
type SimpleReducer struct {
Type string
}
func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) *float64 {
func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float {
if len(series.Points) == 0 {
return nil
return null.FloatFromPtr(nil)
}
value := float64(0)
@@ -25,36 +26,36 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) *float64 {
switch s.Type {
case "avg":
for _, point := range series.Points {
if point[0] != nil {
value += *point[0]
if point[0].Valid {
value += point[0].Float64
allNull = false
}
}
value = value / float64(len(series.Points))
case "sum":
for _, point := range series.Points {
if point[0] != nil {
value += *point[0]
if point[0].Valid {
value += point[0].Float64
allNull = false
}
}
case "min":
value = math.MaxFloat64
for _, point := range series.Points {
if point[0] != nil {
if point[0].Valid {
allNull = false
if value > *point[0] {
value = *point[0]
if value > point[0].Float64 {
value = point[0].Float64
}
}
}
case "max":
value = -math.MaxFloat64
for _, point := range series.Points {
if point[0] != nil {
if point[0].Valid {
allNull = false
if value < *point[0] {
value = *point[0]
if value < point[0].Float64 {
value = point[0].Float64
}
}
}
@@ -64,10 +65,10 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) *float64 {
}
if allNull {
return nil
return null.FloatFromPtr(nil)
}
return &value
return null.FloatFrom(value)
}
func NewSimpleReducer(typ string) *SimpleReducer {

View File

@@ -10,44 +10,41 @@ import (
func TestSimpleReducer(t *testing.T) {
Convey("Test simple reducer by calculating", t, func() {
Convey("avg", func() {
result := *testReducer("avg", 1, 2, 3)
result := testReducer("avg", 1, 2, 3)
So(result, ShouldEqual, float64(2))
})
Convey("sum", func() {
result := *testReducer("sum", 1, 2, 3)
result := testReducer("sum", 1, 2, 3)
So(result, ShouldEqual, float64(6))
})
Convey("min", func() {
result := *testReducer("min", 3, 2, 1)
result := testReducer("min", 3, 2, 1)
So(result, ShouldEqual, float64(1))
})
Convey("max", func() {
result := *testReducer("max", 1, 2, 3)
result := testReducer("max", 1, 2, 3)
So(result, ShouldEqual, float64(3))
})
Convey("count", func() {
result := *testReducer("count", 1, 2, 3000)
result := testReducer("count", 1, 2, 3000)
So(result, ShouldEqual, float64(3))
})
})
}
func testReducer(typ string, datapoints ...float64) *float64 {
func testReducer(typ string, datapoints ...float64) float64 {
reducer := NewSimpleReducer(typ)
var timeserie [][2]*float64
dummieTimestamp := float64(521452145)
series := &tsdb.TimeSeries{
Name: "test time serie",
}
for idx := range datapoints {
timeserie = append(timeserie, [2]*float64{&datapoints[idx], &dummieTimestamp})
series.Points = append(series.Points, tsdb.NewTimePoint(datapoints[idx], 1234134))
}
tsdb := &tsdb.TimeSeries{
Name: "test time serie",
Points: timeserie,
}
return reducer.Reduce(tsdb)
return reducer.Reduce(series).Float64
}