feat(alerting): fixed s3 upload issue, progress on alerting on null/missing data, updated ini package to get the support for line continuations

This commit is contained in:
Torkel Ödegaard
2016-09-07 10:11:00 +02:00
parent fc8f0721cd
commit 50b41130ca
23 changed files with 2423 additions and 1439 deletions

View File

@@ -19,9 +19,9 @@ func NewImageUploader() (ImageUploader, error) {
return nil, err
}
bucket := s3sec.Key("secret_key").String()
accessKey := s3sec.Key("access_key").String()
secretKey := s3sec.Key("secret_key").String()
bucket := s3sec.Key("bucket_url").MustString("")
accessKey := s3sec.Key("access_key").MustString("")
secretKey := s3sec.Key("secret_key").MustString("")
if bucket == "" {
return nil, fmt.Errorf("Could not find bucket setting for image.uploader.s3")

View File

@@ -3,7 +3,10 @@ package imguploader
import (
"io/ioutil"
"net/http"
"net/url"
"path"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/util"
"github.com/kr/s3/s3util"
)
@@ -12,6 +15,7 @@ type S3Uploader struct {
bucket string
secretKey string
accessKey string
log log.Logger
}
func NewS3Uploader(bucket, accessKey, secretKey string) *S3Uploader {
@@ -19,10 +23,11 @@ func NewS3Uploader(bucket, accessKey, secretKey string) *S3Uploader {
bucket: bucket,
accessKey: accessKey,
secretKey: secretKey,
log: log.New("s3uploader"),
}
}
func (u *S3Uploader) Upload(path string) (string, error) {
func (u *S3Uploader) Upload(imageDiskPath string) (string, error) {
s3util.DefaultConfig.AccessKey = u.accessKey
s3util.DefaultConfig.SecretKey = u.secretKey
@@ -31,15 +36,26 @@ func (u *S3Uploader) Upload(path string) (string, error) {
header.Add("x-amz-acl", "public-read")
header.Add("Content-Type", "image/png")
fullUrl := u.bucket + util.GetRandomString(20) + ".png"
writer, err := s3util.Create(fullUrl, header, nil)
var imageUrl *url.URL
var err error
if imageUrl, err = url.Parse(u.bucket); err != nil {
return "", err
}
// add image to url
imageUrl.Path = path.Join(imageUrl.Path, util.GetRandomString(20)+".png")
imageUrlString := imageUrl.String()
log.Debug("Uploading image to s3", "url", imageUrlString)
writer, err := s3util.Create(imageUrlString, header, nil)
if err != nil {
return "", err
}
defer writer.Close()
imgData, err := ioutil.ReadFile(path)
imgData, err := ioutil.ReadFile(imageDiskPath)
if err != nil {
return "", err
}
@@ -49,5 +65,5 @@ func (u *S3Uploader) Upload(path string) (string, error) {
return "", err
}
return fullUrl, nil
return imageUrlString, nil
}

View File

@@ -44,6 +44,10 @@ func newThresholdEvaludator(typ string, model *simplejson.Json) (*ThresholdEvalu
}
func (e *ThresholdEvaluator) Eval(reducedValue *float64) bool {
if reducedValue == nil {
return false
}
switch e.Type {
case "gt":
return *reducedValue > e.Threshold
@@ -83,6 +87,10 @@ func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, e
}
func (e *RangedEvaluator) Eval(reducedValue *float64) bool {
if reducedValue == nil {
return false
}
switch e.Type {
case "within_range":
return (e.Lower < *reducedValue && e.Upper > *reducedValue) || (e.Upper < *reducedValue && e.Lower > *reducedValue)

View File

@@ -1,6 +1,10 @@
package conditions
import "github.com/grafana/grafana/pkg/tsdb"
import (
"math"
"github.com/grafana/grafana/pkg/tsdb"
)
type QueryReducer interface {
Reduce(timeSeries *tsdb.TimeSeries) *float64
@@ -15,41 +19,53 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) *float64 {
return nil
}
var value float64 = 0
value := float64(0)
allNull := true
switch s.Type {
case "avg":
for _, point := range series.Points {
value += point[0]
if point[0] != nil {
value += *point[0]
allNull = false
}
}
value = value / float64(len(series.Points))
case "sum":
for _, point := range series.Points {
value += point[0]
if point[0] != nil {
value += *point[0]
allNull = false
}
}
case "min":
for i, point := range series.Points {
if i == 0 {
value = point[0]
}
if value > point[0] {
value = point[0]
value = math.MaxFloat64
for _, point := range series.Points {
if point[0] != nil {
allNull = false
if value > *point[0] {
value = *point[0]
}
}
}
case "max":
value = -math.MaxFloat64
for _, point := range series.Points {
if value < point[0] {
value = point[0]
if point[0] != nil {
allNull = false
if value < *point[0] {
value = *point[0]
}
}
}
case "mean":
meanPosition := int64(len(series.Points) / 2)
value = series.Points[meanPosition][0]
case "count":
value = float64(len(series.Points))
}
if allNull {
return nil
}
return &value
}

View File

@@ -37,31 +37,36 @@ type StateDescription struct {
}
func (c *EvalContext) GetStateModel() *StateDescription {
if c.Error != nil {
return &StateDescription{
Color: "#D63232",
Text: "EXECUTION ERROR",
}
}
if !c.Firing {
switch c.Rule.State {
case m.AlertStateOK:
return &StateDescription{
Color: "#36a64f",
Text: "OK",
}
}
if c.Rule.Severity == m.AlertSeverityWarning {
case m.AlertStateUnknown:
return &StateDescription{
Color: "#888888",
Text: "UNKNOWN",
}
case m.AlertStateExeuctionError:
return &StateDescription{
Color: "#000",
Text: "EXECUTION_ERROR",
}
case m.AlertStateWarning:
return &StateDescription{
Color: "#fd821b",
Text: "WARNING",
}
} else {
case m.AlertStateCritical:
return &StateDescription{
Color: "#D63232",
Text: "CRITICAL",
}
default:
panic("Unknown rule state " + c.Rule.State)
}
}
func (a *EvalContext) GetDurationMs() float64 {

View File

@@ -54,6 +54,15 @@ func (e *DefaultEvalHandler) retry(context *EvalContext) {
}
func (e *DefaultEvalHandler) eval(context *EvalContext) {
defer func() {
if err := recover(); err != nil {
e.log.Error("Alerting rule eval panic", "error", err, "stack", log.Stack(1))
if panicErr, ok := err.(error); ok {
context.Error = panicErr
}
}
}()
for _, condition := range context.Rule.Conditions {
condition.Eval(context)

View File

@@ -1,6 +1,6 @@
package graphite
type TargetResponseDTO struct {
Target string `json:"target"`
DataPoints [][2]float64 `json:"datapoints"`
Target string `json:"target"`
DataPoints [][2]*float64 `json:"datapoints"`
}

View File

@@ -46,13 +46,13 @@ type QueryResult struct {
}
type TimeSeries struct {
Name string `json:"name"`
Points [][2]float64 `json:"points"`
Name string `json:"name"`
Points [][2]*float64 `json:"points"`
}
type TimeSeriesSlice []*TimeSeries
func NewTimeSeries(name string, points [][2]float64) *TimeSeries {
func NewTimeSeries(name string, points [][2]*float64) *TimeSeries {
return &TimeSeries{
Name: name,
Points: points,