mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Alerting: Logs should not be capitalized and the errors key should be "err" (#50333)
* Alerting: decapitalize log lines and use "err" as the key for errors Found using (logger|log).(Warn|Debug|Info|Error)\([A-Z] and (logger|log).(Warn|Debug|Info|Error)\(.+"error"
This commit is contained in:
parent
af841a79ae
commit
0cde283505
@ -45,7 +45,8 @@ Scopes must have an order to ensure consistency and ease of search, this helps u
|
||||
|
||||
## Grafana Alerting - main / unreleased
|
||||
|
||||
- [ENHANCEMENT] Scheduler: Adds new metrics to track rules that might be scheduled.
|
||||
## 9.0.0
|
||||
|
||||
- [ENHANCEMENT] Scheduler: Ticker expose new metrics. In legacy, metrics are prefixed with `legacy_` #47828, #48190
|
||||
- `grafana_alerting_ticker_last_consumed_tick_timestamp_seconds`
|
||||
- `grafana_alerting_ticker_next_tick_timestamp_seconds`
|
||||
@ -53,15 +54,16 @@ Scopes must have an order to ensure consistency and ease of search, this helps u
|
||||
- [ENHANCEMENT] Create folder 'General Alerting' when Grafana starts from the scratch #48866
|
||||
- [ENHANCEMENT] Rule changes authorization logic to use UID folder scope instead of ID scope #48970
|
||||
- [ENHANCEMENT] Scheduler: ticker to support stopping #48142
|
||||
- [ENHANCEMENT] Scheduler: Adds new metrics to track rules that might be scheduled.
|
||||
- [FEATURE] Indicate whether routes are provisioned when GETting Alertmanager configuration #47857
|
||||
- [FEATURE] Indicate whether contact point is provisioned when GETting Alertmanager configuration #48323
|
||||
- [FEATURE] Indicate whether alert rule is provisioned when GETting the rule #48458
|
||||
- [FEATURE] Alert rules with associated panels will take screenshots. #49293 #49338 #49374 #49377 #49378 #49379 #49381 #49385 #49439 #49445
|
||||
- [BUGFIX] Migration: ignore alerts that do not belong to any existing organization\dashboard #49192
|
||||
- [BUGFIX] Allow anonymous access to alerts #49203
|
||||
- [BUGFIX] Allow anonymous access to alerts #49203
|
||||
- [BUGFIX] RBAC: replace create\update\delete actions for notification policies by alert.notifications:write #49185
|
||||
- [BUGFIX] Fix access to alerts for Viewer role with editor permissions in folder #49270
|
||||
- [BUGFIX] Alerting: Remove double quotes from double quoted matchers #xxxx
|
||||
- [BUGFIX] Alerting: Remove double quotes from double quoted matchers #50038
|
||||
- [BUGFIX] Alerting: rules API to not detect difference between nil and empty map (Annotations, Labels) #50192
|
||||
|
||||
## 8.5.3
|
||||
|
@ -139,7 +139,7 @@ func (srv PrometheusSrv) RouteGetRuleStatuses(c *models.ReqContext) response.Res
|
||||
}
|
||||
|
||||
if len(namespaceMap) == 0 {
|
||||
srv.log.Debug("User does not have access to any namespaces")
|
||||
srv.log.Debug("user does not have access to any namespaces")
|
||||
return response.JSON(http.StatusOK, ruleResponse)
|
||||
}
|
||||
|
||||
|
@ -257,7 +257,7 @@ func (srv RulerSrv) RouteGetRulesConfig(c *models.ReqContext) response.Response
|
||||
result := apimodels.NamespaceConfigResponse{}
|
||||
|
||||
if len(namespaceMap) == 0 {
|
||||
srv.log.Debug("User has no access to any namespaces")
|
||||
srv.log.Debug("user has no access to any namespaces")
|
||||
return response.JSON(http.StatusOK, result)
|
||||
}
|
||||
|
||||
|
@ -142,7 +142,7 @@ func (ng *AlertNG) init() error {
|
||||
|
||||
appUrl, err := url.Parse(ng.Cfg.AppURL)
|
||||
if err != nil {
|
||||
ng.Log.Error("Failed to parse application URL. Continue without it.", "error", err)
|
||||
ng.Log.Error("Failed to parse application URL. Continue without it.", "err", err)
|
||||
appUrl = nil
|
||||
}
|
||||
|
||||
|
@ -8,10 +8,11 @@ import (
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
)
|
||||
|
||||
// GetDecryptedValueFn is a function that returns the decrypted value of
|
||||
@ -90,7 +91,7 @@ type AlertmanagerNotifier struct {
|
||||
|
||||
// Notify sends alert notifications to Alertmanager.
|
||||
func (n *AlertmanagerNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
|
||||
n.logger.Debug("Sending Alertmanager alert", "alertmanager", n.Name)
|
||||
n.logger.Debug("sending Alertmanager alert", "alertmanager", n.Name)
|
||||
if len(as) == 0 {
|
||||
return true, nil
|
||||
}
|
||||
@ -110,7 +111,7 @@ func (n *AlertmanagerNotifier) Notify(ctx context.Context, as ...*types.Alert) (
|
||||
password: n.basicAuthPassword,
|
||||
body: body,
|
||||
}, n.logger); err != nil {
|
||||
n.logger.Warn("Failed to send to Alertmanager", "error", err, "alertmanager", n.Name, "url", u.String())
|
||||
n.logger.Warn("failed to send to Alertmanager", "err", err, "alertmanager", n.Name, "url", u.String())
|
||||
lastErr = err
|
||||
numErrs++
|
||||
}
|
||||
@ -118,7 +119,7 @@ func (n *AlertmanagerNotifier) Notify(ctx context.Context, as ...*types.Alert) (
|
||||
|
||||
if numErrs == len(n.urls) {
|
||||
// All attempts to send alerts have failed
|
||||
n.logger.Warn("All attempts to send to Alertmanager failed", "alertmanager", n.Name)
|
||||
n.logger.Warn("all attempts to send to Alertmanager failed", "alertmanager", n.Name)
|
||||
return false, fmt.Errorf("failed to send alert to Alertmanager: %w", lastErr)
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ type DingDingNotifier struct {
|
||||
|
||||
// Notify sends the alert notification to dingding.
|
||||
func (dd *DingDingNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
|
||||
dd.log.Info("Sending dingding")
|
||||
dd.log.Info("sending dingding")
|
||||
|
||||
ruleURL := joinUrlPath(dd.tmpl.ExternalURL.String(), "/alerting/list", dd.log)
|
||||
|
||||
|
@ -185,7 +185,7 @@ func (d DiscordNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool,
|
||||
}
|
||||
|
||||
if err := d.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
d.log.Error("Failed to send notification to Discord", "error", err)
|
||||
d.log.Error("failed to send notification to Discord", "err", err)
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
@ -225,7 +225,7 @@ func (d DiscordNotifier) constructAttachments(ctx context.Context, as []*types.A
|
||||
url := fmt.Sprintf("attachment://%s", base)
|
||||
reader, err := openImage(image.Path)
|
||||
if err != nil && !errors.Is(err, ngmodels.ErrImageNotFound) {
|
||||
d.log.Warn("failed to retrieve image data from store", "error", err)
|
||||
d.log.Warn("failed to retrieve image data from store", "err", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -262,7 +262,7 @@ func (d DiscordNotifier) buildRequest(ctx context.Context, url string, body []by
|
||||
defer func() {
|
||||
if err := w.Close(); err != nil {
|
||||
// Shouldn't matter since we already close w explicitly on the non-error path
|
||||
d.log.Warn("Failed to close multipart writer", "err", err)
|
||||
d.log.Warn("failed to close multipart writer", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
|
@ -145,7 +145,7 @@ func (en *EmailNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool,
|
||||
cmd.EmbeddedFiles = []string{image.Path}
|
||||
cmd.Data["EmbeddedImage"] = file.Name()
|
||||
} else {
|
||||
en.log.Warn("failed to access email notification image attachment data", "error", err)
|
||||
en.log.Warn("failed to access email notification image attachment data", "err", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
@ -79,7 +79,7 @@ func NewGoogleChatNotifier(config *GoogleChatConfig, images ImageStore, ns notif
|
||||
|
||||
// Notify send an alert notification to Google Chat.
|
||||
func (gcn *GoogleChatNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
|
||||
gcn.log.Debug("Executing Google Chat notification")
|
||||
gcn.log.Debug("executing Google Chat notification")
|
||||
|
||||
var tmplErr error
|
||||
tmpl, _ := TmplText(ctx, gcn.tmpl, as, gcn.log, &tmplErr)
|
||||
@ -119,7 +119,7 @@ func (gcn *GoogleChatNotifier) Notify(ctx context.Context, as ...*types.Alert) (
|
||||
},
|
||||
})
|
||||
} else {
|
||||
gcn.log.Warn("Grafana External URL setting is missing or invalid. Skipping 'open in grafana' button to prevent google from displaying empty alerts.", "ruleURL", ruleURL)
|
||||
gcn.log.Warn("Grafana external URL setting is missing or invalid. Skipping 'open in grafana' button to prevent Google from displaying empty alerts.", "ruleURL", ruleURL)
|
||||
}
|
||||
|
||||
// Add text paragraph widget for the build version and timestamp.
|
||||
@ -190,7 +190,7 @@ func (gcn *GoogleChatNotifier) SendResolved() bool {
|
||||
func (gcn *GoogleChatNotifier) isUrlAbsolute(urlToCheck string) bool {
|
||||
parsed, err := url.Parse(urlToCheck)
|
||||
if err != nil {
|
||||
gcn.log.Warn("Could not parse URL", "urlToCheck", urlToCheck)
|
||||
gcn.log.Warn("could not parse URL", "urlToCheck", urlToCheck)
|
||||
return false
|
||||
}
|
||||
|
||||
|
@ -88,7 +88,7 @@ func (kn *KafkaNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool,
|
||||
state = models.AlertStateOK
|
||||
}
|
||||
|
||||
kn.log.Debug("Notifying Kafka", "alert_state", state)
|
||||
kn.log.Debug("notifying Kafka", "alert_state", state)
|
||||
|
||||
var tmplErr error
|
||||
tmpl, _ := TmplText(ctx, kn.tmpl, as, kn.log, &tmplErr)
|
||||
|
@ -7,11 +7,12 @@ import (
|
||||
"net/url"
|
||||
"path"
|
||||
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -74,7 +75,7 @@ type LineNotifier struct {
|
||||
|
||||
// Notify send an alert notification to LINE
|
||||
func (ln *LineNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
|
||||
ln.log.Debug("Executing line notification", "notification", ln.Name)
|
||||
ln.log.Debug("executing line notification", "notification", ln.Name)
|
||||
|
||||
ruleURL := path.Join(ln.tmpl.ExternalURL.String(), "/alerting/list")
|
||||
|
||||
@ -105,7 +106,7 @@ func (ln *LineNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, e
|
||||
}
|
||||
|
||||
if err := ln.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
ln.log.Error("Failed to send notification to LINE", "error", err, "body", body)
|
||||
ln.log.Error("failed to send notification to LINE", "err", err, "body", body)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -8,15 +8,16 @@ import (
|
||||
"net/http"
|
||||
"sort"
|
||||
|
||||
"github.com/prometheus/alertmanager/notify"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
"github.com/prometheus/common/model"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
"github.com/prometheus/alertmanager/notify"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
"github.com/prometheus/common/model"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -109,11 +110,11 @@ func NewOpsgenieNotifier(config *OpsgenieConfig, ns notifications.WebhookSender,
|
||||
|
||||
// Notify sends an alert notification to Opsgenie
|
||||
func (on *OpsgenieNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
|
||||
on.log.Debug("Executing Opsgenie notification", "notification", on.Name)
|
||||
on.log.Debug("executing Opsgenie notification", "notification", on.Name)
|
||||
|
||||
alerts := types.Alerts(as...)
|
||||
if alerts.Status() == model.AlertResolved && !on.SendResolved() {
|
||||
on.log.Debug("Not sending a trigger to Opsgenie", "status", alerts.Status(), "auto resolve", on.SendResolved())
|
||||
on.log.Debug("not sending a trigger to Opsgenie", "status", alerts.Status(), "auto resolve", on.SendResolved())
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
@ -7,14 +7,15 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
"github.com/prometheus/alertmanager/notify"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
"github.com/prometheus/common/model"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -113,7 +114,7 @@ func NewPagerdutyNotifier(config *PagerdutyConfig, ns notifications.WebhookSende
|
||||
func (pn *PagerdutyNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
|
||||
alerts := types.Alerts(as...)
|
||||
if alerts.Status() == model.AlertResolved && !pn.SendResolved() {
|
||||
pn.log.Debug("Not sending a trigger to Pagerduty", "status", alerts.Status(), "auto resolve", pn.SendResolved())
|
||||
pn.log.Debug("not sending a trigger to Pagerduty", "status", alerts.Status(), "auto resolve", pn.SendResolved())
|
||||
return true, nil
|
||||
}
|
||||
|
||||
@ -127,7 +128,7 @@ func (pn *PagerdutyNotifier) Notify(ctx context.Context, as ...*types.Alert) (bo
|
||||
return false, fmt.Errorf("marshal json: %w", err)
|
||||
}
|
||||
|
||||
pn.log.Info("Notifying Pagerduty", "event_type", eventType)
|
||||
pn.log.Info("notifying Pagerduty", "event_type", eventType)
|
||||
cmd := &models.SendWebhookSync{
|
||||
Url: PagerdutyEventAPIURL,
|
||||
Body: string(body),
|
||||
|
@ -8,12 +8,13 @@ import (
|
||||
"mime/multipart"
|
||||
"strconv"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
"github.com/prometheus/common/model"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -145,7 +146,7 @@ func (pn *PushoverNotifier) Notify(ctx context.Context, as ...*types.Alert) (boo
|
||||
}
|
||||
|
||||
if err := pn.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
pn.log.Error("Failed to send pushover notification", "error", err, "webhook", pn.Name)
|
||||
pn.log.Error("failed to send pushover notification", "err", err, "webhook", pn.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -7,12 +7,13 @@ import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
"github.com/prometheus/common/model"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
)
|
||||
|
||||
type SensuGoNotifier struct {
|
||||
@ -99,7 +100,7 @@ func NewSensuGoNotifier(config *SensuGoConfig, ns notifications.WebhookSender, t
|
||||
|
||||
// Notify sends an alert notification to Sensu Go
|
||||
func (sn *SensuGoNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
|
||||
sn.log.Debug("Sending Sensu Go result")
|
||||
sn.log.Debug("sending Sensu Go result")
|
||||
|
||||
var tmplErr error
|
||||
tmpl, _ := TmplText(ctx, sn.tmpl, as, sn.log, &tmplErr)
|
||||
@ -176,7 +177,7 @@ func (sn *SensuGoNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool
|
||||
},
|
||||
}
|
||||
if err := sn.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
sn.log.Error("Failed to send Sensu Go event", "error", err, "sensugo", sn.Name)
|
||||
sn.log.Error("failed to send Sensu Go event", "err", err, "sensugo", sn.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -14,14 +14,15 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/alertmanager/config"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/prometheus/alertmanager/config"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
)
|
||||
|
||||
var SlackAPIEndpoint = "https://slack.com/api/chat.postMessage"
|
||||
@ -199,7 +200,7 @@ func (sn *SlackNotifier) Notify(ctx context.Context, alerts ...*types.Alert) (bo
|
||||
return false, fmt.Errorf("marshal json: %w", err)
|
||||
}
|
||||
|
||||
sn.log.Debug("Sending Slack API request", "url", sn.URL.String(), "data", string(b))
|
||||
sn.log.Debug("sending Slack API request", "url", sn.URL.String(), "data", string(b))
|
||||
request, err := http.NewRequestWithContext(ctx, http.MethodPost, sn.URL.String(), bytes.NewReader(b))
|
||||
if err != nil {
|
||||
return false, fmt.Errorf("failed to create HTTP request: %w", err)
|
||||
@ -212,7 +213,7 @@ func (sn *SlackNotifier) Notify(ctx context.Context, alerts ...*types.Alert) (bo
|
||||
panic("Token should be set when using the Slack chat API")
|
||||
}
|
||||
} else {
|
||||
sn.log.Debug("Adding authorization header to HTTP request")
|
||||
sn.log.Debug("adding authorization header to HTTP request")
|
||||
request.Header.Set("Authorization", fmt.Sprintf("Bearer %s", sn.Token))
|
||||
}
|
||||
|
||||
@ -252,7 +253,7 @@ var sendSlackRequest = func(request *http.Request, logger log.Logger) (retErr er
|
||||
}
|
||||
defer func() {
|
||||
if err := resp.Body.Close(); err != nil {
|
||||
logger.Warn("Failed to close response body", "err", err)
|
||||
logger.Warn("failed to close response body", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
@ -285,7 +286,7 @@ var sendSlackRequest = func(request *http.Request, logger log.Logger) (retErr er
|
||||
return fmt.Errorf("failed to make Slack API request: %s", rslt.Err)
|
||||
}
|
||||
|
||||
logger.Debug("Sending Slack API request succeeded", "url", request.URL.String(), "statusCode", resp.Status)
|
||||
logger.Debug("sending Slack API request succeeded", "url", request.URL.String(), "statusCode", resp.Status)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -7,11 +7,12 @@ import (
|
||||
"fmt"
|
||||
"mime/multipart"
|
||||
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -95,7 +96,7 @@ func (tn *TelegramNotifier) Notify(ctx context.Context, as ...*types.Alert) (boo
|
||||
w := multipart.NewWriter(&body)
|
||||
defer func() {
|
||||
if err := w.Close(); err != nil {
|
||||
tn.log.Warn("Failed to close writer", "err", err)
|
||||
tn.log.Warn("failed to close writer", "err", err)
|
||||
}
|
||||
}()
|
||||
boundary := GetBoundary()
|
||||
@ -129,7 +130,7 @@ func (tn *TelegramNotifier) Notify(ctx context.Context, as ...*types.Alert) (boo
|
||||
}
|
||||
|
||||
if err := tn.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
tn.log.Error("Failed to send webhook", "error", err, "webhook", tn.Name)
|
||||
tn.log.Error("failed to send webhook", "err", err, "webhook", tn.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -8,12 +8,13 @@ import (
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
"github.com/prometheus/common/model"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -101,7 +102,7 @@ func NewThreemaNotifier(config *ThreemaConfig, ns notifications.WebhookSender, t
|
||||
|
||||
// Notify send an alert notification to Threema
|
||||
func (tn *ThreemaNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
|
||||
tn.log.Debug("Sending threema alert notification", "from", tn.GatewayID, "to", tn.RecipientID)
|
||||
tn.log.Debug("sending threema alert notification", "from", tn.GatewayID, "to", tn.RecipientID)
|
||||
|
||||
var tmplErr error
|
||||
tmpl, _ := TmplText(ctx, tn.tmpl, as, tn.log, &tmplErr)
|
||||
@ -141,7 +142,7 @@ func (tn *ThreemaNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool
|
||||
},
|
||||
}
|
||||
if err := tn.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
tn.log.Error("Failed to send threema notification", "error", err, "webhook", tn.Name)
|
||||
tn.log.Error("Failed to send threema notification", "err", err, "webhook", tn.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -79,7 +79,7 @@ func withStoredImage(ctx context.Context, l log.Logger, imageStore ImageStore, i
|
||||
}
|
||||
} else if err != nil {
|
||||
// Ignore errors. Don't log "ImageUnavailable", which means the storage doesn't exist.
|
||||
l.Warn("failed to retrieve image url from store", "error", err)
|
||||
l.Warn("failed to retrieve image url from store", "err", err)
|
||||
}
|
||||
|
||||
err = imageFunc(index, img)
|
||||
@ -208,7 +208,7 @@ var sendHTTPRequest = func(ctx context.Context, url *url.URL, cfg httpCfg, logge
|
||||
}
|
||||
defer func() {
|
||||
if err := resp.Body.Close(); err != nil {
|
||||
logger.Warn("Failed to close response body", "err", err)
|
||||
logger.Warn("failed to close response body", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
@ -223,7 +223,7 @@ var sendHTTPRequest = func(ctx context.Context, url *url.URL, cfg httpCfg, logge
|
||||
return nil, fmt.Errorf("failed to send HTTP request - status code %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
logger.Debug("Sending HTTP request succeeded", "url", request.URL.String(), "statusCode", resp.Status)
|
||||
logger.Debug("sending HTTP request succeeded", "url", request.URL.String(), "statusCode", resp.Status)
|
||||
return respBody, nil
|
||||
}
|
||||
|
||||
|
@ -88,7 +88,7 @@ type VictoropsNotifier struct {
|
||||
|
||||
// Notify sends notification to Victorops via POST to URL endpoint
|
||||
func (vn *VictoropsNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, error) {
|
||||
vn.log.Debug("Executing victorops notification", "notification", vn.Name)
|
||||
vn.log.Debug("executing victorops notification", "notification", vn.Name)
|
||||
|
||||
var tmplErr error
|
||||
tmpl, _ := TmplText(ctx, vn.tmpl, as, vn.log, &tmplErr)
|
||||
@ -139,7 +139,7 @@ func (vn *VictoropsNotifier) Notify(ctx context.Context, as ...*types.Alert) (bo
|
||||
}
|
||||
|
||||
if err := vn.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
vn.log.Error("Failed to send Victorops notification", "error", err, "webhook", vn.Name)
|
||||
vn.log.Error("Failed to send Victorops notification", "err", err, "webhook", vn.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -6,12 +6,12 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
"github.com/prometheus/alertmanager/types"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/notifications"
|
||||
"github.com/prometheus/alertmanager/template"
|
||||
)
|
||||
|
||||
type WeComConfig struct {
|
||||
@ -101,7 +101,7 @@ func (w *WeComNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, e
|
||||
}
|
||||
|
||||
if err := w.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
w.log.Error("failed to send WeCom webhook", "error", err, "notification", w.Name)
|
||||
w.log.Error("failed to send WeCom webhook", "err", err, "notification", w.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -52,7 +52,7 @@ func (c *alertmanagerCrypto) LoadSecureSettings(ctx context.Context, orgId int64
|
||||
currentConfig, err := Load([]byte(query.Result.AlertmanagerConfiguration))
|
||||
// If the current config is un-loadable, treat it as if it never existed. Providing a new, valid config should be able to "fix" this state.
|
||||
if err != nil {
|
||||
c.log.Warn("Last known alertmanager configuration was invalid. Overwriting...")
|
||||
c.log.Warn("last known alertmanager configuration was invalid. Overwriting...")
|
||||
} else {
|
||||
currentReceiverMap = currentConfig.GetGrafanaReceiverMap()
|
||||
}
|
||||
|
@ -445,7 +445,7 @@ func (sch *schedule) schedulePeriodic(ctx context.Context) error {
|
||||
time.AfterFunc(time.Duration(int64(i)*step), func() {
|
||||
success := item.ruleInfo.eval(tick, item.version)
|
||||
if !success {
|
||||
sch.log.Debug("Scheduled evaluation was canceled because evaluation routine was stopped", "uid", item.key.UID, "org", item.key.OrgID, "time", tick)
|
||||
sch.log.Debug("scheduled evaluation was canceled because evaluation routine was stopped", "uid", item.key.UID, "org", item.key.OrgID, "time", tick)
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -606,12 +606,12 @@ func (sch *schedule) ruleRoutine(grafanaCtx context.Context, key models.AlertRul
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
logger.Error("updating rule failed after all retries", "error", err)
|
||||
logger.Error("updating rule failed after all retries", "err", err)
|
||||
}
|
||||
// evalCh - used by the scheduler to signal that evaluation is needed.
|
||||
case ctx, ok := <-evalCh:
|
||||
if !ok {
|
||||
logger.Debug("Evaluation channel has been closed. Exiting")
|
||||
logger.Debug("evaluation channel has been closed. Exiting")
|
||||
return nil
|
||||
}
|
||||
if evalRunning {
|
||||
|
@ -353,7 +353,7 @@ func (st *Manager) annotateState(ctx context.Context, alertRule *ngModels.AlertR
|
||||
|
||||
err = st.dashboardService.GetDashboard(ctx, query)
|
||||
if err != nil {
|
||||
st.log.Error("error getting dashboard for alert annotation", "dashboardUID", dashUid, "alertRuleUID", alertRule.UID, "error", err.Error())
|
||||
st.log.Error("error getting dashboard for alert annotation", "dashboardUID", dashUid, "alertRuleUID", alertRule.UID, "err", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
@ -363,7 +363,7 @@ func (st *Manager) annotateState(ctx context.Context, alertRule *ngModels.AlertR
|
||||
|
||||
annotationRepo := annotations.GetRepository()
|
||||
if err := annotationRepo.Save(item); err != nil {
|
||||
st.log.Error("error saving alert annotation", "alertRuleUID", alertRule.UID, "error", err.Error())
|
||||
st.log.Error("error saving alert annotation", "alertRuleUID", alertRule.UID, "err", err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
@ -378,11 +378,11 @@ func (st *Manager) staleResultsHandler(ctx context.Context, alertRule *ngModels.
|
||||
ilbs := ngModels.InstanceLabels(s.Labels)
|
||||
_, labelsHash, err := ilbs.StringAndHash()
|
||||
if err != nil {
|
||||
st.log.Error("unable to get labelsHash", "error", err.Error(), "orgID", s.OrgID, "alertRuleUID", s.AlertRuleUID)
|
||||
st.log.Error("unable to get labelsHash", "err", err.Error(), "orgID", s.OrgID, "alertRuleUID", s.AlertRuleUID)
|
||||
}
|
||||
|
||||
if err = st.instanceStore.DeleteAlertInstance(ctx, s.OrgID, s.AlertRuleUID, labelsHash); err != nil {
|
||||
st.log.Error("unable to delete stale instance from database", "error", err.Error(), "orgID", s.OrgID, "alertRuleUID", s.AlertRuleUID, "cacheID", s.CacheId)
|
||||
st.log.Error("unable to delete stale instance from database", "err", err.Error(), "orgID", s.OrgID, "alertRuleUID", s.AlertRuleUID, "cacheID", s.CacheId)
|
||||
}
|
||||
|
||||
if s.State == eval.Alerting {
|
||||
|
@ -386,7 +386,7 @@ func (st DBstore) GetNamespaceByTitle(ctx context.Context, namespace string, org
|
||||
g := guardian.New(ctx, folder.Id, orgID, user)
|
||||
if canSave, err := g.CanSave(); err != nil || !canSave {
|
||||
if err != nil {
|
||||
st.Logger.Error("checking can save permission has failed", "userId", user.UserId, "username", user.Login, "namespace", namespace, "orgId", orgID, "error", err)
|
||||
st.Logger.Error("checking can save permission has failed", "userId", user.UserId, "username", user.Login, "namespace", namespace, "orgId", orgID, "err", err)
|
||||
}
|
||||
return nil, ngmodels.ErrCannotEditNamespace
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user