mirror of
https://github.com/grafana/grafana.git
synced 2024-11-23 09:26:43 -06:00
Find-and-replace 'err' logs to 'error' to match log search conventions (#57309)
This commit is contained in:
parent
3e6bdf0439
commit
3ddb28bad9
@ -53,7 +53,7 @@ func (srv AlertmanagerSrv) RouteGetAMStatus(c *models.ReqContext) response.Respo
|
||||
func (srv AlertmanagerSrv) RouteCreateSilence(c *models.ReqContext, postableSilence apimodels.PostableSilence) response.Response {
|
||||
err := postableSilence.Validate(strfmt.Default)
|
||||
if err != nil {
|
||||
srv.log.Error("silence failed validation", "err", err)
|
||||
srv.log.Error("silence failed validation", "error", err)
|
||||
return ErrResp(http.StatusBadRequest, err, "silence failed validation")
|
||||
}
|
||||
|
||||
@ -98,7 +98,7 @@ func (srv AlertmanagerSrv) RouteDeleteAlertingConfig(c *models.ReqContext) respo
|
||||
}
|
||||
|
||||
if err := am.SaveAndApplyDefaultConfig(c.Req.Context()); err != nil {
|
||||
srv.log.Error("unable to save and apply default alertmanager configuration", "err", err)
|
||||
srv.log.Error("unable to save and apply default alertmanager configuration", "error", err)
|
||||
return ErrResp(http.StatusInternalServerError, err, "failed to save and apply default Alertmanager configuration")
|
||||
}
|
||||
|
||||
@ -404,6 +404,6 @@ func (srv AlertmanagerSrv) AlertmanagerFor(orgID int64) (Alertmanager, *response
|
||||
return am, response.Error(http.StatusConflict, err.Error(), err)
|
||||
}
|
||||
|
||||
srv.log.Error("unable to obtain the org's Alertmanager", "err", err)
|
||||
srv.log.Error("unable to obtain the org's Alertmanager", "error", err)
|
||||
return nil, response.Error(http.StatusInternalServerError, "unable to obtain org's Alertmanager", err)
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ func (srv ConfigSrv) RouteGetNGalertConfig(c *models.ReqContext) response.Respon
|
||||
}
|
||||
|
||||
msg := "failed to fetch admin configuration from the database"
|
||||
srv.log.Error(msg, "err", err)
|
||||
srv.log.Error(msg, "error", err)
|
||||
return ErrResp(http.StatusInternalServerError, err, msg)
|
||||
}
|
||||
|
||||
@ -94,14 +94,14 @@ func (srv ConfigSrv) RoutePostNGalertConfig(c *models.ReqContext, body apimodels
|
||||
|
||||
if err := cfg.Validate(); err != nil {
|
||||
msg := "failed to validate admin configuration"
|
||||
srv.log.Error(msg, "err", err)
|
||||
srv.log.Error(msg, "error", err)
|
||||
return ErrResp(http.StatusBadRequest, err, msg)
|
||||
}
|
||||
|
||||
cmd := store.UpdateAdminConfigurationCmd{AdminConfiguration: cfg}
|
||||
if err := srv.store.UpdateAdminConfiguration(cmd); err != nil {
|
||||
msg := "failed to save the admin configuration to the database"
|
||||
srv.log.Error(msg, "err", err)
|
||||
srv.log.Error(msg, "error", err)
|
||||
return ErrResp(http.StatusBadRequest, err, msg)
|
||||
}
|
||||
|
||||
@ -115,7 +115,7 @@ func (srv ConfigSrv) RouteDeleteNGalertConfig(c *models.ReqContext) response.Res
|
||||
|
||||
err := srv.store.DeleteAdminConfiguration(c.OrgID)
|
||||
if err != nil {
|
||||
srv.log.Error("unable to delete configuration", "err", err)
|
||||
srv.log.Error("unable to delete configuration", "error", err)
|
||||
return ErrResp(http.StatusInternalServerError, err, "")
|
||||
}
|
||||
|
||||
@ -150,7 +150,7 @@ func (srv ConfigSrv) RouteGetAlertingStatus(c *models.ReqContext) response.Respo
|
||||
cfg, err := srv.store.GetAdminConfiguration(c.OrgID)
|
||||
if err != nil && !errors.Is(err, store.ErrNoAdminConfiguration) {
|
||||
msg := "failed to fetch configuration from the database"
|
||||
srv.log.Error(msg, "err", err)
|
||||
srv.log.Error(msg, "error", err)
|
||||
return ErrResp(http.StatusInternalServerError, err, msg)
|
||||
}
|
||||
if cfg != nil {
|
||||
|
@ -284,7 +284,7 @@ func ruleToQuery(logger log.Logger, rule *ngmodels.AlertRule) string {
|
||||
}
|
||||
|
||||
// For any other type of error, it is unexpected abort and return the whole JSON.
|
||||
logger.Debug("failed to parse a query", "err", err)
|
||||
logger.Debug("failed to parse a query", "error", err)
|
||||
queryErr = err
|
||||
break
|
||||
}
|
||||
|
@ -147,7 +147,7 @@ func (s *ScreenshotImageService) NewImage(ctx context.Context, r *models.AlertRu
|
||||
image := models.Image{Path: screenshot.Path}
|
||||
if s.uploads != nil {
|
||||
if image, err = s.uploads.Upload(ctx, image); err != nil {
|
||||
s.logger.Warn("failed to upload image", "path", image.Path, "err", err)
|
||||
s.logger.Warn("failed to upload image", "path", image.Path, "error", err)
|
||||
}
|
||||
}
|
||||
if err := s.store.SaveImage(ctx, &image); err != nil {
|
||||
|
@ -164,7 +164,7 @@ func (ng *AlertNG) init() error {
|
||||
|
||||
appUrl, err := url.Parse(ng.Cfg.AppURL)
|
||||
if err != nil {
|
||||
ng.Log.Error("Failed to parse application URL. Continue without it.", "err", err)
|
||||
ng.Log.Error("Failed to parse application URL. Continue without it.", "error", err)
|
||||
appUrl = nil
|
||||
}
|
||||
|
||||
@ -249,7 +249,7 @@ func subscribeToFolderChanges(logger log.Logger, bus bus.Bus, dbStore api.RuleSt
|
||||
logger.Debug("Got folder title updated event. updating rules in the folder", "folder_uid", evt.UID)
|
||||
updated, err := dbStore.IncreaseVersionForAllRulesInNamespace(context.Background(), evt.OrgID, evt.UID)
|
||||
if err != nil {
|
||||
logger.Error("Failed to update alert rules in the folder after its title was changed", "err", err, "folder_uid", evt.UID, "folder", evt.Title)
|
||||
logger.Error("Failed to update alert rules in the folder after its title was changed", "error", err, "folder_uid", evt.UID, "folder", evt.Title)
|
||||
return
|
||||
}
|
||||
if len(updated) > 0 {
|
||||
|
@ -197,7 +197,7 @@ func newAlertmanager(ctx context.Context, orgID int64, cfg *setting.Cfg, store A
|
||||
am.silences.Maintenance(silenceMaintenanceInterval, silencesFilePath, am.stopc, func() (int64, error) {
|
||||
// Delete silences older than the retention period.
|
||||
if _, err := am.silences.GC(); err != nil {
|
||||
am.logger.Error("silence garbage collection", "err", err)
|
||||
am.logger.Error("silence garbage collection", "error", err)
|
||||
// Don't return here - we need to snapshot our state first.
|
||||
}
|
||||
|
||||
|
@ -116,7 +116,7 @@ func (moa *MultiOrgAlertmanager) ApplyAlertmanagerConfiguration(ctx context.Cont
|
||||
}
|
||||
|
||||
if err := am.SaveAndApplyConfig(ctx, &config); err != nil {
|
||||
moa.logger.Error("unable to save and apply alertmanager configuration", "err", err)
|
||||
moa.logger.Error("unable to save and apply alertmanager configuration", "error", err)
|
||||
return AlertmanagerConfigRejectedError{err}
|
||||
}
|
||||
|
||||
|
@ -34,13 +34,13 @@ func (am *Alertmanager) GetAlerts(active, silenced, inhibited bool, filter []str
|
||||
|
||||
matchers, err := parseFilter(filter)
|
||||
if err != nil {
|
||||
am.logger.Error("failed to parse matchers", "err", err)
|
||||
am.logger.Error("failed to parse matchers", "error", err)
|
||||
return nil, fmt.Errorf("%s: %w", err.Error(), ErrGetAlertsBadPayload)
|
||||
}
|
||||
|
||||
receiverFilter, err := parseReceivers(receivers)
|
||||
if err != nil {
|
||||
am.logger.Error("failed to parse receiver regex", "err", err)
|
||||
am.logger.Error("failed to parse receiver regex", "error", err)
|
||||
return nil, fmt.Errorf("%s: %w", err.Error(), ErrGetAlertsBadPayload)
|
||||
}
|
||||
|
||||
@ -77,7 +77,7 @@ func (am *Alertmanager) GetAlerts(active, silenced, inhibited bool, filter []str
|
||||
am.reloadConfigMtx.RUnlock()
|
||||
|
||||
if err != nil {
|
||||
am.logger.Error("failed to iterate through the alerts", "err", err)
|
||||
am.logger.Error("failed to iterate through the alerts", "error", err)
|
||||
return nil, fmt.Errorf("%s: %w", err.Error(), ErrGetAlertsInternal)
|
||||
}
|
||||
sort.Slice(res, func(i, j int) bool {
|
||||
@ -90,13 +90,13 @@ func (am *Alertmanager) GetAlerts(active, silenced, inhibited bool, filter []str
|
||||
func (am *Alertmanager) GetAlertGroups(active, silenced, inhibited bool, filter []string, receivers string) (apimodels.AlertGroups, error) {
|
||||
matchers, err := parseFilter(filter)
|
||||
if err != nil {
|
||||
am.logger.Error("msg", "failed to parse matchers", "err", err)
|
||||
am.logger.Error("msg", "failed to parse matchers", "error", err)
|
||||
return nil, fmt.Errorf("%s: %w", err.Error(), ErrGetAlertGroupsBadPayload)
|
||||
}
|
||||
|
||||
receiverFilter, err := parseReceivers(receivers)
|
||||
if err != nil {
|
||||
am.logger.Error("msg", "failed to compile receiver regex", "err", err)
|
||||
am.logger.Error("msg", "failed to compile receiver regex", "error", err)
|
||||
return nil, fmt.Errorf("%s: %w", err.Error(), ErrGetAlertGroupsBadPayload)
|
||||
}
|
||||
|
||||
|
@ -126,7 +126,7 @@ func (n *AlertmanagerNotifier) Notify(ctx context.Context, as ...*types.Alert) (
|
||||
password: n.basicAuthPassword,
|
||||
body: body,
|
||||
}, n.logger); err != nil {
|
||||
n.logger.Warn("failed to send to Alertmanager", "err", err, "alertmanager", n.Name, "url", u.String())
|
||||
n.logger.Warn("failed to send to Alertmanager", "error", err, "alertmanager", n.Name, "url", u.String())
|
||||
lastErr = err
|
||||
numErrs++
|
||||
}
|
||||
|
@ -123,13 +123,13 @@ func (dd *DingDingNotifier) Notify(ctx context.Context, as ...*types.Alert) (boo
|
||||
}
|
||||
|
||||
if tmplErr != nil {
|
||||
dd.log.Warn("failed to template DingDing message", "err", tmplErr.Error())
|
||||
dd.log.Warn("failed to template DingDing message", "error", tmplErr.Error())
|
||||
tmplErr = nil
|
||||
}
|
||||
|
||||
u := tmpl(dd.URL)
|
||||
if tmplErr != nil {
|
||||
dd.log.Warn("failed to template DingDing URL", "err", tmplErr.Error(), "fallback", dd.URL)
|
||||
dd.log.Warn("failed to template DingDing URL", "error", tmplErr.Error(), "fallback", dd.URL)
|
||||
u = dd.URL
|
||||
}
|
||||
|
||||
|
@ -115,7 +115,7 @@ func (d DiscordNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool,
|
||||
if d.Content != "" {
|
||||
bodyJSON.Set("content", tmpl(d.Content))
|
||||
if tmplErr != nil {
|
||||
d.log.Warn("failed to template Discord notification content", "err", tmplErr.Error())
|
||||
d.log.Warn("failed to template Discord notification content", "error", tmplErr.Error())
|
||||
// Reset tmplErr for templating other fields.
|
||||
tmplErr = nil
|
||||
}
|
||||
@ -124,7 +124,7 @@ func (d DiscordNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool,
|
||||
if d.AvatarURL != "" {
|
||||
bodyJSON.Set("avatar_url", tmpl(d.AvatarURL))
|
||||
if tmplErr != nil {
|
||||
d.log.Warn("failed to template Discord Avatar URL", "err", tmplErr.Error(), "fallback", d.AvatarURL)
|
||||
d.log.Warn("failed to template Discord Avatar URL", "error", tmplErr.Error(), "fallback", d.AvatarURL)
|
||||
bodyJSON.Set("avatar_url", d.AvatarURL)
|
||||
tmplErr = nil
|
||||
}
|
||||
@ -164,13 +164,13 @@ func (d DiscordNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool,
|
||||
bodyJSON.Set("embeds", embeds)
|
||||
|
||||
if tmplErr != nil {
|
||||
d.log.Warn("failed to template Discord message", "err", tmplErr.Error())
|
||||
d.log.Warn("failed to template Discord message", "error", tmplErr.Error())
|
||||
tmplErr = nil
|
||||
}
|
||||
|
||||
u := tmpl(d.WebhookURL)
|
||||
if tmplErr != nil {
|
||||
d.log.Warn("failed to template Discord URL", "err", tmplErr.Error(), "fallback", d.WebhookURL)
|
||||
d.log.Warn("failed to template Discord URL", "error", tmplErr.Error(), "fallback", d.WebhookURL)
|
||||
u = d.WebhookURL
|
||||
}
|
||||
|
||||
@ -185,7 +185,7 @@ func (d DiscordNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool,
|
||||
}
|
||||
|
||||
if err := d.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
d.log.Error("failed to send notification to Discord", "err", err)
|
||||
d.log.Error("failed to send notification to Discord", "error", err)
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
@ -220,7 +220,7 @@ func (d DiscordNotifier) constructAttachments(ctx context.Context, as []*types.A
|
||||
url := fmt.Sprintf("attachment://%s", base)
|
||||
reader, err := openImage(image.Path)
|
||||
if err != nil && !errors.Is(err, ngmodels.ErrImageNotFound) {
|
||||
d.log.Warn("failed to retrieve image data from store", "err", err)
|
||||
d.log.Warn("failed to retrieve image data from store", "error", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -257,7 +257,7 @@ func (d DiscordNotifier) buildRequest(ctx context.Context, url string, body []by
|
||||
defer func() {
|
||||
if err := w.Close(); err != nil {
|
||||
// Shouldn't matter since we already close w explicitly on the non-error path
|
||||
d.log.Warn("failed to close multipart writer", "err", err)
|
||||
d.log.Warn("failed to close multipart writer", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
|
@ -105,7 +105,7 @@ func (en *EmailNotifier) Notify(ctx context.Context, alerts ...*types.Alert) (bo
|
||||
u.RawQuery = "alertState=firing&view=state"
|
||||
alertPageURL = u.String()
|
||||
} else {
|
||||
en.log.Debug("failed to parse external URL", "url", en.tmpl.ExternalURL.String(), "err", err.Error())
|
||||
en.log.Debug("failed to parse external URL", "url", en.tmpl.ExternalURL.String(), "error", err.Error())
|
||||
}
|
||||
|
||||
// Extend alerts data with images, if available.
|
||||
@ -120,7 +120,7 @@ func (en *EmailNotifier) Notify(ctx context.Context, alerts ...*types.Alert) (bo
|
||||
data.Alerts[index].EmbeddedImage = filepath.Base(image.Path)
|
||||
embeddedFiles = append(embeddedFiles, image.Path)
|
||||
} else {
|
||||
en.log.Warn("failed to get image file for email attachment", "file", image.Path, "err", err)
|
||||
en.log.Warn("failed to get image file for email attachment", "file", image.Path, "error", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
@ -149,7 +149,7 @@ func (en *EmailNotifier) Notify(ctx context.Context, alerts ...*types.Alert) (bo
|
||||
}
|
||||
|
||||
if tmplErr != nil {
|
||||
en.log.Warn("failed to template email message", "err", tmplErr.Error())
|
||||
en.log.Warn("failed to template email message", "error", tmplErr.Error())
|
||||
}
|
||||
|
||||
if err := en.ns.SendEmailCommandHandlerSync(ctx, cmd); err != nil {
|
||||
|
@ -97,7 +97,7 @@ func (gcn *GoogleChatNotifier) Notify(ctx context.Context, as ...*types.Alert) (
|
||||
}
|
||||
|
||||
if tmplErr != nil {
|
||||
gcn.log.Warn("failed to template Google Chat message", "err", tmplErr.Error())
|
||||
gcn.log.Warn("failed to template Google Chat message", "error", tmplErr.Error())
|
||||
tmplErr = nil
|
||||
}
|
||||
|
||||
@ -151,13 +151,13 @@ func (gcn *GoogleChatNotifier) Notify(ctx context.Context, as ...*types.Alert) (
|
||||
}
|
||||
|
||||
if tmplErr != nil {
|
||||
gcn.log.Warn("failed to template GoogleChat message", "err", tmplErr.Error())
|
||||
gcn.log.Warn("failed to template GoogleChat message", "error", tmplErr.Error())
|
||||
tmplErr = nil
|
||||
}
|
||||
|
||||
u := tmpl(gcn.URL)
|
||||
if tmplErr != nil {
|
||||
gcn.log.Warn("failed to template GoogleChat URL", "err", tmplErr.Error(), "fallback", gcn.URL)
|
||||
gcn.log.Warn("failed to template GoogleChat URL", "error", tmplErr.Error(), "fallback", gcn.URL)
|
||||
u = gcn.URL
|
||||
}
|
||||
|
||||
|
@ -140,7 +140,7 @@ func (kn *KafkaNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool,
|
||||
topicURL := strings.TrimRight(kn.Endpoint, "/") + "/topics/" + tmpl(kn.Topic)
|
||||
|
||||
if tmplErr != nil {
|
||||
kn.log.Warn("failed to template Kafka message", "err", tmplErr.Error())
|
||||
kn.log.Warn("failed to template Kafka message", "error", tmplErr.Error())
|
||||
}
|
||||
|
||||
cmd := &models.SendWebhookSync{
|
||||
|
@ -89,7 +89,7 @@ func (ln *LineNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, e
|
||||
tmpl(DefaultMessageEmbed),
|
||||
)
|
||||
if tmplErr != nil {
|
||||
ln.log.Warn("failed to template Line message", "err", tmplErr.Error())
|
||||
ln.log.Warn("failed to template Line message", "error", tmplErr.Error())
|
||||
}
|
||||
|
||||
form := url.Values{}
|
||||
@ -106,7 +106,7 @@ func (ln *LineNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, e
|
||||
}
|
||||
|
||||
if err := ln.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
ln.log.Error("failed to send notification to LINE", "err", err, "body", body)
|
||||
ln.log.Error("failed to send notification to LINE", "error", err, "body", body)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -225,7 +225,7 @@ func (on *OpsgenieNotifier) buildOpsgenieMessage(ctx context.Context, alerts mod
|
||||
|
||||
// Check for templating errors
|
||||
if tmplErr != nil {
|
||||
on.log.Warn("failed to template Opsgenie message", "err", tmplErr.Error())
|
||||
on.log.Warn("failed to template Opsgenie message", "error", tmplErr.Error())
|
||||
tmplErr = nil
|
||||
}
|
||||
|
||||
@ -272,7 +272,7 @@ func (on *OpsgenieNotifier) buildOpsgenieMessage(ctx context.Context, alerts mod
|
||||
bodyJSON.Set("details", details)
|
||||
apiURL = tmpl(on.APIUrl)
|
||||
if tmplErr != nil {
|
||||
on.log.Warn("failed to template Opsgenie URL", "err", tmplErr.Error(), "fallback", on.APIUrl)
|
||||
on.log.Warn("failed to template Opsgenie URL", "error", tmplErr.Error(), "fallback", on.APIUrl)
|
||||
apiURL = on.APIUrl
|
||||
}
|
||||
|
||||
|
@ -209,7 +209,7 @@ func (pn *PagerdutyNotifier) buildPagerdutyMessage(ctx context.Context, alerts m
|
||||
}
|
||||
|
||||
if tmplErr != nil {
|
||||
pn.log.Warn("failed to template PagerDuty message", "err", tmplErr.Error())
|
||||
pn.log.Warn("failed to template PagerDuty message", "error", tmplErr.Error())
|
||||
}
|
||||
|
||||
return msg, eventType, nil
|
||||
|
@ -156,7 +156,7 @@ func (pn *PushoverNotifier) Notify(ctx context.Context, as ...*types.Alert) (boo
|
||||
}
|
||||
|
||||
if err := pn.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
pn.log.Error("failed to send pushover notification", "err", err, "webhook", pn.Name)
|
||||
pn.log.Error("failed to send pushover notification", "error", err, "webhook", pn.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
@ -286,7 +286,7 @@ func (pn *PushoverNotifier) genPushoverBody(ctx context.Context, as ...*types.Al
|
||||
}
|
||||
|
||||
if tmplErr != nil {
|
||||
pn.log.Warn("failed to template pushover message", "err", tmplErr.Error())
|
||||
pn.log.Warn("failed to template pushover message", "error", tmplErr.Error())
|
||||
}
|
||||
|
||||
headers := map[string]string{
|
||||
|
@ -176,7 +176,7 @@ func (sn *SensuGoNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool
|
||||
}
|
||||
|
||||
if tmplErr != nil {
|
||||
sn.log.Warn("failed to template sensugo message", "err", tmplErr.Error())
|
||||
sn.log.Warn("failed to template sensugo message", "error", tmplErr.Error())
|
||||
}
|
||||
|
||||
body, err := json.Marshal(bodyMsgType)
|
||||
@ -194,7 +194,7 @@ func (sn *SensuGoNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool
|
||||
},
|
||||
}
|
||||
if err := sn.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
sn.log.Error("failed to send Sensu Go event", "err", err, "sensugo", sn.Name)
|
||||
sn.log.Error("failed to send Sensu Go event", "error", err, "sensugo", sn.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -191,7 +191,7 @@ func (sn *SlackNotifier) Notify(ctx context.Context, alerts ...*types.Alert) (bo
|
||||
var sendSlackRequest = func(request *http.Request, logger log.Logger) (retErr error) {
|
||||
defer func() {
|
||||
if retErr != nil {
|
||||
logger.Warn("failed to send slack request", "err", retErr)
|
||||
logger.Warn("failed to send slack request", "error", retErr)
|
||||
}
|
||||
}()
|
||||
|
||||
@ -215,7 +215,7 @@ var sendSlackRequest = func(request *http.Request, logger log.Logger) (retErr er
|
||||
}
|
||||
defer func() {
|
||||
if err := resp.Body.Close(); err != nil {
|
||||
logger.Warn("failed to close response body", "err", err)
|
||||
logger.Warn("failed to close response body", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
@ -244,7 +244,7 @@ var sendSlackRequest = func(request *http.Request, logger log.Logger) (retErr er
|
||||
|
||||
if !rslt.Ok && rslt.Err != "" {
|
||||
logger.Error("Sending Slack API request failed", "url", request.URL.String(), "statusCode", resp.Status,
|
||||
"err", rslt.Err)
|
||||
"error", rslt.Err)
|
||||
return fmt.Errorf("failed to make Slack API request: %s", rslt.Err)
|
||||
}
|
||||
|
||||
@ -288,7 +288,7 @@ func (sn *SlackNotifier) buildSlackMessage(ctx context.Context, alrts []*types.A
|
||||
}, alrts...)
|
||||
|
||||
if tmplErr != nil {
|
||||
sn.log.Warn("failed to template Slack message", "err", tmplErr.Error())
|
||||
sn.log.Warn("failed to template Slack message", "error", tmplErr.Error())
|
||||
}
|
||||
|
||||
mentionsBuilder := strings.Builder{}
|
||||
|
@ -339,13 +339,13 @@ func (tn *TeamsNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool,
|
||||
|
||||
// This check for tmplErr must happen before templating the URL
|
||||
if tmplErr != nil {
|
||||
tn.log.Warn("failed to template Teams message", "err", tmplErr.Error())
|
||||
tn.log.Warn("failed to template Teams message", "error", tmplErr.Error())
|
||||
tmplErr = nil
|
||||
}
|
||||
|
||||
u := tmpl(tn.URL)
|
||||
if tmplErr != nil {
|
||||
tn.log.Warn("failed to template Teams URL", "err", tmplErr.Error(), "fallback", tn.URL)
|
||||
tn.log.Warn("failed to template Teams URL", "error", tmplErr.Error(), "fallback", tn.URL)
|
||||
u = tn.URL
|
||||
}
|
||||
|
||||
|
@ -127,7 +127,7 @@ func (tn *TelegramNotifier) Notify(ctx context.Context, as ...*types.Alert) (boo
|
||||
}
|
||||
defer func() {
|
||||
if err := f.Close(); err != nil {
|
||||
tn.log.Warn("failed to close image", "err", err)
|
||||
tn.log.Warn("failed to close image", "error", err)
|
||||
}
|
||||
}()
|
||||
fw, err := w.CreateFormFile("photo", image.Path)
|
||||
@ -155,7 +155,7 @@ func (tn *TelegramNotifier) buildTelegramMessage(ctx context.Context, as []*type
|
||||
var tmplErr error
|
||||
defer func() {
|
||||
if tmplErr != nil {
|
||||
tn.log.Warn("failed to template Telegram message", "err", tmplErr)
|
||||
tn.log.Warn("failed to template Telegram message", "error", tmplErr)
|
||||
}
|
||||
}()
|
||||
|
||||
|
@ -76,7 +76,7 @@ func extendAlert(alert template.Alert, externalURL string, logger log.Logger) *E
|
||||
}
|
||||
u, err := url.Parse(externalURL)
|
||||
if err != nil {
|
||||
logger.Debug("failed to parse external URL while extending template data", "url", externalURL, "err", err.Error())
|
||||
logger.Debug("failed to parse external URL while extending template data", "url", externalURL, "error", err.Error())
|
||||
return extended
|
||||
}
|
||||
externalPath := u.Path
|
||||
@ -94,7 +94,7 @@ func extendAlert(alert template.Alert, externalURL string, logger log.Logger) *E
|
||||
if alert.Annotations != nil {
|
||||
if s, ok := alert.Annotations[ngmodels.ValuesAnnotation]; ok {
|
||||
if err := json.Unmarshal([]byte(s), &extended.Values); err != nil {
|
||||
logger.Warn("failed to unmarshal values annotation", "err", err)
|
||||
logger.Warn("failed to unmarshal values annotation", "error", err)
|
||||
}
|
||||
}
|
||||
// TODO: Remove in Grafana 10
|
||||
|
@ -141,7 +141,7 @@ func (tn *ThreemaNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool
|
||||
data.Set("text", message)
|
||||
|
||||
if tmplErr != nil {
|
||||
tn.log.Warn("failed to template Threema message", "err", tmplErr.Error())
|
||||
tn.log.Warn("failed to template Threema message", "error", tmplErr.Error())
|
||||
}
|
||||
|
||||
cmd := &models.SendWebhookSync{
|
||||
@ -153,7 +153,7 @@ func (tn *ThreemaNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool
|
||||
},
|
||||
}
|
||||
if err := tn.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
tn.log.Error("Failed to send threema notification", "err", err, "webhook", tn.Name)
|
||||
tn.log.Error("Failed to send threema notification", "error", err, "webhook", tn.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -66,7 +66,7 @@ func getImage(ctx context.Context, l log.Logger, imageStore ImageStore, alert ty
|
||||
if errors.Is(err, models.ErrImageNotFound) || errors.Is(err, ErrImagesUnavailable) {
|
||||
return nil, nil
|
||||
} else if err != nil {
|
||||
l.Warn("failed to get image with token", "token", token, "err", err)
|
||||
l.Warn("failed to get image with token", "token", token, "error", err)
|
||||
return nil, err
|
||||
} else {
|
||||
return img, nil
|
||||
@ -228,7 +228,7 @@ var sendHTTPRequest = func(ctx context.Context, url *url.URL, cfg httpCfg, logge
|
||||
}
|
||||
defer func() {
|
||||
if err := resp.Body.Close(); err != nil {
|
||||
logger.Warn("failed to close response body", "err", err)
|
||||
logger.Warn("failed to close response body", "error", err)
|
||||
}
|
||||
}()
|
||||
|
||||
@ -250,7 +250,7 @@ var sendHTTPRequest = func(ctx context.Context, url *url.URL, cfg httpCfg, logge
|
||||
func joinUrlPath(base, additionalPath string, logger log.Logger) string {
|
||||
u, err := url.Parse(base)
|
||||
if err != nil {
|
||||
logger.Debug("failed to parse URL while joining URL", "url", base, "err", err.Error())
|
||||
logger.Debug("failed to parse URL while joining URL", "url", base, "error", err.Error())
|
||||
return base
|
||||
}
|
||||
|
||||
|
@ -127,7 +127,7 @@ func (vn *VictoropsNotifier) Notify(ctx context.Context, as ...*types.Alert) (bo
|
||||
|
||||
if tmplErr != nil {
|
||||
vn.log.Warn("failed to expand message template. "+
|
||||
"", "err", tmplErr.Error())
|
||||
"", "error", tmplErr.Error())
|
||||
tmplErr = nil
|
||||
}
|
||||
|
||||
@ -145,7 +145,7 @@ func (vn *VictoropsNotifier) Notify(ctx context.Context, as ...*types.Alert) (bo
|
||||
|
||||
u := tmpl(vn.settings.URL)
|
||||
if tmplErr != nil {
|
||||
vn.log.Info("failed to expand URL template", "err", tmplErr.Error(), "fallback", vn.settings.URL)
|
||||
vn.log.Info("failed to expand URL template", "error", tmplErr.Error(), "fallback", vn.settings.URL)
|
||||
u = vn.settings.URL
|
||||
}
|
||||
|
||||
@ -159,7 +159,7 @@ func (vn *VictoropsNotifier) Notify(ctx context.Context, as ...*types.Alert) (bo
|
||||
}
|
||||
|
||||
if err := vn.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
vn.log.Error("failed to send notification", "err", err, "webhook", vn.Name)
|
||||
vn.log.Error("failed to send notification", "error", err, "webhook", vn.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,7 @@ func buildWebhookNotifier(factoryConfig FactoryConfig) (*WebhookNotifier, error)
|
||||
case string:
|
||||
maxAlerts, err = strconv.Atoi(value)
|
||||
if err != nil {
|
||||
logger.Warn("failed to convert setting maxAlerts to integer. Using default", "err", err, "original", value)
|
||||
logger.Warn("failed to convert setting maxAlerts to integer. Using default", "error", err, "original", value)
|
||||
maxAlerts = 0
|
||||
}
|
||||
default:
|
||||
@ -177,7 +177,7 @@ func (wn *WebhookNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool
|
||||
}
|
||||
|
||||
if tmplErr != nil {
|
||||
wn.log.Warn("failed to template webhook message", "err", tmplErr.Error())
|
||||
wn.log.Warn("failed to template webhook message", "error", tmplErr.Error())
|
||||
tmplErr = nil
|
||||
}
|
||||
|
||||
|
@ -180,7 +180,7 @@ func (w *WeComNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, e
|
||||
}
|
||||
|
||||
if tmplErr != nil {
|
||||
w.log.Warn("failed to template WeCom message", "err", tmplErr.Error())
|
||||
w.log.Warn("failed to template WeCom message", "error", tmplErr.Error())
|
||||
}
|
||||
|
||||
cmd := &models.SendWebhookSync{
|
||||
@ -189,7 +189,7 @@ func (w *WeComNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool, e
|
||||
}
|
||||
|
||||
if err = w.ns.SendWebhookSync(ctx, cmd); err != nil {
|
||||
w.log.Error("failed to send WeCom webhook", "err", err, "notification", w.Name)
|
||||
w.log.Error("failed to send WeCom webhook", "error", err, "notification", w.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
|
@ -54,7 +54,7 @@ func PersistTemplates(cfg *api.PostableUserConfig, path string) ([]string, bool,
|
||||
// Now that we have the list of _actual_ templates, let's remove the ones that we don't need.
|
||||
existingFiles, err := os.ReadDir(path)
|
||||
if err != nil {
|
||||
cfglogger.Error("unable to read directory for deleting Alertmanager templates", "err", err, "path", path)
|
||||
cfglogger.Error("unable to read directory for deleting Alertmanager templates", "error", err, "path", path)
|
||||
}
|
||||
for _, existingFile := range existingFiles {
|
||||
p := filepath.Join(path, existingFile.Name())
|
||||
@ -63,7 +63,7 @@ func PersistTemplates(cfg *api.PostableUserConfig, path string) ([]string, bool,
|
||||
templatesChanged = true
|
||||
err := os.Remove(p)
|
||||
if err != nil {
|
||||
cfglogger.Error("unable to delete template", "err", err, "file", p)
|
||||
cfglogger.Error("unable to delete template", "error", err, "file", p)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ func (fileStore *FileStore) WriteFileToDisk(fn string, content []byte) error {
|
||||
func (fileStore *FileStore) CleanUp() {
|
||||
if err := os.RemoveAll(fileStore.workingDirPath); err != nil {
|
||||
fileStore.logger.Warn("unable to delete the local working directory", "dir", fileStore.workingDirPath,
|
||||
"err", err)
|
||||
"error", err)
|
||||
return
|
||||
}
|
||||
fileStore.logger.Info("successfully deleted working directory", "dir", fileStore.workingDirPath)
|
||||
|
@ -98,7 +98,7 @@ func NewMultiOrgAlertmanager(cfg *setting.Cfg, configStore AlertingStore, orgSto
|
||||
|
||||
err = peer.Join(cluster.DefaultReconnectInterval, cluster.DefaultReconnectTimeout)
|
||||
if err != nil {
|
||||
l.Error("msg", "unable to join gossip mesh while initializing cluster for high availability mode", "err", err)
|
||||
l.Error("msg", "unable to join gossip mesh while initializing cluster for high availability mode", "error", err)
|
||||
}
|
||||
// Attempt to verify the number of peers for 30s every 2s. The risk here is what we send a notification "too soon".
|
||||
// Which should _never_ happen given we share the notification log via the database so the risk of double notification is very low.
|
||||
@ -121,7 +121,7 @@ func (moa *MultiOrgAlertmanager) Run(ctx context.Context) error {
|
||||
return nil
|
||||
case <-time.After(moa.settings.UnifiedAlerting.AlertmanagerConfigPollInterval):
|
||||
if err := moa.LoadAndSyncAlertmanagersForOrgs(ctx); err != nil {
|
||||
moa.logger.Error("error while synchronizing Alertmanager orgs", "err", err)
|
||||
moa.logger.Error("error while synchronizing Alertmanager orgs", "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -164,7 +164,7 @@ func (moa *MultiOrgAlertmanager) SyncAlertmanagersForOrgs(ctx context.Context, o
|
||||
orgsFound := make(map[int64]struct{}, len(orgIDs))
|
||||
dbConfigs, err := moa.getLatestConfigs(ctx)
|
||||
if err != nil {
|
||||
moa.logger.Error("failed to load Alertmanager configurations", "err", err)
|
||||
moa.logger.Error("failed to load Alertmanager configurations", "error", err)
|
||||
return
|
||||
}
|
||||
moa.alertmanagersMtx.Lock()
|
||||
@ -184,7 +184,7 @@ func (moa *MultiOrgAlertmanager) SyncAlertmanagersForOrgs(ctx context.Context, o
|
||||
m := metrics.NewAlertmanagerMetrics(moa.metrics.GetOrCreateOrgRegistry(orgID))
|
||||
am, err := newAlertmanager(ctx, orgID, moa.settings, moa.configStore, moa.kvStore, moa.peer, moa.decryptFn, moa.ns, m)
|
||||
if err != nil {
|
||||
moa.logger.Error("unable to create Alertmanager for org", "org", orgID, "err", err)
|
||||
moa.logger.Error("unable to create Alertmanager for org", "org", orgID, "error", err)
|
||||
}
|
||||
moa.alertmanagers[orgID] = am
|
||||
alertmanager = am
|
||||
@ -207,7 +207,7 @@ func (moa *MultiOrgAlertmanager) SyncAlertmanagersForOrgs(ctx context.Context, o
|
||||
|
||||
err := alertmanager.ApplyConfig(dbConfig)
|
||||
if err != nil {
|
||||
moa.logger.Error("failed to apply Alertmanager config for org", "org", orgID, "id", dbConfig.ID, "err", err)
|
||||
moa.logger.Error("failed to apply Alertmanager config for org", "org", orgID, "id", dbConfig.ID, "error", err)
|
||||
continue
|
||||
}
|
||||
moa.alertmanagers[orgID] = alertmanager
|
||||
@ -247,7 +247,7 @@ func (moa *MultiOrgAlertmanager) cleanupOrphanLocalOrgState(ctx context.Context,
|
||||
dataDir := filepath.Join(moa.settings.DataPath, workingDir)
|
||||
files, err := os.ReadDir(dataDir)
|
||||
if err != nil {
|
||||
moa.logger.Error("failed to list local working directory", "dir", dataDir, "err", err)
|
||||
moa.logger.Error("failed to list local working directory", "dir", dataDir, "error", err)
|
||||
return
|
||||
}
|
||||
for _, file := range files {
|
||||
@ -257,7 +257,7 @@ func (moa *MultiOrgAlertmanager) cleanupOrphanLocalOrgState(ctx context.Context,
|
||||
}
|
||||
orgID, err := strconv.ParseInt(file.Name(), 10, 64)
|
||||
if err != nil {
|
||||
moa.logger.Error("unable to parse orgID from directory name", "name", file.Name(), "err", err)
|
||||
moa.logger.Error("unable to parse orgID from directory name", "name", file.Name(), "error", err)
|
||||
continue
|
||||
}
|
||||
_, exists := activeOrganizations[orgID]
|
||||
@ -276,7 +276,7 @@ func (moa *MultiOrgAlertmanager) cleanupOrphanLocalOrgState(ctx context.Context,
|
||||
for _, fileName := range storedFiles {
|
||||
keys, err := moa.kvStore.Keys(ctx, kvstore.AllOrganizations, KVNamespace, fileName)
|
||||
if err != nil {
|
||||
moa.logger.Error("failed to fetch items from kvstore", "err", err,
|
||||
moa.logger.Error("failed to fetch items from kvstore", "error", err,
|
||||
"namespace", KVNamespace, "key", fileName)
|
||||
}
|
||||
for _, key := range keys {
|
||||
@ -285,7 +285,7 @@ func (moa *MultiOrgAlertmanager) cleanupOrphanLocalOrgState(ctx context.Context,
|
||||
}
|
||||
err = moa.kvStore.Del(ctx, key.OrgId, key.Namespace, key.Key)
|
||||
if err != nil {
|
||||
moa.logger.Error("failed to delete item from kvstore", "err", err,
|
||||
moa.logger.Error("failed to delete item from kvstore", "error", err,
|
||||
"orgID", key.OrgId, "namespace", KVNamespace, "key", key.Key)
|
||||
}
|
||||
}
|
||||
@ -304,7 +304,7 @@ func (moa *MultiOrgAlertmanager) StopAndWait() {
|
||||
if ok {
|
||||
moa.settleCancel()
|
||||
if err := p.Leave(10 * time.Second); err != nil {
|
||||
moa.logger.Warn("unable to leave the gossip mesh", "err", err)
|
||||
moa.logger.Warn("unable to leave the gossip mesh", "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -22,13 +22,13 @@ var (
|
||||
func (am *Alertmanager) ListSilences(filter []string) (apimodels.GettableSilences, error) {
|
||||
matchers, err := parseFilter(filter)
|
||||
if err != nil {
|
||||
am.logger.Error("failed to parse matchers", "err", err)
|
||||
am.logger.Error("failed to parse matchers", "error", err)
|
||||
return nil, fmt.Errorf("%s: %w", ErrListSilencesBadPayload.Error(), err)
|
||||
}
|
||||
|
||||
psils, _, err := am.silences.Query()
|
||||
if err != nil {
|
||||
am.logger.Error(ErrGetSilencesInternal.Error(), "err", err)
|
||||
am.logger.Error(ErrGetSilencesInternal.Error(), "error", err)
|
||||
return nil, fmt.Errorf("%s: %w", ErrGetSilencesInternal.Error(), err)
|
||||
}
|
||||
|
||||
@ -39,7 +39,7 @@ func (am *Alertmanager) ListSilences(filter []string) (apimodels.GettableSilence
|
||||
}
|
||||
silence, err := v2.GettableSilenceFromProto(ps)
|
||||
if err != nil {
|
||||
am.logger.Error("unmarshaling from protobuf failed", "err", err)
|
||||
am.logger.Error("unmarshaling from protobuf failed", "error", err)
|
||||
return apimodels.GettableSilences{}, fmt.Errorf("%s: failed to convert internal silence to API silence: %w",
|
||||
ErrGetSilencesInternal.Error(), err)
|
||||
}
|
||||
@ -59,13 +59,13 @@ func (am *Alertmanager) GetSilence(silenceID string) (apimodels.GettableSilence,
|
||||
}
|
||||
|
||||
if len(sils) == 0 {
|
||||
am.logger.Error("failed to find silence", "err", err, "id", sils)
|
||||
am.logger.Error("failed to find silence", "error", err, "id", sils)
|
||||
return apimodels.GettableSilence{}, ErrSilenceNotFound
|
||||
}
|
||||
|
||||
sil, err := v2.GettableSilenceFromProto(sils[0])
|
||||
if err != nil {
|
||||
am.logger.Error("unmarshaling from protobuf failed", "err", err)
|
||||
am.logger.Error("unmarshaling from protobuf failed", "error", err)
|
||||
return apimodels.GettableSilence{}, fmt.Errorf("%s: failed to convert internal silence to API silence: %w",
|
||||
ErrGetSilencesInternal.Error(), err)
|
||||
}
|
||||
@ -77,14 +77,14 @@ func (am *Alertmanager) GetSilence(silenceID string) (apimodels.GettableSilence,
|
||||
func (am *Alertmanager) CreateSilence(ps *apimodels.PostableSilence) (string, error) {
|
||||
sil, err := v2.PostableSilenceToProto(ps)
|
||||
if err != nil {
|
||||
am.logger.Error("marshaling to protobuf failed", "err", err)
|
||||
am.logger.Error("marshaling to protobuf failed", "error", err)
|
||||
return "", fmt.Errorf("%s: failed to convert API silence to internal silence: %w",
|
||||
ErrCreateSilenceBadPayload.Error(), err)
|
||||
}
|
||||
|
||||
if sil.StartsAt.After(sil.EndsAt) || sil.StartsAt.Equal(sil.EndsAt) {
|
||||
msg := "start time must be before end time"
|
||||
am.logger.Error(msg, "err", "starts_at", sil.StartsAt, "ends_at", sil.EndsAt)
|
||||
am.logger.Error(msg, "error", "starts_at", sil.StartsAt, "ends_at", sil.EndsAt)
|
||||
return "", fmt.Errorf("%s: %w", msg, ErrCreateSilenceBadPayload)
|
||||
}
|
||||
|
||||
@ -96,7 +96,7 @@ func (am *Alertmanager) CreateSilence(ps *apimodels.PostableSilence) (string, er
|
||||
|
||||
silenceID, err := am.silences.Set(sil)
|
||||
if err != nil {
|
||||
am.logger.Error("msg", "unable to save silence", "err", err)
|
||||
am.logger.Error("msg", "unable to save silence", "error", err)
|
||||
if errors.Is(err, silence.ErrNotFound) {
|
||||
return "", ErrSilenceNotFound
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ func (ecp *ContactPointService) GetContactPoints(ctx context.Context, q ContactP
|
||||
for k, v := range contactPoint.SecureSettings {
|
||||
decryptedValue, err := ecp.decryptValue(v)
|
||||
if err != nil {
|
||||
ecp.log.Warn("decrypting value failed", "err", err.Error())
|
||||
ecp.log.Warn("decrypting value failed", "error", err.Error())
|
||||
continue
|
||||
}
|
||||
if decryptedValue == "" {
|
||||
@ -106,7 +106,7 @@ func (ecp *ContactPointService) getContactPointDecrypted(ctx context.Context, or
|
||||
for k, v := range receiver.SecureSettings {
|
||||
decryptedValue, err := ecp.decryptValue(v)
|
||||
if err != nil {
|
||||
ecp.log.Warn("decrypting value failed", "err", err.Error())
|
||||
ecp.log.Warn("decrypting value failed", "error", err.Error())
|
||||
continue
|
||||
}
|
||||
if decryptedValue == "" {
|
||||
|
@ -141,7 +141,7 @@ func (sch *schedule) Run(ctx context.Context) error {
|
||||
defer t.Stop()
|
||||
|
||||
if err := sch.schedulePeriodic(ctx, t); err != nil {
|
||||
sch.log.Error("failure while running the rule evaluation loop", "err", err)
|
||||
sch.log.Error("failure while running the rule evaluation loop", "error", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
@ -194,7 +194,7 @@ func (sch *schedule) schedulePeriodic(ctx context.Context, t *ticker.T) error {
|
||||
tickNum := tick.Unix() / int64(sch.baseInterval.Seconds())
|
||||
|
||||
if err := sch.updateSchedulableAlertRules(ctx); err != nil {
|
||||
sch.log.Error("scheduler failed to update alert rules", "err", err)
|
||||
sch.log.Error("scheduler failed to update alert rules", "error", err)
|
||||
}
|
||||
alertRules, folderTitles := sch.schedulableAlertRules.all()
|
||||
|
||||
@ -424,7 +424,7 @@ func (sch *schedule) ruleRoutine(grafanaCtx context.Context, key ngmodels.AlertR
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
logger.Error("evaluation failed after all retries", "err", err)
|
||||
logger.Error("evaluation failed after all retries", "error", err)
|
||||
}
|
||||
}()
|
||||
case <-grafanaCtx.Done():
|
||||
|
@ -109,7 +109,7 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error {
|
||||
if err != nil {
|
||||
d.logger.Error("failed to get alertmanagers from datasources",
|
||||
"org", cfg.OrgID,
|
||||
"err", err)
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
cfg.Alertmanagers = append(cfg.Alertmanagers, externalAlertmanagers...)
|
||||
@ -134,7 +134,7 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error {
|
||||
if err != nil {
|
||||
d.logger.Error("failed to parse alertmanager string",
|
||||
"org", cfg.OrgID,
|
||||
"err", err)
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
redactedAMs = append(redactedAMs, parsedAM.Redacted())
|
||||
@ -153,7 +153,7 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error {
|
||||
d.logger.Debug("applying new configuration to sender", "org", cfg.OrgID, "alertmanagers", redactedAMs)
|
||||
err := existing.ApplyConfig(cfg)
|
||||
if err != nil {
|
||||
d.logger.Error("failed to apply configuration", "err", err, "org", cfg.OrgID)
|
||||
d.logger.Error("failed to apply configuration", "error", err, "org", cfg.OrgID)
|
||||
continue
|
||||
}
|
||||
d.externalAlertmanagersCfgHash[cfg.OrgID] = amHash
|
||||
@ -164,7 +164,7 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error {
|
||||
d.logger.Info("creating new sender for the external alertmanagers", "org", cfg.OrgID, "alertmanagers", redactedAMs)
|
||||
s, err := NewExternalAlertmanagerSender()
|
||||
if err != nil {
|
||||
d.logger.Error("unable to start the sender", "err", err, "org", cfg.OrgID)
|
||||
d.logger.Error("unable to start the sender", "error", err, "org", cfg.OrgID)
|
||||
continue
|
||||
}
|
||||
|
||||
@ -173,7 +173,7 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error {
|
||||
|
||||
err = s.ApplyConfig(cfg)
|
||||
if err != nil {
|
||||
d.logger.Error("failed to apply configuration", "err", err, "org", cfg.OrgID)
|
||||
d.logger.Error("failed to apply configuration", "error", err, "org", cfg.OrgID)
|
||||
continue
|
||||
}
|
||||
|
||||
@ -226,7 +226,7 @@ func (d *AlertsRouter) alertmanagersFromDatasources(orgID int64) ([]string, erro
|
||||
d.logger.Error("failed to build external alertmanager URL",
|
||||
"org", ds.OrgId,
|
||||
"uid", ds.Uid,
|
||||
"err", err)
|
||||
"error", err)
|
||||
continue
|
||||
}
|
||||
alertmanagers = append(alertmanagers, amURL)
|
||||
@ -271,13 +271,13 @@ func (d *AlertsRouter) Send(key models.AlertRuleKey, alerts definitions.Postable
|
||||
if err == nil {
|
||||
localNotifierExist = true
|
||||
if err := n.PutAlerts(alerts); err != nil {
|
||||
logger.Error("failed to put alerts in the local notifier", "count", len(alerts.PostableAlerts), "err", err)
|
||||
logger.Error("failed to put alerts in the local notifier", "count", len(alerts.PostableAlerts), "error", err)
|
||||
}
|
||||
} else {
|
||||
if errors.Is(err, notifier.ErrNoAlertmanagerForOrg) {
|
||||
logger.Debug("local notifier was not found")
|
||||
} else {
|
||||
logger.Error("local notifier is not available", "err", err)
|
||||
logger.Error("local notifier is not available", "error", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -327,7 +327,7 @@ func (d *AlertsRouter) Run(ctx context.Context) error {
|
||||
select {
|
||||
case <-time.After(d.adminConfigPollInterval):
|
||||
if err := d.SyncAndApplyConfigFromDatabase(); err != nil {
|
||||
d.logger.Error("unable to sync admin configuration", "err", err)
|
||||
d.logger.Error("unable to sync admin configuration", "error", err)
|
||||
}
|
||||
case <-ctx.Done():
|
||||
// Stop sending alerts to all external Alertmanager(s).
|
||||
|
@ -86,7 +86,7 @@ func (s *ExternalAlertmanager) Run() {
|
||||
|
||||
go func() {
|
||||
if err := s.sdManager.Run(); err != nil {
|
||||
s.logger.Error("failed to start the sender service discovery manager", "err", err)
|
||||
s.logger.Error("failed to start the sender service discovery manager", "error", err)
|
||||
}
|
||||
s.wg.Done()
|
||||
}()
|
||||
@ -197,14 +197,14 @@ func (s *ExternalAlertmanager) sanitizeLabelSet(lbls models.LabelSet) labels.Lab
|
||||
for _, k := range sortedKeys(lbls) {
|
||||
sanitizedLabelName, err := s.sanitizeLabelName(k)
|
||||
if err != nil {
|
||||
s.logger.Error("alert sending to external Alertmanager(s) contains an invalid label/annotation name that failed to sanitize, skipping", "name", k, "err", err)
|
||||
s.logger.Error("alert sending to external Alertmanager(s) contains an invalid label/annotation name that failed to sanitize, skipping", "name", k, "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
// There can be label name collisions after we sanitize. We check for this and attempt to make the name unique again using a short hash of the original name.
|
||||
if _, ok := set[sanitizedLabelName]; ok {
|
||||
sanitizedLabelName = sanitizedLabelName + fmt.Sprintf("_%.3x", md5.Sum([]byte(k)))
|
||||
s.logger.Warn("alert contains duplicate label/annotation name after sanitization, appending unique suffix", "name", k, "new_name", sanitizedLabelName, "err", err)
|
||||
s.logger.Warn("alert contains duplicate label/annotation name after sanitization, appending unique suffix", "name", k, "new_name", sanitizedLabelName, "error", err)
|
||||
}
|
||||
|
||||
set[sanitizedLabelName] = struct{}{}
|
||||
|
@ -96,7 +96,7 @@ func (rs *ruleStates) getOrCreate(ctx context.Context, log log.Logger, alertRule
|
||||
il := ngModels.InstanceLabels(lbs)
|
||||
id, err := il.StringKey()
|
||||
if err != nil {
|
||||
log.Error("error getting cacheId for entry", "err", err.Error())
|
||||
log.Error("error getting cacheId for entry", "error", err.Error())
|
||||
}
|
||||
|
||||
if state, ok := rs.states[id]; ok {
|
||||
@ -145,7 +145,7 @@ func (rs *ruleStates) expandRuleLabelsAndAnnotations(ctx context.Context, log lo
|
||||
ev, err := expandTemplate(ctx, alertRule.Title, v, templateLabels, alertInstance, externalURL)
|
||||
expanded[k] = ev
|
||||
if err != nil {
|
||||
log.Error("error in expanding template", "name", k, "value", v, "err", err.Error())
|
||||
log.Error("error in expanding template", "name", k, "value", v, "error", err.Error())
|
||||
// Store the original template on error.
|
||||
expanded[k] = v
|
||||
}
|
||||
|
@ -51,13 +51,13 @@ func (h *AnnotationStateHistorian) RecordState(ctx context.Context, rule *ngmode
|
||||
|
||||
panelId, err := strconv.ParseInt(panelUid, 10, 64)
|
||||
if err != nil {
|
||||
h.log.Error("error parsing panelUID for alert annotation", "panelUID", panelUid, "alertRuleUID", rule.UID, "err", err.Error())
|
||||
h.log.Error("error parsing panelUID for alert annotation", "panelUID", panelUid, "alertRuleUID", rule.UID, "error", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
dashID, err := h.dashboards.getID(ctx, rule.OrgID, dashUid)
|
||||
if err != nil {
|
||||
h.log.Error("error getting dashboard for alert annotation", "dashboardUID", dashUid, "alertRuleUID", rule.UID, "err", err.Error())
|
||||
h.log.Error("error getting dashboard for alert annotation", "dashboardUID", dashUid, "alertRuleUID", rule.UID, "error", err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
@ -66,7 +66,7 @@ func (h *AnnotationStateHistorian) RecordState(ctx context.Context, rule *ngmode
|
||||
}
|
||||
|
||||
if err := h.annotations.Save(ctx, item); err != nil {
|
||||
h.log.Error("error saving alert annotation", "alertRuleUID", rule.UID, "err", err.Error())
|
||||
h.log.Error("error saving alert annotation", "alertRuleUID", rule.UID, "error", err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ func (st *Manager) Warm(ctx context.Context) {
|
||||
|
||||
orgIds, err := st.instanceStore.FetchOrgIds(ctx)
|
||||
if err != nil {
|
||||
st.log.Error("unable to fetch orgIds", "err", err.Error())
|
||||
st.log.Error("unable to fetch orgIds", "error", err.Error())
|
||||
}
|
||||
|
||||
statesCount := 0
|
||||
@ -259,7 +259,7 @@ func (st *Manager) setNextState(ctx context.Context, alertRule *ngModels.AlertRu
|
||||
"alert_rule", alertRule.UID,
|
||||
"dashboard", alertRule.DashboardUID,
|
||||
"panel", alertRule.PanelID,
|
||||
"err", err)
|
||||
"error", err)
|
||||
}
|
||||
|
||||
st.cache.set(currentState)
|
||||
@ -321,7 +321,7 @@ func (st *Manager) saveAlertStates(ctx context.Context, states ...*State) (saved
|
||||
_, hash, err := labels.StringAndHash()
|
||||
if err != nil {
|
||||
debug = append(debug, debugInfo{s.OrgID, s.AlertRuleUID, s.State.String(), s.Labels.String()})
|
||||
st.log.Error("failed to save alert instance with invalid labels", "orgID", s.OrgID, "ruleUID", s.AlertRuleUID, "err", err)
|
||||
st.log.Error("failed to save alert instance with invalid labels", "orgID", s.OrgID, "ruleUID", s.AlertRuleUID, "error", err)
|
||||
continue
|
||||
}
|
||||
fields := ngModels.AlertInstance{
|
||||
@ -344,7 +344,7 @@ func (st *Manager) saveAlertStates(ctx context.Context, states ...*State) (saved
|
||||
for _, inst := range instances {
|
||||
debug = append(debug, debugInfo{inst.RuleOrgID, inst.RuleUID, string(inst.CurrentState), data.Labels(inst.Labels).String()})
|
||||
}
|
||||
st.log.Error("failed to save alert states", "states", debug, "err", err)
|
||||
st.log.Error("failed to save alert states", "states", debug, "error", err)
|
||||
return 0, len(debug)
|
||||
}
|
||||
|
||||
@ -390,7 +390,7 @@ func (st *Manager) staleResultsHandler(ctx context.Context, evaluatedAt time.Tim
|
||||
ilbs := ngModels.InstanceLabels(s.Labels)
|
||||
_, labelsHash, err := ilbs.StringAndHash()
|
||||
if err != nil {
|
||||
st.log.Error("unable to get labelsHash", "err", err.Error(), "orgID", s.OrgID, "alertRuleUID", s.AlertRuleUID)
|
||||
st.log.Error("unable to get labelsHash", "error", err.Error(), "orgID", s.OrgID, "alertRuleUID", s.AlertRuleUID)
|
||||
}
|
||||
|
||||
toDelete = append(toDelete, ngModels.AlertInstanceKey{RuleOrgID: s.OrgID, RuleUID: s.AlertRuleUID, LabelsHash: labelsHash})
|
||||
@ -411,7 +411,7 @@ func (st *Manager) staleResultsHandler(ctx context.Context, evaluatedAt time.Tim
|
||||
}
|
||||
|
||||
if err := st.instanceStore.DeleteAlertInstances(ctx, toDelete...); err != nil {
|
||||
st.log.Error("unable to delete stale instances from database", "err", err.Error(),
|
||||
st.log.Error("unable to delete stale instances from database", "error", err.Error(),
|
||||
"orgID", alertRule.OrgID, "alertRuleUID", alertRule.UID, "count", len(toDelete))
|
||||
}
|
||||
return resolvedStates
|
||||
|
@ -340,7 +340,7 @@ func (st DBstore) GetNamespaceByTitle(ctx context.Context, namespace string, org
|
||||
g := guardian.New(ctx, folder.Id, orgID, user)
|
||||
if canSave, err := g.CanSave(); err != nil || !canSave {
|
||||
if err != nil {
|
||||
st.Logger.Error("checking can save permission has failed", "userId", user.UserID, "username", user.Login, "namespace", namespace, "orgId", orgID, "err", err)
|
||||
st.Logger.Error("checking can save permission has failed", "userId", user.UserID, "username", user.Login, "namespace", namespace, "orgId", orgID, "error", err)
|
||||
}
|
||||
return nil, ngmodels.ErrCannotEditNamespace
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ func (st DBstore) SaveAlertmanagerConfigurationWithCallback(ctx context.Context,
|
||||
return err
|
||||
}
|
||||
if _, err := st.deleteOldConfigurations(ctx, cmd.OrgID, ConfigRecordsLimit); err != nil {
|
||||
st.Logger.Warn("failed to delete old am configs", "org", cmd.OrgID, "err", err)
|
||||
st.Logger.Warn("failed to delete old am configs", "org", cmd.OrgID, "error", err)
|
||||
}
|
||||
if err := callback(); err != nil {
|
||||
return err
|
||||
@ -125,7 +125,7 @@ func (st *DBstore) UpdateAlertmanagerConfiguration(ctx context.Context, cmd *mod
|
||||
return ErrVersionLockedObjectNotFound
|
||||
}
|
||||
if _, err := st.deleteOldConfigurations(ctx, cmd.OrgID, ConfigRecordsLimit); err != nil {
|
||||
st.Logger.Warn("failed to delete old am configs", "org", cmd.OrgID, "err", err)
|
||||
st.Logger.Warn("failed to delete old am configs", "org", cmd.OrgID, "error", err)
|
||||
}
|
||||
return err
|
||||
})
|
||||
|
Loading…
Reference in New Issue
Block a user