mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Alerting: Update alerting state history API to authorize access using RBAC (#89579)
* add method CanReadAllRules to rule authorization service * add alias type Namespace for Folder in ngalert's models package. It implements the Namespacer interface that is used by authz logic * update state history's backends to authorize access to rules. * update Loki to add folders UIDs to query. * Update BuildLogQuery to drop filter by folders if it's too long and fall back to in-memory filtering.
This commit is contained in:
parent
16b050f676
commit
06d5850396
@ -99,7 +99,7 @@ func (r *LokiHistorianStore) Get(ctx context.Context, query *annotations.ItemQue
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logQL, err := historian.BuildLogQuery(buildHistoryQuery(query, accessResources.Dashboards, rule.UID), r.client.MaxQuerySize())
|
logQL, _, err := historian.BuildLogQuery(buildHistoryQuery(query, accessResources.Dashboards, rule.UID), nil, r.client.MaxQuerySize())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
grafanaErr := errutil.Error{}
|
grafanaErr := errutil.Error{}
|
||||||
if errors.As(err, &grafanaErr) {
|
if errors.As(err, &grafanaErr) {
|
||||||
|
@ -24,6 +24,7 @@ type FakeRuleService struct {
|
|||||||
HasAccessInFolderFunc func(context.Context, identity.Requester, models.Namespaced) (bool, error)
|
HasAccessInFolderFunc func(context.Context, identity.Requester, models.Namespaced) (bool, error)
|
||||||
AuthorizeAccessInFolderFunc func(context.Context, identity.Requester, models.Namespaced) error
|
AuthorizeAccessInFolderFunc func(context.Context, identity.Requester, models.Namespaced) error
|
||||||
AuthorizeRuleChangesFunc func(context.Context, identity.Requester, *store.GroupDelta) error
|
AuthorizeRuleChangesFunc func(context.Context, identity.Requester, *store.GroupDelta) error
|
||||||
|
CanReadAllRulesFunc func(context.Context, identity.Requester) (bool, error)
|
||||||
|
|
||||||
Calls []Call
|
Calls []Call
|
||||||
}
|
}
|
||||||
@ -99,3 +100,11 @@ func (s *FakeRuleService) AuthorizeRuleChanges(ctx context.Context, user identit
|
|||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *FakeRuleService) CanReadAllRules(ctx context.Context, user identity.Requester) (bool, error) {
|
||||||
|
s.Calls = append(s.Calls, Call{"CanReadAllRules", []interface{}{ctx, user}})
|
||||||
|
if s.CanReadAllRulesFunc != nil {
|
||||||
|
return s.CanReadAllRulesFunc(ctx, user)
|
||||||
|
}
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
@ -76,6 +76,14 @@ func (r *RuleService) getRulesQueryEvaluator(rules ...*models.AlertRule) accessc
|
|||||||
return accesscontrol.EvalAll(evals...)
|
return accesscontrol.EvalAll(evals...)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CanReadAllRules returns true when user has access to all folders and can read rules in them.
|
||||||
|
func (r *RuleService) CanReadAllRules(ctx context.Context, user identity.Requester) (bool, error) {
|
||||||
|
return r.HasAccess(ctx, user, accesscontrol.EvalAll(
|
||||||
|
accesscontrol.EvalPermission(ruleRead, dashboards.ScopeFoldersProvider.GetResourceAllScope()),
|
||||||
|
accesscontrol.EvalPermission(dashboards.ActionFoldersRead, dashboards.ScopeFoldersProvider.GetResourceAllScope()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
// AuthorizeDatasourceAccessForRule checks that user has access to all data sources declared by the rule
|
// AuthorizeDatasourceAccessForRule checks that user has access to all data sources declared by the rule
|
||||||
func (r *RuleService) AuthorizeDatasourceAccessForRule(ctx context.Context, user identity.Requester, rule *models.AlertRule) error {
|
func (r *RuleService) AuthorizeDatasourceAccessForRule(ctx context.Context, user identity.Requester, rule *models.AlertRule) error {
|
||||||
ds := r.getRulesQueryEvaluator(rule)
|
ds := r.getRulesQueryEvaluator(rule)
|
||||||
|
@ -453,3 +453,54 @@ func Test_authorizeAccessToRuleGroup(t *testing.T) {
|
|||||||
require.ErrorIs(t, result, ErrAuthorizationBase)
|
require.ErrorIs(t, result, ErrAuthorizationBase)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestCanReadAllRules(t *testing.T) {
|
||||||
|
ac := &recordingAccessControlFake{}
|
||||||
|
svc := RuleService{
|
||||||
|
genericService{ac: ac},
|
||||||
|
}
|
||||||
|
|
||||||
|
testCases := []struct {
|
||||||
|
permissions map[string][]string
|
||||||
|
expected bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
permissions: map[string][]string{
|
||||||
|
ruleRead: {dashboards.ScopeFoldersProvider.GetResourceAllScope()},
|
||||||
|
dashboards.ActionFoldersRead: {dashboards.ScopeFoldersProvider.GetResourceAllScope()},
|
||||||
|
},
|
||||||
|
expected: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
permissions: make(map[string][]string),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
permissions: map[string][]string{
|
||||||
|
ruleRead: {dashboards.ScopeFoldersProvider.GetResourceScopeUID("test")},
|
||||||
|
dashboards.ActionFoldersRead: {dashboards.ScopeFoldersProvider.GetResourceAllScope()},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
permissions: map[string][]string{
|
||||||
|
ruleRead: {dashboards.ScopeFoldersProvider.GetResourceAllScope()},
|
||||||
|
dashboards.ActionFoldersRead: {dashboards.ScopeFoldersProvider.GetResourceScopeUID("test")},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
permissions: map[string][]string{
|
||||||
|
ruleRead: {dashboards.ScopeFoldersProvider.GetResourceAllScope()},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
permissions: map[string][]string{
|
||||||
|
dashboards.ActionFoldersRead: {dashboards.ScopeFoldersProvider.GetResourceAllScope()},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range testCases {
|
||||||
|
result, err := svc.CanReadAllRules(context.Background(), createUserWithPermissions(tc.permissions))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equalf(t, tc.expected, result, "permissions: %v", tc.permissions)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -20,6 +20,7 @@ import (
|
|||||||
|
|
||||||
alertingModels "github.com/grafana/alerting/models"
|
alertingModels "github.com/grafana/alerting/models"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/services/folder"
|
||||||
"github.com/grafana/grafana/pkg/services/quota"
|
"github.com/grafana/grafana/pkg/services/quota"
|
||||||
"github.com/grafana/grafana/pkg/setting"
|
"github.com/grafana/grafana/pkg/setting"
|
||||||
"github.com/grafana/grafana/pkg/util/cmputil"
|
"github.com/grafana/grafana/pkg/util/cmputil"
|
||||||
@ -274,6 +275,12 @@ type Namespaced interface {
|
|||||||
GetNamespaceUID() string
|
GetNamespaceUID() string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Namespace folder.Folder
|
||||||
|
|
||||||
|
func (n Namespace) GetNamespaceUID() string {
|
||||||
|
return n.UID
|
||||||
|
}
|
||||||
|
|
||||||
// AlertRuleWithOptionals This is to avoid having to pass in additional arguments deep in the call stack. Alert rule
|
// AlertRuleWithOptionals This is to avoid having to pass in additional arguments deep in the call stack. Alert rule
|
||||||
// object is created in an early validation step without knowledge about current alert rule fields or if they need to be
|
// object is created in an early validation step without knowledge about current alert rule fields or if they need to be
|
||||||
// overridden. This is done in a later step and, in that step, we did not have knowledge about if a field was optional
|
// overridden. This is done in a later step and, in that step, we did not have knowledge about if a field was optional
|
||||||
|
@ -366,7 +366,7 @@ func (ng *AlertNG) init() error {
|
|||||||
// There are a set of feature toggles available that act as short-circuits for common configurations.
|
// There are a set of feature toggles available that act as short-circuits for common configurations.
|
||||||
// If any are set, override the config accordingly.
|
// If any are set, override the config accordingly.
|
||||||
ApplyStateHistoryFeatureToggles(&ng.Cfg.UnifiedAlerting.StateHistory, ng.FeatureToggles, ng.Log)
|
ApplyStateHistoryFeatureToggles(&ng.Cfg.UnifiedAlerting.StateHistory, ng.FeatureToggles, ng.Log)
|
||||||
history, err := configureHistorianBackend(initCtx, ng.Cfg.UnifiedAlerting.StateHistory, ng.annotationsRepo, ng.dashboardService, ng.store, ng.Metrics.GetHistorianMetrics(), ng.Log, ng.tracer)
|
history, err := configureHistorianBackend(initCtx, ng.Cfg.UnifiedAlerting.StateHistory, ng.annotationsRepo, ng.dashboardService, ng.store, ng.Metrics.GetHistorianMetrics(), ng.Log, ng.tracer, ac.NewRuleService(ng.accesscontrol))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -531,7 +531,7 @@ type Historian interface {
|
|||||||
state.Historian
|
state.Historian
|
||||||
}
|
}
|
||||||
|
|
||||||
func configureHistorianBackend(ctx context.Context, cfg setting.UnifiedAlertingStateHistorySettings, ar annotations.Repository, ds dashboards.DashboardService, rs historian.RuleStore, met *metrics.Historian, l log.Logger, tracer tracing.Tracer) (Historian, error) {
|
func configureHistorianBackend(ctx context.Context, cfg setting.UnifiedAlertingStateHistorySettings, ar annotations.Repository, ds dashboards.DashboardService, rs historian.RuleStore, met *metrics.Historian, l log.Logger, tracer tracing.Tracer, ac historian.AccessControl) (Historian, error) {
|
||||||
if !cfg.Enabled {
|
if !cfg.Enabled {
|
||||||
met.Info.WithLabelValues("noop").Set(0)
|
met.Info.WithLabelValues("noop").Set(0)
|
||||||
return historian.NewNopHistorian(), nil
|
return historian.NewNopHistorian(), nil
|
||||||
@ -546,7 +546,7 @@ func configureHistorianBackend(ctx context.Context, cfg setting.UnifiedAlertingS
|
|||||||
if backend == historian.BackendTypeMultiple {
|
if backend == historian.BackendTypeMultiple {
|
||||||
primaryCfg := cfg
|
primaryCfg := cfg
|
||||||
primaryCfg.Backend = cfg.MultiPrimary
|
primaryCfg.Backend = cfg.MultiPrimary
|
||||||
primary, err := configureHistorianBackend(ctx, primaryCfg, ar, ds, rs, met, l, tracer)
|
primary, err := configureHistorianBackend(ctx, primaryCfg, ar, ds, rs, met, l, tracer, ac)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("multi-backend target \"%s\" was misconfigured: %w", cfg.MultiPrimary, err)
|
return nil, fmt.Errorf("multi-backend target \"%s\" was misconfigured: %w", cfg.MultiPrimary, err)
|
||||||
}
|
}
|
||||||
@ -555,7 +555,7 @@ func configureHistorianBackend(ctx context.Context, cfg setting.UnifiedAlertingS
|
|||||||
for _, b := range cfg.MultiSecondaries {
|
for _, b := range cfg.MultiSecondaries {
|
||||||
secCfg := cfg
|
secCfg := cfg
|
||||||
secCfg.Backend = b
|
secCfg.Backend = b
|
||||||
sec, err := configureHistorianBackend(ctx, secCfg, ar, ds, rs, met, l, tracer)
|
sec, err := configureHistorianBackend(ctx, secCfg, ar, ds, rs, met, l, tracer, ac)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("multi-backend target \"%s\" was miconfigured: %w", b, err)
|
return nil, fmt.Errorf("multi-backend target \"%s\" was miconfigured: %w", b, err)
|
||||||
}
|
}
|
||||||
@ -568,7 +568,7 @@ func configureHistorianBackend(ctx context.Context, cfg setting.UnifiedAlertingS
|
|||||||
if backend == historian.BackendTypeAnnotations {
|
if backend == historian.BackendTypeAnnotations {
|
||||||
store := historian.NewAnnotationStore(ar, ds, met)
|
store := historian.NewAnnotationStore(ar, ds, met)
|
||||||
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
||||||
return historian.NewAnnotationBackend(annotationBackendLogger, store, rs, met), nil
|
return historian.NewAnnotationBackend(annotationBackendLogger, store, rs, met, ac), nil
|
||||||
}
|
}
|
||||||
if backend == historian.BackendTypeLoki {
|
if backend == historian.BackendTypeLoki {
|
||||||
lcfg, err := historian.NewLokiConfig(cfg)
|
lcfg, err := historian.NewLokiConfig(cfg)
|
||||||
@ -577,7 +577,7 @@ func configureHistorianBackend(ctx context.Context, cfg setting.UnifiedAlertingS
|
|||||||
}
|
}
|
||||||
req := historian.NewRequester()
|
req := historian.NewRequester()
|
||||||
lokiBackendLogger := log.New("ngalert.state.historian", "backend", "loki")
|
lokiBackendLogger := log.New("ngalert.state.historian", "backend", "loki")
|
||||||
backend := historian.NewRemoteLokiBackend(lokiBackendLogger, lcfg, req, met, tracer)
|
backend := historian.NewRemoteLokiBackend(lokiBackendLogger, lcfg, req, met, tracer, rs, ac)
|
||||||
|
|
||||||
testConnCtx, cancelFunc := context.WithTimeout(ctx, 10*time.Second)
|
testConnCtx, cancelFunc := context.WithTimeout(ctx, 10*time.Second)
|
||||||
defer cancelFunc()
|
defer cancelFunc()
|
||||||
|
@ -16,6 +16,7 @@ import (
|
|||||||
"github.com/grafana/grafana/pkg/infra/log"
|
"github.com/grafana/grafana/pkg/infra/log"
|
||||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||||
"github.com/grafana/grafana/pkg/services/folder"
|
"github.com/grafana/grafana/pkg/services/folder"
|
||||||
|
acfakes "github.com/grafana/grafana/pkg/services/ngalert/accesscontrol/fakes"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/tests/fakes"
|
"github.com/grafana/grafana/pkg/services/ngalert/tests/fakes"
|
||||||
@ -67,8 +68,9 @@ func TestConfigureHistorianBackend(t *testing.T) {
|
|||||||
Enabled: true,
|
Enabled: true,
|
||||||
Backend: "invalid-backend",
|
Backend: "invalid-backend",
|
||||||
}
|
}
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
|
||||||
_, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer)
|
_, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer, ac)
|
||||||
|
|
||||||
require.ErrorContains(t, err, "unrecognized")
|
require.ErrorContains(t, err, "unrecognized")
|
||||||
})
|
})
|
||||||
@ -82,8 +84,9 @@ func TestConfigureHistorianBackend(t *testing.T) {
|
|||||||
Backend: "multiple",
|
Backend: "multiple",
|
||||||
MultiPrimary: "invalid-backend",
|
MultiPrimary: "invalid-backend",
|
||||||
}
|
}
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
|
||||||
_, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer)
|
_, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer, ac)
|
||||||
|
|
||||||
require.ErrorContains(t, err, "multi-backend target")
|
require.ErrorContains(t, err, "multi-backend target")
|
||||||
require.ErrorContains(t, err, "unrecognized")
|
require.ErrorContains(t, err, "unrecognized")
|
||||||
@ -99,8 +102,9 @@ func TestConfigureHistorianBackend(t *testing.T) {
|
|||||||
MultiPrimary: "annotations",
|
MultiPrimary: "annotations",
|
||||||
MultiSecondaries: []string{"annotations", "invalid-backend"},
|
MultiSecondaries: []string{"annotations", "invalid-backend"},
|
||||||
}
|
}
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
|
||||||
_, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer)
|
_, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer, ac)
|
||||||
|
|
||||||
require.ErrorContains(t, err, "multi-backend target")
|
require.ErrorContains(t, err, "multi-backend target")
|
||||||
require.ErrorContains(t, err, "unrecognized")
|
require.ErrorContains(t, err, "unrecognized")
|
||||||
@ -117,8 +121,9 @@ func TestConfigureHistorianBackend(t *testing.T) {
|
|||||||
LokiReadURL: "http://gone.invalid",
|
LokiReadURL: "http://gone.invalid",
|
||||||
LokiWriteURL: "http://gone.invalid",
|
LokiWriteURL: "http://gone.invalid",
|
||||||
}
|
}
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
|
||||||
h, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer)
|
h, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer, ac)
|
||||||
|
|
||||||
require.NotNil(t, h)
|
require.NotNil(t, h)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -133,8 +138,9 @@ func TestConfigureHistorianBackend(t *testing.T) {
|
|||||||
Enabled: true,
|
Enabled: true,
|
||||||
Backend: "annotations",
|
Backend: "annotations",
|
||||||
}
|
}
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
|
||||||
h, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer)
|
h, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer, ac)
|
||||||
|
|
||||||
require.NotNil(t, h)
|
require.NotNil(t, h)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
@ -155,8 +161,9 @@ grafana_alerting_state_history_info{backend="annotations"} 1
|
|||||||
cfg := setting.UnifiedAlertingStateHistorySettings{
|
cfg := setting.UnifiedAlertingStateHistorySettings{
|
||||||
Enabled: false,
|
Enabled: false,
|
||||||
}
|
}
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
|
||||||
h, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer)
|
h, err := configureHistorianBackend(context.Background(), cfg, nil, nil, nil, met, logger, tracer, ac)
|
||||||
|
|
||||||
require.NotNil(t, h)
|
require.NotNil(t, h)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -13,9 +13,11 @@ import (
|
|||||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
"go.opentelemetry.io/otel/trace"
|
"go.opentelemetry.io/otel/trace"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
"github.com/grafana/grafana/pkg/infra/log"
|
"github.com/grafana/grafana/pkg/infra/log"
|
||||||
"github.com/grafana/grafana/pkg/services/annotations"
|
"github.com/grafana/grafana/pkg/services/annotations"
|
||||||
|
"github.com/grafana/grafana/pkg/services/folder"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
||||||
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
|
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||||
@ -23,6 +25,12 @@ import (
|
|||||||
history_model "github.com/grafana/grafana/pkg/services/ngalert/state/historian/model"
|
history_model "github.com/grafana/grafana/pkg/services/ngalert/state/historian/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type AccessControl interface {
|
||||||
|
CanReadAllRules(ctx context.Context, user identity.Requester) (bool, error)
|
||||||
|
AuthorizeAccessInFolder(ctx context.Context, user identity.Requester, rule ngmodels.Namespaced) error
|
||||||
|
HasAccessInFolder(ctx context.Context, user identity.Requester, rule ngmodels.Namespaced) (bool, error)
|
||||||
|
}
|
||||||
|
|
||||||
// AnnotationBackend is an implementation of state.Historian that uses Grafana Annotations as the backing datastore.
|
// AnnotationBackend is an implementation of state.Historian that uses Grafana Annotations as the backing datastore.
|
||||||
type AnnotationBackend struct {
|
type AnnotationBackend struct {
|
||||||
store AnnotationStore
|
store AnnotationStore
|
||||||
@ -30,10 +38,12 @@ type AnnotationBackend struct {
|
|||||||
clock clock.Clock
|
clock clock.Clock
|
||||||
metrics *metrics.Historian
|
metrics *metrics.Historian
|
||||||
log log.Logger
|
log log.Logger
|
||||||
|
ac AccessControl
|
||||||
}
|
}
|
||||||
|
|
||||||
type RuleStore interface {
|
type RuleStore interface {
|
||||||
GetAlertRuleByUID(ctx context.Context, query *ngmodels.GetAlertRuleByUIDQuery) (*ngmodels.AlertRule, error)
|
GetAlertRuleByUID(ctx context.Context, query *ngmodels.GetAlertRuleByUIDQuery) (*ngmodels.AlertRule, error)
|
||||||
|
GetUserVisibleNamespaces(ctx context.Context, orgID int64, user identity.Requester) (map[string]*folder.Folder, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type AnnotationStore interface {
|
type AnnotationStore interface {
|
||||||
@ -41,13 +51,20 @@ type AnnotationStore interface {
|
|||||||
Save(ctx context.Context, panel *PanelKey, annotations []annotations.Item, orgID int64, logger log.Logger) error
|
Save(ctx context.Context, panel *PanelKey, annotations []annotations.Item, orgID int64, logger log.Logger) error
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewAnnotationBackend(logger log.Logger, annotations AnnotationStore, rules RuleStore, metrics *metrics.Historian) *AnnotationBackend {
|
func NewAnnotationBackend(
|
||||||
|
logger log.Logger,
|
||||||
|
annotations AnnotationStore,
|
||||||
|
rules RuleStore,
|
||||||
|
metrics *metrics.Historian,
|
||||||
|
ac AccessControl,
|
||||||
|
) *AnnotationBackend {
|
||||||
return &AnnotationBackend{
|
return &AnnotationBackend{
|
||||||
store: annotations,
|
store: annotations,
|
||||||
rules: rules,
|
rules: rules,
|
||||||
clock: clock.New(),
|
clock: clock.New(),
|
||||||
metrics: metrics,
|
metrics: metrics,
|
||||||
log: logger,
|
log: logger,
|
||||||
|
ac: ac,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -107,6 +124,10 @@ func (h *AnnotationBackend) Query(ctx context.Context, query ngmodels.HistoryQue
|
|||||||
return nil, fmt.Errorf("no such rule exists")
|
return nil, fmt.Errorf("no such rule exists")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := h.ac.AuthorizeAccessInFolder(ctx, query.SignedInUser, rule); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
q := annotations.ItemQuery{
|
q := annotations.ItemQuery{
|
||||||
AlertID: rule.ID,
|
AlertID: rule.ID,
|
||||||
OrgID: query.OrgID,
|
OrgID: query.OrgID,
|
||||||
|
@ -4,28 +4,33 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"math"
|
"math"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/prometheus/client_golang/prometheus/testutil"
|
"github.com/prometheus/client_golang/prometheus/testutil"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/mock"
|
"github.com/stretchr/testify/mock"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
"github.com/grafana/grafana/pkg/infra/log"
|
"github.com/grafana/grafana/pkg/infra/log"
|
||||||
"github.com/grafana/grafana/pkg/services/annotations"
|
"github.com/grafana/grafana/pkg/services/annotations"
|
||||||
"github.com/grafana/grafana/pkg/services/annotations/annotationstest"
|
"github.com/grafana/grafana/pkg/services/annotations/annotationstest"
|
||||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||||
|
acfakes "github.com/grafana/grafana/pkg/services/ngalert/accesscontrol/fakes"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/state"
|
"github.com/grafana/grafana/pkg/services/ngalert/state"
|
||||||
history_model "github.com/grafana/grafana/pkg/services/ngalert/state/historian/model"
|
history_model "github.com/grafana/grafana/pkg/services/ngalert/state/historian/model"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/tests/fakes"
|
"github.com/grafana/grafana/pkg/services/ngalert/tests/fakes"
|
||||||
|
"github.com/grafana/grafana/pkg/services/user"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestAnnotationHistorian(t *testing.T) {
|
func TestAnnotationHistorian(t *testing.T) {
|
||||||
@ -48,6 +53,31 @@ func TestAnnotationHistorian(t *testing.T) {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("alert annotations are authorized", func(t *testing.T) {
|
||||||
|
anns := createTestAnnotationBackendSut(t)
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
expectedErr := errors.New("test-error")
|
||||||
|
ac.AuthorizeAccessInFolderFunc = func(ctx context.Context, requester identity.Requester, namespaced models.Namespaced) error {
|
||||||
|
return expectedErr
|
||||||
|
}
|
||||||
|
anns.ac = ac
|
||||||
|
|
||||||
|
items := []annotations.Item{createAnnotation()}
|
||||||
|
require.NoError(t, anns.store.Save(context.Background(), nil, items, 1, log.NewNopLogger()))
|
||||||
|
|
||||||
|
q := models.HistoryQuery{
|
||||||
|
RuleUID: "my-rule",
|
||||||
|
OrgID: 1,
|
||||||
|
SignedInUser: &user.SignedInUser{Name: "test-user", OrgID: 1},
|
||||||
|
}
|
||||||
|
_, err := anns.Query(context.Background(), q)
|
||||||
|
|
||||||
|
require.ErrorIs(t, err, expectedErr)
|
||||||
|
assert.Len(t, ac.Calls, 1)
|
||||||
|
assert.Equal(t, "AuthorizeAccessInFolder", ac.Calls[0].MethodName)
|
||||||
|
assert.Equal(t, q.SignedInUser, ac.Calls[0].Arguments[1])
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("annotation queries send expected item query", func(t *testing.T) {
|
t.Run("annotation queries send expected item query", func(t *testing.T) {
|
||||||
store := &interceptingAnnotationStore{}
|
store := &interceptingAnnotationStore{}
|
||||||
anns := createTestAnnotationSutWithStore(t, store)
|
anns := createTestAnnotationSutWithStore(t, store)
|
||||||
@ -132,7 +162,8 @@ func createTestAnnotationSutWithStore(t *testing.T, annotations AnnotationStore)
|
|||||||
models.RuleGen.With(models.RuleMuts.WithOrgID(1), withUID("my-rule")).GenerateRef(),
|
models.RuleGen.With(models.RuleMuts.WithOrgID(1), withUID("my-rule")).GenerateRef(),
|
||||||
}
|
}
|
||||||
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
||||||
return NewAnnotationBackend(annotationBackendLogger, annotations, rules, met)
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
return NewAnnotationBackend(annotationBackendLogger, annotations, rules, met, ac)
|
||||||
}
|
}
|
||||||
|
|
||||||
func createTestAnnotationBackendSutWithMetrics(t *testing.T, met *metrics.Historian) *AnnotationBackend {
|
func createTestAnnotationBackendSutWithMetrics(t *testing.T, met *metrics.Historian) *AnnotationBackend {
|
||||||
@ -146,7 +177,8 @@ func createTestAnnotationBackendSutWithMetrics(t *testing.T, met *metrics.Histor
|
|||||||
dbs.On("GetDashboard", mock.Anything, mock.Anything).Return(&dashboards.Dashboard{}, nil)
|
dbs.On("GetDashboard", mock.Anything, mock.Anything).Return(&dashboards.Dashboard{}, nil)
|
||||||
store := NewAnnotationStore(fakeAnnoRepo, dbs, met)
|
store := NewAnnotationStore(fakeAnnoRepo, dbs, met)
|
||||||
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
||||||
return NewAnnotationBackend(annotationBackendLogger, store, rules, met)
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
return NewAnnotationBackend(annotationBackendLogger, store, rules, met, ac)
|
||||||
}
|
}
|
||||||
|
|
||||||
func createFailingAnnotationSut(t *testing.T, met *metrics.Historian) *AnnotationBackend {
|
func createFailingAnnotationSut(t *testing.T, met *metrics.Historian) *AnnotationBackend {
|
||||||
@ -159,7 +191,8 @@ func createFailingAnnotationSut(t *testing.T, met *metrics.Historian) *Annotatio
|
|||||||
dbs.On("GetDashboard", mock.Anything, mock.Anything).Return(&dashboards.Dashboard{}, nil)
|
dbs.On("GetDashboard", mock.Anything, mock.Anything).Return(&dashboards.Dashboard{}, nil)
|
||||||
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
||||||
store := NewAnnotationStore(fakeAnnoRepo, dbs, met)
|
store := NewAnnotationStore(fakeAnnoRepo, dbs, met)
|
||||||
return NewAnnotationBackend(annotationBackendLogger, store, rules, met)
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
return NewAnnotationBackend(annotationBackendLogger, store, rules, met, ac)
|
||||||
}
|
}
|
||||||
|
|
||||||
func createAnnotation() annotations.Item {
|
func createAnnotation() annotations.Item {
|
||||||
|
@ -5,7 +5,9 @@ import (
|
|||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
|
"regexp"
|
||||||
"sort"
|
"sort"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/benbjohnson/clock"
|
"github.com/benbjohnson/clock"
|
||||||
@ -16,6 +18,7 @@ import (
|
|||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
"github.com/grafana/grafana/pkg/infra/log"
|
"github.com/grafana/grafana/pkg/infra/log"
|
||||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||||
|
"github.com/grafana/grafana/pkg/services/ngalert/accesscontrol"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/client"
|
"github.com/grafana/grafana/pkg/services/ngalert/client"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
||||||
@ -75,15 +78,19 @@ type RemoteLokiBackend struct {
|
|||||||
clock clock.Clock
|
clock clock.Clock
|
||||||
metrics *metrics.Historian
|
metrics *metrics.Historian
|
||||||
log log.Logger
|
log log.Logger
|
||||||
|
ac AccessControl
|
||||||
|
ruleStore RuleStore
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewRemoteLokiBackend(logger log.Logger, cfg LokiConfig, req client.Requester, metrics *metrics.Historian, tracer tracing.Tracer) *RemoteLokiBackend {
|
func NewRemoteLokiBackend(logger log.Logger, cfg LokiConfig, req client.Requester, metrics *metrics.Historian, tracer tracing.Tracer, ruleStore RuleStore, ac AccessControl) *RemoteLokiBackend {
|
||||||
return &RemoteLokiBackend{
|
return &RemoteLokiBackend{
|
||||||
client: NewLokiClient(cfg, req, metrics, logger, tracer),
|
client: NewLokiClient(cfg, req, metrics, logger, tracer),
|
||||||
externalLabels: cfg.ExternalLabels,
|
externalLabels: cfg.ExternalLabels,
|
||||||
clock: clock.New(),
|
clock: clock.New(),
|
||||||
metrics: metrics,
|
metrics: metrics,
|
||||||
log: logger,
|
log: logger,
|
||||||
|
ac: ac,
|
||||||
|
ruleStore: ruleStore,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -133,11 +140,19 @@ func (h *RemoteLokiBackend) Record(ctx context.Context, rule history_model.RuleM
|
|||||||
|
|
||||||
// Query retrieves state history entries from an external Loki instance and formats the results into a dataframe.
|
// Query retrieves state history entries from an external Loki instance and formats the results into a dataframe.
|
||||||
func (h *RemoteLokiBackend) Query(ctx context.Context, query models.HistoryQuery) (*data.Frame, error) {
|
func (h *RemoteLokiBackend) Query(ctx context.Context, query models.HistoryQuery) (*data.Frame, error) {
|
||||||
logQL, err := BuildLogQuery(query, h.client.MaxQuerySize())
|
uids, err := h.getFolderUIDsForFilter(ctx, query)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logQL, filterByFolderSkipped, err := BuildLogQuery(query, uids, h.client.MaxQuerySize())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if filterByFolderSkipped {
|
||||||
|
h.log.FromContext(ctx).Warn("Filter by folder skipped because it's too long. Use in-memory filtering", "folders", len(uids))
|
||||||
|
}
|
||||||
|
|
||||||
now := time.Now().UTC()
|
now := time.Now().UTC()
|
||||||
if query.To.IsZero() {
|
if query.To.IsZero() {
|
||||||
query.To = now
|
query.To = now
|
||||||
@ -151,7 +166,7 @@ func (h *RemoteLokiBackend) Query(ctx context.Context, query models.HistoryQuery
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return merge(res, query.RuleUID)
|
return merge(res, uids)
|
||||||
}
|
}
|
||||||
|
|
||||||
func buildSelectors(query models.HistoryQuery) ([]Selector, error) {
|
func buildSelectors(query models.HistoryQuery) ([]Selector, error) {
|
||||||
@ -176,7 +191,12 @@ func buildSelectors(query models.HistoryQuery) ([]Selector, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// merge will put all the results in one array sorted by timestamp.
|
// merge will put all the results in one array sorted by timestamp.
|
||||||
func merge(res QueryRes, ruleUID string) (*data.Frame, error) {
|
func merge(res QueryRes, folderUIDToFilter []string) (*data.Frame, error) {
|
||||||
|
filterByFolderUIDMap := make(map[string]struct{}, len(folderUIDToFilter))
|
||||||
|
for _, uid := range folderUIDToFilter {
|
||||||
|
filterByFolderUIDMap[uid] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
// Find the total number of elements in all arrays.
|
// Find the total number of elements in all arrays.
|
||||||
totalLen := 0
|
totalLen := 0
|
||||||
for _, arr := range res.Data.Result {
|
for _, arr := range res.Data.Result {
|
||||||
@ -210,6 +230,18 @@ func merge(res QueryRes, ruleUID string) (*data.Frame, error) {
|
|||||||
if len(stream.Values) == pointers[i] {
|
if len(stream.Values) == pointers[i] {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
// check if stream should be in the results
|
||||||
|
if len(filterByFolderUIDMap) > 0 {
|
||||||
|
folderLbl, ok := stream.Stream[FolderUIDLabel]
|
||||||
|
if !ok {
|
||||||
|
continue // skip entries without folder UID, only if needs filtering
|
||||||
|
}
|
||||||
|
_, ok = filterByFolderUIDMap[folderLbl]
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
curTime := stream.Values[pointers[i]].T.UnixNano()
|
curTime := stream.Values[pointers[i]].T.UnixNano()
|
||||||
if pointers[i] < len(stream.Values) && curTime < minTime {
|
if pointers[i] < len(stream.Values) && curTime < minTime {
|
||||||
minTime = curTime
|
minTime = curTime
|
||||||
@ -365,7 +397,7 @@ func NewSelector(label, op, value string) (Selector, error) {
|
|||||||
return Selector{Label: label, Op: Operator(op), Value: value}, nil
|
return Selector{Label: label, Op: Operator(op), Value: value}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func selectorString(selectors []Selector) string {
|
func selectorString(selectors []Selector, folderUIDs []string) string {
|
||||||
if len(selectors) == 0 {
|
if len(selectors) == 0 {
|
||||||
return "{}"
|
return "{}"
|
||||||
}
|
}
|
||||||
@ -374,8 +406,23 @@ func selectorString(selectors []Selector) string {
|
|||||||
for _, s := range selectors {
|
for _, s := range selectors {
|
||||||
query += fmt.Sprintf("%s%s%q,", s.Label, s.Op, s.Value)
|
query += fmt.Sprintf("%s%s%q,", s.Label, s.Op, s.Value)
|
||||||
}
|
}
|
||||||
// Remove the last comma, as we append one to every selector.
|
|
||||||
query = query[:len(query)-1]
|
if len(folderUIDs) > 0 {
|
||||||
|
b := strings.Builder{}
|
||||||
|
b.Grow(len(folderUIDs)*40 + len(FolderUIDLabel)) // rough estimate of the length
|
||||||
|
b.WriteString(FolderUIDLabel)
|
||||||
|
b.WriteString("=~`")
|
||||||
|
b.WriteString(regexp.QuoteMeta(folderUIDs[0]))
|
||||||
|
for _, uid := range folderUIDs[1:] {
|
||||||
|
b.WriteString("|")
|
||||||
|
b.WriteString(regexp.QuoteMeta(uid))
|
||||||
|
}
|
||||||
|
b.WriteString("`")
|
||||||
|
query += b.String()
|
||||||
|
} else {
|
||||||
|
// Remove the last comma, as we append one to every selector.
|
||||||
|
query = query[:len(query)-1]
|
||||||
|
}
|
||||||
return "{" + query + "}"
|
return "{" + query + "}"
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -388,14 +435,18 @@ func isValidOperator(op string) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// BuildLogQuery converts models.HistoryQuery and a list of folder UIDs to a Loki query.
|
// BuildLogQuery converts models.HistoryQuery and a list of folder UIDs to a Loki query.
|
||||||
// Returns a Loki query or error if log query cannot be constructed. If user-defined query exceeds maximum allowed size returns ErrLokiQueryTooLong
|
// If query size exceeds the `maxQuerySize` then it re-builds query ignoring the folderUIDs. If it's still bigger - returns ErrQueryTooLong.
|
||||||
func BuildLogQuery(query models.HistoryQuery, maxQuerySize int) (string, error) {
|
// Returns a tuple:
|
||||||
|
// - loki query
|
||||||
|
// - true if filter by folder UID was not added to the query ignored
|
||||||
|
// - error if log query cannot be constructed, and ErrQueryTooLong if user-defined query exceeds maximum allowed size
|
||||||
|
func BuildLogQuery(query models.HistoryQuery, folderUIDs []string, maxQuerySize int) (string, bool, error) {
|
||||||
selectors, err := buildSelectors(query)
|
selectors, err := buildSelectors(query)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("failed to build the provided selectors: %w", err)
|
return "", false, fmt.Errorf("failed to build the provided selectors: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
logQL := selectorString(selectors)
|
logQL := selectorString(selectors, folderUIDs)
|
||||||
|
|
||||||
if queryHasLogFilters(query) {
|
if queryHasLogFilters(query) {
|
||||||
logQL = fmt.Sprintf("%s | json", logQL)
|
logQL = fmt.Sprintf("%s | json", logQL)
|
||||||
@ -424,10 +475,22 @@ func BuildLogQuery(query models.HistoryQuery, maxQuerySize int) (string, error)
|
|||||||
logQL += labelFilters
|
logQL += labelFilters
|
||||||
|
|
||||||
if len(logQL) > maxQuerySize {
|
if len(logQL) > maxQuerySize {
|
||||||
|
// if request is too long, try to drop filter by folder UIDs.
|
||||||
|
if len(folderUIDs) > 0 {
|
||||||
|
logQL, tooLong, err := BuildLogQuery(query, nil, maxQuerySize)
|
||||||
|
if err != nil {
|
||||||
|
return "", false, err
|
||||||
|
}
|
||||||
|
if tooLong {
|
||||||
|
return "", false, NewErrLokiQueryTooLong(logQL, maxQuerySize)
|
||||||
|
}
|
||||||
|
return logQL, true, nil
|
||||||
|
}
|
||||||
// if the query is too long even without filter by folders, then fail
|
// if the query is too long even without filter by folders, then fail
|
||||||
return "", NewErrLokiQueryTooLong(logQL, maxQuerySize)
|
return "", false, NewErrLokiQueryTooLong(logQL, maxQuerySize)
|
||||||
}
|
}
|
||||||
return logQL, nil
|
|
||||||
|
return logQL, false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func queryHasLogFilters(query models.HistoryQuery) bool {
|
func queryHasLogFilters(query models.HistoryQuery) bool {
|
||||||
@ -436,3 +499,48 @@ func queryHasLogFilters(query models.HistoryQuery) bool {
|
|||||||
query.PanelID != 0 ||
|
query.PanelID != 0 ||
|
||||||
len(query.Labels) > 0
|
len(query.Labels) > 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (h *RemoteLokiBackend) getFolderUIDsForFilter(ctx context.Context, query models.HistoryQuery) ([]string, error) {
|
||||||
|
bypass, err := h.ac.CanReadAllRules(ctx, query.SignedInUser)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if bypass { // if user has access to all rules and folder, remove filter
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
// if there is a filter by rule UID, find that rule UID and make sure that user has access to it.
|
||||||
|
if query.RuleUID != "" {
|
||||||
|
rule, err := h.ruleStore.GetAlertRuleByUID(ctx, &models.GetAlertRuleByUIDQuery{
|
||||||
|
UID: query.RuleUID,
|
||||||
|
OrgID: query.OrgID,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to fetch alert rule by UID: %w", err)
|
||||||
|
}
|
||||||
|
if rule == nil {
|
||||||
|
return nil, models.ErrAlertRuleNotFound
|
||||||
|
}
|
||||||
|
return nil, h.ac.AuthorizeAccessInFolder(ctx, query.SignedInUser, rule)
|
||||||
|
}
|
||||||
|
// if no filter, then we need to get all namespaces user has access to
|
||||||
|
folders, err := h.ruleStore.GetUserVisibleNamespaces(ctx, query.OrgID, query.SignedInUser)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to fetch folders that user can access: %w", err)
|
||||||
|
}
|
||||||
|
uids := make([]string, 0, len(folders))
|
||||||
|
// now keep only UIDs of folder in which user can read rules.
|
||||||
|
for _, f := range folders {
|
||||||
|
hasAccess, err := h.ac.HasAccessInFolder(ctx, query.SignedInUser, models.Namespace(*f))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if !hasAccess {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
uids = append(uids, f.UID)
|
||||||
|
}
|
||||||
|
if len(uids) == 0 {
|
||||||
|
return nil, accesscontrol.NewAuthorizationErrorGeneric("read rules in any folder")
|
||||||
|
}
|
||||||
|
return uids, nil
|
||||||
|
}
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
@ -15,16 +16,24 @@ import (
|
|||||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/prometheus/client_golang/prometheus/testutil"
|
"github.com/prometheus/client_golang/prometheus/testutil"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||||
"github.com/grafana/grafana/pkg/infra/log"
|
"github.com/grafana/grafana/pkg/infra/log"
|
||||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||||
|
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||||
|
"github.com/grafana/grafana/pkg/services/folder"
|
||||||
|
rulesAuthz "github.com/grafana/grafana/pkg/services/ngalert/accesscontrol"
|
||||||
|
acfakes "github.com/grafana/grafana/pkg/services/ngalert/accesscontrol/fakes"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/client"
|
"github.com/grafana/grafana/pkg/services/ngalert/client"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/state"
|
"github.com/grafana/grafana/pkg/services/ngalert/state"
|
||||||
history_model "github.com/grafana/grafana/pkg/services/ngalert/state/historian/model"
|
history_model "github.com/grafana/grafana/pkg/services/ngalert/state/historian/model"
|
||||||
|
"github.com/grafana/grafana/pkg/services/ngalert/tests/fakes"
|
||||||
|
"github.com/grafana/grafana/pkg/services/org"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestRemoteLokiBackend(t *testing.T) {
|
func TestRemoteLokiBackend(t *testing.T) {
|
||||||
@ -197,12 +206,17 @@ func TestRemoteLokiBackend(t *testing.T) {
|
|||||||
t.Run("selector string", func(t *testing.T) {
|
t.Run("selector string", func(t *testing.T) {
|
||||||
selectors := []Selector{{"name", "=", "Bob"}, {"age", "=~", "30"}}
|
selectors := []Selector{{"name", "=", "Bob"}, {"age", "=~", "30"}}
|
||||||
expected := "{name=\"Bob\",age=~\"30\"}"
|
expected := "{name=\"Bob\",age=~\"30\"}"
|
||||||
result := selectorString(selectors)
|
result := selectorString(selectors, nil)
|
||||||
|
require.Equal(t, expected, result)
|
||||||
|
|
||||||
|
selectors = []Selector{{"name", "=", "quoted\"string"}, {"age", "=~", "30"}}
|
||||||
|
expected = "{name=\"quoted\\\"string\",age=~\"30\",folderUID=~`some\\\\d\\.r\\$|normal_string`}"
|
||||||
|
result = selectorString(selectors, []string{`some\d.r$`, "normal_string"})
|
||||||
require.Equal(t, expected, result)
|
require.Equal(t, expected, result)
|
||||||
|
|
||||||
selectors = []Selector{}
|
selectors = []Selector{}
|
||||||
expected = "{}"
|
expected = "{}"
|
||||||
result = selectorString(selectors)
|
result = selectorString(selectors, nil)
|
||||||
require.Equal(t, expected, result)
|
require.Equal(t, expected, result)
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -221,10 +235,12 @@ func TestRemoteLokiBackend(t *testing.T) {
|
|||||||
func TestBuildLogQuery(t *testing.T) {
|
func TestBuildLogQuery(t *testing.T) {
|
||||||
maxQuerySize := 110
|
maxQuerySize := 110
|
||||||
cases := []struct {
|
cases := []struct {
|
||||||
name string
|
name string
|
||||||
query models.HistoryQuery
|
query models.HistoryQuery
|
||||||
exp string
|
folderUIDs []string
|
||||||
expErr error
|
exp string
|
||||||
|
expErr error
|
||||||
|
expDropped bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "default includes state history label and orgID label",
|
name: "default includes state history label and orgID label",
|
||||||
@ -282,8 +298,7 @@ func TestBuildLogQuery(t *testing.T) {
|
|||||||
"customlabel": "customvalue",
|
"customlabel": "customvalue",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
exp: `{orgID="123",from="state-history"} | json | ruleUID="rule-uid" | labels_customlabel="customvalue"`,
|
exp: `{orgID="123",from="state-history"} | json | ruleUID="rule-uid" | labels_customlabel="customvalue"`},
|
||||||
},
|
|
||||||
{
|
{
|
||||||
name: "should return if query does not exceed max limit",
|
name: "should return if query does not exceed max limit",
|
||||||
query: models.HistoryQuery{
|
query: models.HistoryQuery{
|
||||||
@ -306,16 +321,38 @@ func TestBuildLogQuery(t *testing.T) {
|
|||||||
},
|
},
|
||||||
expErr: ErrLokiQueryTooLong,
|
expErr: ErrLokiQueryTooLong,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "filters by all namespaces",
|
||||||
|
query: models.HistoryQuery{
|
||||||
|
OrgID: 123,
|
||||||
|
},
|
||||||
|
folderUIDs: []string{"folder-1", "folder\\d"},
|
||||||
|
exp: `{orgID="123",from="state-history",folderUID=~` + "`folder-1|folder\\\\d`" + `}`,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "should drop folders if it's too long",
|
||||||
|
query: models.HistoryQuery{
|
||||||
|
OrgID: 123,
|
||||||
|
RuleUID: "rule-uid",
|
||||||
|
Labels: map[string]string{
|
||||||
|
"customlabel": "customvalue",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
folderUIDs: []string{"folder-1", "folder-2", "folder\\d"},
|
||||||
|
exp: `{orgID="123",from="state-history"} | json | ruleUID="rule-uid" | labels_customlabel="customvalue"`,
|
||||||
|
expDropped: true,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tc := range cases {
|
for _, tc := range cases {
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
res, err := BuildLogQuery(tc.query, maxQuerySize)
|
res, dropped, err := BuildLogQuery(tc.query, tc.folderUIDs, maxQuerySize)
|
||||||
if tc.expErr != nil {
|
if tc.expErr != nil {
|
||||||
require.ErrorIs(t, err, tc.expErr)
|
require.ErrorIs(t, err, tc.expErr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
require.LessOrEqual(t, len(res), maxQuerySize)
|
require.LessOrEqual(t, len(res), maxQuerySize)
|
||||||
|
require.Equal(t, tc.expDropped, dropped)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Equal(t, tc.exp, res)
|
require.Equal(t, tc.exp, res)
|
||||||
})
|
})
|
||||||
@ -324,10 +361,10 @@ func TestBuildLogQuery(t *testing.T) {
|
|||||||
|
|
||||||
func TestMerge(t *testing.T) {
|
func TestMerge(t *testing.T) {
|
||||||
testCases := []struct {
|
testCases := []struct {
|
||||||
name string
|
name string
|
||||||
res QueryRes
|
res QueryRes
|
||||||
ruleID string
|
expected *data.Frame
|
||||||
expectedTime []time.Time
|
folderUIDs []string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "Should return values from multiple streams in right order",
|
name: "Should return values from multiple streams in right order",
|
||||||
@ -336,28 +373,55 @@ func TestMerge(t *testing.T) {
|
|||||||
Result: []Stream{
|
Result: []Stream{
|
||||||
{
|
{
|
||||||
Stream: map[string]string{
|
Stream: map[string]string{
|
||||||
"current": "pending",
|
"from": "state-history",
|
||||||
|
"orgID": "1",
|
||||||
|
"group": "test-group-1",
|
||||||
|
"folderUID": "test-folder-1",
|
||||||
|
"extra": "label",
|
||||||
},
|
},
|
||||||
Values: []Sample{
|
Values: []Sample{
|
||||||
{time.Unix(0, 1), `{"schemaVersion": 1, "previous": "normal", "current": "pending", "values":{"a": "b"}}`},
|
{time.Unix(1, 0), `{"schemaVersion": 1, "previous": "normal", "current": "pending", "values":{"a": 1.5}, "ruleUID": "test-rule-1"}`},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Stream: map[string]string{
|
Stream: map[string]string{
|
||||||
"current": "firing",
|
"from": "state-history",
|
||||||
|
"orgID": "1",
|
||||||
|
"group": "test-group-2",
|
||||||
|
"folderUID": "test-folder-1",
|
||||||
},
|
},
|
||||||
Values: []Sample{
|
Values: []Sample{
|
||||||
{time.Unix(0, 2), `{"schemaVersion": 1, "previous": "pending", "current": "firing", "values":{"a": "b"}}`},
|
{time.Unix(2, 0), `{"schemaVersion": 1, "previous": "pending", "current": "firing", "values":{"a": 2.5}, "ruleUID": "test-rule-2"}`},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ruleID: "123456",
|
expected: data.NewFrame("states",
|
||||||
expectedTime: []time.Time{
|
data.NewField(dfTime, data.Labels{}, []time.Time{
|
||||||
time.Unix(0, 1),
|
time.Unix(1, 0),
|
||||||
time.Unix(0, 2),
|
time.Unix(2, 0),
|
||||||
},
|
}),
|
||||||
|
data.NewField(dfLine, data.Labels{}, []json.RawMessage{
|
||||||
|
toJson(LokiEntry{RuleUID: "test-rule-1", SchemaVersion: 1, Previous: "normal", Current: "pending", Values: jsonifyValues(map[string]float64{"a": 1.5})}),
|
||||||
|
toJson(LokiEntry{RuleUID: "test-rule-2", SchemaVersion: 1, Previous: "pending", Current: "firing", Values: jsonifyValues(map[string]float64{"a": 2.5})}),
|
||||||
|
}),
|
||||||
|
data.NewField(dfLabels, data.Labels{}, []json.RawMessage{
|
||||||
|
toJson(map[string]string{
|
||||||
|
StateHistoryLabelKey: "state-history",
|
||||||
|
OrgIDLabel: "1",
|
||||||
|
GroupLabel: "test-group-1",
|
||||||
|
FolderUIDLabel: "test-folder-1",
|
||||||
|
"extra": "label",
|
||||||
|
}),
|
||||||
|
toJson(map[string]string{
|
||||||
|
StateHistoryLabelKey: "state-history",
|
||||||
|
OrgIDLabel: "1",
|
||||||
|
GroupLabel: "test-group-2",
|
||||||
|
FolderUIDLabel: "test-folder-1",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Should handle empty values",
|
name: "Should handle empty values",
|
||||||
@ -366,15 +430,18 @@ func TestMerge(t *testing.T) {
|
|||||||
Result: []Stream{
|
Result: []Stream{
|
||||||
{
|
{
|
||||||
Stream: map[string]string{
|
Stream: map[string]string{
|
||||||
"current": "normal",
|
"extra": "labels",
|
||||||
},
|
},
|
||||||
Values: []Sample{},
|
Values: []Sample{},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ruleID: "123456",
|
expected: data.NewFrame("states",
|
||||||
expectedTime: []time.Time{},
|
data.NewField(dfTime, data.Labels{}, []time.Time{}),
|
||||||
|
data.NewField(dfLine, data.Labels{}, []json.RawMessage{}),
|
||||||
|
data.NewField(dfLabels, data.Labels{}, []json.RawMessage{}),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Should handle multiple values in one stream",
|
name: "Should handle multiple values in one stream",
|
||||||
@ -383,50 +450,188 @@ func TestMerge(t *testing.T) {
|
|||||||
Result: []Stream{
|
Result: []Stream{
|
||||||
{
|
{
|
||||||
Stream: map[string]string{
|
Stream: map[string]string{
|
||||||
"current": "normal",
|
"from": "state-history",
|
||||||
|
"orgID": "1",
|
||||||
|
"group": "test-group-1",
|
||||||
|
"folderUID": "test-folder-1",
|
||||||
},
|
},
|
||||||
Values: []Sample{
|
Values: []Sample{
|
||||||
{time.Unix(0, 1), `{"schemaVersion": 1, "previous": "firing", "current": "normal", "values":{"a": "b"}}`},
|
{time.Unix(1, 0), `{"schemaVersion": 1, "previous": "normal", "current": "pending", "values":{"a": 1.5}, "ruleUID": "test-rule-1"}`},
|
||||||
{time.Unix(0, 2), `{"schemaVersion": 1, "previous": "firing", "current": "normal", "values":{"a": "b"}}`},
|
{time.Unix(5, 0), `{"schemaVersion": 1, "previous": "pending", "current": "normal", "values":{"a": 0.5}, "ruleUID": "test-rule-2"}`},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Stream: map[string]string{
|
Stream: map[string]string{
|
||||||
"current": "firing",
|
"from": "state-history",
|
||||||
|
"orgID": "1",
|
||||||
|
"group": "test-group-2",
|
||||||
|
"folderUID": "test-folder-1",
|
||||||
},
|
},
|
||||||
Values: []Sample{
|
Values: []Sample{
|
||||||
{time.Unix(0, 3), `{"schemaVersion": 1, "previous": "pending", "current": "firing", "values":{"a": "b"}}`},
|
{time.Unix(2, 0), `{"schemaVersion": 1, "previous": "pending", "current": "firing", "values":{"a": 2.5}, "ruleUID": "test-rule-3"}`},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
ruleID: "123456",
|
expected: data.NewFrame("states",
|
||||||
expectedTime: []time.Time{
|
data.NewField(dfTime, data.Labels{}, []time.Time{
|
||||||
time.Unix(0, 1),
|
time.Unix(1, 0),
|
||||||
time.Unix(0, 2),
|
time.Unix(2, 0),
|
||||||
time.Unix(0, 3),
|
time.Unix(5, 0),
|
||||||
|
}),
|
||||||
|
data.NewField(dfLine, data.Labels{}, []json.RawMessage{
|
||||||
|
toJson(LokiEntry{RuleUID: "test-rule-1", SchemaVersion: 1, Previous: "normal", Current: "pending", Values: jsonifyValues(map[string]float64{"a": 1.5})}),
|
||||||
|
toJson(LokiEntry{RuleUID: "test-rule-3", SchemaVersion: 1, Previous: "pending", Current: "firing", Values: jsonifyValues(map[string]float64{"a": 2.5})}),
|
||||||
|
toJson(LokiEntry{RuleUID: "test-rule-2", SchemaVersion: 1, Previous: "pending", Current: "normal", Values: jsonifyValues(map[string]float64{"a": 0.5})}),
|
||||||
|
}),
|
||||||
|
data.NewField(dfLabels, data.Labels{}, []json.RawMessage{
|
||||||
|
toJson(map[string]string{
|
||||||
|
StateHistoryLabelKey: "state-history",
|
||||||
|
OrgIDLabel: "1",
|
||||||
|
GroupLabel: "test-group-1",
|
||||||
|
FolderUIDLabel: "test-folder-1",
|
||||||
|
}),
|
||||||
|
toJson(map[string]string{
|
||||||
|
StateHistoryLabelKey: "state-history",
|
||||||
|
OrgIDLabel: "1",
|
||||||
|
GroupLabel: "test-group-2",
|
||||||
|
FolderUIDLabel: "test-folder-1",
|
||||||
|
}),
|
||||||
|
toJson(map[string]string{
|
||||||
|
StateHistoryLabelKey: "state-history",
|
||||||
|
OrgIDLabel: "1",
|
||||||
|
GroupLabel: "test-group-1",
|
||||||
|
FolderUIDLabel: "test-folder-1",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "should filter streams by folder UID",
|
||||||
|
folderUIDs: []string{"test-folder-1"},
|
||||||
|
res: QueryRes{
|
||||||
|
Data: QueryData{
|
||||||
|
Result: []Stream{
|
||||||
|
{
|
||||||
|
Stream: map[string]string{
|
||||||
|
"from": "state-history",
|
||||||
|
"orgID": "1",
|
||||||
|
"group": "test-group-1",
|
||||||
|
"folderUID": "test-folder-1",
|
||||||
|
},
|
||||||
|
Values: []Sample{
|
||||||
|
{time.Unix(1, 0), `{"schemaVersion": 1, "previous": "normal", "current": "pending", "values":{"a": 1.5}, "ruleUID": "test-rule-1"}`},
|
||||||
|
{time.Unix(5, 0), `{"schemaVersion": 1, "previous": "pending", "current": "normal", "values":{"a": 0.5}, "ruleUID": "test-rule-2"}`},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Stream: map[string]string{
|
||||||
|
"from": "state-history",
|
||||||
|
"orgID": "1",
|
||||||
|
"group": "test-group-2",
|
||||||
|
"folderUID": "test-folder-2",
|
||||||
|
},
|
||||||
|
Values: []Sample{
|
||||||
|
{time.Unix(2, 0), `{"schemaVersion": 1, "previous": "pending", "current": "firing", "values":{"a": 2.5}, "ruleUID": "test-rule-3"}`},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
expected: data.NewFrame("states",
|
||||||
|
data.NewField(dfTime, data.Labels{}, []time.Time{
|
||||||
|
time.Unix(1, 0),
|
||||||
|
time.Unix(5, 0),
|
||||||
|
}),
|
||||||
|
data.NewField(dfLine, data.Labels{}, []json.RawMessage{
|
||||||
|
toJson(LokiEntry{RuleUID: "test-rule-1", SchemaVersion: 1, Previous: "normal", Current: "pending", Values: jsonifyValues(map[string]float64{"a": 1.5})}),
|
||||||
|
toJson(LokiEntry{RuleUID: "test-rule-2", SchemaVersion: 1, Previous: "pending", Current: "normal", Values: jsonifyValues(map[string]float64{"a": 0.5})}),
|
||||||
|
}),
|
||||||
|
data.NewField(dfLabels, data.Labels{}, []json.RawMessage{
|
||||||
|
toJson(map[string]string{
|
||||||
|
StateHistoryLabelKey: "state-history",
|
||||||
|
OrgIDLabel: "1",
|
||||||
|
GroupLabel: "test-group-1",
|
||||||
|
FolderUIDLabel: "test-folder-1",
|
||||||
|
}),
|
||||||
|
toJson(map[string]string{
|
||||||
|
StateHistoryLabelKey: "state-history",
|
||||||
|
OrgIDLabel: "1",
|
||||||
|
GroupLabel: "test-group-1",
|
||||||
|
FolderUIDLabel: "test-folder-1",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "should skip streams without folder UID if filter is specified",
|
||||||
|
folderUIDs: []string{"test-folder-1"},
|
||||||
|
res: QueryRes{
|
||||||
|
Data: QueryData{
|
||||||
|
Result: []Stream{
|
||||||
|
{
|
||||||
|
Stream: map[string]string{
|
||||||
|
"group": "test-group-1",
|
||||||
|
},
|
||||||
|
Values: []Sample{
|
||||||
|
{time.Unix(1, 0), `{"schemaVersion": 1, "previous": "normal", "current": "pending", "values":{"a": 1.5}, "ruleUID": "test-rule-1"}`},
|
||||||
|
{time.Unix(5, 0), `{"schemaVersion": 1, "previous": "pending", "current": "normal", "values":{"a": 0.5}, "ruleUID": "test-rule-2"}`},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: data.NewFrame("states",
|
||||||
|
data.NewField(dfTime, data.Labels{}, []time.Time{}),
|
||||||
|
data.NewField(dfLine, data.Labels{}, []json.RawMessage{}),
|
||||||
|
data.NewField(dfLabels, data.Labels{}, []json.RawMessage{}),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "should return streams without folder UID if filter is not specified",
|
||||||
|
folderUIDs: []string{},
|
||||||
|
res: QueryRes{
|
||||||
|
Data: QueryData{
|
||||||
|
Result: []Stream{
|
||||||
|
{
|
||||||
|
Stream: map[string]string{
|
||||||
|
"group": "test-group-1",
|
||||||
|
},
|
||||||
|
Values: []Sample{
|
||||||
|
{time.Unix(1, 0), `{"schemaVersion": 1, "previous": "normal", "current": "pending", "values":{"a": 1.5}, "ruleUID": "test-rule-1"}`},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
expected: data.NewFrame("states",
|
||||||
|
data.NewField(dfTime, data.Labels{}, []time.Time{
|
||||||
|
time.Unix(1, 0),
|
||||||
|
}),
|
||||||
|
data.NewField(dfLine, data.Labels{}, []json.RawMessage{
|
||||||
|
toJson(LokiEntry{RuleUID: "test-rule-1", SchemaVersion: 1, Previous: "normal", Current: "pending", Values: jsonifyValues(map[string]float64{"a": 1.5})}),
|
||||||
|
}),
|
||||||
|
data.NewField(dfLabels, data.Labels{}, []json.RawMessage{
|
||||||
|
toJson(map[string]string{
|
||||||
|
GroupLabel: "test-group-1",
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, tc := range testCases {
|
for _, tc := range testCases {
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
m, err := merge(tc.res, tc.ruleID)
|
expectedJson, err := tc.expected.MarshalJSON()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
m, err := merge(tc.res, tc.folderUIDs)
|
||||||
|
require.NoError(t, err)
|
||||||
|
actualJson, err := m.MarshalJSON()
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
var dfTimeColumn *data.Field
|
assert.Equal(t, tc.expected.Rows(), m.Rows())
|
||||||
for _, f := range m.Fields {
|
assert.JSONEq(t, string(expectedJson), string(actualJson))
|
||||||
if f.Name == dfTime {
|
|
||||||
dfTimeColumn = f
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
require.NotNil(t, dfTimeColumn)
|
|
||||||
|
|
||||||
for i := 0; i < len(tc.expectedTime); i++ {
|
|
||||||
require.Equal(t, tc.expectedTime[i], dfTimeColumn.At(i))
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -434,7 +639,7 @@ func TestMerge(t *testing.T) {
|
|||||||
func TestRecordStates(t *testing.T) {
|
func TestRecordStates(t *testing.T) {
|
||||||
t.Run("writes state transitions to loki", func(t *testing.T) {
|
t.Run("writes state transitions to loki", func(t *testing.T) {
|
||||||
req := NewFakeRequester()
|
req := NewFakeRequester()
|
||||||
loki := createTestLokiBackend(req, metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem))
|
loki := createTestLokiBackend(t, req, metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem))
|
||||||
rule := createTestRule()
|
rule := createTestRule()
|
||||||
states := singleFromNormal(&state.State{
|
states := singleFromNormal(&state.State{
|
||||||
State: eval.Alerting,
|
State: eval.Alerting,
|
||||||
@ -450,8 +655,8 @@ func TestRecordStates(t *testing.T) {
|
|||||||
t.Run("emits expected write metrics", func(t *testing.T) {
|
t.Run("emits expected write metrics", func(t *testing.T) {
|
||||||
reg := prometheus.NewRegistry()
|
reg := prometheus.NewRegistry()
|
||||||
met := metrics.NewHistorianMetrics(reg, metrics.Subsystem)
|
met := metrics.NewHistorianMetrics(reg, metrics.Subsystem)
|
||||||
loki := createTestLokiBackend(NewFakeRequester(), met)
|
loki := createTestLokiBackend(t, NewFakeRequester(), met)
|
||||||
errLoki := createTestLokiBackend(NewFakeRequester().WithResponse(badResponse()), met) //nolint:bodyclose
|
errLoki := createTestLokiBackend(t, NewFakeRequester().WithResponse(badResponse()), met) //nolint:bodyclose
|
||||||
rule := createTestRule()
|
rule := createTestRule()
|
||||||
states := singleFromNormal(&state.State{
|
states := singleFromNormal(&state.State{
|
||||||
State: eval.Alerting,
|
State: eval.Alerting,
|
||||||
@ -486,7 +691,7 @@ grafana_alerting_state_history_writes_total{backend="loki",org="1"} 2
|
|||||||
|
|
||||||
t.Run("elides request if nothing to send", func(t *testing.T) {
|
t.Run("elides request if nothing to send", func(t *testing.T) {
|
||||||
req := NewFakeRequester()
|
req := NewFakeRequester()
|
||||||
loki := createTestLokiBackend(req, metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem))
|
loki := createTestLokiBackend(t, req, metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem))
|
||||||
rule := createTestRule()
|
rule := createTestRule()
|
||||||
states := []state.StateTransition{}
|
states := []state.StateTransition{}
|
||||||
|
|
||||||
@ -498,7 +703,7 @@ grafana_alerting_state_history_writes_total{backend="loki",org="1"} 2
|
|||||||
|
|
||||||
t.Run("succeeds with special chars in labels", func(t *testing.T) {
|
t.Run("succeeds with special chars in labels", func(t *testing.T) {
|
||||||
req := NewFakeRequester()
|
req := NewFakeRequester()
|
||||||
loki := createTestLokiBackend(req, metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem))
|
loki := createTestLokiBackend(t, req, metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem))
|
||||||
rule := createTestRule()
|
rule := createTestRule()
|
||||||
states := singleFromNormal(&state.State{
|
states := singleFromNormal(&state.State{
|
||||||
State: eval.Alerting,
|
State: eval.Alerting,
|
||||||
@ -521,7 +726,7 @@ grafana_alerting_state_history_writes_total{backend="loki",org="1"} 2
|
|||||||
|
|
||||||
t.Run("adds external labels to log lines", func(t *testing.T) {
|
t.Run("adds external labels to log lines", func(t *testing.T) {
|
||||||
req := NewFakeRequester()
|
req := NewFakeRequester()
|
||||||
loki := createTestLokiBackend(req, metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem))
|
loki := createTestLokiBackend(t, req, metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem))
|
||||||
rule := createTestRule()
|
rule := createTestRule()
|
||||||
states := singleFromNormal(&state.State{
|
states := singleFromNormal(&state.State{
|
||||||
State: eval.Alerting,
|
State: eval.Alerting,
|
||||||
@ -537,7 +742,153 @@ grafana_alerting_state_history_writes_total{backend="loki",org="1"} 2
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func createTestLokiBackend(req client.Requester, met *metrics.Historian) *RemoteLokiBackend {
|
func TestGetFolderUIDsForFilter(t *testing.T) {
|
||||||
|
orgID := int64(1)
|
||||||
|
rule := models.RuleGen.With(models.RuleMuts.WithNamespaceUID("folder-1")).GenerateRef()
|
||||||
|
folders := []string{
|
||||||
|
"folder-1",
|
||||||
|
"folder-2",
|
||||||
|
"folder-3",
|
||||||
|
}
|
||||||
|
usr := accesscontrol.BackgroundUser("test", 1, org.RoleNone, nil)
|
||||||
|
|
||||||
|
createLoki := func(ac AccessControl) *RemoteLokiBackend {
|
||||||
|
req := NewFakeRequester()
|
||||||
|
loki := createTestLokiBackend(t, req, metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem))
|
||||||
|
rules := fakes.NewRuleStore(t)
|
||||||
|
f := make([]*folder.Folder, 0, len(folders))
|
||||||
|
for _, uid := range folders {
|
||||||
|
f = append(f, &folder.Folder{UID: uid, OrgID: orgID})
|
||||||
|
}
|
||||||
|
rules.Folders = map[int64][]*folder.Folder{
|
||||||
|
orgID: f,
|
||||||
|
}
|
||||||
|
rules.Rules = map[int64][]*models.AlertRule{
|
||||||
|
orgID: {rule},
|
||||||
|
}
|
||||||
|
loki.ruleStore = rules
|
||||||
|
loki.ac = ac
|
||||||
|
return loki
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run("when rule UID is specified", func(t *testing.T) {
|
||||||
|
t.Run("should bypass authorization if user can read all rules", func(t *testing.T) {
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
ac.CanReadAllRulesFunc = func(ctx context.Context, requester identity.Requester) (bool, error) {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
result, err := createLoki(ac).getFolderUIDsForFilter(context.Background(), models.HistoryQuery{OrgID: orgID, RuleUID: rule.UID, SignedInUser: usr})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Empty(t, result)
|
||||||
|
|
||||||
|
assert.Len(t, ac.Calls, 1)
|
||||||
|
assert.Equal(t, "CanReadAllRules", ac.Calls[0].MethodName)
|
||||||
|
assert.Equal(t, usr, ac.Calls[0].Arguments[1])
|
||||||
|
|
||||||
|
t.Run("even if rule does not exist", func(t *testing.T) {
|
||||||
|
result, err := createLoki(ac).getFolderUIDsForFilter(context.Background(), models.HistoryQuery{OrgID: orgID, RuleUID: "not-found", SignedInUser: usr})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Empty(t, result)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("should authorize access to the rule", func(t *testing.T) {
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
ac.CanReadAllRulesFunc = func(ctx context.Context, requester identity.Requester) (bool, error) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
ac.AuthorizeAccessInFolderFunc = func(ctx context.Context, requester identity.Requester, namespaced models.Namespaced) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
loki := createLoki(ac)
|
||||||
|
|
||||||
|
result, err := loki.getFolderUIDsForFilter(context.Background(), models.HistoryQuery{OrgID: orgID, RuleUID: rule.UID, SignedInUser: usr})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Empty(t, result)
|
||||||
|
|
||||||
|
assert.Len(t, ac.Calls, 2)
|
||||||
|
assert.Equal(t, "CanReadAllRules", ac.Calls[0].MethodName)
|
||||||
|
assert.Equal(t, usr, ac.Calls[0].Arguments[1])
|
||||||
|
assert.Equal(t, "AuthorizeAccessInFolder", ac.Calls[1].MethodName)
|
||||||
|
assert.Equal(t, usr, ac.Calls[1].Arguments[1])
|
||||||
|
assert.Equal(t, rule, ac.Calls[1].Arguments[2])
|
||||||
|
|
||||||
|
t.Run("should fail if unauthorized", func(t *testing.T) {
|
||||||
|
authzErr := errors.New("generic error")
|
||||||
|
ac.AuthorizeAccessInFolderFunc = func(ctx context.Context, requester identity.Requester, namespaced models.Namespaced) error {
|
||||||
|
return authzErr
|
||||||
|
}
|
||||||
|
result, err = loki.getFolderUIDsForFilter(context.Background(), models.HistoryQuery{OrgID: orgID, RuleUID: rule.UID, SignedInUser: usr})
|
||||||
|
require.ErrorIs(t, err, authzErr)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("should fail if rule does not exist", func(t *testing.T) {
|
||||||
|
result, err = loki.getFolderUIDsForFilter(context.Background(), models.HistoryQuery{OrgID: orgID, RuleUID: "not-found", SignedInUser: usr})
|
||||||
|
require.ErrorIs(t, err, models.ErrAlertRuleNotFound)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("when rule UID is empty", func(t *testing.T) {
|
||||||
|
t.Run("should bypass authorization if user can read all rules", func(t *testing.T) {
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
ac.CanReadAllRulesFunc = func(ctx context.Context, requester identity.Requester) (bool, error) {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
result, err := createLoki(ac).getFolderUIDsForFilter(context.Background(), models.HistoryQuery{OrgID: orgID, SignedInUser: usr})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Empty(t, result)
|
||||||
|
|
||||||
|
assert.Len(t, ac.Calls, 1)
|
||||||
|
assert.Equal(t, "CanReadAllRules", ac.Calls[0].MethodName)
|
||||||
|
assert.Equal(t, usr, ac.Calls[0].Arguments[1])
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("should return only folders user has access to", func(t *testing.T) {
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
ac.CanReadAllRulesFunc = func(ctx context.Context, requester identity.Requester) (bool, error) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
ac.HasAccessInFolderFunc = func(ctx context.Context, requester identity.Requester, namespaced models.Namespaced) (bool, error) {
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
loki := createLoki(ac)
|
||||||
|
|
||||||
|
result, err := loki.getFolderUIDsForFilter(context.Background(), models.HistoryQuery{OrgID: orgID, SignedInUser: usr})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.EqualValues(t, folders, result)
|
||||||
|
|
||||||
|
assert.Len(t, ac.Calls, len(folders)+1)
|
||||||
|
assert.Equal(t, "CanReadAllRules", ac.Calls[0].MethodName)
|
||||||
|
assert.Equal(t, usr, ac.Calls[0].Arguments[1])
|
||||||
|
for i, folderUID := range folders {
|
||||||
|
assert.Equal(t, "HasAccessInFolder", ac.Calls[i+1].MethodName)
|
||||||
|
assert.Equal(t, usr, ac.Calls[i+1].Arguments[1])
|
||||||
|
assert.Equal(t, folderUID, ac.Calls[i+1].Arguments[2].(models.Namespaced).GetNamespaceUID())
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run("should fail if no folders to read", func(t *testing.T) {
|
||||||
|
loki := createLoki(ac)
|
||||||
|
loki.ruleStore = fakes.NewRuleStore(t)
|
||||||
|
|
||||||
|
result, err = loki.getFolderUIDsForFilter(context.Background(), models.HistoryQuery{OrgID: orgID, SignedInUser: usr})
|
||||||
|
require.ErrorIs(t, err, rulesAuthz.ErrAuthorizationBase)
|
||||||
|
require.Empty(t, result)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("should fail if no folders to read alert rules in", func(t *testing.T) {
|
||||||
|
ac.HasAccessInFolderFunc = func(ctx context.Context, requester identity.Requester, namespaced models.Namespaced) (bool, error) {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
result, err = loki.getFolderUIDsForFilter(context.Background(), models.HistoryQuery{OrgID: orgID, SignedInUser: usr})
|
||||||
|
require.ErrorIs(t, err, rulesAuthz.ErrAuthorizationBase)
|
||||||
|
require.Empty(t, result)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func createTestLokiBackend(t *testing.T, req client.Requester, met *metrics.Historian) *RemoteLokiBackend {
|
||||||
url, _ := url.Parse("http://some.url")
|
url, _ := url.Parse("http://some.url")
|
||||||
cfg := LokiConfig{
|
cfg := LokiConfig{
|
||||||
WritePathURL: url,
|
WritePathURL: url,
|
||||||
@ -546,7 +897,9 @@ func createTestLokiBackend(req client.Requester, met *metrics.Historian) *Remote
|
|||||||
ExternalLabels: map[string]string{"externalLabelKey": "externalLabelValue"},
|
ExternalLabels: map[string]string{"externalLabelKey": "externalLabelValue"},
|
||||||
}
|
}
|
||||||
lokiBackendLogger := log.New("ngalert.state.historian", "backend", "loki")
|
lokiBackendLogger := log.New("ngalert.state.historian", "backend", "loki")
|
||||||
return NewRemoteLokiBackend(lokiBackendLogger, cfg, req, met, tracing.InitializeTracerForTest())
|
rules := fakes.NewRuleStore(t)
|
||||||
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
return NewRemoteLokiBackend(lokiBackendLogger, cfg, req, met, tracing.InitializeTracerForTest(), rules, ac)
|
||||||
}
|
}
|
||||||
|
|
||||||
func singleFromNormal(st *state.State) []state.StateTransition {
|
func singleFromNormal(st *state.State) []state.StateTransition {
|
||||||
@ -602,3 +955,11 @@ func readBody(t *testing.T, req *http.Request) []byte {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
return val
|
return val
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func toJson[T any](entry T) json.RawMessage {
|
||||||
|
b, err := json.Marshal(entry)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
@ -12,6 +12,7 @@ import (
|
|||||||
"github.com/grafana/grafana/pkg/infra/log"
|
"github.com/grafana/grafana/pkg/infra/log"
|
||||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||||
"github.com/grafana/grafana/pkg/services/annotations"
|
"github.com/grafana/grafana/pkg/services/annotations"
|
||||||
|
"github.com/grafana/grafana/pkg/services/ngalert/accesscontrol/fakes"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||||
@ -25,7 +26,8 @@ func BenchmarkProcessEvalResults(b *testing.B) {
|
|||||||
metrics := metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem)
|
metrics := metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem)
|
||||||
store := historian.NewAnnotationStore(&as, nil, metrics)
|
store := historian.NewAnnotationStore(&as, nil, metrics)
|
||||||
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
||||||
hist := historian.NewAnnotationBackend(annotationBackendLogger, store, nil, metrics)
|
ac := &fakes.FakeRuleService{}
|
||||||
|
hist := historian.NewAnnotationBackend(annotationBackendLogger, store, nil, metrics, ac)
|
||||||
cfg := state.ManagerCfg{
|
cfg := state.ManagerCfg{
|
||||||
Historian: hist,
|
Historian: hist,
|
||||||
Tracer: tracing.InitializeTracerForTest(),
|
Tracer: tracing.InitializeTracerForTest(),
|
||||||
|
@ -28,6 +28,7 @@ import (
|
|||||||
"github.com/grafana/grafana/pkg/services/annotations"
|
"github.com/grafana/grafana/pkg/services/annotations"
|
||||||
"github.com/grafana/grafana/pkg/services/annotations/annotationstest"
|
"github.com/grafana/grafana/pkg/services/annotations/annotationstest"
|
||||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||||
|
acfakes "github.com/grafana/grafana/pkg/services/ngalert/accesscontrol/fakes"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
"github.com/grafana/grafana/pkg/services/ngalert/eval"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
|
||||||
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
"github.com/grafana/grafana/pkg/services/ngalert/models"
|
||||||
@ -235,7 +236,8 @@ func TestDashboardAnnotations(t *testing.T) {
|
|||||||
historianMetrics := metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem)
|
historianMetrics := metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem)
|
||||||
store := historian.NewAnnotationStore(fakeAnnoRepo, &dashboards.FakeDashboardService{}, historianMetrics)
|
store := historian.NewAnnotationStore(fakeAnnoRepo, &dashboards.FakeDashboardService{}, historianMetrics)
|
||||||
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
||||||
hist := historian.NewAnnotationBackend(annotationBackendLogger, store, nil, historianMetrics)
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
hist := historian.NewAnnotationBackend(annotationBackendLogger, store, nil, historianMetrics, ac)
|
||||||
cfg := state.ManagerCfg{
|
cfg := state.ManagerCfg{
|
||||||
Metrics: metrics.NewNGAlert(prometheus.NewPedanticRegistry()).GetStateMetrics(),
|
Metrics: metrics.NewNGAlert(prometheus.NewPedanticRegistry()).GetStateMetrics(),
|
||||||
ExternalURL: nil,
|
ExternalURL: nil,
|
||||||
@ -1386,7 +1388,8 @@ func TestProcessEvalResults(t *testing.T) {
|
|||||||
m := metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem)
|
m := metrics.NewHistorianMetrics(prometheus.NewRegistry(), metrics.Subsystem)
|
||||||
store := historian.NewAnnotationStore(fakeAnnoRepo, &dashboards.FakeDashboardService{}, m)
|
store := historian.NewAnnotationStore(fakeAnnoRepo, &dashboards.FakeDashboardService{}, m)
|
||||||
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
annotationBackendLogger := log.New("ngalert.state.historian", "backend", "annotations")
|
||||||
hist := historian.NewAnnotationBackend(annotationBackendLogger, store, nil, m)
|
ac := &acfakes.FakeRuleService{}
|
||||||
|
hist := historian.NewAnnotationBackend(annotationBackendLogger, store, nil, m, ac)
|
||||||
clk := clock.NewMock()
|
clk := clock.NewMock()
|
||||||
cfg := state.ManagerCfg{
|
cfg := state.ManagerCfg{
|
||||||
Metrics: stateMetrics,
|
Metrics: stateMetrics,
|
||||||
|
Loading…
Reference in New Issue
Block a user