mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Search: Add search index configuration options (#55525)
* Search: externalize config * Search: update config descriptions * Search: fix value * Search: fix * update * Search: revert config values * Search: rename copy/paste * Search: fix tests
This commit is contained in:
parent
9db2720016
commit
c3ca5405ce
@ -1296,3 +1296,19 @@ scheduler_interval =
|
|||||||
[storage]
|
[storage]
|
||||||
# Allow uploading SVG files without sanitization.
|
# Allow uploading SVG files without sanitization.
|
||||||
allow_unsanitized_svg_upload = false
|
allow_unsanitized_svg_upload = false
|
||||||
|
|
||||||
|
|
||||||
|
#################################### Search ################################################
|
||||||
|
|
||||||
|
[search]
|
||||||
|
# Defines the number of dashboards loaded at once in a batch during a full reindex.
|
||||||
|
# This is a temporary settings that might be removed in the future.
|
||||||
|
dashboard_loading_batch_size = 200
|
||||||
|
|
||||||
|
# Defines the frequency of a full search reindex.
|
||||||
|
# This is a temporary settings that might be removed in the future.
|
||||||
|
full_reindex_interval = 5m
|
||||||
|
|
||||||
|
# Defines the frequency of partial index updates based on recent changes such as dashboard updates.
|
||||||
|
# This is a temporary settings that might be removed in the future.
|
||||||
|
index_update_interval = 10s
|
||||||
|
@ -16,6 +16,7 @@ import (
|
|||||||
"github.com/grafana/grafana/pkg/services/datasources"
|
"github.com/grafana/grafana/pkg/services/datasources"
|
||||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||||
"github.com/grafana/grafana/pkg/services/user"
|
"github.com/grafana/grafana/pkg/services/user"
|
||||||
|
"github.com/grafana/grafana/pkg/setting"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -82,7 +83,7 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func service(t *testing.T) *StandardSearchService {
|
func service(t *testing.T) *StandardSearchService {
|
||||||
service, ok := ProvideService(nil, nil, nil, accesscontrolmock.New(), tracing.InitializeTracerForTest(), featuremgmt.WithFeatures(), nil).(*StandardSearchService)
|
service, ok := ProvideService(&setting.Cfg{Search: setting.SearchSettings{}}, nil, nil, accesscontrolmock.New(), tracing.InitializeTracerForTest(), featuremgmt.WithFeatures(), nil).(*StandardSearchService)
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
return service
|
return service
|
||||||
}
|
}
|
||||||
|
@ -20,6 +20,7 @@ import (
|
|||||||
"github.com/grafana/grafana/pkg/services/searchV2/extract"
|
"github.com/grafana/grafana/pkg/services/searchV2/extract"
|
||||||
"github.com/grafana/grafana/pkg/services/sqlstore"
|
"github.com/grafana/grafana/pkg/services/sqlstore"
|
||||||
"github.com/grafana/grafana/pkg/services/store"
|
"github.com/grafana/grafana/pkg/services/store"
|
||||||
|
"github.com/grafana/grafana/pkg/setting"
|
||||||
"go.opentelemetry.io/otel/attribute"
|
"go.opentelemetry.io/otel/attribute"
|
||||||
|
|
||||||
"github.com/blugelabs/bluge"
|
"github.com/blugelabs/bluge"
|
||||||
@ -98,9 +99,10 @@ type searchIndex struct {
|
|||||||
syncCh chan chan struct{}
|
syncCh chan chan struct{}
|
||||||
tracer tracing.Tracer
|
tracer tracing.Tracer
|
||||||
features featuremgmt.FeatureToggles
|
features featuremgmt.FeatureToggles
|
||||||
|
settings setting.SearchSettings
|
||||||
}
|
}
|
||||||
|
|
||||||
func newSearchIndex(dashLoader dashboardLoader, evStore eventStore, extender DocumentExtender, folderIDs folderUIDLookup, tracer tracing.Tracer, features featuremgmt.FeatureToggles) *searchIndex {
|
func newSearchIndex(dashLoader dashboardLoader, evStore eventStore, extender DocumentExtender, folderIDs folderUIDLookup, tracer tracing.Tracer, features featuremgmt.FeatureToggles, settings setting.SearchSettings) *searchIndex {
|
||||||
return &searchIndex{
|
return &searchIndex{
|
||||||
loader: dashLoader,
|
loader: dashLoader,
|
||||||
eventStore: evStore,
|
eventStore: evStore,
|
||||||
@ -113,6 +115,7 @@ func newSearchIndex(dashLoader dashboardLoader, evStore eventStore, extender Doc
|
|||||||
syncCh: make(chan chan struct{}),
|
syncCh: make(chan chan struct{}),
|
||||||
tracer: tracer,
|
tracer: tracer,
|
||||||
features: features,
|
features: features,
|
||||||
|
settings: settings,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -176,11 +179,13 @@ func (i *searchIndex) sync(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (i *searchIndex) run(ctx context.Context, orgIDs []int64, reIndexSignalCh chan struct{}) error {
|
func (i *searchIndex) run(ctx context.Context, orgIDs []int64, reIndexSignalCh chan struct{}) error {
|
||||||
reIndexInterval := 5 * time.Minute
|
i.logger.Info("Initializing SearchV2", "dashboardLoadingBatchSize", i.settings.DashboardLoadingBatchSize, "fullReindexInterval", i.settings.FullReindexInterval, "indexUpdateInterval", i.settings.IndexUpdateInterval)
|
||||||
|
|
||||||
|
reIndexInterval := i.settings.FullReindexInterval
|
||||||
fullReIndexTimer := time.NewTimer(reIndexInterval)
|
fullReIndexTimer := time.NewTimer(reIndexInterval)
|
||||||
defer fullReIndexTimer.Stop()
|
defer fullReIndexTimer.Stop()
|
||||||
|
|
||||||
partialUpdateInterval := 5 * time.Second
|
partialUpdateInterval := i.settings.IndexUpdateInterval
|
||||||
partialUpdateTimer := time.NewTimer(partialUpdateInterval)
|
partialUpdateTimer := time.NewTimer(partialUpdateInterval)
|
||||||
defer partialUpdateTimer.Stop()
|
defer partialUpdateTimer.Stop()
|
||||||
|
|
||||||
@ -786,12 +791,13 @@ func (i *searchIndex) updateDashboard(ctx context.Context, orgID int64, index *o
|
|||||||
}
|
}
|
||||||
|
|
||||||
type sqlDashboardLoader struct {
|
type sqlDashboardLoader struct {
|
||||||
sql *sqlstore.SQLStore
|
sql *sqlstore.SQLStore
|
||||||
logger log.Logger
|
logger log.Logger
|
||||||
|
settings setting.SearchSettings
|
||||||
}
|
}
|
||||||
|
|
||||||
func newSQLDashboardLoader(sql *sqlstore.SQLStore) *sqlDashboardLoader {
|
func newSQLDashboardLoader(sql *sqlstore.SQLStore, settings setting.SearchSettings) *sqlDashboardLoader {
|
||||||
return &sqlDashboardLoader{sql: sql, logger: log.New("sqlDashboardLoader")}
|
return &sqlDashboardLoader{sql: sql, logger: log.New("sqlDashboardLoader"), settings: settings}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l sqlDashboardLoader) LoadDashboards(ctx context.Context, orgID int64, dashboardUID string) ([]dashboard, error) {
|
func (l sqlDashboardLoader) LoadDashboards(ctx context.Context, orgID int64, dashboardUID string) ([]dashboard, error) {
|
||||||
@ -800,7 +806,7 @@ func (l sqlDashboardLoader) LoadDashboards(ctx context.Context, orgID int64, das
|
|||||||
limit := 1
|
limit := 1
|
||||||
|
|
||||||
if dashboardUID == "" {
|
if dashboardUID == "" {
|
||||||
limit = 200
|
limit = l.settings.DashboardLoadingBatchSize
|
||||||
dashboards = make([]dashboard, 0, limit+1)
|
dashboards = make([]dashboard, 0, limit+1)
|
||||||
|
|
||||||
// Add the root folder ID (does not exist in SQL).
|
// Add the root folder ID (does not exist in SQL).
|
||||||
|
@ -10,6 +10,7 @@ import (
|
|||||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||||
|
"github.com/grafana/grafana/pkg/setting"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/infra/log"
|
"github.com/grafana/grafana/pkg/infra/log"
|
||||||
"github.com/grafana/grafana/pkg/services/searchV2/extract"
|
"github.com/grafana/grafana/pkg/services/searchV2/extract"
|
||||||
@ -62,7 +63,7 @@ func initTestIndexFromDashesExtended(t *testing.T, dashboards []dashboard, exten
|
|||||||
dashboardLoader := &testDashboardLoader{
|
dashboardLoader := &testDashboardLoader{
|
||||||
dashboards: dashboards,
|
dashboards: dashboards,
|
||||||
}
|
}
|
||||||
index := newSearchIndex(dashboardLoader, &store.MockEntityEventsService{}, extender, func(ctx context.Context, folderId int64) (string, error) { return "x", nil }, tracing.InitializeTracerForTest(), featuremgmt.WithFeatures())
|
index := newSearchIndex(dashboardLoader, &store.MockEntityEventsService{}, extender, func(ctx context.Context, folderId int64) (string, error) { return "x", nil }, tracing.InitializeTracerForTest(), featuremgmt.WithFeatures(), setting.SearchSettings{})
|
||||||
require.NotNil(t, index)
|
require.NotNil(t, index)
|
||||||
numDashboards, err := index.buildOrgIndex(context.Background(), testOrgID)
|
numDashboards, err := index.buildOrgIndex(context.Background(), testOrgID)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -81,12 +81,13 @@ func ProvideService(cfg *setting.Cfg, sql *sqlstore.SQLStore, entityEventStore s
|
|||||||
ac: ac,
|
ac: ac,
|
||||||
},
|
},
|
||||||
dashboardIndex: newSearchIndex(
|
dashboardIndex: newSearchIndex(
|
||||||
newSQLDashboardLoader(sql),
|
newSQLDashboardLoader(sql, cfg.Search),
|
||||||
entityEventStore,
|
entityEventStore,
|
||||||
extender.GetDocumentExtender(),
|
extender.GetDocumentExtender(),
|
||||||
newFolderIDLookup(sql),
|
newFolderIDLookup(sql),
|
||||||
tracer,
|
tracer,
|
||||||
features,
|
features,
|
||||||
|
cfg.Search,
|
||||||
),
|
),
|
||||||
logger: log.New("searchV2"),
|
logger: log.New("searchV2"),
|
||||||
extender: extender,
|
extender: extender,
|
||||||
|
@ -455,6 +455,8 @@ type Cfg struct {
|
|||||||
|
|
||||||
Storage StorageSettings
|
Storage StorageSettings
|
||||||
|
|
||||||
|
Search SearchSettings
|
||||||
|
|
||||||
// Access Control
|
// Access Control
|
||||||
RBACEnabled bool
|
RBACEnabled bool
|
||||||
RBACPermissionCache bool
|
RBACPermissionCache bool
|
||||||
@ -1028,6 +1030,7 @@ func (cfg *Cfg) Load(args CommandLineArgs) error {
|
|||||||
|
|
||||||
cfg.DashboardPreviews = readDashboardPreviewsSettings(iniFile)
|
cfg.DashboardPreviews = readDashboardPreviewsSettings(iniFile)
|
||||||
cfg.Storage = readStorageSettings(iniFile)
|
cfg.Storage = readStorageSettings(iniFile)
|
||||||
|
cfg.Search = readSearchSettings(iniFile)
|
||||||
|
|
||||||
if VerifyEmailEnabled && !cfg.Smtp.Enabled {
|
if VerifyEmailEnabled && !cfg.Smtp.Enabled {
|
||||||
cfg.Logger.Warn("require_email_validation is enabled but smtp is disabled")
|
cfg.Logger.Warn("require_email_validation is enabled but smtp is disabled")
|
||||||
|
23
pkg/setting/setting_search.go
Normal file
23
pkg/setting/setting_search.go
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
package setting
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"gopkg.in/ini.v1"
|
||||||
|
)
|
||||||
|
|
||||||
|
type SearchSettings struct {
|
||||||
|
FullReindexInterval time.Duration
|
||||||
|
IndexUpdateInterval time.Duration
|
||||||
|
DashboardLoadingBatchSize int
|
||||||
|
}
|
||||||
|
|
||||||
|
func readSearchSettings(iniFile *ini.File) SearchSettings {
|
||||||
|
s := SearchSettings{}
|
||||||
|
|
||||||
|
searchSection := iniFile.Section("search")
|
||||||
|
s.DashboardLoadingBatchSize = searchSection.Key("dashboard_loading_batch_size").MustInt(200)
|
||||||
|
s.FullReindexInterval = searchSection.Key("full_reindex_interval").MustDuration(5 * time.Minute)
|
||||||
|
s.IndexUpdateInterval = searchSection.Key("index_update_interval").MustDuration(10 * time.Second)
|
||||||
|
return s
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user