UsageStats: Move stats collection to separate service (#47381)

* Remove specific stats from usage stats service

* Create statscollector service

* refactor

* Update and move tests

Mostly equivalent tests to before, but they've been divided over the two
services and removed the behavior driven legacy from GoConvey to
reduce the complexity of the tests.

* Collect featuremgmr metrics (copied over from #47407)

I removed the metrics registration from the feature manager in the merge
and re-add them in this commit. Separated to make things easier to
review.
This commit is contained in:
Emil Tullstedt 2022-04-08 12:41:26 +01:00 committed by GitHub
parent 87383b1c8b
commit 3df625e9f4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 964 additions and 783 deletions

View File

@ -2,6 +2,7 @@ package usagestats
import (
"context"
"strings"
"testing"
"github.com/stretchr/testify/require"
@ -29,8 +30,8 @@ func (usm *UsageStatsMock) GetUsageReport(ctx context.Context) (Report, error) {
return Report{Metrics: all}, nil
}
func (usm *UsageStatsMock) ShouldBeReported(_ context.Context, _ string) bool {
return true
func (usm *UsageStatsMock) ShouldBeReported(_ context.Context, s string) bool {
return !strings.HasPrefix(s, "unknown")
}
func (usm *UsageStatsMock) RegisterSendReportCallback(_ SendReportCallbackFunc) {}

View File

@ -2,51 +2,35 @@ package service
import (
"context"
"fmt"
"time"
"github.com/grafana/grafana/pkg/api/routing"
"github.com/grafana/grafana/pkg/infra/kvstore"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/usagestats"
"github.com/grafana/grafana/pkg/login/social"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
)
type UsageStats struct {
Cfg *setting.Cfg
SQLStore sqlstore.Store
pluginStore plugins.Store
SocialService social.Service
kvStore *kvstore.NamespacedKVStore
RouteRegister routing.RouteRegister
features *featuremgmt.FeatureManager
pluginStore plugins.Store
log log.Logger
oauthProviders map[string]bool
externalMetrics []usagestats.MetricsFunc
concurrentUserStatsCache memoConcurrentUserStats
startTime time.Time
sendReportCallbacks []usagestats.SendReportCallbackFunc
externalMetrics []usagestats.MetricsFunc
sendReportCallbacks []usagestats.SendReportCallbackFunc
}
func ProvideService(cfg *setting.Cfg, sqlStore *sqlstore.SQLStore, pluginStore plugins.Store,
socialService social.Service, kvStore kvstore.KVStore, routeRegister routing.RouteRegister, features *featuremgmt.FeatureManager,
) *UsageStats {
func ProvideService(cfg *setting.Cfg, pluginStore plugins.Store, kvStore kvstore.KVStore, routeRegister routing.RouteRegister) *UsageStats {
s := &UsageStats{
Cfg: cfg,
SQLStore: sqlStore,
features: features,
oauthProviders: socialService.GetOAuthProviders(),
RouteRegister: routeRegister,
pluginStore: pluginStore,
kvStore: kvstore.WithNamespace(kvStore, 0, "infra.usagestats"),
log: log.New("infra.usagestats"),
startTime: time.Now(),
Cfg: cfg,
RouteRegister: routeRegister,
pluginStore: pluginStore,
kvStore: kvstore.WithNamespace(kvStore, 0, "infra.usagestats"),
log: log.New("infra.usagestats"),
}
s.registerAPIEndpoints()
@ -55,8 +39,6 @@ func ProvideService(cfg *setting.Cfg, sqlStore *sqlstore.SQLStore, pluginStore p
}
func (uss *UsageStats) Run(ctx context.Context) error {
uss.updateTotalStats(ctx)
// try to load last sent time from kv store
lastSent := time.Now()
if val, ok, err := uss.kvStore.Get(ctx, "last_sent"); err != nil {
@ -77,10 +59,8 @@ func (uss *UsageStats) Run(ctx context.Context) error {
}
sendReportTicker := time.NewTicker(nextSendInterval)
updateStatsTicker := time.NewTicker(time.Minute * 30)
defer sendReportTicker.Stop()
defer updateStatsTicker.Stop()
for {
select {
@ -102,54 +82,21 @@ func (uss *UsageStats) Run(ctx context.Context) error {
for _, callback := range uss.sendReportCallbacks {
callback()
}
case <-updateStatsTicker.C:
uss.updateTotalStats(ctx)
case <-ctx.Done():
return ctx.Err()
}
}
}
type memoConcurrentUserStats struct {
stats *concurrentUsersStats
memoized time.Time
}
const concurrentUserStatsCacheLifetime = time.Hour
func (uss *UsageStats) GetConcurrentUsersStats(ctx context.Context) (*concurrentUsersStats, error) {
memoizationPeriod := time.Now().Add(-concurrentUserStatsCacheLifetime)
if !uss.concurrentUserStatsCache.memoized.Before(memoizationPeriod) {
return uss.concurrentUserStatsCache.stats, nil
}
uss.concurrentUserStatsCache.stats = &concurrentUsersStats{}
err := uss.SQLStore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error {
// Retrieves concurrent users stats as a histogram. Buckets are accumulative and upper bound is inclusive.
rawSQL := `
SELECT
COUNT(CASE WHEN tokens <= 3 THEN 1 END) AS bucket_le_3,
COUNT(CASE WHEN tokens <= 6 THEN 1 END) AS bucket_le_6,
COUNT(CASE WHEN tokens <= 9 THEN 1 END) AS bucket_le_9,
COUNT(CASE WHEN tokens <= 12 THEN 1 END) AS bucket_le_12,
COUNT(CASE WHEN tokens <= 15 THEN 1 END) AS bucket_le_15,
COUNT(1) AS bucket_le_inf
FROM (select count(1) as tokens from user_auth_token group by user_id) uat;`
_, err := sess.SQL(rawSQL).Get(uss.concurrentUserStatsCache.stats)
if err != nil {
return err
}
return nil
})
if err != nil {
return nil, fmt.Errorf("failed to get concurrent users stats from database: %w", err)
}
uss.concurrentUserStatsCache.memoized = time.Now()
return uss.concurrentUserStatsCache.stats, nil
}
func (uss *UsageStats) RegisterSendReportCallback(c usagestats.SendReportCallbackFunc) {
uss.sendReportCallbacks = append(uss.sendReportCallbacks, c)
}
func (uss *UsageStats) ShouldBeReported(ctx context.Context, dsType string) bool {
ds, exists := uss.pluginStore.Plugin(ctx, dsType)
if !exists {
return false
}
return ds.Signature.IsValid() || ds.Signature.IsInternal()
}

View File

@ -1,10 +0,0 @@
package service
type concurrentUsersStats struct {
BucketLE3 int32 `xorm:"bucket_le_3"`
BucketLE6 int32 `xorm:"bucket_le_6"`
BucketLE9 int32 `xorm:"bucket_le_9"`
BucketLE12 int32 `xorm:"bucket_le_12"`
BucketLE15 int32 `xorm:"bucket_le_15"`
BucketLEInf int32 `xorm:"bucket_le_inf"`
}

View File

@ -4,17 +4,14 @@ import (
"bytes"
"context"
"encoding/json"
"fmt"
"net/http"
"runtime"
"strings"
"time"
"github.com/google/uuid"
"github.com/grafana/grafana/pkg/infra/metrics"
"github.com/grafana/grafana/pkg/infra/usagestats"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
)
var usageStatsURL = "https://stats.grafana.org/grafana-usage-report"
@ -38,64 +35,7 @@ func (uss *UsageStats) GetUsageReport(ctx context.Context) (usagestats.Report, e
UsageStatsId: uss.GetUsageStatsId(ctx),
}
statsQuery := models.GetSystemStatsQuery{}
if err := uss.SQLStore.GetSystemStats(ctx, &statsQuery); err != nil {
uss.log.Error("Failed to get system stats", "error", err)
return report, err
}
metrics["stats.dashboards.count"] = statsQuery.Result.Dashboards
metrics["stats.users.count"] = statsQuery.Result.Users
metrics["stats.admins.count"] = statsQuery.Result.Admins
metrics["stats.editors.count"] = statsQuery.Result.Editors
metrics["stats.viewers.count"] = statsQuery.Result.Viewers
metrics["stats.orgs.count"] = statsQuery.Result.Orgs
metrics["stats.playlist.count"] = statsQuery.Result.Playlists
metrics["stats.plugins.apps.count"] = uss.appCount(ctx)
metrics["stats.plugins.panels.count"] = uss.panelCount(ctx)
metrics["stats.plugins.datasources.count"] = uss.dataSourceCount(ctx)
metrics["stats.alerts.count"] = statsQuery.Result.Alerts
metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers
metrics["stats.active_admins.count"] = statsQuery.Result.ActiveAdmins
metrics["stats.active_editors.count"] = statsQuery.Result.ActiveEditors
metrics["stats.active_viewers.count"] = statsQuery.Result.ActiveViewers
metrics["stats.active_sessions.count"] = statsQuery.Result.ActiveSessions
metrics["stats.monthly_active_users.count"] = statsQuery.Result.MonthlyActiveUsers
metrics["stats.daily_active_users.count"] = statsQuery.Result.DailyActiveUsers
metrics["stats.daily_active_admins.count"] = statsQuery.Result.DailyActiveAdmins
metrics["stats.daily_active_editors.count"] = statsQuery.Result.DailyActiveEditors
metrics["stats.daily_active_viewers.count"] = statsQuery.Result.DailyActiveViewers
metrics["stats.daily_active_sessions.count"] = statsQuery.Result.DailyActiveSessions
metrics["stats.datasources.count"] = statsQuery.Result.Datasources
metrics["stats.stars.count"] = statsQuery.Result.Stars
metrics["stats.folders.count"] = statsQuery.Result.Folders
metrics["stats.dashboard_permissions.count"] = statsQuery.Result.DashboardPermissions
metrics["stats.folder_permissions.count"] = statsQuery.Result.FolderPermissions
metrics["stats.provisioned_dashboards.count"] = statsQuery.Result.ProvisionedDashboards
metrics["stats.snapshots.count"] = statsQuery.Result.Snapshots
metrics["stats.teams.count"] = statsQuery.Result.Teams
metrics["stats.total_auth_token.count"] = statsQuery.Result.AuthTokens
metrics["stats.dashboard_versions.count"] = statsQuery.Result.DashboardVersions
metrics["stats.annotations.count"] = statsQuery.Result.Annotations
metrics["stats.alert_rules.count"] = statsQuery.Result.AlertRules
metrics["stats.library_panels.count"] = statsQuery.Result.LibraryPanels
metrics["stats.library_variables.count"] = statsQuery.Result.LibraryVariables
metrics["stats.dashboards_viewers_can_edit.count"] = statsQuery.Result.DashboardsViewersCanEdit
metrics["stats.dashboards_viewers_can_admin.count"] = statsQuery.Result.DashboardsViewersCanAdmin
metrics["stats.folders_viewers_can_edit.count"] = statsQuery.Result.FoldersViewersCanEdit
metrics["stats.folders_viewers_can_admin.count"] = statsQuery.Result.FoldersViewersCanAdmin
metrics["stats.api_keys.count"] = statsQuery.Result.APIKeys
ossEditionCount := 1
enterpriseEditionCount := 0
if uss.Cfg.IsEnterprise {
enterpriseEditionCount = 1
ossEditionCount = 0
}
metrics["stats.edition.oss.count"] = ossEditionCount
metrics["stats.edition.enterprise.count"] = enterpriseEditionCount
uss.registerExternalMetrics(ctx, metrics)
uss.gatherMetrics(ctx, metrics)
// must run after registration of external metrics
if v, ok := metrics["stats.valid_license.count"]; ok {
@ -104,141 +44,10 @@ func (uss *UsageStats) GetUsageReport(ctx context.Context) (usagestats.Report, e
metrics["stats.valid_license.count"] = 0
}
userCount := statsQuery.Result.Users
avgAuthTokensPerUser := statsQuery.Result.AuthTokens
if userCount != 0 {
avgAuthTokensPerUser /= userCount
}
metrics["stats.avg_auth_token_per_user.count"] = avgAuthTokensPerUser
dsStats := models.GetDataSourceStatsQuery{}
if err := uss.SQLStore.GetDataSourceStats(ctx, &dsStats); err != nil {
uss.log.Error("Failed to get datasource stats", "error", err)
return report, err
}
// send counters for each data source
// but ignore any custom data sources
// as sending that name could be sensitive information
dsOtherCount := 0
for _, dsStat := range dsStats.Result {
if uss.ShouldBeReported(ctx, dsStat.Type) {
metrics["stats.ds."+dsStat.Type+".count"] = dsStat.Count
} else {
dsOtherCount += dsStat.Count
}
}
metrics["stats.ds.other.count"] = dsOtherCount
esDataSourcesQuery := models.GetDataSourcesByTypeQuery{Type: models.DS_ES}
if err := uss.SQLStore.GetDataSourcesByType(ctx, &esDataSourcesQuery); err != nil {
uss.log.Error("Failed to get elasticsearch json data", "error", err)
return report, err
}
for _, data := range esDataSourcesQuery.Result {
esVersion, err := data.JsonData.Get("esVersion").Int()
if err != nil {
continue
}
statName := fmt.Sprintf("stats.ds.elasticsearch.v%d.count", esVersion)
count, _ := metrics[statName].(int64)
metrics[statName] = count + 1
}
metrics["stats.packaging."+uss.Cfg.Packaging+".count"] = 1
metrics["stats.distributor."+uss.Cfg.ReportingDistributor+".count"] = 1
// fetch datasource access stats
dsAccessStats := models.GetDataSourceAccessStatsQuery{}
if err := uss.SQLStore.GetDataSourceAccessStats(ctx, &dsAccessStats); err != nil {
uss.log.Error("Failed to get datasource access stats", "error", err)
return report, err
}
// send access counters for each data source
// but ignore any custom data sources
// as sending that name could be sensitive information
dsAccessOtherCount := make(map[string]int64)
for _, dsAccessStat := range dsAccessStats.Result {
if dsAccessStat.Access == "" {
continue
}
access := strings.ToLower(dsAccessStat.Access)
if uss.ShouldBeReported(ctx, dsAccessStat.Type) {
metrics["stats.ds_access."+dsAccessStat.Type+"."+access+".count"] = dsAccessStat.Count
} else {
old := dsAccessOtherCount[access]
dsAccessOtherCount[access] = old + dsAccessStat.Count
}
}
for access, count := range dsAccessOtherCount {
metrics["stats.ds_access.other."+access+".count"] = count
}
// get stats about alert notifier usage
anStats := models.GetAlertNotifierUsageStatsQuery{}
if err := uss.SQLStore.GetAlertNotifiersUsageStats(ctx, &anStats); err != nil {
uss.log.Error("Failed to get alert notification stats", "error", err)
return report, err
}
for _, stats := range anStats.Result {
metrics["stats.alert_notifiers."+stats.Type+".count"] = stats.Count
}
// Add stats about auth configuration
authTypes := map[string]bool{}
authTypes["anonymous"] = uss.Cfg.AnonymousEnabled
authTypes["basic_auth"] = uss.Cfg.BasicAuthEnabled
authTypes["ldap"] = uss.Cfg.LDAPEnabled
authTypes["auth_proxy"] = uss.Cfg.AuthProxyEnabled
for provider, enabled := range uss.oauthProviders {
authTypes["oauth_"+provider] = enabled
}
for authType, enabled := range authTypes {
enabledValue := 0
if enabled {
enabledValue = 1
}
metrics["stats.auth_enabled."+authType+".count"] = enabledValue
}
// Get concurrent users stats as histogram
concurrentUsersStats, err := uss.GetConcurrentUsersStats(ctx)
if err != nil {
uss.log.Error("Failed to get concurrent users stats", "error", err)
return report, err
}
// Histogram is cumulative and metric name has a postfix of le_"<upper inclusive bound>"
metrics["stats.auth_token_per_user_le_3"] = concurrentUsersStats.BucketLE3
metrics["stats.auth_token_per_user_le_6"] = concurrentUsersStats.BucketLE6
metrics["stats.auth_token_per_user_le_9"] = concurrentUsersStats.BucketLE9
metrics["stats.auth_token_per_user_le_12"] = concurrentUsersStats.BucketLE12
metrics["stats.auth_token_per_user_le_15"] = concurrentUsersStats.BucketLE15
metrics["stats.auth_token_per_user_le_inf"] = concurrentUsersStats.BucketLEInf
metrics["stats.uptime"] = int64(time.Since(uss.startTime).Seconds())
featureUsageStats := uss.features.GetUsageStats(ctx)
for name, val := range featureUsageStats {
metrics[name] = val
}
return report, nil
}
func (uss *UsageStats) registerExternalMetrics(ctx context.Context, metrics map[string]interface{}) {
func (uss *UsageStats) gatherMetrics(ctx context.Context, metrics map[string]interface{}) {
for _, fn := range uss.externalMetrics {
fnMetrics, err := fn(ctx)
if err != nil {
@ -295,55 +104,6 @@ var sendUsageStats = func(uss *UsageStats, data *bytes.Buffer) {
}()
}
func (uss *UsageStats) updateTotalStats(ctx context.Context) {
if !uss.Cfg.MetricsEndpointEnabled || uss.Cfg.MetricsEndpointDisableTotalStats {
return
}
statsQuery := models.GetSystemStatsQuery{}
if err := uss.SQLStore.GetSystemStats(ctx, &statsQuery); err != nil {
uss.log.Error("Failed to get system stats", "error", err)
return
}
metrics.MStatTotalDashboards.Set(float64(statsQuery.Result.Dashboards))
metrics.MStatTotalFolders.Set(float64(statsQuery.Result.Folders))
metrics.MStatTotalUsers.Set(float64(statsQuery.Result.Users))
metrics.MStatActiveUsers.Set(float64(statsQuery.Result.ActiveUsers))
metrics.MStatTotalPlaylists.Set(float64(statsQuery.Result.Playlists))
metrics.MStatTotalOrgs.Set(float64(statsQuery.Result.Orgs))
metrics.StatsTotalViewers.Set(float64(statsQuery.Result.Viewers))
metrics.StatsTotalActiveViewers.Set(float64(statsQuery.Result.ActiveViewers))
metrics.StatsTotalEditors.Set(float64(statsQuery.Result.Editors))
metrics.StatsTotalActiveEditors.Set(float64(statsQuery.Result.ActiveEditors))
metrics.StatsTotalAdmins.Set(float64(statsQuery.Result.Admins))
metrics.StatsTotalActiveAdmins.Set(float64(statsQuery.Result.ActiveAdmins))
metrics.StatsTotalDashboardVersions.Set(float64(statsQuery.Result.DashboardVersions))
metrics.StatsTotalAnnotations.Set(float64(statsQuery.Result.Annotations))
metrics.StatsTotalAlertRules.Set(float64(statsQuery.Result.AlertRules))
metrics.StatsTotalLibraryPanels.Set(float64(statsQuery.Result.LibraryPanels))
metrics.StatsTotalLibraryVariables.Set(float64(statsQuery.Result.LibraryVariables))
dsStats := models.GetDataSourceStatsQuery{}
if err := uss.SQLStore.GetDataSourceStats(ctx, &dsStats); err != nil {
uss.log.Error("Failed to get datasource stats", "error", err)
return
}
for _, dsStat := range dsStats.Result {
metrics.StatsTotalDataSources.WithLabelValues(dsStat.Type).Set(float64(dsStat.Count))
}
}
func (uss *UsageStats) ShouldBeReported(ctx context.Context, dsType string) bool {
ds, exists := uss.pluginStore.Plugin(ctx, dsType)
if !exists {
return false
}
return ds.Signature.IsValid() || ds.Signature.IsInternal()
}
func (uss *UsageStats) GetUsageStatsId(ctx context.Context) string {
anonId, ok, err := uss.kvStore.Get(ctx, "anonymous_id")
if err != nil {
@ -371,15 +131,3 @@ func (uss *UsageStats) GetUsageStatsId(ctx context.Context) string {
return anonId
}
func (uss *UsageStats) appCount(ctx context.Context) int {
return len(uss.pluginStore.Plugins(ctx, plugins.App))
}
func (uss *UsageStats) panelCount(ctx context.Context) int {
return len(uss.pluginStore.Plugins(ctx, plugins.Panel))
}
func (uss *UsageStats) dataSourceCount(ctx context.Context) int {
return len(uss.pluginStore.Plugins(ctx, plugins.DataSource))
}

View File

@ -3,6 +3,7 @@ package service
import (
"bytes"
"context"
"encoding/json"
"errors"
"io/ioutil"
"net/http"
@ -11,19 +12,16 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/api/routing"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/kvstore"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/usagestats"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/services/sqlstore/mockstore"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// This is to ensure that the interface contract is held by the implementation
@ -38,419 +36,163 @@ func Test_InterfaceContractValidity(t *testing.T) {
}
func TestMetrics(t *testing.T) {
t.Run("When sending usage stats", func(t *testing.T) {
sqlStore := mockstore.NewSQLStoreMock()
uss := createService(t, setting.Cfg{}, sqlStore, false)
const metricName = "stats.test_metric.count"
sqlStore.ExpectedSystemStats = &models.SystemStats{
Dashboards: 1,
Datasources: 2,
Users: 3,
Admins: 31,
Editors: 32,
Viewers: 33,
ActiveUsers: 4,
ActiveAdmins: 21,
ActiveEditors: 22,
ActiveViewers: 23,
ActiveSessions: 24,
DailyActiveUsers: 25,
DailyActiveAdmins: 26,
DailyActiveEditors: 27,
DailyActiveViewers: 28,
DailyActiveSessions: 29,
Orgs: 5,
Playlists: 6,
Alerts: 7,
Stars: 8,
Folders: 9,
DashboardPermissions: 10,
FolderPermissions: 11,
ProvisionedDashboards: 12,
Snapshots: 13,
Teams: 14,
AuthTokens: 15,
DashboardVersions: 16,
Annotations: 17,
AlertRules: 18,
LibraryPanels: 19,
LibraryVariables: 20,
DashboardsViewersCanAdmin: 3,
DashboardsViewersCanEdit: 2,
FoldersViewersCanAdmin: 1,
FoldersViewersCanEdit: 5,
APIKeys: 2,
sqlStore := mockstore.NewSQLStoreMock()
uss := createService(t, setting.Cfg{}, sqlStore, false)
uss.RegisterMetricsFunc(func(context.Context) (map[string]interface{}, error) {
return map[string]interface{}{metricName: 1}, nil
})
err := uss.sendUsageStats(context.Background())
require.NoError(t, err)
t.Run("Given reporting not enabled and sending usage stats", func(t *testing.T) {
origSendUsageStats := sendUsageStats
t.Cleanup(func() {
sendUsageStats = origSendUsageStats
})
statsSent := false
sendUsageStats = func(uss *UsageStats, b *bytes.Buffer) {
statsSent = true
}
setupSomeDataSourcePlugins(t, uss)
uss.Cfg.ReportingEnabled = false
err := uss.sendUsageStats(context.Background())
require.NoError(t, err)
sqlStore.ExpectedDataSourceStats = []*models.DataSourceStats{
{
Type: models.DS_ES,
Count: 9,
},
{
Type: models.DS_PROMETHEUS,
Count: 10,
},
{
Type: "unknown_ds",
Count: 11,
},
{
Type: "unknown_ds2",
Count: 12,
},
require.False(t, statsSent)
})
t.Run("Given reporting enabled, stats should be gathered and sent to HTTP endpoint", func(t *testing.T) {
origCfg := uss.Cfg
t.Cleanup(func() {
uss.Cfg = origCfg
})
uss.Cfg = &setting.Cfg{
ReportingEnabled: true,
BuildVersion: "5.0.0",
AnonymousEnabled: true,
BasicAuthEnabled: true,
LDAPEnabled: true,
AuthProxyEnabled: true,
Packaging: "deb",
ReportingDistributor: "hosted-grafana",
}
sqlStore.ExpectedDataSources = []*models.DataSource{
{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 2,
}),
},
{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 2,
}),
},
{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 70,
}),
},
}
sqlStore.ExpectedDataSourcesAccessStats = []*models.DataSourceAccessStats{
{
Type: models.DS_ES,
Access: "direct",
Count: 1,
},
{
Type: models.DS_ES,
Access: "proxy",
Count: 2,
},
{
Type: models.DS_PROMETHEUS,
Access: "proxy",
Count: 3,
},
{
Type: "unknown_ds",
Access: "proxy",
Count: 4,
},
{
Type: "unknown_ds2",
Access: "",
Count: 5,
},
{
Type: "unknown_ds3",
Access: "direct",
Count: 6,
},
{
Type: "unknown_ds4",
Access: "direct",
Count: 7,
},
{
Type: "unknown_ds5",
Access: "proxy",
Count: 8,
},
}
sqlStore.ExpectedNotifierUsageStats = []*models.NotifierUsageStats{
{
Type: "slack",
Count: 1,
},
{
Type: "webhook",
Count: 2,
},
}
uss.SQLStore = sqlStore
createConcurrentTokens(t, uss.SQLStore)
uss.oauthProviders = map[string]bool{
"github": true,
"gitlab": true,
"azuread": true,
"google": true,
"generic_oauth": true,
"grafana_com": true,
}
ch := make(chan httpResp)
ticker := time.NewTicker(2 * time.Second)
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
buf, err := ioutil.ReadAll(r.Body)
if err != nil {
t.Logf("Fake HTTP handler received an error: %s", err.Error())
ch <- httpResp{
err: err,
}
return
}
require.NoError(t, err, "Failed to read response body, err=%v", err)
t.Logf("Fake HTTP handler received a response")
ch <- httpResp{
responseBuffer: bytes.NewBuffer(buf),
req: r,
}
}))
t.Cleanup(ts.Close)
t.Cleanup(func() {
close(ch)
})
usageStatsURL = ts.URL
err := uss.sendUsageStats(context.Background())
require.NoError(t, err)
t.Run("Given reporting not enabled and sending usage stats", func(t *testing.T) {
origSendUsageStats := sendUsageStats
t.Cleanup(func() {
sendUsageStats = origSendUsageStats
})
statsSent := false
sendUsageStats = func(uss *UsageStats, b *bytes.Buffer) {
statsSent = true
}
// Wait for fake HTTP server to receive a request
var resp httpResp
select {
case resp = <-ch:
require.NoError(t, resp.err, "Fake server experienced an error")
case <-ticker.C:
t.Fatalf("Timed out waiting for HTTP request")
}
uss.Cfg.ReportingEnabled = false
err := uss.sendUsageStats(context.Background())
require.NoError(t, err)
t.Logf("Received response from fake HTTP server: %+v\n", resp)
require.False(t, statsSent)
})
assert.NotNil(t, resp.req)
t.Run("Given reporting enabled, stats should be gathered and sent to HTTP endpoint", func(t *testing.T) {
origCfg := uss.Cfg
t.Cleanup(func() {
uss.Cfg = origCfg
})
uss.Cfg = &setting.Cfg{
ReportingEnabled: true,
BuildVersion: "5.0.0",
AnonymousEnabled: true,
BasicAuthEnabled: true,
LDAPEnabled: true,
AuthProxyEnabled: true,
Packaging: "deb",
ReportingDistributor: "hosted-grafana",
}
assert.Equal(t, http.MethodPost, resp.req.Method)
assert.Equal(t, "application/json", resp.req.Header.Get("Content-Type"))
ch := make(chan httpResp)
ticker := time.NewTicker(2 * time.Second)
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
buf, err := ioutil.ReadAll(r.Body)
if err != nil {
t.Logf("Fake HTTP handler received an error: %s", err.Error())
ch <- httpResp{
err: err,
}
return
}
require.NoError(t, err, "Failed to read response body, err=%v", err)
t.Logf("Fake HTTP handler received a response")
ch <- httpResp{
responseBuffer: bytes.NewBuffer(buf),
req: r,
}
}))
t.Cleanup(ts.Close)
t.Cleanup(func() {
close(ch)
})
usageStatsURL = ts.URL
require.NotNil(t, resp.responseBuffer)
err := uss.sendUsageStats(context.Background())
require.NoError(t, err)
j := make(map[string]interface{})
err = json.Unmarshal(resp.responseBuffer.Bytes(), &j)
require.NoError(t, err)
// Wait for fake HTTP server to receive a request
var resp httpResp
select {
case resp = <-ch:
require.NoError(t, resp.err, "Fake server experienced an error")
case <-ticker.C:
t.Fatalf("Timed out waiting for HTTP request")
}
assert.Equal(t, "5_0_0", j["version"])
assert.Equal(t, runtime.GOOS, j["os"])
assert.Equal(t, runtime.GOARCH, j["arch"])
t.Logf("Received response from fake HTTP server: %+v\n", resp)
usageId := uss.GetUsageStatsId(context.Background())
assert.NotEmpty(t, usageId)
assert.NotNil(t, resp.req)
metrics, ok := j["metrics"].(map[string]interface{})
require.True(t, ok)
assert.EqualValues(t, 1, metrics[metricName])
})
}
assert.Equal(t, http.MethodPost, resp.req.Method)
assert.Equal(t, "application/json", resp.req.Header.Get("Content-Type"))
func TestGetUsageReport_IncludesMetrics(t *testing.T) {
sqlStore := mockstore.NewSQLStoreMock()
uss := createService(t, setting.Cfg{}, sqlStore, true)
metricName := "stats.test_metric.count"
require.NotNil(t, resp.responseBuffer)
j, err := simplejson.NewFromReader(resp.responseBuffer)
require.NoError(t, err)
assert.Equal(t, "5_0_0", j.Get("version").MustString())
assert.Equal(t, runtime.GOOS, j.Get("os").MustString())
assert.Equal(t, runtime.GOARCH, j.Get("arch").MustString())
usageId := uss.GetUsageStatsId(context.Background())
assert.NotEmpty(t, usageId)
metrics := j.Get("metrics")
assert.Equal(t, 15, metrics.Get("stats.total_auth_token.count").MustInt())
assert.Equal(t, 2, metrics.Get("stats.api_keys.count").MustInt())
assert.Equal(t, 5, metrics.Get("stats.avg_auth_token_per_user.count").MustInt())
assert.Equal(t, 16, metrics.Get("stats.dashboard_versions.count").MustInt())
assert.Equal(t, 17, metrics.Get("stats.annotations.count").MustInt())
assert.Equal(t, 18, metrics.Get("stats.alert_rules.count").MustInt())
assert.Equal(t, 19, metrics.Get("stats.library_panels.count").MustInt())
assert.Equal(t, 20, metrics.Get("stats.library_variables.count").MustInt())
assert.Equal(t, 0, metrics.Get("stats.live_users.count").MustInt())
assert.Equal(t, 0, metrics.Get("stats.live_clients.count").MustInt())
assert.Equal(t, 9, metrics.Get("stats.ds."+models.DS_ES+".count").MustInt())
assert.Equal(t, 10, metrics.Get("stats.ds."+models.DS_PROMETHEUS+".count").MustInt())
assert.Equal(t, 11+12, metrics.Get("stats.ds.other.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.ds_access."+models.DS_ES+".direct.count").MustInt())
assert.Equal(t, 2, metrics.Get("stats.ds_access."+models.DS_ES+".proxy.count").MustInt())
assert.Equal(t, 3, metrics.Get("stats.ds_access."+models.DS_PROMETHEUS+".proxy.count").MustInt())
assert.Equal(t, 6+7, metrics.Get("stats.ds_access.other.direct.count").MustInt())
assert.Equal(t, 4+8, metrics.Get("stats.ds_access.other.proxy.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.alert_notifiers.slack.count").MustInt())
assert.Equal(t, 2, metrics.Get("stats.alert_notifiers.webhook.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.auth_enabled.anonymous.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.auth_enabled.basic_auth.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.auth_enabled.ldap.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.auth_enabled.auth_proxy.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.auth_enabled.oauth_github.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.auth_enabled.oauth_gitlab.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.auth_enabled.oauth_google.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.auth_enabled.oauth_azuread.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.auth_enabled.oauth_generic_oauth.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.auth_enabled.oauth_grafana_com.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.packaging.deb.count").MustInt())
assert.Equal(t, 1, metrics.Get("stats.distributor.hosted-grafana.count").MustInt())
assert.LessOrEqual(t, 60, metrics.Get("stats.uptime").MustInt())
assert.Greater(t, 70, metrics.Get("stats.uptime").MustInt())
})
uss.RegisterMetricsFunc(func(context.Context) (map[string]interface{}, error) {
return map[string]interface{}{metricName: 1}, nil
})
t.Run("When updating total stats", func(t *testing.T) {
sqlStore := mockstore.NewSQLStoreMock()
uss := createService(t, setting.Cfg{}, sqlStore, false)
uss.Cfg.MetricsEndpointEnabled = true
uss.Cfg.MetricsEndpointDisableTotalStats = false
report, err := uss.GetUsageReport(context.Background())
require.NoError(t, err, "Expected no error")
sqlStore.ExpectedSystemStats = &models.SystemStats{}
metric := report.Metrics[metricName]
assert.Equal(t, 1, metric)
}
t.Run("When metrics is disabled and total stats is enabled, stats should not be updated", func(t *testing.T) {
uss.Cfg.MetricsEndpointEnabled = false
uss.Cfg.MetricsEndpointDisableTotalStats = false
uss.updateTotalStats(context.Background())
})
func TestRegisterMetrics(t *testing.T) {
const goodMetricName = "stats.test_external_metric.count"
t.Run("When metrics is enabled and total stats is disabled, stats should not be updated", func(t *testing.T) {
uss.Cfg.MetricsEndpointEnabled = true
uss.Cfg.MetricsEndpointDisableTotalStats = true
sqlStore := mockstore.NewSQLStoreMock()
uss := createService(t, setting.Cfg{}, sqlStore, false)
metrics := map[string]interface{}{"stats.test_metric.count": 1, "stats.test_metric_second.count": 2}
uss.updateTotalStats(context.Background())
})
t.Run("When metrics is disabled and total stats is disabled, stats should not be updated", func(t *testing.T) {
uss.Cfg.MetricsEndpointEnabled = false
uss.Cfg.MetricsEndpointDisableTotalStats = true
uss.updateTotalStats(context.Background())
})
t.Run("When metrics is enabled and total stats is enabled, stats should be updated", func(t *testing.T) {
uss.Cfg.MetricsEndpointEnabled = true
uss.Cfg.MetricsEndpointDisableTotalStats = false
uss.updateTotalStats(context.Background())
})
uss.RegisterMetricsFunc(func(context.Context) (map[string]interface{}, error) {
return map[string]interface{}{goodMetricName: 1}, nil
})
t.Run("When registering a metric", func(t *testing.T) {
sqlStore := mockstore.NewSQLStoreMock()
uss := createService(t, setting.Cfg{}, sqlStore, false)
metricName := "stats.test_metric.count"
{
extMetrics, err := uss.externalMetrics[0](context.Background())
require.NoError(t, err)
assert.Equal(t, map[string]interface{}{goodMetricName: 1}, extMetrics)
}
t.Run("Adds a new metric to the external metrics", func(t *testing.T) {
uss.RegisterMetricsFunc(func(context.Context) (map[string]interface{}, error) {
return map[string]interface{}{metricName: 1}, nil
})
uss.gatherMetrics(context.Background(), metrics)
assert.Equal(t, 1, metrics[goodMetricName])
metrics, err := uss.externalMetrics[0](context.Background())
require.NoError(t, err)
assert.Equal(t, map[string]interface{}{metricName: 1}, metrics)
})
})
t.Run("When getting usage report", func(t *testing.T) {
sqlStore := mockstore.NewSQLStoreMock()
uss := createService(t, setting.Cfg{}, sqlStore, true)
metricName := "stats.test_metric.count"
createConcurrentTokens(t, uss.SQLStore)
t.Run("Should include metrics for concurrent users", func(t *testing.T) {
report, err := uss.GetUsageReport(context.Background())
require.NoError(t, err)
assert.Equal(t, int32(1), report.Metrics["stats.auth_token_per_user_le_3"])
assert.Equal(t, int32(2), report.Metrics["stats.auth_token_per_user_le_6"])
assert.Equal(t, int32(3), report.Metrics["stats.auth_token_per_user_le_9"])
assert.Equal(t, int32(4), report.Metrics["stats.auth_token_per_user_le_12"])
assert.Equal(t, int32(5), report.Metrics["stats.auth_token_per_user_le_15"])
assert.Equal(t, int32(6), report.Metrics["stats.auth_token_per_user_le_inf"])
})
t.Run("Should include external metrics", func(t *testing.T) {
uss.RegisterMetricsFunc(func(context.Context) (map[string]interface{}, error) {
return map[string]interface{}{metricName: 1}, nil
})
report, err := uss.GetUsageReport(context.Background())
require.NoError(t, err, "Expected no error")
metric := report.Metrics[metricName]
assert.Equal(t, 1, metric)
})
t.Run("Should include feature usage stats", func(t *testing.T) {
report, err := uss.GetUsageReport(context.Background())
require.NoError(t, err, "Expected no error")
assert.Equal(t, 1, report.Metrics["stats.features.feature_1.count"])
assert.Equal(t, 1, report.Metrics["stats.features.feature_2.count"])
})
})
t.Run("When registering external metrics", func(t *testing.T) {
sqlStore := mockstore.NewSQLStoreMock()
uss := createService(t, setting.Cfg{}, sqlStore, false)
metrics := map[string]interface{}{"stats.test_metric.count": 1, "stats.test_metric_second.count": 2}
extMetricName := "stats.test_external_metric.count"
t.Run("do not add metrics that return an error when fetched", func(t *testing.T) {
const badMetricName = "stats.test_external_metric_error.count"
uss.RegisterMetricsFunc(func(context.Context) (map[string]interface{}, error) {
return map[string]interface{}{extMetricName: 1}, nil
return map[string]interface{}{badMetricName: 1}, errors.New("some error")
})
uss.gatherMetrics(context.Background(), metrics)
uss.registerExternalMetrics(context.Background(), metrics)
extErrorMetric := metrics[badMetricName]
extMetric := metrics[goodMetricName]
assert.Equal(t, 1, metrics[extMetricName])
t.Run("When loading a metric results to an error", func(t *testing.T) {
uss.RegisterMetricsFunc(func(context.Context) (map[string]interface{}, error) {
return map[string]interface{}{extMetricName: 1}, nil
})
extErrorMetricName := "stats.test_external_metric_error.count"
t.Run("Should not add it to metrics", func(t *testing.T) {
uss.RegisterMetricsFunc(func(context.Context) (map[string]interface{}, error) {
return map[string]interface{}{extErrorMetricName: 1}, errors.New("some error")
})
uss.registerExternalMetrics(context.Background(), metrics)
extErrorMetric := metrics[extErrorMetricName]
extMetric := metrics[extMetricName]
require.Nil(t, extErrorMetric, "Invalid metric should not be added")
assert.Equal(t, 1, extMetric)
assert.Len(t, metrics, 3, "Expected only one available metric")
})
})
require.Nil(t, extErrorMetric, "Invalid metric should not be added")
assert.Equal(t, 1, extMetric)
assert.Len(t, metrics, 3, "Expected only one available metric")
})
}
@ -479,27 +221,6 @@ func (pr fakePluginStore) Plugins(_ context.Context, pluginTypes ...plugins.Type
return result
}
func setupSomeDataSourcePlugins(t *testing.T, uss *UsageStats) {
t.Helper()
uss.pluginStore = &fakePluginStore{
plugins: map[string]plugins.PluginDTO{
models.DS_ES: {
Signature: "internal",
},
models.DS_PROMETHEUS: {
Signature: "internal",
},
models.DS_GRAPHITE: {
Signature: "internal",
},
models.DS_MYSQL: {
Signature: "internal",
},
},
}
}
type httpResp struct {
req *http.Request
responseBuffer *bytes.Buffer
@ -511,15 +232,11 @@ func createService(t *testing.T, cfg setting.Cfg, sqlStore sqlstore.Store, withD
if withDB {
sqlStore = sqlstore.InitTestDB(t)
}
return &UsageStats{
Cfg: &cfg,
SQLStore: sqlStore,
externalMetrics: make([]usagestats.MetricsFunc, 0),
pluginStore: &fakePluginStore{},
kvStore: kvstore.WithNamespace(kvstore.ProvideService(sqlStore), 0, "infra.usagestats"),
log: log.New("infra.usagestats"),
startTime: time.Now().Add(-1 * time.Minute),
RouteRegister: routing.NewRouteRegister(),
features: featuremgmt.WithFeatures("feature1", "feature2"),
}
return ProvideService(
&cfg,
&fakePluginStore{},
kvstore.ProvideService(sqlStore),
routing.NewRouteRegister(),
)
}

View File

@ -0,0 +1,58 @@
package statscollector
import (
"context"
"fmt"
"time"
"github.com/grafana/grafana/pkg/services/sqlstore"
)
const concurrentUserStatsCacheLifetime = time.Hour
type concurrentUsersStats struct {
BucketLE3 int32 `xorm:"bucket_le_3"`
BucketLE6 int32 `xorm:"bucket_le_6"`
BucketLE9 int32 `xorm:"bucket_le_9"`
BucketLE12 int32 `xorm:"bucket_le_12"`
BucketLE15 int32 `xorm:"bucket_le_15"`
BucketLEInf int32 `xorm:"bucket_le_inf"`
}
type memoConcurrentUserStats struct {
stats *concurrentUsersStats
memoized time.Time
}
func (s *Service) concurrentUsers(ctx context.Context) (*concurrentUsersStats, error) {
memoizationPeriod := time.Now().Add(-concurrentUserStatsCacheLifetime)
if !s.concurrentUserStatsCache.memoized.Before(memoizationPeriod) {
return s.concurrentUserStatsCache.stats, nil
}
s.concurrentUserStatsCache.stats = &concurrentUsersStats{}
err := s.sqlstore.WithDbSession(ctx, func(sess *sqlstore.DBSession) error {
// Retrieves concurrent users stats as a histogram. Buckets are accumulative and upper bound is inclusive.
rawSQL := `
SELECT
COUNT(CASE WHEN tokens <= 3 THEN 1 END) AS bucket_le_3,
COUNT(CASE WHEN tokens <= 6 THEN 1 END) AS bucket_le_6,
COUNT(CASE WHEN tokens <= 9 THEN 1 END) AS bucket_le_9,
COUNT(CASE WHEN tokens <= 12 THEN 1 END) AS bucket_le_12,
COUNT(CASE WHEN tokens <= 15 THEN 1 END) AS bucket_le_15,
COUNT(1) AS bucket_le_inf
FROM (select count(1) as tokens from user_auth_token group by user_id) uat;`
_, err := sess.SQL(rawSQL).Get(s.concurrentUserStatsCache.stats)
if err != nil {
return err
}
return nil
})
if err != nil {
return nil, fmt.Errorf("failed to get concurrent users stats from database: %w", err)
}
s.concurrentUserStatsCache.memoized = time.Now()
return s.concurrentUserStatsCache.stats, nil
}

View File

@ -1,4 +1,4 @@
package service
package statscollector
import (
"context"
@ -8,21 +8,35 @@ import (
"testing"
"time"
"github.com/grafana/grafana/pkg/infra/kvstore"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/util"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/util"
)
func TestUsageStatsService_GetConcurrentUsersStats(t *testing.T) {
func TestConcurrentUsersMetrics(t *testing.T) {
sqlStore := sqlstore.InitTestDB(t)
uss := &UsageStats{
SQLStore: sqlStore,
kvStore: kvstore.WithNamespace(kvstore.ProvideService(sqlStore), 0, "infra.usagestats"),
log: log.New("infra.usagestats"),
}
s := createService(t, setting.NewCfg(), sqlStore)
createConcurrentTokens(t, sqlStore)
stats, err := s.collect(context.Background())
require.NoError(t, err)
assert.Equal(t, int32(1), stats["stats.auth_token_per_user_le_3"])
assert.Equal(t, int32(2), stats["stats.auth_token_per_user_le_6"])
assert.Equal(t, int32(3), stats["stats.auth_token_per_user_le_9"])
assert.Equal(t, int32(4), stats["stats.auth_token_per_user_le_12"])
assert.Equal(t, int32(5), stats["stats.auth_token_per_user_le_15"])
assert.Equal(t, int32(6), stats["stats.auth_token_per_user_le_inf"])
}
func TestConcurrentUsersStats(t *testing.T) {
sqlStore := sqlstore.InitTestDB(t)
s := createService(t, setting.NewCfg(), sqlStore)
createConcurrentTokens(t, sqlStore)
@ -31,10 +45,10 @@ func TestUsageStatsService_GetConcurrentUsersStats(t *testing.T) {
cancel()
})
actualResult, err := uss.GetConcurrentUsersStats(ctx)
actualResult, err := s.concurrentUsers(ctx)
require.NoError(t, err)
expectedCachedResult := &concurrentUsersStats{
expectedResult := &concurrentUsersStats{
BucketLE3: 1,
BucketLE6: 2,
BucketLE9: 3,
@ -42,14 +56,39 @@ func TestUsageStatsService_GetConcurrentUsersStats(t *testing.T) {
BucketLE15: 5,
BucketLEInf: 6,
}
assert.Equal(t, expectedCachedResult, actualResult)
assert.Equal(t, expectedResult, actualResult)
createToken(t, 8, sqlStore)
require.NoError(t, err)
actualResult, err = uss.GetConcurrentUsersStats(ctx)
// Cached, new token is not reflected yet.
actualResult, err = s.concurrentUsers(ctx)
require.NoError(t, err)
assert.Equal(t, expectedCachedResult, actualResult)
assert.Equal(t, expectedResult, actualResult)
// Bust cache
s.concurrentUserStatsCache = memoConcurrentUserStats{}
expectedResult = &concurrentUsersStats{
BucketLE3: 2,
BucketLE6: 3,
BucketLE9: 4,
BucketLE12: 5,
BucketLE15: 6,
BucketLEInf: 7,
}
actualResult, err = s.concurrentUsers(ctx)
require.NoError(t, err)
assert.Equal(t, expectedResult, actualResult)
}
func createConcurrentTokens(t *testing.T, sqlStore sqlstore.Store) {
t.Helper()
for u := 1; u <= 6; u++ {
for tkn := 1; tkn <= u*3; tkn++ {
createToken(t, u, sqlStore)
}
}
}
func createToken(t *testing.T, uID int, sqlStore sqlstore.Store) {
@ -83,15 +122,6 @@ func createToken(t *testing.T, uID int, sqlStore sqlstore.Store) {
require.NoError(t, err)
}
func createConcurrentTokens(t *testing.T, sqlStore sqlstore.Store) {
t.Helper()
for u := 1; u <= 6; u++ {
for tkn := 1; tkn <= u*3; tkn++ {
createToken(t, u, sqlStore)
}
}
}
type userAuthToken struct {
UserID int64 `xorm:"user_id"`
AuthToken string

View File

@ -0,0 +1,321 @@
package statscollector
import (
"context"
"fmt"
"strings"
"time"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/infra/usagestats"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/metrics"
"github.com/grafana/grafana/pkg/login/social"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
)
type Service struct {
cfg *setting.Cfg
sqlstore sqlstore.Store
plugins plugins.Store
social social.Service
usageStats usagestats.Service
features *featuremgmt.FeatureManager
log log.Logger
startTime time.Time
concurrentUserStatsCache memoConcurrentUserStats
}
func ProvideService(
usagestats usagestats.Service,
cfg *setting.Cfg,
store sqlstore.Store,
social social.Service,
plugins plugins.Store,
features *featuremgmt.FeatureManager,
) *Service {
s := &Service{
cfg: cfg,
sqlstore: store,
plugins: plugins,
social: social,
usageStats: usagestats,
features: features,
startTime: time.Now(),
log: log.New("infra.usagestats.collector"),
}
usagestats.RegisterMetricsFunc(s.collect)
return s
}
func (s *Service) Run(ctx context.Context) error {
s.updateTotalStats(ctx)
updateStatsTicker := time.NewTicker(time.Minute * 30)
defer updateStatsTicker.Stop()
for {
select {
case <-updateStatsTicker.C:
s.updateTotalStats(ctx)
case <-ctx.Done():
return ctx.Err()
}
}
}
func (s *Service) collect(ctx context.Context) (map[string]interface{}, error) {
m := map[string]interface{}{}
statsQuery := models.GetSystemStatsQuery{}
if err := s.sqlstore.GetSystemStats(ctx, &statsQuery); err != nil {
s.log.Error("Failed to get system stats", "error", err)
return nil, err
}
m["stats.dashboards.count"] = statsQuery.Result.Dashboards
m["stats.users.count"] = statsQuery.Result.Users
m["stats.admins.count"] = statsQuery.Result.Admins
m["stats.editors.count"] = statsQuery.Result.Editors
m["stats.viewers.count"] = statsQuery.Result.Viewers
m["stats.orgs.count"] = statsQuery.Result.Orgs
m["stats.playlist.count"] = statsQuery.Result.Playlists
m["stats.plugins.apps.count"] = s.appCount(ctx)
m["stats.plugins.panels.count"] = s.panelCount(ctx)
m["stats.plugins.datasources.count"] = s.dataSourceCount(ctx)
m["stats.alerts.count"] = statsQuery.Result.Alerts
m["stats.active_users.count"] = statsQuery.Result.ActiveUsers
m["stats.active_admins.count"] = statsQuery.Result.ActiveAdmins
m["stats.active_editors.count"] = statsQuery.Result.ActiveEditors
m["stats.active_viewers.count"] = statsQuery.Result.ActiveViewers
m["stats.active_sessions.count"] = statsQuery.Result.ActiveSessions
m["stats.monthly_active_users.count"] = statsQuery.Result.MonthlyActiveUsers
m["stats.daily_active_users.count"] = statsQuery.Result.DailyActiveUsers
m["stats.daily_active_admins.count"] = statsQuery.Result.DailyActiveAdmins
m["stats.daily_active_editors.count"] = statsQuery.Result.DailyActiveEditors
m["stats.daily_active_viewers.count"] = statsQuery.Result.DailyActiveViewers
m["stats.daily_active_sessions.count"] = statsQuery.Result.DailyActiveSessions
m["stats.datasources.count"] = statsQuery.Result.Datasources
m["stats.stars.count"] = statsQuery.Result.Stars
m["stats.folders.count"] = statsQuery.Result.Folders
m["stats.dashboard_permissions.count"] = statsQuery.Result.DashboardPermissions
m["stats.folder_permissions.count"] = statsQuery.Result.FolderPermissions
m["stats.provisioned_dashboards.count"] = statsQuery.Result.ProvisionedDashboards
m["stats.snapshots.count"] = statsQuery.Result.Snapshots
m["stats.teams.count"] = statsQuery.Result.Teams
m["stats.total_auth_token.count"] = statsQuery.Result.AuthTokens
m["stats.dashboard_versions.count"] = statsQuery.Result.DashboardVersions
m["stats.annotations.count"] = statsQuery.Result.Annotations
m["stats.alert_rules.count"] = statsQuery.Result.AlertRules
m["stats.library_panels.count"] = statsQuery.Result.LibraryPanels
m["stats.library_variables.count"] = statsQuery.Result.LibraryVariables
m["stats.dashboards_viewers_can_edit.count"] = statsQuery.Result.DashboardsViewersCanEdit
m["stats.dashboards_viewers_can_admin.count"] = statsQuery.Result.DashboardsViewersCanAdmin
m["stats.folders_viewers_can_edit.count"] = statsQuery.Result.FoldersViewersCanEdit
m["stats.folders_viewers_can_admin.count"] = statsQuery.Result.FoldersViewersCanAdmin
m["stats.api_keys.count"] = statsQuery.Result.APIKeys
ossEditionCount := 1
enterpriseEditionCount := 0
if s.cfg.IsEnterprise {
enterpriseEditionCount = 1
ossEditionCount = 0
}
m["stats.edition.oss.count"] = ossEditionCount
m["stats.edition.enterprise.count"] = enterpriseEditionCount
userCount := statsQuery.Result.Users
avgAuthTokensPerUser := statsQuery.Result.AuthTokens
if userCount != 0 {
avgAuthTokensPerUser /= userCount
}
m["stats.avg_auth_token_per_user.count"] = avgAuthTokensPerUser
dsStats := models.GetDataSourceStatsQuery{}
if err := s.sqlstore.GetDataSourceStats(ctx, &dsStats); err != nil {
s.log.Error("Failed to get datasource stats", "error", err)
return nil, err
}
// send counters for each data source
// but ignore any custom data sources
// as sending that name could be sensitive information
dsOtherCount := 0
for _, dsStat := range dsStats.Result {
if s.usageStats.ShouldBeReported(ctx, dsStat.Type) {
m["stats.ds."+dsStat.Type+".count"] = dsStat.Count
} else {
dsOtherCount += dsStat.Count
}
}
m["stats.ds.other.count"] = dsOtherCount
esDataSourcesQuery := models.GetDataSourcesByTypeQuery{Type: models.DS_ES}
if err := s.sqlstore.GetDataSourcesByType(ctx, &esDataSourcesQuery); err != nil {
s.log.Error("Failed to get elasticsearch json data", "error", err)
return nil, err
}
for _, data := range esDataSourcesQuery.Result {
esVersion, err := data.JsonData.Get("esVersion").Int()
if err != nil {
continue
}
statName := fmt.Sprintf("stats.ds.elasticsearch.v%d.count", esVersion)
count, _ := m[statName].(int64)
m[statName] = count + 1
}
m["stats.packaging."+s.cfg.Packaging+".count"] = 1
m["stats.distributor."+s.cfg.ReportingDistributor+".count"] = 1
// fetch datasource access stats
dsAccessStats := models.GetDataSourceAccessStatsQuery{}
if err := s.sqlstore.GetDataSourceAccessStats(ctx, &dsAccessStats); err != nil {
s.log.Error("Failed to get datasource access stats", "error", err)
return nil, err
}
// send access counters for each data source
// but ignore any custom data sources
// as sending that name could be sensitive information
dsAccessOtherCount := make(map[string]int64)
for _, dsAccessStat := range dsAccessStats.Result {
if dsAccessStat.Access == "" {
continue
}
access := strings.ToLower(dsAccessStat.Access)
if s.usageStats.ShouldBeReported(ctx, dsAccessStat.Type) {
m["stats.ds_access."+dsAccessStat.Type+"."+access+".count"] = dsAccessStat.Count
} else {
old := dsAccessOtherCount[access]
dsAccessOtherCount[access] = old + dsAccessStat.Count
}
}
for access, count := range dsAccessOtherCount {
m["stats.ds_access.other."+access+".count"] = count
}
// get stats about alert notifier usage
anStats := models.GetAlertNotifierUsageStatsQuery{}
if err := s.sqlstore.GetAlertNotifiersUsageStats(ctx, &anStats); err != nil {
s.log.Error("Failed to get alert notification stats", "error", err)
return nil, err
}
for _, stats := range anStats.Result {
m["stats.alert_notifiers."+stats.Type+".count"] = stats.Count
}
// Add stats about auth configuration
authTypes := map[string]bool{}
authTypes["anonymous"] = s.cfg.AnonymousEnabled
authTypes["basic_auth"] = s.cfg.BasicAuthEnabled
authTypes["ldap"] = s.cfg.LDAPEnabled
authTypes["auth_proxy"] = s.cfg.AuthProxyEnabled
for provider, enabled := range s.social.GetOAuthProviders() {
authTypes["oauth_"+provider] = enabled
}
for authType, enabled := range authTypes {
enabledValue := 0
if enabled {
enabledValue = 1
}
m["stats.auth_enabled."+authType+".count"] = enabledValue
}
// Get concurrent users stats as histogram
concurrentUsersStats, err := s.concurrentUsers(ctx)
if err != nil {
s.log.Error("Failed to get concurrent users stats", "error", err)
return nil, err
}
// Histogram is cumulative and metric name has a postfix of le_"<upper inclusive bound>"
m["stats.auth_token_per_user_le_3"] = concurrentUsersStats.BucketLE3
m["stats.auth_token_per_user_le_6"] = concurrentUsersStats.BucketLE6
m["stats.auth_token_per_user_le_9"] = concurrentUsersStats.BucketLE9
m["stats.auth_token_per_user_le_12"] = concurrentUsersStats.BucketLE12
m["stats.auth_token_per_user_le_15"] = concurrentUsersStats.BucketLE15
m["stats.auth_token_per_user_le_inf"] = concurrentUsersStats.BucketLEInf
m["stats.uptime"] = int64(time.Since(s.startTime).Seconds())
featureUsageStats := s.features.GetUsageStats(ctx)
for k, v := range featureUsageStats {
m[k] = v
}
return m, nil
}
func (s *Service) updateTotalStats(ctx context.Context) bool {
if !s.cfg.MetricsEndpointEnabled || s.cfg.MetricsEndpointDisableTotalStats {
return false
}
statsQuery := models.GetSystemStatsQuery{}
if err := s.sqlstore.GetSystemStats(ctx, &statsQuery); err != nil {
s.log.Error("Failed to get system stats", "error", err)
return false
}
metrics.MStatTotalDashboards.Set(float64(statsQuery.Result.Dashboards))
metrics.MStatTotalFolders.Set(float64(statsQuery.Result.Folders))
metrics.MStatTotalUsers.Set(float64(statsQuery.Result.Users))
metrics.MStatActiveUsers.Set(float64(statsQuery.Result.ActiveUsers))
metrics.MStatTotalPlaylists.Set(float64(statsQuery.Result.Playlists))
metrics.MStatTotalOrgs.Set(float64(statsQuery.Result.Orgs))
metrics.StatsTotalViewers.Set(float64(statsQuery.Result.Viewers))
metrics.StatsTotalActiveViewers.Set(float64(statsQuery.Result.ActiveViewers))
metrics.StatsTotalEditors.Set(float64(statsQuery.Result.Editors))
metrics.StatsTotalActiveEditors.Set(float64(statsQuery.Result.ActiveEditors))
metrics.StatsTotalAdmins.Set(float64(statsQuery.Result.Admins))
metrics.StatsTotalActiveAdmins.Set(float64(statsQuery.Result.ActiveAdmins))
metrics.StatsTotalDashboardVersions.Set(float64(statsQuery.Result.DashboardVersions))
metrics.StatsTotalAnnotations.Set(float64(statsQuery.Result.Annotations))
metrics.StatsTotalAlertRules.Set(float64(statsQuery.Result.AlertRules))
metrics.StatsTotalLibraryPanels.Set(float64(statsQuery.Result.LibraryPanels))
metrics.StatsTotalLibraryVariables.Set(float64(statsQuery.Result.LibraryVariables))
dsStats := models.GetDataSourceStatsQuery{}
if err := s.sqlstore.GetDataSourceStats(ctx, &dsStats); err != nil {
s.log.Error("Failed to get datasource stats", "error", err)
return true
}
for _, dsStat := range dsStats.Result {
metrics.StatsTotalDataSources.WithLabelValues(dsStat.Type).Set(float64(dsStat.Count))
}
return true
}
func (s *Service) appCount(ctx context.Context) int {
return len(s.plugins.Plugins(ctx, plugins.App))
}
func (s *Service) panelCount(ctx context.Context) int {
return len(s.plugins.Plugins(ctx, plugins.Panel))
}
func (s *Service) dataSourceCount(ctx context.Context) int {
return len(s.plugins.Plugins(ctx, plugins.DataSource))
}

View File

@ -0,0 +1,362 @@
package statscollector
import (
"context"
"fmt"
"testing"
"time"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/usagestats"
"github.com/grafana/grafana/pkg/login/social"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/services/sqlstore/mockstore"
"github.com/grafana/grafana/pkg/setting"
)
func TestTotalStatsUpdate(t *testing.T) {
sqlStore := mockstore.NewSQLStoreMock()
s := createService(t, setting.NewCfg(), sqlStore)
s.cfg.MetricsEndpointEnabled = true
s.cfg.MetricsEndpointDisableTotalStats = false
sqlStore.ExpectedSystemStats = &models.SystemStats{}
tests := []struct {
MetricsEndpointEnabled bool
MetricsEndpointDisableTotalStats bool
ExpectedUpdate bool
}{
{
MetricsEndpointEnabled: false,
MetricsEndpointDisableTotalStats: false,
ExpectedUpdate: false,
},
{
MetricsEndpointEnabled: false,
MetricsEndpointDisableTotalStats: true,
ExpectedUpdate: false,
},
{
MetricsEndpointEnabled: true,
MetricsEndpointDisableTotalStats: true,
ExpectedUpdate: false,
},
{
MetricsEndpointEnabled: true,
MetricsEndpointDisableTotalStats: false,
ExpectedUpdate: true,
},
}
for _, tc := range tests {
tc := tc
t.Run(fmt.Sprintf(
"metricsEnabled(%v) * totalStatsDisabled(%v) = %v",
tc.MetricsEndpointEnabled,
tc.MetricsEndpointDisableTotalStats,
tc.ExpectedUpdate,
), func(t *testing.T) {
s.cfg.MetricsEndpointEnabled = tc.MetricsEndpointEnabled
s.cfg.MetricsEndpointDisableTotalStats = tc.MetricsEndpointDisableTotalStats
assert.Equal(t, tc.ExpectedUpdate, s.updateTotalStats(context.Background()))
})
}
}
func TestFeatureUsageStats(t *testing.T) {
store := mockstore.NewSQLStoreMock()
mockSystemStats(store)
s := createService(t, setting.NewCfg(), store)
m, err := s.collect(context.Background())
require.NoError(t, err, "Expected no error")
assert.Equal(t, 1, m["stats.features.feature_1.count"])
assert.Equal(t, 1, m["stats.features.feature_2.count"])
}
func TestCollectingUsageStats(t *testing.T) {
sqlStore := mockstore.NewSQLStoreMock()
s := createService(t, &setting.Cfg{
ReportingEnabled: true,
BuildVersion: "5.0.0",
AnonymousEnabled: true,
BasicAuthEnabled: true,
LDAPEnabled: true,
AuthProxyEnabled: true,
Packaging: "deb",
ReportingDistributor: "hosted-grafana",
}, sqlStore)
s.startTime = time.Now().Add(-1 * time.Minute)
mockSystemStats(sqlStore)
setupSomeDataSourcePlugins(t, s)
sqlStore.ExpectedDataSourceStats = []*models.DataSourceStats{
{
Type: models.DS_ES,
Count: 9,
},
{
Type: models.DS_PROMETHEUS,
Count: 10,
},
{
Type: "unknown_ds",
Count: 11,
},
{
Type: "unknown_ds2",
Count: 12,
},
}
sqlStore.ExpectedDataSources = []*models.DataSource{
{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 2,
}),
},
{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 2,
}),
},
{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"esVersion": 70,
}),
},
}
sqlStore.ExpectedDataSourcesAccessStats = []*models.DataSourceAccessStats{
{
Type: models.DS_ES,
Access: "direct",
Count: 1,
},
{
Type: models.DS_ES,
Access: "proxy",
Count: 2,
},
{
Type: models.DS_PROMETHEUS,
Access: "proxy",
Count: 3,
},
{
Type: "unknown_ds",
Access: "proxy",
Count: 4,
},
{
Type: "unknown_ds2",
Access: "",
Count: 5,
},
{
Type: "unknown_ds3",
Access: "direct",
Count: 6,
},
{
Type: "unknown_ds4",
Access: "direct",
Count: 7,
},
{
Type: "unknown_ds5",
Access: "proxy",
Count: 8,
},
}
sqlStore.ExpectedNotifierUsageStats = []*models.NotifierUsageStats{
{
Type: "slack",
Count: 1,
},
{
Type: "webhook",
Count: 2,
},
}
createConcurrentTokens(t, sqlStore)
s.social = &mockSocial{
OAuthProviders: map[string]bool{
"github": true,
"gitlab": true,
"azuread": true,
"google": true,
"generic_oauth": true,
"grafana_com": true,
},
}
metrics, err := s.collect(context.Background())
require.NoError(t, err)
assert.EqualValues(t, 15, metrics["stats.total_auth_token.count"])
assert.EqualValues(t, 2, metrics["stats.api_keys.count"])
assert.EqualValues(t, 5, metrics["stats.avg_auth_token_per_user.count"])
assert.EqualValues(t, 16, metrics["stats.dashboard_versions.count"])
assert.EqualValues(t, 17, metrics["stats.annotations.count"])
assert.EqualValues(t, 18, metrics["stats.alert_rules.count"])
assert.EqualValues(t, 19, metrics["stats.library_panels.count"])
assert.EqualValues(t, 20, metrics["stats.library_variables.count"])
assert.EqualValues(t, 9, metrics["stats.ds."+models.DS_ES+".count"])
assert.EqualValues(t, 10, metrics["stats.ds."+models.DS_PROMETHEUS+".count"])
assert.EqualValues(t, 11+12, metrics["stats.ds.other.count"])
assert.EqualValues(t, 1, metrics["stats.ds_access."+models.DS_ES+".direct.count"])
assert.EqualValues(t, 2, metrics["stats.ds_access."+models.DS_ES+".proxy.count"])
assert.EqualValues(t, 3, metrics["stats.ds_access."+models.DS_PROMETHEUS+".proxy.count"])
assert.EqualValues(t, 6+7, metrics["stats.ds_access.other.direct.count"])
assert.EqualValues(t, 4+8, metrics["stats.ds_access.other.proxy.count"])
assert.EqualValues(t, 1, metrics["stats.alert_notifiers.slack.count"])
assert.EqualValues(t, 2, metrics["stats.alert_notifiers.webhook.count"])
assert.EqualValues(t, 1, metrics["stats.auth_enabled.anonymous.count"])
assert.EqualValues(t, 1, metrics["stats.auth_enabled.basic_auth.count"])
assert.EqualValues(t, 1, metrics["stats.auth_enabled.ldap.count"])
assert.EqualValues(t, 1, metrics["stats.auth_enabled.auth_proxy.count"])
assert.EqualValues(t, 1, metrics["stats.auth_enabled.oauth_github.count"])
assert.EqualValues(t, 1, metrics["stats.auth_enabled.oauth_gitlab.count"])
assert.EqualValues(t, 1, metrics["stats.auth_enabled.oauth_google.count"])
assert.EqualValues(t, 1, metrics["stats.auth_enabled.oauth_azuread.count"])
assert.EqualValues(t, 1, metrics["stats.auth_enabled.oauth_generic_oauth.count"])
assert.EqualValues(t, 1, metrics["stats.auth_enabled.oauth_grafana_com.count"])
assert.EqualValues(t, 1, metrics["stats.packaging.deb.count"])
assert.EqualValues(t, 1, metrics["stats.distributor.hosted-grafana.count"])
assert.InDelta(t, int64(65), metrics["stats.uptime"], 6)
}
func mockSystemStats(sqlStore *mockstore.SQLStoreMock) {
sqlStore.ExpectedSystemStats = &models.SystemStats{
Dashboards: 1,
Datasources: 2,
Users: 3,
Admins: 31,
Editors: 32,
Viewers: 33,
ActiveUsers: 4,
ActiveAdmins: 21,
ActiveEditors: 22,
ActiveViewers: 23,
ActiveSessions: 24,
DailyActiveUsers: 25,
DailyActiveAdmins: 26,
DailyActiveEditors: 27,
DailyActiveViewers: 28,
DailyActiveSessions: 29,
Orgs: 5,
Playlists: 6,
Alerts: 7,
Stars: 8,
Folders: 9,
DashboardPermissions: 10,
FolderPermissions: 11,
ProvisionedDashboards: 12,
Snapshots: 13,
Teams: 14,
AuthTokens: 15,
DashboardVersions: 16,
Annotations: 17,
AlertRules: 18,
LibraryPanels: 19,
LibraryVariables: 20,
DashboardsViewersCanAdmin: 3,
DashboardsViewersCanEdit: 2,
FoldersViewersCanAdmin: 1,
FoldersViewersCanEdit: 5,
APIKeys: 2,
}
}
type mockSocial struct {
social.Service
OAuthProviders map[string]bool
}
func (m *mockSocial) GetOAuthProviders() map[string]bool {
return m.OAuthProviders
}
type fakePluginStore struct {
plugins.Store
plugins map[string]plugins.PluginDTO
}
func (pr fakePluginStore) Plugin(_ context.Context, pluginID string) (plugins.PluginDTO, bool) {
p, exists := pr.plugins[pluginID]
return p, exists
}
func setupSomeDataSourcePlugins(t *testing.T, s *Service) {
t.Helper()
s.plugins = &fakePluginStore{
plugins: map[string]plugins.PluginDTO{
models.DS_ES: {
Signature: "internal",
},
models.DS_PROMETHEUS: {
Signature: "internal",
},
models.DS_GRAPHITE: {
Signature: "internal",
},
models.DS_MYSQL: {
Signature: "internal",
},
},
}
}
func (pr fakePluginStore) Plugins(_ context.Context, pluginTypes ...plugins.Type) []plugins.PluginDTO {
var result []plugins.PluginDTO
for _, v := range pr.plugins {
for _, t := range pluginTypes {
if v.Type == t {
result = append(result, v)
}
}
}
return result
}
func createService(t testing.TB, cfg *setting.Cfg, store sqlstore.Store) *Service {
t.Helper()
return ProvideService(
&usagestats.UsageStatsMock{},
cfg,
store,
&mockSocial{},
&fakePluginStore{},
featuremgmt.WithFeatures("feature1", "feature2"),
)
}

View File

@ -6,6 +6,7 @@ import (
"github.com/grafana/grafana/pkg/infra/remotecache"
"github.com/grafana/grafana/pkg/infra/tracing"
uss "github.com/grafana/grafana/pkg/infra/usagestats/service"
"github.com/grafana/grafana/pkg/infra/usagestats/statscollector"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins/manager"
"github.com/grafana/grafana/pkg/registry"
@ -32,9 +33,10 @@ func ProvideBackgroundServiceRegistry(
pushGateway *pushhttp.Gateway, notifications *notifications.NotificationService, pm *manager.PluginManager,
rendering *rendering.RenderingService, tokenService models.UserTokenBackgroundService, tracing tracing.Tracer,
provisioning *provisioning.ProvisioningServiceImpl, alerting *alerting.AlertEngine, usageStats *uss.UsageStats,
grafanaUpdateChecker *updatechecker.GrafanaService, pluginsUpdateChecker *updatechecker.PluginsService,
metrics *metrics.InternalMetricsService, secretsService *secretsManager.SecretsService,
remoteCache *remotecache.RemoteCache, thumbnailsService thumbs.Service, StorageService store.StorageService,
statsCollector *statscollector.Service, grafanaUpdateChecker *updatechecker.GrafanaService,
pluginsUpdateChecker *updatechecker.PluginsService, metrics *metrics.InternalMetricsService,
secretsService *secretsManager.SecretsService, remoteCache *remotecache.RemoteCache,
thumbnailsService thumbs.Service, StorageService store.StorageService,
// Need to make sure these are initialized, is there a better place to put them?
_ *dashboardsnapshots.Service, _ *alerting.AlertNotificationService,
_ serviceaccounts.Service, _ *guardian.Provider,
@ -56,11 +58,13 @@ func ProvideBackgroundServiceRegistry(
pluginsUpdateChecker,
metrics,
usageStats,
statsCollector,
tracing,
remoteCache,
secretsService,
StorageService,
thumbnailsService)
thumbnailsService,
)
}
// BackgroundServiceRegistry provides background services.

View File

@ -5,6 +5,7 @@ package server
import (
"github.com/google/wire"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana/pkg/api"
"github.com/grafana/grafana/pkg/api/avatar"
@ -21,6 +22,7 @@ import (
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/infra/usagestats"
uss "github.com/grafana/grafana/pkg/infra/usagestats/service"
"github.com/grafana/grafana/pkg/infra/usagestats/statscollector"
loginpkg "github.com/grafana/grafana/pkg/login"
"github.com/grafana/grafana/pkg/login/social"
"github.com/grafana/grafana/pkg/models"
@ -235,6 +237,7 @@ var wireBasicSet = wire.NewSet(
guardian.ProvideService,
avatar.ProvideAvatarCacheServer,
authproxy.ProvideAuthProxy,
statscollector.ProvideService,
)
var wireSet = wire.NewSet(