chore(perf): Pre-allocate where possible (enable prealloc linter) (#88952)

* chore(perf): Pre-allocate where possible (enable prealloc linter)

Signed-off-by: Dave Henderson <dave.henderson@grafana.com>

* fix TestAlertManagers_buildRedactedAMs

Signed-off-by: Dave Henderson <dave.henderson@grafana.com>

* prealloc a slice that appeared after rebase

Signed-off-by: Dave Henderson <dave.henderson@grafana.com>

---------

Signed-off-by: Dave Henderson <dave.henderson@grafana.com>
This commit is contained in:
Dave Henderson 2024-06-14 14:16:36 -04:00 committed by GitHub
parent e53e6e7caa
commit 6262c56132
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
43 changed files with 211 additions and 140 deletions

View File

@ -129,14 +129,20 @@ max-func-lines = 60
[linters]
disable-all = true
# try to keep this list sorted, please
enable = [
"asciicheck",
"bodyclose",
"depguard",
"dogsled",
"errcheck",
"errorlint",
"exhaustive",
"exportloopref",
# "gochecknoinits",
# "goconst",
# "gocritic", # Temporarily disabled on 2022-09-09, running into weird bug "ruleguard: execution error: used Run() with an empty rule set; forgot to call Load() first?"
"gocyclo",
"goimports",
"goprintffuncname",
"gosec",
@ -145,19 +151,14 @@ enable = [
"ineffassign",
"misspell",
"nakedret",
"exportloopref",
"prealloc",
"revive",
"staticcheck",
"stylecheck",
"typecheck",
"unconvert",
"unused",
"whitespace",
"gocyclo",
"exhaustive",
"typecheck",
"asciicheck",
"errorlint",
"revive",
]
# Disabled linters (might want them later)

View File

@ -156,8 +156,8 @@ func publishPackages(cfg packaging.PublishConfig) error {
pth = path.Join(pth, product)
baseArchiveURL := fmt.Sprintf("https://dl.grafana.com/%s", pth)
var builds []buildRepr
for _, ba := range packaging.ArtifactConfigs {
builds := make([]buildRepr, len(packaging.ArtifactConfigs))
for i, ba := range packaging.ArtifactConfigs {
u := ba.GetURL(baseArchiveURL, cfg)
sha256, err := getSHA256(u)
@ -165,12 +165,12 @@ func publishPackages(cfg packaging.PublishConfig) error {
return err
}
builds = append(builds, buildRepr{
builds[i] = buildRepr{
OS: ba.Os,
URL: u,
SHA256: string(sha256),
Arch: ba.Arch,
})
}
}
r := releaseRepr{

View File

@ -558,8 +558,9 @@ func (e *State) biSeriesSeries(labels data.Labels, op string, aSeries, bSeries S
func (e *State) walkFunc(node *parse.FuncNode) (Results, error) {
var res Results
var err error
var in []reflect.Value
for _, a := range node.Args {
in := make([]reflect.Value, len(node.Args))
for i, a := range node.Args {
var v any
switch t := a.(type) {
case *parse.StringNode:
@ -580,7 +581,8 @@ func (e *State) walkFunc(node *parse.FuncNode) (Results, error) {
if err != nil {
return res, err
}
in = append(in, reflect.ValueOf(v))
in[i] = reflect.ValueOf(v)
}
f := reflect.ValueOf(node.F.F)

View File

@ -444,7 +444,7 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) error {
defaultLevelName, _ := getLogLevelFromConfig("log", "info", cfg)
defaultFilters := getFilters(util.SplitString(cfg.Section("log").Key("filters").String()))
var configLoggers []logWithFilters
configLoggers := make([]logWithFilters, 0, len(modes))
for _, mode := range modes {
mode = strings.TrimSpace(mode)
sec, err := cfg.GetSection("log." + mode)
@ -505,6 +505,7 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) error {
handler.filters = modeFilters
handler.maxLevel = leveloption
configLoggers = append(configLoggers, handler)
}
if len(configLoggers) > 0 {

View File

@ -71,7 +71,7 @@ func (m *service) Run(ctx context.Context) error {
return nil
}
var svcs []services.Service
svcs := make([]services.Service, 0, len(m.serviceMap))
for _, s := range m.serviceMap {
svcs = append(svcs, s)
}

View File

@ -93,6 +93,8 @@ func TestDefaultStaticDetectorsInspector(t *testing.T) {
plugin *plugins.Plugin
exp bool
}
//nolint:prealloc // just a test, and it'd require too much refactoring to preallocate
var tcs []tc
// Angular imports

View File

@ -381,11 +381,13 @@ func TestFSPathSeparatorFiles(t *testing.T) {
}
func fileList(manifest *PluginManifest) []string {
var keys []string
keys := make([]string, 0, len(manifest.Files))
for k := range manifest.Files {
keys = append(keys, k)
}
sort.Strings(keys)
return keys
}

View File

@ -62,9 +62,10 @@ func DirAsLocalSources(pluginsPath string, class plugins.Class) ([]*LocalSource,
}
slices.Sort(pluginDirs)
var sources []*LocalSource
for _, dir := range pluginDirs {
sources = append(sources, NewLocalSource(class, []string{dir}))
sources := make([]*LocalSource, len(pluginDirs))
for i, dir := range pluginDirs {
sources[i] = NewLocalSource(class, []string{dir})
}
return sources, nil
}

View File

@ -46,7 +46,7 @@ func TestDirAsLocalSources(t *testing.T) {
{
name: "Directory with no subdirectories",
pluginsPath: filepath.Join(testdataDir, "pluginRootWithDist", "datasource"),
expected: nil,
expected: []*LocalSource{},
},
{
name: "Directory with a symlink to a directory",

View File

@ -38,22 +38,25 @@ func (s *Service) externalPluginSources() []plugins.PluginSource {
return []plugins.PluginSource{}
}
var srcs []plugins.PluginSource
for _, src := range localSrcs {
srcs = append(srcs, src)
srcs := make([]plugins.PluginSource, len(localSrcs))
for i, src := range localSrcs {
srcs[i] = src
}
return srcs
}
func (s *Service) pluginSettingSources() []plugins.PluginSource {
var sources []plugins.PluginSource
sources := make([]plugins.PluginSource, 0, len(s.cfg.PluginSettings))
for _, ps := range s.cfg.PluginSettings {
path, exists := ps["path"]
if !exists || path == "" {
continue
}
sources = append(sources, NewLocalSource(plugins.ClassExternal, []string{path}))
}
return sources
}

View File

@ -197,9 +197,8 @@ type versionArg struct {
}
func createPluginVersions(versions ...versionArg) []Version {
var vs []Version
for _, version := range versions {
vs := make([]Version, len(versions))
for i, version := range versions {
ver := Version{
Version: version.version,
}
@ -211,7 +210,8 @@ func createPluginVersions(versions ...versionArg) []Version {
}
}
}
vs = append(vs, ver)
vs[i] = ver
}
return vs

View File

@ -326,7 +326,7 @@ func (e DatasourcePermissionsService) SetBuiltInRolePermission(ctx context.Conte
// if an OSS/unlicensed instance is upgraded to Enterprise/licensed.
// https://github.com/grafana/identity-access-team/issues/672
func (e DatasourcePermissionsService) SetPermissions(ctx context.Context, orgID int64, resourceID string, commands ...accesscontrol.SetResourcePermissionCommand) ([]accesscontrol.ResourcePermission, error) {
var dbCommands []resourcepermissions.SetResourcePermissionsCommand
dbCommands := make([]resourcepermissions.SetResourcePermissionsCommand, 0, len(commands))
for _, cmd := range commands {
// Only set query permissions for built-in roles; do not set permissions for data sources with * as UID, as this would grant wildcard permissions
if cmd.Permission != "Query" || cmd.BuiltinRole == "" || resourceID == "*" {

View File

@ -420,21 +420,12 @@ func (s *Service) RunMigration(ctx context.Context, uid string) (*cloudmigration
}
func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.MigrateDataRequest, error) {
var migrationDataSlice []cloudmigration.MigrateDataRequestItem
// Data sources
dataSources, err := s.getDataSources(ctx)
if err != nil {
s.log.Error("Failed to get datasources", "err", err)
return nil, err
}
for _, ds := range dataSources {
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.DatasourceDataType,
RefID: ds.UID,
Name: ds.Name,
Data: ds,
})
}
// Dashboards
dashboards, err := s.getDashboards(ctx)
@ -443,6 +434,26 @@ func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.Mig
return nil, err
}
// Folders
folders, err := s.getFolders(ctx)
if err != nil {
s.log.Error("Failed to get folders", "err", err)
return nil, err
}
migrationDataSlice := make(
[]cloudmigration.MigrateDataRequestItem, 0,
len(dataSources)+len(dashboards)+len(folders),
)
for _, ds := range dataSources {
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.DatasourceDataType,
RefID: ds.UID,
Name: ds.Name,
Data: ds,
})
}
for _, dashboard := range dashboards {
dashboard.Data.Del("id")
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
@ -453,13 +464,6 @@ func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.Mig
})
}
// Folders
folders, err := s.getFolders(ctx)
if err != nil {
s.log.Error("Failed to get folders", "err", err)
return nil, err
}
for _, f := range folders {
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.FolderDataType,
@ -468,6 +472,7 @@ func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.Mig
Data: f,
})
}
migrationData := &cloudmigration.MigrateDataRequest{
Items: migrationDataSlice,
}
@ -521,9 +526,9 @@ func (s *Service) getFolders(ctx context.Context) ([]folder.Folder, error) {
return nil, err
}
var result []folder.Folder
for _, folder := range folders {
result = append(result, *folder)
result := make([]folder.Folder, len(folders))
for i, folder := range folders {
result[i] = *folder
}
return result, nil
@ -535,10 +540,11 @@ func (s *Service) getDashboards(ctx context.Context) ([]dashboards.Dashboard, er
return nil, err
}
var result []dashboards.Dashboard
for _, dashboard := range dashs {
result = append(result, *dashboard)
result := make([]dashboards.Dashboard, len(dashs))
for i, dashboard := range dashs {
result[i] = *dashboard
}
return result, nil
}

View File

@ -332,12 +332,14 @@ func setupRedisLiveEngine(g *GrafanaLive, node *centrifuge.Node) error {
redisShardConfigs := []centrifuge.RedisShardConfig{
{Address: redisAddress, Password: redisPassword},
}
var redisShards []*centrifuge.RedisShard
redisShards := make([]*centrifuge.RedisShard, 0, len(redisShardConfigs))
for _, redisConf := range redisShardConfigs {
redisShard, err := centrifuge.NewRedisShard(node, redisConf)
if err != nil {
return fmt.Errorf("error connecting to Live Redis: %v", err)
}
redisShards = append(redisShards, redisShard)
}
@ -348,6 +350,7 @@ func setupRedisLiveEngine(g *GrafanaLive, node *centrifuge.Node) error {
if err != nil {
return fmt.Errorf("error creating Live Redis broker: %v", err)
}
node.SetBroker(broker)
presenceManager, err := centrifuge.NewRedisPresenceManager(node, centrifuge.RedisPresenceManagerConfig{
@ -357,7 +360,9 @@ func setupRedisLiveEngine(g *GrafanaLive, node *centrifuge.Node) error {
if err != nil {
return fmt.Errorf("error creating Live Redis presence manager: %v", err)
}
node.SetPresenceManager(presenceManager)
return nil
}

View File

@ -116,17 +116,21 @@ func (s *Manager) handleDatasourceEvent(orgID int64, dsUID string, resubmit bool
s.mu.RUnlock()
return nil
}
var resubmitRequests []streamRequest
var waitChannels []chan struct{}
resubmitRequests := make([]streamRequest, 0, len(dsStreams))
waitChannels := make([]chan struct{}, 0, len(dsStreams))
for channel := range dsStreams {
streamCtx, ok := s.streams[channel]
if !ok {
continue
}
streamCtx.cancelFn()
waitChannels = append(waitChannels, streamCtx.CloseCh)
resubmitRequests = append(resubmitRequests, streamCtx.streamRequest)
}
s.mu.RUnlock()
// Wait for all streams to stop.

View File

@ -563,8 +563,8 @@ func toRuleGroup(log log.Logger, manager state.AlertInstanceManager, groupKey ng
// Returns the whole JSON model as a string if it fails to extract a minimum of 1 query.
func ruleToQuery(logger log.Logger, rule *ngmodels.AlertRule) string {
var queryErr error
var queries []string
queries := make([]string, 0, len(rule.Data))
for _, q := range rule.Data {
q, err := q.GetQuery()
if err != nil {

View File

@ -46,156 +46,164 @@ func ContactPointToContactPointExport(cp definitions.ContactPoint) (notify.APIRe
// This is needed to keep the API models clean and convert from database model
j.RegisterExtension(&contactPointsExtension{})
var integration []*notify.GrafanaIntegrationConfig
contactPointsLength := len(cp.Alertmanager) + len(cp.Dingding) + len(cp.Discord) + len(cp.Email) +
len(cp.Googlechat) + len(cp.Kafka) + len(cp.Line) + len(cp.Opsgenie) +
len(cp.Pagerduty) + len(cp.OnCall) + len(cp.Pushover) + len(cp.Sensugo) +
len(cp.Sns) + len(cp.Slack) + len(cp.Teams) + len(cp.Telegram) +
len(cp.Threema) + len(cp.Victorops) + len(cp.Webhook) + len(cp.Wecom) +
len(cp.Webex)
integration := make([]*notify.GrafanaIntegrationConfig, 0, contactPointsLength)
var errs []error
for _, i := range cp.Alertmanager {
el, err := marshallIntegration(j, "prometheus-alertmanager", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Dingding {
el, err := marshallIntegration(j, "dingding", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Discord {
el, err := marshallIntegration(j, "discord", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Email {
el, err := marshallIntegration(j, "email", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Googlechat {
el, err := marshallIntegration(j, "googlechat", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Kafka {
el, err := marshallIntegration(j, "kafka", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Line {
el, err := marshallIntegration(j, "line", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Opsgenie {
el, err := marshallIntegration(j, "opsgenie", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Pagerduty {
el, err := marshallIntegration(j, "pagerduty", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.OnCall {
el, err := marshallIntegration(j, "oncall", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Pushover {
el, err := marshallIntegration(j, "pushover", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Sensugo {
el, err := marshallIntegration(j, "sensugo", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Sns {
el, err := marshallIntegration(j, "sns", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Slack {
el, err := marshallIntegration(j, "slack", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Teams {
el, err := marshallIntegration(j, "teams", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Telegram {
el, err := marshallIntegration(j, "telegram", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Threema {
el, err := marshallIntegration(j, "threema", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Victorops {
el, err := marshallIntegration(j, "victorops", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Webhook {
el, err := marshallIntegration(j, "webhook", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Wecom {
el, err := marshallIntegration(j, "wecom", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
for _, i := range cp.Webex {
el, err := marshallIntegration(j, "webex", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil {
errs = append(errs, err)
}
integration = append(integration, el)
}
if len(errs) > 0 {
return notify.APIReceiver{}, errors.Join(errs...)
}

View File

@ -88,13 +88,14 @@ func (ecp *ContactPointService) GetContactPoints(ctx context.Context, q ContactP
}
}
var contactPoints []apimodels.EmbeddedContactPoint
for _, gr := range grafanaReceivers {
contactPoints := make([]apimodels.EmbeddedContactPoint, len(grafanaReceivers))
for i, gr := range grafanaReceivers {
contactPoint, err := GettableGrafanaReceiverToEmbeddedContactPoint(gr)
if err != nil {
return nil, err
}
contactPoints = append(contactPoints, contactPoint)
contactPoints[i] = contactPoint
}
sort.SliceStable(contactPoints, func(i, j int) bool {

View File

@ -31,17 +31,19 @@ func (t *TemplateService) GetTemplates(ctx context.Context, orgID int64) ([]defi
return nil, err
}
var templates []definitions.NotificationTemplate
templates := make([]definitions.NotificationTemplate, 0, len(revision.cfg.TemplateFiles))
for name, tmpl := range revision.cfg.TemplateFiles {
tmpl := definitions.NotificationTemplate{
Name: name,
Template: tmpl,
}
provenance, err := t.provenanceStore.GetProvenance(ctx, &tmpl, orgID)
if err != nil {
return nil, err
}
tmpl.Provenance = definitions.Provenance(provenance)
templates = append(templates, tmpl)
}

View File

@ -492,13 +492,14 @@ func (am *Alertmanager) GetReceivers(ctx context.Context) ([]apimodels.Receiver,
return []apimodels.Receiver{}, err
}
var rcvs []apimodels.Receiver
for _, rcv := range res.Payload {
rcvs = append(rcvs, apimodels.Receiver{
rcvs := make([]apimodels.Receiver, len(res.Payload))
for i, rcv := range res.Payload {
rcvs[i] = apimodels.Receiver{
Name: *rcv.Name,
Integrations: []apimodels.Integration{},
})
}
}
return rcvs, nil
}

View File

@ -205,15 +205,17 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase(ctx context.Context) error
}
func buildRedactedAMs(l log.Logger, alertmanagers []ExternalAMcfg, ordId int64) []string {
var redactedAMs []string
redactedAMs := make([]string, 0, len(alertmanagers))
for _, am := range alertmanagers {
parsedAM, err := url.Parse(am.URL)
if err != nil {
l.Error("Failed to parse alertmanager string", "org", ordId, "error", err)
continue
}
redactedAMs = append(redactedAMs, parsedAM.Redacted())
}
return redactedAMs
}
@ -225,9 +227,6 @@ func asSHA256(strings []string) string {
}
func (d *AlertsRouter) alertmanagersFromDatasources(orgID int64) ([]ExternalAMcfg, error) {
var (
alertmanagers []ExternalAMcfg
)
// We might have alertmanager datasources that are acting as external
// alertmanager, let's fetch them.
query := &datasources.GetDataSourcesByTypeQuery{
@ -240,6 +239,9 @@ func (d *AlertsRouter) alertmanagersFromDatasources(orgID int64) ([]ExternalAMcf
if err != nil {
return nil, fmt.Errorf("failed to fetch datasources for org: %w", err)
}
alertmanagers := make([]ExternalAMcfg, 0, len(dataSources))
for _, ds := range dataSources {
if !ds.JsonData.Get(definitions.HandleGrafanaManagedAlerts).MustBool(false) {
continue
@ -262,11 +264,13 @@ func (d *AlertsRouter) alertmanagersFromDatasources(orgID int64) ([]ExternalAMcf
"error", err)
continue
}
alertmanagers = append(alertmanagers, ExternalAMcfg{
URL: amURL,
Headers: headers,
})
}
return alertmanagers, nil
}

View File

@ -712,7 +712,7 @@ func TestAlertManagers_buildRedactedAMs(t *testing.T) {
amUrls: []string{"1234://user:password@localhost:9094"},
errCalls: 1,
errLog: "Failed to parse alertmanager string",
expected: nil,
expected: []string{},
},
}
@ -724,6 +724,7 @@ func TestAlertManagers_buildRedactedAMs(t *testing.T) {
URL: url,
})
}
require.Equal(t, tt.expected, buildRedactedAMs(&fakeLogger, cfgs, tt.orgId))
require.Equal(t, tt.errCalls, fakeLogger.ErrorLogs.Calls)
require.Equal(t, tt.errLog, fakeLogger.ErrorLogs.Message)

View File

@ -141,9 +141,9 @@ func errorAlert(labels, annotations data.Labels, alertState *State, urlStr strin
func FromStateTransitionToPostableAlerts(firingStates []StateTransition, stateManager *Manager, appURL *url.URL) apimodels.PostableAlerts {
alerts := apimodels.PostableAlerts{PostableAlerts: make([]models.PostableAlert, 0, len(firingStates))}
var sentAlerts []*State
ts := time.Now()
sentAlerts := make([]*State, 0, len(firingStates))
for _, alertState := range firingStates {
if !alertState.NeedsSending(stateManager.ResendDelay) {
continue

View File

@ -787,7 +787,8 @@ func (st DBstore) RenameReceiverInNotificationSettings(ctx context.Context, orgI
if len(rules) == 0 {
return 0, nil
}
var updates []ngmodels.UpdateRule
updates := make([]ngmodels.UpdateRule, 0, len(rules))
for _, rule := range rules {
r := ngmodels.CopyRule(rule)
for idx := range r.NotificationSettings {
@ -795,6 +796,7 @@ func (st DBstore) RenameReceiverInNotificationSettings(ctx context.Context, orgI
r.NotificationSettings[idx].Receiver = newReceiver
}
}
updates = append(updates, ngmodels.UpdateRule{
Existing: rule,
New: *r,

View File

@ -401,15 +401,15 @@ type pluginsSettingsServiceMock struct {
func (s *pluginsSettingsServiceMock) GetPluginSettings(_ context.Context, args *pluginsettings.GetArgs) ([]*pluginsettings.InfoDTO, error) {
s.getPluginSettingsArgs = append(s.getPluginSettingsArgs, args.OrgID)
var res []*pluginsettings.InfoDTO
for _, ps := range s.storedPluginSettings {
res = append(res, &pluginsettings.InfoDTO{
res := make([]*pluginsettings.InfoDTO, len(s.storedPluginSettings))
for i, ps := range s.storedPluginSettings {
res[i] = &pluginsettings.InfoDTO{
PluginID: ps.PluginID,
OrgID: ps.OrgID,
Enabled: ps.Enabled,
Pinned: ps.Pinned,
PluginVersion: ps.PluginVersion,
})
}
}
return res, s.err

View File

@ -163,17 +163,23 @@ func (p *EnvVarsProvider) tracingEnvVars(plugin *plugins.Plugin) []string {
func (p *EnvVarsProvider) pluginSettingsEnvVars(pluginID string) []string {
const customConfigPrefix = "GF_PLUGIN"
var env []string
for k, v := range p.cfg.PluginSettings[pluginID] {
pluginSettings := p.cfg.PluginSettings[pluginID]
env := make([]string, 0, len(pluginSettings))
for k, v := range pluginSettings {
if k == "path" || strings.ToLower(k) == "id" {
continue
}
key := fmt.Sprintf("%s_%s", customConfigPrefix, strings.ToUpper(k))
if value := os.Getenv(key); value != "" {
v = value
}
env = append(env, fmt.Sprintf("%s=%s", key, v))
}
return env
}

View File

@ -191,7 +191,9 @@ func getNonFolderDashboardDoc(dash dashboard, location string) *bluge.Document {
func getDashboardPanelDocs(dash dashboard, location string) []*bluge.Document {
dashURL := fmt.Sprintf("/d/%s/%s", dash.uid, slugify.Slugify(dash.summary.Name))
var docs []*bluge.Document
// pre-allocating a little bit more than necessary, possibly
docs := make([]*bluge.Document, 0, len(dash.summary.Nested))
for _, panel := range dash.summary.Nested {
if panel.Fields["type"] == "row" {
continue // skip rows
@ -239,7 +241,7 @@ func getDashboardPanelDocs(dash dashboard, location string) []*bluge.Document {
}
// Names need to be indexed a few ways to support key features
func newSearchDocument(uid string, name string, descr string, url string) *bluge.Document {
func newSearchDocument(uid, name, descr, url string) *bluge.Document {
doc := bluge.NewDocument(uid)
if name != "" {

View File

@ -51,9 +51,11 @@ func Test_punctuationCharFilter_Filter(t1 *testing.T) {
func TestNgramIndexAnalyzer(t *testing.T) {
stream := ngramIndexAnalyzer.Analyze([]byte("x-rays.and.xRays, and НемногоКириллицы"))
expectedTerms := []string{"x", "r", "ra", "ray", "rays", "a", "an", "and", "x", "r", "ra", "ray", "rays", "a", "an", "and", "н", "не", "нем", "немн", "немно", "немног", "немного", "к", "ки", "кир", "кири", "кирил", "кирилл", "кирилли"}
var actualTerms []string
actualTerms := make([]string, 0, len(stream))
for _, t := range stream {
actualTerms = append(actualTerms, string(t.Term))
}
require.Equal(t, expectedTerms, actualTerms)
}

View File

@ -97,9 +97,8 @@ func (e externalAlertmanagerToDatasources) Exec(sess *xorm.Session, mg *migrator
}
func removeDuplicates(strs []string) []string {
var res []string
found := map[string]bool{}
found := make(map[string]bool, len(strs))
res := make([]string, 0, len(strs))
for _, str := range strs {
if found[str] {
continue

View File

@ -435,8 +435,10 @@ func (f *accessControlDashboardPermissionFilter) nestedFoldersSelectors(permSele
}
func getAllowedUIDs(action string, user identity.Requester, scopePrefix string) []any {
var args []any
for _, uidScope := range user.GetPermissions()[action] {
uidScopes := user.GetPermissions()[action]
args := make([]any, 0, len(uidScopes))
for _, uidScope := range uidScopes {
if !strings.HasPrefix(uidScope, scopePrefix) {
continue
}

View File

@ -101,7 +101,7 @@ func (api *Api) getAuthorizedList(ctx context.Context, identity identity.Request
return nil, err
}
var authorizedProviders []*models.SSOSettings
authorizedProviders := make([]*models.SSOSettings, 0, len(allProviders))
for _, provider := range allProviders {
ev := ac.EvalPermission(ac.ActionSettingsRead, ac.Scope("settings", "auth."+provider.Provider, "*"))
hasAccess, err := api.AccessControl.Evaluate(ctx, identity, ev)

View File

@ -1986,19 +1986,23 @@ func (cfg *Cfg) readLiveSettings(iniFile *ini.File) error {
cfg.LiveHAEngineAddress = section.Key("ha_engine_address").MustString("127.0.0.1:6379")
cfg.LiveHAEnginePassword = section.Key("ha_engine_password").MustString("")
var originPatterns []string
allowedOrigins := section.Key("allowed_origins").MustString("")
for _, originPattern := range strings.Split(allowedOrigins, ",") {
origins := strings.Split(allowedOrigins, ",")
originPatterns := make([]string, 0, len(origins))
for _, originPattern := range origins {
originPattern = strings.TrimSpace(originPattern)
if originPattern == "" {
continue
}
originPatterns = append(originPatterns, originPattern)
}
_, err := GetAllowedOriginGlobs(originPatterns)
if err != nil {
return err
}
cfg.LiveAllowedOrigins = originPatterns
return nil
}

View File

@ -75,7 +75,7 @@ func readSecureSocksDSProxySettings(iniFile *ini.File) (SecureSocksDSProxySettin
s.ClientKey = string(keyPEMBlock)
}
var rootCAs []string
rootCAs := make([]string, 0, len(s.RootCAFilePaths))
for _, rootCAFile := range s.RootCAFilePaths {
// nolint:gosec
// The gosec G304 warning can be ignored because `rootCAFile` comes from config ini, and we check below if

View File

@ -88,9 +88,8 @@ type argJSONQuery struct {
}
func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*AzureResourceGraphQuery, error) {
var azureResourceGraphQueries []*AzureResourceGraphQuery
for _, query := range queries {
azureResourceGraphQueries := make([]*AzureResourceGraphQuery, len(queries))
for i, query := range queries {
queryJSONModel := argJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil {
@ -105,19 +104,18 @@ func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery,
}
interpolatedQuery, err := macros.KqlInterpolate(query, dsInfo, azureResourceGraphTarget.Query)
if err != nil {
return nil, err
}
azureResourceGraphQueries = append(azureResourceGraphQueries, &AzureResourceGraphQuery{
azureResourceGraphQueries[i] = &AzureResourceGraphQuery{
RefID: query.RefID,
ResultFormat: resultFormat,
JSON: query.JSON,
InterpolatedQuery: interpolatedQuery,
TimeRange: query.TimeRange,
QueryType: query.QueryType,
})
}
}
return azureResourceGraphQueries, nil

View File

@ -129,8 +129,8 @@ type AzureMonitorDimensionFilterBackend struct {
}
func ConstructFiltersString(a dataquery.AzureMetricDimension) string {
var filterStrings []string
for _, filter := range a.Filters {
filterStrings := make([]string, len(a.Filters))
for i, filter := range a.Filters {
dimension := ""
operator := ""
if a.Dimension != nil {
@ -139,11 +139,14 @@ func ConstructFiltersString(a dataquery.AzureMetricDimension) string {
if a.Operator != nil {
operator = *a.Operator
}
filterStrings = append(filterStrings, fmt.Sprintf("%v %v '%v'", dimension, operator, filter))
filterStrings[i] = fmt.Sprintf("%v %v '%v'", dimension, operator, filter)
}
if a.Operator != nil && *a.Operator == "eq" {
return strings.Join(filterStrings, " or ")
}
return strings.Join(filterStrings, " and ")
}

View File

@ -7,7 +7,7 @@ import (
)
func valuesToListMetricRespone[T any](values []T) []resources.ResourceResponse[T] {
var response []resources.ResourceResponse[T]
response := make([]resources.ResourceResponse[T], 0, len(values))
for _, value := range values {
response = append(response, resources.ResourceResponse[T]{Value: value})
}

View File

@ -84,15 +84,16 @@ func newFlowTestQueries(allJsonBytes []byte) ([]backend.DataQuery, error) {
return nil, fmt.Errorf("error unmarshaling query-json: %w", err)
}
var queries []backend.DataQuery
for _, jsonBytes := range jsonBytesArray {
queries := make([]backend.DataQuery, len(jsonBytesArray))
for i, jsonBytes := range jsonBytesArray {
// we need to extract some fields from the json-array
var jsonInfo queryDataTestQueryJSON
err = json.Unmarshal(jsonBytes, &jsonInfo)
if err != nil {
return nil, err
}
// we setup the DataQuery, with values loaded from the json
query := backend.DataQuery{
RefID: jsonInfo.RefID,
@ -101,7 +102,8 @@ func newFlowTestQueries(allJsonBytes []byte) ([]backend.DataQuery, error) {
TimeRange: timeRange,
JSON: jsonBytes,
}
queries = append(queries, query)
queries[i] = query
}
return queries, nil
}

View File

@ -873,16 +873,16 @@ func trimDatapoints(queryResult backend.DataResponse, target *Query) {
// we sort the label's pairs by the label-key,
// and return the label-values
func getSortedLabelValues(labels data.Labels) []string {
var keys []string
keys := make([]string, 0, len(labels))
for key := range labels {
keys = append(keys, key)
}
sort.Strings(keys)
var values []string
for _, key := range keys {
values = append(values, labels[key])
values := make([]string, len(keys))
for i, key := range keys {
values[i] = labels[key]
}
return values

View File

@ -60,7 +60,7 @@ func TestNewQueryDataResponse(t *testing.T) {
newJSONArray(`[0, 1, 2]`, &arrow.TimestampType{}),
}
var arr []arrow.Array
arr := make([]arrow.Array, 0, len(strValues))
for _, v := range strValues {
tarr, _, err := array.FromJSON(
alloc,

View File

@ -269,12 +269,12 @@ func transformRowsForTimeSeries(rows []models.Row, query models.Query) data.Fram
}
func newFrameWithTimeField(row models.Row, column string, colIndex int, query models.Query, frameName []byte) *data.Frame {
var timeArray []time.Time
var floatArray []*float64
var stringArray []*string
var boolArray []*bool
valType := util.Typeof(row.Values, colIndex)
timeArray := make([]time.Time, 0, len(row.Values))
for _, valuePair := range row.Values {
timestamp, timestampErr := util.ParseTimestamp(valuePair[0])
// we only add this row if the timestamp is valid

View File

@ -136,10 +136,11 @@ func rootSpan(frame *BetterFrame) Row {
}
func fieldNames(frame *data.Frame) []string {
var names []string
for _, f := range frame.Fields {
names = append(names, f.Name)
names := make([]string, len(frame.Fields))
for i, f := range frame.Fields {
names[i] = f.Name
}
return names
}

View File

@ -48,11 +48,13 @@ func SplitString(str string) []string {
return res
}
var result []string
matches := stringListItemMatcher.FindAllString(str, -1)
for _, match := range matches {
result = append(result, strings.Trim(match, "\""))
result := make([]string, len(matches))
for i, match := range matches {
result[i] = strings.Trim(match, "\"")
}
return result
}

View File

@ -30,22 +30,26 @@ func TlsCiphersToIDs(names []string) ([]uint16, error) {
// no ciphers specified, use defaults
return nil, nil
}
var ids []uint16
var missing []string
ciphers := tls.CipherSuites()
var cipherMap = make(map[string]uint16, len(ciphers))
cipherMap := make(map[string]uint16, len(ciphers))
for _, cipher := range ciphers {
cipherMap[cipher.Name] = cipher.ID
}
missing := []string{}
ids := make([]uint16, 0, len(names))
for _, name := range names {
name = strings.ToUpper(name)
id, ok := cipherMap[name]
if !ok {
missing = append(missing, name)
continue
}
ids = append(ids, id)
}