chore(perf): Pre-allocate where possible (enable prealloc linter) (#88952)

* chore(perf): Pre-allocate where possible (enable prealloc linter)

Signed-off-by: Dave Henderson <dave.henderson@grafana.com>

* fix TestAlertManagers_buildRedactedAMs

Signed-off-by: Dave Henderson <dave.henderson@grafana.com>

* prealloc a slice that appeared after rebase

Signed-off-by: Dave Henderson <dave.henderson@grafana.com>

---------

Signed-off-by: Dave Henderson <dave.henderson@grafana.com>
This commit is contained in:
Dave Henderson 2024-06-14 14:16:36 -04:00 committed by GitHub
parent e53e6e7caa
commit 6262c56132
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
43 changed files with 211 additions and 140 deletions

View File

@ -129,14 +129,20 @@ max-func-lines = 60
[linters] [linters]
disable-all = true disable-all = true
# try to keep this list sorted, please
enable = [ enable = [
"asciicheck",
"bodyclose", "bodyclose",
"depguard", "depguard",
"dogsled", "dogsled",
"errcheck", "errcheck",
"errorlint",
"exhaustive",
"exportloopref",
# "gochecknoinits", # "gochecknoinits",
# "goconst", # "goconst",
# "gocritic", # Temporarily disabled on 2022-09-09, running into weird bug "ruleguard: execution error: used Run() with an empty rule set; forgot to call Load() first?" # "gocritic", # Temporarily disabled on 2022-09-09, running into weird bug "ruleguard: execution error: used Run() with an empty rule set; forgot to call Load() first?"
"gocyclo",
"goimports", "goimports",
"goprintffuncname", "goprintffuncname",
"gosec", "gosec",
@ -145,19 +151,14 @@ enable = [
"ineffassign", "ineffassign",
"misspell", "misspell",
"nakedret", "nakedret",
"exportloopref", "prealloc",
"revive",
"staticcheck", "staticcheck",
"stylecheck", "stylecheck",
"typecheck", "typecheck",
"unconvert", "unconvert",
"unused", "unused",
"whitespace", "whitespace",
"gocyclo",
"exhaustive",
"typecheck",
"asciicheck",
"errorlint",
"revive",
] ]
# Disabled linters (might want them later) # Disabled linters (might want them later)

View File

@ -156,8 +156,8 @@ func publishPackages(cfg packaging.PublishConfig) error {
pth = path.Join(pth, product) pth = path.Join(pth, product)
baseArchiveURL := fmt.Sprintf("https://dl.grafana.com/%s", pth) baseArchiveURL := fmt.Sprintf("https://dl.grafana.com/%s", pth)
var builds []buildRepr builds := make([]buildRepr, len(packaging.ArtifactConfigs))
for _, ba := range packaging.ArtifactConfigs { for i, ba := range packaging.ArtifactConfigs {
u := ba.GetURL(baseArchiveURL, cfg) u := ba.GetURL(baseArchiveURL, cfg)
sha256, err := getSHA256(u) sha256, err := getSHA256(u)
@ -165,12 +165,12 @@ func publishPackages(cfg packaging.PublishConfig) error {
return err return err
} }
builds = append(builds, buildRepr{ builds[i] = buildRepr{
OS: ba.Os, OS: ba.Os,
URL: u, URL: u,
SHA256: string(sha256), SHA256: string(sha256),
Arch: ba.Arch, Arch: ba.Arch,
}) }
} }
r := releaseRepr{ r := releaseRepr{

View File

@ -558,8 +558,9 @@ func (e *State) biSeriesSeries(labels data.Labels, op string, aSeries, bSeries S
func (e *State) walkFunc(node *parse.FuncNode) (Results, error) { func (e *State) walkFunc(node *parse.FuncNode) (Results, error) {
var res Results var res Results
var err error var err error
var in []reflect.Value
for _, a := range node.Args { in := make([]reflect.Value, len(node.Args))
for i, a := range node.Args {
var v any var v any
switch t := a.(type) { switch t := a.(type) {
case *parse.StringNode: case *parse.StringNode:
@ -580,7 +581,8 @@ func (e *State) walkFunc(node *parse.FuncNode) (Results, error) {
if err != nil { if err != nil {
return res, err return res, err
} }
in = append(in, reflect.ValueOf(v))
in[i] = reflect.ValueOf(v)
} }
f := reflect.ValueOf(node.F.F) f := reflect.ValueOf(node.F.F)

View File

@ -444,7 +444,7 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) error {
defaultLevelName, _ := getLogLevelFromConfig("log", "info", cfg) defaultLevelName, _ := getLogLevelFromConfig("log", "info", cfg)
defaultFilters := getFilters(util.SplitString(cfg.Section("log").Key("filters").String())) defaultFilters := getFilters(util.SplitString(cfg.Section("log").Key("filters").String()))
var configLoggers []logWithFilters configLoggers := make([]logWithFilters, 0, len(modes))
for _, mode := range modes { for _, mode := range modes {
mode = strings.TrimSpace(mode) mode = strings.TrimSpace(mode)
sec, err := cfg.GetSection("log." + mode) sec, err := cfg.GetSection("log." + mode)
@ -505,6 +505,7 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) error {
handler.filters = modeFilters handler.filters = modeFilters
handler.maxLevel = leveloption handler.maxLevel = leveloption
configLoggers = append(configLoggers, handler) configLoggers = append(configLoggers, handler)
} }
if len(configLoggers) > 0 { if len(configLoggers) > 0 {

View File

@ -71,7 +71,7 @@ func (m *service) Run(ctx context.Context) error {
return nil return nil
} }
var svcs []services.Service svcs := make([]services.Service, 0, len(m.serviceMap))
for _, s := range m.serviceMap { for _, s := range m.serviceMap {
svcs = append(svcs, s) svcs = append(svcs, s)
} }

View File

@ -93,6 +93,8 @@ func TestDefaultStaticDetectorsInspector(t *testing.T) {
plugin *plugins.Plugin plugin *plugins.Plugin
exp bool exp bool
} }
//nolint:prealloc // just a test, and it'd require too much refactoring to preallocate
var tcs []tc var tcs []tc
// Angular imports // Angular imports

View File

@ -381,11 +381,13 @@ func TestFSPathSeparatorFiles(t *testing.T) {
} }
func fileList(manifest *PluginManifest) []string { func fileList(manifest *PluginManifest) []string {
var keys []string keys := make([]string, 0, len(manifest.Files))
for k := range manifest.Files { for k := range manifest.Files {
keys = append(keys, k) keys = append(keys, k)
} }
sort.Strings(keys) sort.Strings(keys)
return keys return keys
} }

View File

@ -62,9 +62,10 @@ func DirAsLocalSources(pluginsPath string, class plugins.Class) ([]*LocalSource,
} }
slices.Sort(pluginDirs) slices.Sort(pluginDirs)
var sources []*LocalSource sources := make([]*LocalSource, len(pluginDirs))
for _, dir := range pluginDirs { for i, dir := range pluginDirs {
sources = append(sources, NewLocalSource(class, []string{dir})) sources[i] = NewLocalSource(class, []string{dir})
} }
return sources, nil return sources, nil
} }

View File

@ -46,7 +46,7 @@ func TestDirAsLocalSources(t *testing.T) {
{ {
name: "Directory with no subdirectories", name: "Directory with no subdirectories",
pluginsPath: filepath.Join(testdataDir, "pluginRootWithDist", "datasource"), pluginsPath: filepath.Join(testdataDir, "pluginRootWithDist", "datasource"),
expected: nil, expected: []*LocalSource{},
}, },
{ {
name: "Directory with a symlink to a directory", name: "Directory with a symlink to a directory",

View File

@ -38,22 +38,25 @@ func (s *Service) externalPluginSources() []plugins.PluginSource {
return []plugins.PluginSource{} return []plugins.PluginSource{}
} }
var srcs []plugins.PluginSource srcs := make([]plugins.PluginSource, len(localSrcs))
for _, src := range localSrcs { for i, src := range localSrcs {
srcs = append(srcs, src) srcs[i] = src
} }
return srcs return srcs
} }
func (s *Service) pluginSettingSources() []plugins.PluginSource { func (s *Service) pluginSettingSources() []plugins.PluginSource {
var sources []plugins.PluginSource sources := make([]plugins.PluginSource, 0, len(s.cfg.PluginSettings))
for _, ps := range s.cfg.PluginSettings { for _, ps := range s.cfg.PluginSettings {
path, exists := ps["path"] path, exists := ps["path"]
if !exists || path == "" { if !exists || path == "" {
continue continue
} }
sources = append(sources, NewLocalSource(plugins.ClassExternal, []string{path})) sources = append(sources, NewLocalSource(plugins.ClassExternal, []string{path}))
} }
return sources return sources
} }

View File

@ -197,9 +197,8 @@ type versionArg struct {
} }
func createPluginVersions(versions ...versionArg) []Version { func createPluginVersions(versions ...versionArg) []Version {
var vs []Version vs := make([]Version, len(versions))
for i, version := range versions {
for _, version := range versions {
ver := Version{ ver := Version{
Version: version.version, Version: version.version,
} }
@ -211,7 +210,8 @@ func createPluginVersions(versions ...versionArg) []Version {
} }
} }
} }
vs = append(vs, ver)
vs[i] = ver
} }
return vs return vs

View File

@ -326,7 +326,7 @@ func (e DatasourcePermissionsService) SetBuiltInRolePermission(ctx context.Conte
// if an OSS/unlicensed instance is upgraded to Enterprise/licensed. // if an OSS/unlicensed instance is upgraded to Enterprise/licensed.
// https://github.com/grafana/identity-access-team/issues/672 // https://github.com/grafana/identity-access-team/issues/672
func (e DatasourcePermissionsService) SetPermissions(ctx context.Context, orgID int64, resourceID string, commands ...accesscontrol.SetResourcePermissionCommand) ([]accesscontrol.ResourcePermission, error) { func (e DatasourcePermissionsService) SetPermissions(ctx context.Context, orgID int64, resourceID string, commands ...accesscontrol.SetResourcePermissionCommand) ([]accesscontrol.ResourcePermission, error) {
var dbCommands []resourcepermissions.SetResourcePermissionsCommand dbCommands := make([]resourcepermissions.SetResourcePermissionsCommand, 0, len(commands))
for _, cmd := range commands { for _, cmd := range commands {
// Only set query permissions for built-in roles; do not set permissions for data sources with * as UID, as this would grant wildcard permissions // Only set query permissions for built-in roles; do not set permissions for data sources with * as UID, as this would grant wildcard permissions
if cmd.Permission != "Query" || cmd.BuiltinRole == "" || resourceID == "*" { if cmd.Permission != "Query" || cmd.BuiltinRole == "" || resourceID == "*" {

View File

@ -420,21 +420,12 @@ func (s *Service) RunMigration(ctx context.Context, uid string) (*cloudmigration
} }
func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.MigrateDataRequest, error) { func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.MigrateDataRequest, error) {
var migrationDataSlice []cloudmigration.MigrateDataRequestItem
// Data sources // Data sources
dataSources, err := s.getDataSources(ctx) dataSources, err := s.getDataSources(ctx)
if err != nil { if err != nil {
s.log.Error("Failed to get datasources", "err", err) s.log.Error("Failed to get datasources", "err", err)
return nil, err return nil, err
} }
for _, ds := range dataSources {
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.DatasourceDataType,
RefID: ds.UID,
Name: ds.Name,
Data: ds,
})
}
// Dashboards // Dashboards
dashboards, err := s.getDashboards(ctx) dashboards, err := s.getDashboards(ctx)
@ -443,6 +434,26 @@ func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.Mig
return nil, err return nil, err
} }
// Folders
folders, err := s.getFolders(ctx)
if err != nil {
s.log.Error("Failed to get folders", "err", err)
return nil, err
}
migrationDataSlice := make(
[]cloudmigration.MigrateDataRequestItem, 0,
len(dataSources)+len(dashboards)+len(folders),
)
for _, ds := range dataSources {
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.DatasourceDataType,
RefID: ds.UID,
Name: ds.Name,
Data: ds,
})
}
for _, dashboard := range dashboards { for _, dashboard := range dashboards {
dashboard.Data.Del("id") dashboard.Data.Del("id")
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{ migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
@ -453,13 +464,6 @@ func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.Mig
}) })
} }
// Folders
folders, err := s.getFolders(ctx)
if err != nil {
s.log.Error("Failed to get folders", "err", err)
return nil, err
}
for _, f := range folders { for _, f := range folders {
migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{ migrationDataSlice = append(migrationDataSlice, cloudmigration.MigrateDataRequestItem{
Type: cloudmigration.FolderDataType, Type: cloudmigration.FolderDataType,
@ -468,6 +472,7 @@ func (s *Service) getMigrationDataJSON(ctx context.Context) (*cloudmigration.Mig
Data: f, Data: f,
}) })
} }
migrationData := &cloudmigration.MigrateDataRequest{ migrationData := &cloudmigration.MigrateDataRequest{
Items: migrationDataSlice, Items: migrationDataSlice,
} }
@ -521,9 +526,9 @@ func (s *Service) getFolders(ctx context.Context) ([]folder.Folder, error) {
return nil, err return nil, err
} }
var result []folder.Folder result := make([]folder.Folder, len(folders))
for _, folder := range folders { for i, folder := range folders {
result = append(result, *folder) result[i] = *folder
} }
return result, nil return result, nil
@ -535,10 +540,11 @@ func (s *Service) getDashboards(ctx context.Context) ([]dashboards.Dashboard, er
return nil, err return nil, err
} }
var result []dashboards.Dashboard result := make([]dashboards.Dashboard, len(dashs))
for _, dashboard := range dashs { for i, dashboard := range dashs {
result = append(result, *dashboard) result[i] = *dashboard
} }
return result, nil return result, nil
} }

View File

@ -332,12 +332,14 @@ func setupRedisLiveEngine(g *GrafanaLive, node *centrifuge.Node) error {
redisShardConfigs := []centrifuge.RedisShardConfig{ redisShardConfigs := []centrifuge.RedisShardConfig{
{Address: redisAddress, Password: redisPassword}, {Address: redisAddress, Password: redisPassword},
} }
var redisShards []*centrifuge.RedisShard
redisShards := make([]*centrifuge.RedisShard, 0, len(redisShardConfigs))
for _, redisConf := range redisShardConfigs { for _, redisConf := range redisShardConfigs {
redisShard, err := centrifuge.NewRedisShard(node, redisConf) redisShard, err := centrifuge.NewRedisShard(node, redisConf)
if err != nil { if err != nil {
return fmt.Errorf("error connecting to Live Redis: %v", err) return fmt.Errorf("error connecting to Live Redis: %v", err)
} }
redisShards = append(redisShards, redisShard) redisShards = append(redisShards, redisShard)
} }
@ -348,6 +350,7 @@ func setupRedisLiveEngine(g *GrafanaLive, node *centrifuge.Node) error {
if err != nil { if err != nil {
return fmt.Errorf("error creating Live Redis broker: %v", err) return fmt.Errorf("error creating Live Redis broker: %v", err)
} }
node.SetBroker(broker) node.SetBroker(broker)
presenceManager, err := centrifuge.NewRedisPresenceManager(node, centrifuge.RedisPresenceManagerConfig{ presenceManager, err := centrifuge.NewRedisPresenceManager(node, centrifuge.RedisPresenceManagerConfig{
@ -357,7 +360,9 @@ func setupRedisLiveEngine(g *GrafanaLive, node *centrifuge.Node) error {
if err != nil { if err != nil {
return fmt.Errorf("error creating Live Redis presence manager: %v", err) return fmt.Errorf("error creating Live Redis presence manager: %v", err)
} }
node.SetPresenceManager(presenceManager) node.SetPresenceManager(presenceManager)
return nil return nil
} }

View File

@ -116,17 +116,21 @@ func (s *Manager) handleDatasourceEvent(orgID int64, dsUID string, resubmit bool
s.mu.RUnlock() s.mu.RUnlock()
return nil return nil
} }
var resubmitRequests []streamRequest
var waitChannels []chan struct{} resubmitRequests := make([]streamRequest, 0, len(dsStreams))
waitChannels := make([]chan struct{}, 0, len(dsStreams))
for channel := range dsStreams { for channel := range dsStreams {
streamCtx, ok := s.streams[channel] streamCtx, ok := s.streams[channel]
if !ok { if !ok {
continue continue
} }
streamCtx.cancelFn() streamCtx.cancelFn()
waitChannels = append(waitChannels, streamCtx.CloseCh) waitChannels = append(waitChannels, streamCtx.CloseCh)
resubmitRequests = append(resubmitRequests, streamCtx.streamRequest) resubmitRequests = append(resubmitRequests, streamCtx.streamRequest)
} }
s.mu.RUnlock() s.mu.RUnlock()
// Wait for all streams to stop. // Wait for all streams to stop.

View File

@ -563,8 +563,8 @@ func toRuleGroup(log log.Logger, manager state.AlertInstanceManager, groupKey ng
// Returns the whole JSON model as a string if it fails to extract a minimum of 1 query. // Returns the whole JSON model as a string if it fails to extract a minimum of 1 query.
func ruleToQuery(logger log.Logger, rule *ngmodels.AlertRule) string { func ruleToQuery(logger log.Logger, rule *ngmodels.AlertRule) string {
var queryErr error var queryErr error
var queries []string
queries := make([]string, 0, len(rule.Data))
for _, q := range rule.Data { for _, q := range rule.Data {
q, err := q.GetQuery() q, err := q.GetQuery()
if err != nil { if err != nil {

View File

@ -46,156 +46,164 @@ func ContactPointToContactPointExport(cp definitions.ContactPoint) (notify.APIRe
// This is needed to keep the API models clean and convert from database model // This is needed to keep the API models clean and convert from database model
j.RegisterExtension(&contactPointsExtension{}) j.RegisterExtension(&contactPointsExtension{})
var integration []*notify.GrafanaIntegrationConfig contactPointsLength := len(cp.Alertmanager) + len(cp.Dingding) + len(cp.Discord) + len(cp.Email) +
len(cp.Googlechat) + len(cp.Kafka) + len(cp.Line) + len(cp.Opsgenie) +
len(cp.Pagerduty) + len(cp.OnCall) + len(cp.Pushover) + len(cp.Sensugo) +
len(cp.Sns) + len(cp.Slack) + len(cp.Teams) + len(cp.Telegram) +
len(cp.Threema) + len(cp.Victorops) + len(cp.Webhook) + len(cp.Wecom) +
len(cp.Webex)
integration := make([]*notify.GrafanaIntegrationConfig, 0, contactPointsLength)
var errs []error var errs []error
for _, i := range cp.Alertmanager { for _, i := range cp.Alertmanager {
el, err := marshallIntegration(j, "prometheus-alertmanager", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "prometheus-alertmanager", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Dingding { for _, i := range cp.Dingding {
el, err := marshallIntegration(j, "dingding", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "dingding", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Discord { for _, i := range cp.Discord {
el, err := marshallIntegration(j, "discord", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "discord", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Email { for _, i := range cp.Email {
el, err := marshallIntegration(j, "email", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "email", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Googlechat { for _, i := range cp.Googlechat {
el, err := marshallIntegration(j, "googlechat", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "googlechat", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Kafka { for _, i := range cp.Kafka {
el, err := marshallIntegration(j, "kafka", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "kafka", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Line { for _, i := range cp.Line {
el, err := marshallIntegration(j, "line", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "line", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Opsgenie { for _, i := range cp.Opsgenie {
el, err := marshallIntegration(j, "opsgenie", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "opsgenie", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Pagerduty { for _, i := range cp.Pagerduty {
el, err := marshallIntegration(j, "pagerduty", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "pagerduty", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.OnCall { for _, i := range cp.OnCall {
el, err := marshallIntegration(j, "oncall", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "oncall", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Pushover { for _, i := range cp.Pushover {
el, err := marshallIntegration(j, "pushover", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "pushover", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Sensugo { for _, i := range cp.Sensugo {
el, err := marshallIntegration(j, "sensugo", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "sensugo", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Sns { for _, i := range cp.Sns {
el, err := marshallIntegration(j, "sns", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "sns", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Slack { for _, i := range cp.Slack {
el, err := marshallIntegration(j, "slack", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "slack", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Teams { for _, i := range cp.Teams {
el, err := marshallIntegration(j, "teams", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "teams", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Telegram { for _, i := range cp.Telegram {
el, err := marshallIntegration(j, "telegram", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "telegram", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Threema { for _, i := range cp.Threema {
el, err := marshallIntegration(j, "threema", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "threema", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Victorops { for _, i := range cp.Victorops {
el, err := marshallIntegration(j, "victorops", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "victorops", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Webhook { for _, i := range cp.Webhook {
el, err := marshallIntegration(j, "webhook", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "webhook", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Wecom { for _, i := range cp.Wecom {
el, err := marshallIntegration(j, "wecom", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "wecom", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
for _, i := range cp.Webex { for _, i := range cp.Webex {
el, err := marshallIntegration(j, "webex", i, i.DisableResolveMessage) el, err := marshallIntegration(j, "webex", i, i.DisableResolveMessage)
integration = append(integration, el)
if err != nil { if err != nil {
errs = append(errs, err) errs = append(errs, err)
} }
integration = append(integration, el)
} }
if len(errs) > 0 { if len(errs) > 0 {
return notify.APIReceiver{}, errors.Join(errs...) return notify.APIReceiver{}, errors.Join(errs...)
} }

View File

@ -88,13 +88,14 @@ func (ecp *ContactPointService) GetContactPoints(ctx context.Context, q ContactP
} }
} }
var contactPoints []apimodels.EmbeddedContactPoint contactPoints := make([]apimodels.EmbeddedContactPoint, len(grafanaReceivers))
for _, gr := range grafanaReceivers { for i, gr := range grafanaReceivers {
contactPoint, err := GettableGrafanaReceiverToEmbeddedContactPoint(gr) contactPoint, err := GettableGrafanaReceiverToEmbeddedContactPoint(gr)
if err != nil { if err != nil {
return nil, err return nil, err
} }
contactPoints = append(contactPoints, contactPoint)
contactPoints[i] = contactPoint
} }
sort.SliceStable(contactPoints, func(i, j int) bool { sort.SliceStable(contactPoints, func(i, j int) bool {

View File

@ -31,17 +31,19 @@ func (t *TemplateService) GetTemplates(ctx context.Context, orgID int64) ([]defi
return nil, err return nil, err
} }
var templates []definitions.NotificationTemplate templates := make([]definitions.NotificationTemplate, 0, len(revision.cfg.TemplateFiles))
for name, tmpl := range revision.cfg.TemplateFiles { for name, tmpl := range revision.cfg.TemplateFiles {
tmpl := definitions.NotificationTemplate{ tmpl := definitions.NotificationTemplate{
Name: name, Name: name,
Template: tmpl, Template: tmpl,
} }
provenance, err := t.provenanceStore.GetProvenance(ctx, &tmpl, orgID) provenance, err := t.provenanceStore.GetProvenance(ctx, &tmpl, orgID)
if err != nil { if err != nil {
return nil, err return nil, err
} }
tmpl.Provenance = definitions.Provenance(provenance) tmpl.Provenance = definitions.Provenance(provenance)
templates = append(templates, tmpl) templates = append(templates, tmpl)
} }

View File

@ -492,13 +492,14 @@ func (am *Alertmanager) GetReceivers(ctx context.Context) ([]apimodels.Receiver,
return []apimodels.Receiver{}, err return []apimodels.Receiver{}, err
} }
var rcvs []apimodels.Receiver rcvs := make([]apimodels.Receiver, len(res.Payload))
for _, rcv := range res.Payload { for i, rcv := range res.Payload {
rcvs = append(rcvs, apimodels.Receiver{ rcvs[i] = apimodels.Receiver{
Name: *rcv.Name, Name: *rcv.Name,
Integrations: []apimodels.Integration{}, Integrations: []apimodels.Integration{},
}) }
} }
return rcvs, nil return rcvs, nil
} }

View File

@ -205,15 +205,17 @@ func (d *AlertsRouter) SyncAndApplyConfigFromDatabase(ctx context.Context) error
} }
func buildRedactedAMs(l log.Logger, alertmanagers []ExternalAMcfg, ordId int64) []string { func buildRedactedAMs(l log.Logger, alertmanagers []ExternalAMcfg, ordId int64) []string {
var redactedAMs []string redactedAMs := make([]string, 0, len(alertmanagers))
for _, am := range alertmanagers { for _, am := range alertmanagers {
parsedAM, err := url.Parse(am.URL) parsedAM, err := url.Parse(am.URL)
if err != nil { if err != nil {
l.Error("Failed to parse alertmanager string", "org", ordId, "error", err) l.Error("Failed to parse alertmanager string", "org", ordId, "error", err)
continue continue
} }
redactedAMs = append(redactedAMs, parsedAM.Redacted()) redactedAMs = append(redactedAMs, parsedAM.Redacted())
} }
return redactedAMs return redactedAMs
} }
@ -225,9 +227,6 @@ func asSHA256(strings []string) string {
} }
func (d *AlertsRouter) alertmanagersFromDatasources(orgID int64) ([]ExternalAMcfg, error) { func (d *AlertsRouter) alertmanagersFromDatasources(orgID int64) ([]ExternalAMcfg, error) {
var (
alertmanagers []ExternalAMcfg
)
// We might have alertmanager datasources that are acting as external // We might have alertmanager datasources that are acting as external
// alertmanager, let's fetch them. // alertmanager, let's fetch them.
query := &datasources.GetDataSourcesByTypeQuery{ query := &datasources.GetDataSourcesByTypeQuery{
@ -240,6 +239,9 @@ func (d *AlertsRouter) alertmanagersFromDatasources(orgID int64) ([]ExternalAMcf
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to fetch datasources for org: %w", err) return nil, fmt.Errorf("failed to fetch datasources for org: %w", err)
} }
alertmanagers := make([]ExternalAMcfg, 0, len(dataSources))
for _, ds := range dataSources { for _, ds := range dataSources {
if !ds.JsonData.Get(definitions.HandleGrafanaManagedAlerts).MustBool(false) { if !ds.JsonData.Get(definitions.HandleGrafanaManagedAlerts).MustBool(false) {
continue continue
@ -262,11 +264,13 @@ func (d *AlertsRouter) alertmanagersFromDatasources(orgID int64) ([]ExternalAMcf
"error", err) "error", err)
continue continue
} }
alertmanagers = append(alertmanagers, ExternalAMcfg{ alertmanagers = append(alertmanagers, ExternalAMcfg{
URL: amURL, URL: amURL,
Headers: headers, Headers: headers,
}) })
} }
return alertmanagers, nil return alertmanagers, nil
} }

View File

@ -712,7 +712,7 @@ func TestAlertManagers_buildRedactedAMs(t *testing.T) {
amUrls: []string{"1234://user:password@localhost:9094"}, amUrls: []string{"1234://user:password@localhost:9094"},
errCalls: 1, errCalls: 1,
errLog: "Failed to parse alertmanager string", errLog: "Failed to parse alertmanager string",
expected: nil, expected: []string{},
}, },
} }
@ -724,6 +724,7 @@ func TestAlertManagers_buildRedactedAMs(t *testing.T) {
URL: url, URL: url,
}) })
} }
require.Equal(t, tt.expected, buildRedactedAMs(&fakeLogger, cfgs, tt.orgId)) require.Equal(t, tt.expected, buildRedactedAMs(&fakeLogger, cfgs, tt.orgId))
require.Equal(t, tt.errCalls, fakeLogger.ErrorLogs.Calls) require.Equal(t, tt.errCalls, fakeLogger.ErrorLogs.Calls)
require.Equal(t, tt.errLog, fakeLogger.ErrorLogs.Message) require.Equal(t, tt.errLog, fakeLogger.ErrorLogs.Message)

View File

@ -141,9 +141,9 @@ func errorAlert(labels, annotations data.Labels, alertState *State, urlStr strin
func FromStateTransitionToPostableAlerts(firingStates []StateTransition, stateManager *Manager, appURL *url.URL) apimodels.PostableAlerts { func FromStateTransitionToPostableAlerts(firingStates []StateTransition, stateManager *Manager, appURL *url.URL) apimodels.PostableAlerts {
alerts := apimodels.PostableAlerts{PostableAlerts: make([]models.PostableAlert, 0, len(firingStates))} alerts := apimodels.PostableAlerts{PostableAlerts: make([]models.PostableAlert, 0, len(firingStates))}
var sentAlerts []*State
ts := time.Now() ts := time.Now()
sentAlerts := make([]*State, 0, len(firingStates))
for _, alertState := range firingStates { for _, alertState := range firingStates {
if !alertState.NeedsSending(stateManager.ResendDelay) { if !alertState.NeedsSending(stateManager.ResendDelay) {
continue continue

View File

@ -787,7 +787,8 @@ func (st DBstore) RenameReceiverInNotificationSettings(ctx context.Context, orgI
if len(rules) == 0 { if len(rules) == 0 {
return 0, nil return 0, nil
} }
var updates []ngmodels.UpdateRule
updates := make([]ngmodels.UpdateRule, 0, len(rules))
for _, rule := range rules { for _, rule := range rules {
r := ngmodels.CopyRule(rule) r := ngmodels.CopyRule(rule)
for idx := range r.NotificationSettings { for idx := range r.NotificationSettings {
@ -795,6 +796,7 @@ func (st DBstore) RenameReceiverInNotificationSettings(ctx context.Context, orgI
r.NotificationSettings[idx].Receiver = newReceiver r.NotificationSettings[idx].Receiver = newReceiver
} }
} }
updates = append(updates, ngmodels.UpdateRule{ updates = append(updates, ngmodels.UpdateRule{
Existing: rule, Existing: rule,
New: *r, New: *r,

View File

@ -401,15 +401,15 @@ type pluginsSettingsServiceMock struct {
func (s *pluginsSettingsServiceMock) GetPluginSettings(_ context.Context, args *pluginsettings.GetArgs) ([]*pluginsettings.InfoDTO, error) { func (s *pluginsSettingsServiceMock) GetPluginSettings(_ context.Context, args *pluginsettings.GetArgs) ([]*pluginsettings.InfoDTO, error) {
s.getPluginSettingsArgs = append(s.getPluginSettingsArgs, args.OrgID) s.getPluginSettingsArgs = append(s.getPluginSettingsArgs, args.OrgID)
var res []*pluginsettings.InfoDTO res := make([]*pluginsettings.InfoDTO, len(s.storedPluginSettings))
for _, ps := range s.storedPluginSettings { for i, ps := range s.storedPluginSettings {
res = append(res, &pluginsettings.InfoDTO{ res[i] = &pluginsettings.InfoDTO{
PluginID: ps.PluginID, PluginID: ps.PluginID,
OrgID: ps.OrgID, OrgID: ps.OrgID,
Enabled: ps.Enabled, Enabled: ps.Enabled,
Pinned: ps.Pinned, Pinned: ps.Pinned,
PluginVersion: ps.PluginVersion, PluginVersion: ps.PluginVersion,
}) }
} }
return res, s.err return res, s.err

View File

@ -163,17 +163,23 @@ func (p *EnvVarsProvider) tracingEnvVars(plugin *plugins.Plugin) []string {
func (p *EnvVarsProvider) pluginSettingsEnvVars(pluginID string) []string { func (p *EnvVarsProvider) pluginSettingsEnvVars(pluginID string) []string {
const customConfigPrefix = "GF_PLUGIN" const customConfigPrefix = "GF_PLUGIN"
var env []string
for k, v := range p.cfg.PluginSettings[pluginID] { pluginSettings := p.cfg.PluginSettings[pluginID]
env := make([]string, 0, len(pluginSettings))
for k, v := range pluginSettings {
if k == "path" || strings.ToLower(k) == "id" { if k == "path" || strings.ToLower(k) == "id" {
continue continue
} }
key := fmt.Sprintf("%s_%s", customConfigPrefix, strings.ToUpper(k)) key := fmt.Sprintf("%s_%s", customConfigPrefix, strings.ToUpper(k))
if value := os.Getenv(key); value != "" { if value := os.Getenv(key); value != "" {
v = value v = value
} }
env = append(env, fmt.Sprintf("%s=%s", key, v)) env = append(env, fmt.Sprintf("%s=%s", key, v))
} }
return env return env
} }

View File

@ -191,7 +191,9 @@ func getNonFolderDashboardDoc(dash dashboard, location string) *bluge.Document {
func getDashboardPanelDocs(dash dashboard, location string) []*bluge.Document { func getDashboardPanelDocs(dash dashboard, location string) []*bluge.Document {
dashURL := fmt.Sprintf("/d/%s/%s", dash.uid, slugify.Slugify(dash.summary.Name)) dashURL := fmt.Sprintf("/d/%s/%s", dash.uid, slugify.Slugify(dash.summary.Name))
var docs []*bluge.Document // pre-allocating a little bit more than necessary, possibly
docs := make([]*bluge.Document, 0, len(dash.summary.Nested))
for _, panel := range dash.summary.Nested { for _, panel := range dash.summary.Nested {
if panel.Fields["type"] == "row" { if panel.Fields["type"] == "row" {
continue // skip rows continue // skip rows
@ -239,7 +241,7 @@ func getDashboardPanelDocs(dash dashboard, location string) []*bluge.Document {
} }
// Names need to be indexed a few ways to support key features // Names need to be indexed a few ways to support key features
func newSearchDocument(uid string, name string, descr string, url string) *bluge.Document { func newSearchDocument(uid, name, descr, url string) *bluge.Document {
doc := bluge.NewDocument(uid) doc := bluge.NewDocument(uid)
if name != "" { if name != "" {

View File

@ -51,9 +51,11 @@ func Test_punctuationCharFilter_Filter(t1 *testing.T) {
func TestNgramIndexAnalyzer(t *testing.T) { func TestNgramIndexAnalyzer(t *testing.T) {
stream := ngramIndexAnalyzer.Analyze([]byte("x-rays.and.xRays, and НемногоКириллицы")) stream := ngramIndexAnalyzer.Analyze([]byte("x-rays.and.xRays, and НемногоКириллицы"))
expectedTerms := []string{"x", "r", "ra", "ray", "rays", "a", "an", "and", "x", "r", "ra", "ray", "rays", "a", "an", "and", "н", "не", "нем", "немн", "немно", "немног", "немного", "к", "ки", "кир", "кири", "кирил", "кирилл", "кирилли"} expectedTerms := []string{"x", "r", "ra", "ray", "rays", "a", "an", "and", "x", "r", "ra", "ray", "rays", "a", "an", "and", "н", "не", "нем", "немн", "немно", "немног", "немного", "к", "ки", "кир", "кири", "кирил", "кирилл", "кирилли"}
var actualTerms []string
actualTerms := make([]string, 0, len(stream))
for _, t := range stream { for _, t := range stream {
actualTerms = append(actualTerms, string(t.Term)) actualTerms = append(actualTerms, string(t.Term))
} }
require.Equal(t, expectedTerms, actualTerms) require.Equal(t, expectedTerms, actualTerms)
} }

View File

@ -97,9 +97,8 @@ func (e externalAlertmanagerToDatasources) Exec(sess *xorm.Session, mg *migrator
} }
func removeDuplicates(strs []string) []string { func removeDuplicates(strs []string) []string {
var res []string found := make(map[string]bool, len(strs))
found := map[string]bool{} res := make([]string, 0, len(strs))
for _, str := range strs { for _, str := range strs {
if found[str] { if found[str] {
continue continue

View File

@ -435,8 +435,10 @@ func (f *accessControlDashboardPermissionFilter) nestedFoldersSelectors(permSele
} }
func getAllowedUIDs(action string, user identity.Requester, scopePrefix string) []any { func getAllowedUIDs(action string, user identity.Requester, scopePrefix string) []any {
var args []any uidScopes := user.GetPermissions()[action]
for _, uidScope := range user.GetPermissions()[action] {
args := make([]any, 0, len(uidScopes))
for _, uidScope := range uidScopes {
if !strings.HasPrefix(uidScope, scopePrefix) { if !strings.HasPrefix(uidScope, scopePrefix) {
continue continue
} }

View File

@ -101,7 +101,7 @@ func (api *Api) getAuthorizedList(ctx context.Context, identity identity.Request
return nil, err return nil, err
} }
var authorizedProviders []*models.SSOSettings authorizedProviders := make([]*models.SSOSettings, 0, len(allProviders))
for _, provider := range allProviders { for _, provider := range allProviders {
ev := ac.EvalPermission(ac.ActionSettingsRead, ac.Scope("settings", "auth."+provider.Provider, "*")) ev := ac.EvalPermission(ac.ActionSettingsRead, ac.Scope("settings", "auth."+provider.Provider, "*"))
hasAccess, err := api.AccessControl.Evaluate(ctx, identity, ev) hasAccess, err := api.AccessControl.Evaluate(ctx, identity, ev)

View File

@ -1986,19 +1986,23 @@ func (cfg *Cfg) readLiveSettings(iniFile *ini.File) error {
cfg.LiveHAEngineAddress = section.Key("ha_engine_address").MustString("127.0.0.1:6379") cfg.LiveHAEngineAddress = section.Key("ha_engine_address").MustString("127.0.0.1:6379")
cfg.LiveHAEnginePassword = section.Key("ha_engine_password").MustString("") cfg.LiveHAEnginePassword = section.Key("ha_engine_password").MustString("")
var originPatterns []string
allowedOrigins := section.Key("allowed_origins").MustString("") allowedOrigins := section.Key("allowed_origins").MustString("")
for _, originPattern := range strings.Split(allowedOrigins, ",") { origins := strings.Split(allowedOrigins, ",")
originPatterns := make([]string, 0, len(origins))
for _, originPattern := range origins {
originPattern = strings.TrimSpace(originPattern) originPattern = strings.TrimSpace(originPattern)
if originPattern == "" { if originPattern == "" {
continue continue
} }
originPatterns = append(originPatterns, originPattern) originPatterns = append(originPatterns, originPattern)
} }
_, err := GetAllowedOriginGlobs(originPatterns) _, err := GetAllowedOriginGlobs(originPatterns)
if err != nil { if err != nil {
return err return err
} }
cfg.LiveAllowedOrigins = originPatterns cfg.LiveAllowedOrigins = originPatterns
return nil return nil
} }

View File

@ -75,7 +75,7 @@ func readSecureSocksDSProxySettings(iniFile *ini.File) (SecureSocksDSProxySettin
s.ClientKey = string(keyPEMBlock) s.ClientKey = string(keyPEMBlock)
} }
var rootCAs []string rootCAs := make([]string, 0, len(s.RootCAFilePaths))
for _, rootCAFile := range s.RootCAFilePaths { for _, rootCAFile := range s.RootCAFilePaths {
// nolint:gosec // nolint:gosec
// The gosec G304 warning can be ignored because `rootCAFile` comes from config ini, and we check below if // The gosec G304 warning can be ignored because `rootCAFile` comes from config ini, and we check below if

View File

@ -88,9 +88,8 @@ type argJSONQuery struct {
} }
func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*AzureResourceGraphQuery, error) { func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery, dsInfo types.DatasourceInfo) ([]*AzureResourceGraphQuery, error) {
var azureResourceGraphQueries []*AzureResourceGraphQuery azureResourceGraphQueries := make([]*AzureResourceGraphQuery, len(queries))
for i, query := range queries {
for _, query := range queries {
queryJSONModel := argJSONQuery{} queryJSONModel := argJSONQuery{}
err := json.Unmarshal(query.JSON, &queryJSONModel) err := json.Unmarshal(query.JSON, &queryJSONModel)
if err != nil { if err != nil {
@ -105,19 +104,18 @@ func (e *AzureResourceGraphDatasource) buildQueries(queries []backend.DataQuery,
} }
interpolatedQuery, err := macros.KqlInterpolate(query, dsInfo, azureResourceGraphTarget.Query) interpolatedQuery, err := macros.KqlInterpolate(query, dsInfo, azureResourceGraphTarget.Query)
if err != nil { if err != nil {
return nil, err return nil, err
} }
azureResourceGraphQueries = append(azureResourceGraphQueries, &AzureResourceGraphQuery{ azureResourceGraphQueries[i] = &AzureResourceGraphQuery{
RefID: query.RefID, RefID: query.RefID,
ResultFormat: resultFormat, ResultFormat: resultFormat,
JSON: query.JSON, JSON: query.JSON,
InterpolatedQuery: interpolatedQuery, InterpolatedQuery: interpolatedQuery,
TimeRange: query.TimeRange, TimeRange: query.TimeRange,
QueryType: query.QueryType, QueryType: query.QueryType,
}) }
} }
return azureResourceGraphQueries, nil return azureResourceGraphQueries, nil

View File

@ -129,8 +129,8 @@ type AzureMonitorDimensionFilterBackend struct {
} }
func ConstructFiltersString(a dataquery.AzureMetricDimension) string { func ConstructFiltersString(a dataquery.AzureMetricDimension) string {
var filterStrings []string filterStrings := make([]string, len(a.Filters))
for _, filter := range a.Filters { for i, filter := range a.Filters {
dimension := "" dimension := ""
operator := "" operator := ""
if a.Dimension != nil { if a.Dimension != nil {
@ -139,11 +139,14 @@ func ConstructFiltersString(a dataquery.AzureMetricDimension) string {
if a.Operator != nil { if a.Operator != nil {
operator = *a.Operator operator = *a.Operator
} }
filterStrings = append(filterStrings, fmt.Sprintf("%v %v '%v'", dimension, operator, filter))
filterStrings[i] = fmt.Sprintf("%v %v '%v'", dimension, operator, filter)
} }
if a.Operator != nil && *a.Operator == "eq" { if a.Operator != nil && *a.Operator == "eq" {
return strings.Join(filterStrings, " or ") return strings.Join(filterStrings, " or ")
} }
return strings.Join(filterStrings, " and ") return strings.Join(filterStrings, " and ")
} }

View File

@ -7,7 +7,7 @@ import (
) )
func valuesToListMetricRespone[T any](values []T) []resources.ResourceResponse[T] { func valuesToListMetricRespone[T any](values []T) []resources.ResourceResponse[T] {
var response []resources.ResourceResponse[T] response := make([]resources.ResourceResponse[T], 0, len(values))
for _, value := range values { for _, value := range values {
response = append(response, resources.ResourceResponse[T]{Value: value}) response = append(response, resources.ResourceResponse[T]{Value: value})
} }

View File

@ -84,15 +84,16 @@ func newFlowTestQueries(allJsonBytes []byte) ([]backend.DataQuery, error) {
return nil, fmt.Errorf("error unmarshaling query-json: %w", err) return nil, fmt.Errorf("error unmarshaling query-json: %w", err)
} }
var queries []backend.DataQuery queries := make([]backend.DataQuery, len(jsonBytesArray))
for i, jsonBytes := range jsonBytesArray {
for _, jsonBytes := range jsonBytesArray {
// we need to extract some fields from the json-array // we need to extract some fields from the json-array
var jsonInfo queryDataTestQueryJSON var jsonInfo queryDataTestQueryJSON
err = json.Unmarshal(jsonBytes, &jsonInfo) err = json.Unmarshal(jsonBytes, &jsonInfo)
if err != nil { if err != nil {
return nil, err return nil, err
} }
// we setup the DataQuery, with values loaded from the json // we setup the DataQuery, with values loaded from the json
query := backend.DataQuery{ query := backend.DataQuery{
RefID: jsonInfo.RefID, RefID: jsonInfo.RefID,
@ -101,7 +102,8 @@ func newFlowTestQueries(allJsonBytes []byte) ([]backend.DataQuery, error) {
TimeRange: timeRange, TimeRange: timeRange,
JSON: jsonBytes, JSON: jsonBytes,
} }
queries = append(queries, query)
queries[i] = query
} }
return queries, nil return queries, nil
} }

View File

@ -873,16 +873,16 @@ func trimDatapoints(queryResult backend.DataResponse, target *Query) {
// we sort the label's pairs by the label-key, // we sort the label's pairs by the label-key,
// and return the label-values // and return the label-values
func getSortedLabelValues(labels data.Labels) []string { func getSortedLabelValues(labels data.Labels) []string {
var keys []string keys := make([]string, 0, len(labels))
for key := range labels { for key := range labels {
keys = append(keys, key) keys = append(keys, key)
} }
sort.Strings(keys) sort.Strings(keys)
var values []string values := make([]string, len(keys))
for _, key := range keys { for i, key := range keys {
values = append(values, labels[key]) values[i] = labels[key]
} }
return values return values

View File

@ -60,7 +60,7 @@ func TestNewQueryDataResponse(t *testing.T) {
newJSONArray(`[0, 1, 2]`, &arrow.TimestampType{}), newJSONArray(`[0, 1, 2]`, &arrow.TimestampType{}),
} }
var arr []arrow.Array arr := make([]arrow.Array, 0, len(strValues))
for _, v := range strValues { for _, v := range strValues {
tarr, _, err := array.FromJSON( tarr, _, err := array.FromJSON(
alloc, alloc,

View File

@ -269,12 +269,12 @@ func transformRowsForTimeSeries(rows []models.Row, query models.Query) data.Fram
} }
func newFrameWithTimeField(row models.Row, column string, colIndex int, query models.Query, frameName []byte) *data.Frame { func newFrameWithTimeField(row models.Row, column string, colIndex int, query models.Query, frameName []byte) *data.Frame {
var timeArray []time.Time
var floatArray []*float64 var floatArray []*float64
var stringArray []*string var stringArray []*string
var boolArray []*bool var boolArray []*bool
valType := util.Typeof(row.Values, colIndex) valType := util.Typeof(row.Values, colIndex)
timeArray := make([]time.Time, 0, len(row.Values))
for _, valuePair := range row.Values { for _, valuePair := range row.Values {
timestamp, timestampErr := util.ParseTimestamp(valuePair[0]) timestamp, timestampErr := util.ParseTimestamp(valuePair[0])
// we only add this row if the timestamp is valid // we only add this row if the timestamp is valid

View File

@ -136,10 +136,11 @@ func rootSpan(frame *BetterFrame) Row {
} }
func fieldNames(frame *data.Frame) []string { func fieldNames(frame *data.Frame) []string {
var names []string names := make([]string, len(frame.Fields))
for _, f := range frame.Fields { for i, f := range frame.Fields {
names = append(names, f.Name) names[i] = f.Name
} }
return names return names
} }

View File

@ -48,11 +48,13 @@ func SplitString(str string) []string {
return res return res
} }
var result []string
matches := stringListItemMatcher.FindAllString(str, -1) matches := stringListItemMatcher.FindAllString(str, -1)
for _, match := range matches {
result = append(result, strings.Trim(match, "\"")) result := make([]string, len(matches))
for i, match := range matches {
result[i] = strings.Trim(match, "\"")
} }
return result return result
} }

View File

@ -30,22 +30,26 @@ func TlsCiphersToIDs(names []string) ([]uint16, error) {
// no ciphers specified, use defaults // no ciphers specified, use defaults
return nil, nil return nil, nil
} }
var ids []uint16
var missing []string
ciphers := tls.CipherSuites() ciphers := tls.CipherSuites()
var cipherMap = make(map[string]uint16, len(ciphers))
cipherMap := make(map[string]uint16, len(ciphers))
for _, cipher := range ciphers { for _, cipher := range ciphers {
cipherMap[cipher.Name] = cipher.ID cipherMap[cipher.Name] = cipher.ID
} }
missing := []string{}
ids := make([]uint16, 0, len(names))
for _, name := range names { for _, name := range names {
name = strings.ToUpper(name) name = strings.ToUpper(name)
id, ok := cipherMap[name] id, ok := cipherMap[name]
if !ok { if !ok {
missing = append(missing, name) missing = append(missing, name)
continue continue
} }
ids = append(ids, id) ids = append(ids, id)
} }