Perfomance: Add preallocation for some slices (#61632)

This commit is contained in:
Denis Limarev 2023-01-23 22:44:27 +06:00 committed by GitHub
parent 46cbf9d24c
commit 812c90ec69
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 31 additions and 27 deletions

View File

@ -324,11 +324,10 @@ func printProperties(w io.Writer, s *schema) {
table.SetAutoWrapText(false)
// Buffer all property rows so that we can sort them before printing them.
var rows [][]string
rows := make([][]string, 0, len(s.Properties))
for k, p := range s.Properties {
// Generate relative links for objects and arrays of objects.
var propType []string
propType := make([]string, 0, len(p.Type))
for _, pt := range p.Type {
switch pt {
case PropertyTypeObject:
@ -372,7 +371,7 @@ func printProperties(w io.Writer, s *schema) {
desc := p.Description
if len(p.Enum) > 0 {
var vals []string
vals := make([]string, 0, len(p.Enum))
for _, e := range p.Enum {
vals = append(vals, e.String())
}

View File

@ -1777,8 +1777,8 @@ func (cfg *Cfg) readDataSourcesSettings() {
}
func GetAllowedOriginGlobs(originPatterns []string) ([]glob.Glob, error) {
var originGlobs []glob.Glob
allowedOrigins := originPatterns
originGlobs := make([]glob.Glob, 0, len(allowedOrigins))
for _, originPattern := range allowedOrigins {
g, err := glob.Compile(originPattern)
if err != nil {

View File

@ -380,7 +380,7 @@ func queryModel(query backend.DataQuery) (grafanaQuery, error) {
}
func (s *Service) buildQueryExecutors(logger log.Logger, req *backend.QueryDataRequest) ([]cloudMonitoringQueryExecutor, error) {
var cloudMonitoringQueryExecutors []cloudMonitoringQueryExecutor
cloudMonitoringQueryExecutors := make([]cloudMonitoringQueryExecutor, 0, len(req.Queries))
startTime := req.Queries[0].TimeRange.From
endTime := req.Queries[0].TimeRange.To
durationSeconds := int(endTime.Sub(startTime).Seconds())

View File

@ -248,7 +248,7 @@ func ParseMetricDataQueries(dataQueries []backend.DataQuery, startTime time.Time
metricDataQueries[query.RefID] = metricsDataQuery
}
var result []*CloudWatchQuery
result := make([]*CloudWatchQuery, 0, len(metricDataQueries))
for refId, mdq := range metricDataQueries {
cwQuery := &CloudWatchQuery{
logger: logger,

View File

@ -46,8 +46,9 @@ func parseQuery(tsdbQuery []backend.DataQuery) ([]*Query, error) {
func parseBucketAggs(model *simplejson.Json) ([]*BucketAgg, error) {
var err error
var result []*BucketAgg
for _, t := range model.Get("bucketAggs").MustArray() {
bucketAggs := model.Get("bucketAggs").MustArray()
result := make([]*BucketAgg, 0, len(bucketAggs))
for _, t := range bucketAggs {
aggJSON := simplejson.NewFromAny(t)
agg := &BucketAgg{}
@ -71,8 +72,9 @@ func parseBucketAggs(model *simplejson.Json) ([]*BucketAgg, error) {
func parseMetrics(model *simplejson.Json) ([]*MetricAgg, error) {
var err error
var result []*MetricAgg
for _, t := range model.Get("metrics").MustArray() {
metrics := model.Get("metrics").MustArray()
result := make([]*MetricAgg, 0, len(metrics))
for _, t := range metrics {
metricJSON := simplejson.NewFromAny(t)
metric := &MetricAgg{}

View File

@ -101,7 +101,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest)
logger.Debug("Making a non-Flux type query")
var allRawQueries string
var queries []Query
queries := make([]Query, 0, len(req.Queries))
for _, reqQuery := range req.Queries {
query, err := s.queryParser.Parse(reqQuery)

View File

@ -70,9 +70,10 @@ func (qp *InfluxdbQueryParser) Parse(query backend.DataQuery) (*Query, error) {
}
func (qp *InfluxdbQueryParser) parseSelects(model *simplejson.Json) ([]*Select, error) {
var result []*Select
selectObjs := model.Get("select").MustArray()
result := make([]*Select, 0, len(selectObjs))
for _, selectObj := range model.Get("select").MustArray() {
for _, selectObj := range selectObjs {
selectJson := simplejson.NewFromAny(selectObj)
var parts Select
@ -93,8 +94,9 @@ func (qp *InfluxdbQueryParser) parseSelects(model *simplejson.Json) ([]*Select,
}
func (*InfluxdbQueryParser) parseTags(model *simplejson.Json) ([]*Tag, error) {
var result []*Tag
for _, t := range model.Get("tags").MustArray() {
tags := model.Get("tags").MustArray()
result := make([]*Tag, 0, len(tags))
for _, t := range tags {
tagJson := simplejson.NewFromAny(t)
tag := &Tag{}
var err error
@ -159,8 +161,9 @@ func (*InfluxdbQueryParser) parseQueryPart(model *simplejson.Json) (*QueryPart,
}
func (qp *InfluxdbQueryParser) parseGroupBy(model *simplejson.Json) ([]*QueryPart, error) {
var result []*QueryPart
for _, groupObj := range model.Get("groupBy").MustArray() {
groupBy := model.Get("groupBy").MustArray()
result := make([]*QueryPart, 0, len(groupBy))
for _, groupObj := range groupBy {
groupJson := simplejson.NewFromAny(groupObj)
queryPart, err := qp.parseQueryPart(groupJson)
if err != nil {

View File

@ -44,7 +44,7 @@ func (query *Query) Build(queryContext *backend.QueryDataRequest) (string, error
}
func (query *Query) renderTags() []string {
var res []string
res := make([]string, 0, len(query.Tags))
for i, tag := range query.Tags {
str := ""
@ -91,7 +91,7 @@ func (query *Query) renderTimeFilter(queryContext *backend.QueryDataRequest) str
func (query *Query) renderSelectors(queryContext *backend.QueryDataRequest) string {
res := "SELECT "
var selectors []string
selectors := make([]string, 0, len(query.Selects))
for _, sel := range query.Selects {
stk := ""
for _, s := range *sel {

View File

@ -189,7 +189,7 @@ func formatFrameName(row Row, column string, query Query) string {
}
func buildFrameNameFromQuery(row Row, column string) string {
var tags []string
tags := make([]string, 0, len(row.Tags))
for k, v := range row.Tags {
tags = append(tags, fmt.Sprintf("%s: %s", k, v))
}

View File

@ -178,7 +178,7 @@ func makeIdField(stringTimeField *data.Field, lineField *data.Field, labelsField
}
func formatNamePrometheusStyle(labels map[string]string) string {
var parts []string
parts := make([]string, 0, len(labels))
for k, v := range labels {
parts = append(parts, fmt.Sprintf("%s=%q", k, v))

View File

@ -142,7 +142,7 @@ type Node struct {
}
func walkTree(tree *v1alpha1.FlamegraphRootNode, fn func(level int64, value int64, name string, self int64)) {
var stack []*Node
stack := make([]*Node, 0, len(tree.Children))
var childrenValue int64 = 0
for _, child := range tree.Children {
@ -217,7 +217,7 @@ func normalizeUnit(unit string) string {
}
func seriesToDataFrame(seriesResp *connect.Response[v1alpha1.QueryRangeResponse], profileTypeID string) []*data.Frame {
var frames []*data.Frame
frames := make([]*data.Frame, 0, len(seriesResp.Msg.Series))
for _, series := range seriesResp.Msg.Series {
frame := data.NewFrame("series")

View File

@ -28,7 +28,7 @@ func (d *ParcaDatasource) callProfileTypes(ctx context.Context, req *backend.Cal
return err
}
var types []*ProfileType
types := make([]*ProfileType, 0, len(res.Msg.Types))
for _, t := range res.Msg.Types {
var id string
if t.Delta {

View File

@ -285,14 +285,14 @@ func walkTree(tree *ProfileTree, fn func(tree *ProfileTree)) {
}
func seriesToDataFrames(seriesResp *connect.Response[querierv1.SelectSeriesResponse], profileTypeID string) []*data.Frame {
var frames []*data.Frame
frames := make([]*data.Frame, 0, len(seriesResp.Msg.Series))
for _, series := range seriesResp.Msg.Series {
// We create separate data frames as the series may not have the same length
frame := data.NewFrame("series")
frame.Meta = &data.FrameMeta{PreferredVisualization: "graph"}
fields := data.Fields{}
fields := make(data.Fields, 0, 2)
timeField := data.NewField("time", nil, []time.Time{})
fields = append(fields, timeField)