Chore: Enable Go linter gocritic (#26224)

* Chore: Enable gocritic linter

Signed-off-by: Arve Knudsen <arve.knudsen@gmail.com>
This commit is contained in:
Arve Knudsen 2020-07-16 14:39:01 +02:00 committed by GitHub
parent 317c7b269c
commit d4e4cb4c71
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
43 changed files with 184 additions and 171 deletions

View File

@ -815,7 +815,7 @@ jobs:
# To save memory, run in two batches
golangci-lint run -v -j 4 --config scripts/go/configs/ci/.golangci.toml -E deadcode -E depguard -E dogsled \
-E errcheck -E goconst -E golint -E gosec -E gosimple -E govet -E exportloopref -E whitespace ./pkg/...
golangci-lint run -v -j 4 --config scripts/go/configs/ci/.golangci.toml -E ineffassign \
golangci-lint run -v -j 4 --config scripts/go/configs/ci/.golangci.toml -E ineffassign -E gocritic \
-E rowserrcheck -E staticcheck -E structcheck -E typecheck -E unconvert -E unused -E varcheck ./pkg/...
./scripts/go/bin/revive -formatter stylish -config ./scripts/go/configs/revive.toml ./pkg/...
./scripts/go/bin/revive -formatter stylish -config ./scripts/go/configs/revive-strict.toml \

View File

@ -38,7 +38,7 @@ func formatShort(interval time.Duration) string {
result += fmt.Sprintf("%dm", mins)
}
remaining = remaining - (mins * time.Minute)
remaining -= (mins * time.Minute)
seconds := remaining / time.Second
if seconds > 0 {
result += fmt.Sprintf("%ds", seconds)

View File

@ -12,6 +12,8 @@ import (
"github.com/grafana/grafana/pkg/setting"
)
var regNonAlphaNumeric = regexp.MustCompile("[^a-zA-Z0-9]+")
type AnyId struct {
Id int64 `json:"id"`
}
@ -73,13 +75,7 @@ func GetGravatarUrlWithDefault(text string, defaultText string) string {
return GetGravatarUrl(text)
}
reg, err := regexp.Compile("[^a-zA-Z0-9]+")
if err != nil {
return ""
}
text = reg.ReplaceAllString(defaultText, "") + "@localhost"
text = regNonAlphaNumeric.ReplaceAllString(defaultText, "") + "@localhost"
return GetGravatarUrl(text)
}

View File

@ -159,19 +159,20 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) {
reqQueryVals := req.URL.Query()
if proxy.ds.Type == models.DS_INFLUXDB_08 {
switch proxy.ds.Type {
case models.DS_INFLUXDB_08:
req.URL.Path = util.JoinURLFragments(proxy.targetUrl.Path, "db/"+proxy.ds.Database+"/"+proxy.proxyPath)
reqQueryVals.Add("u", proxy.ds.User)
reqQueryVals.Add("p", proxy.ds.DecryptedPassword())
req.URL.RawQuery = reqQueryVals.Encode()
} else if proxy.ds.Type == models.DS_INFLUXDB {
case models.DS_INFLUXDB:
req.URL.Path = util.JoinURLFragments(proxy.targetUrl.Path, proxy.proxyPath)
req.URL.RawQuery = reqQueryVals.Encode()
if !proxy.ds.BasicAuth {
req.Header.Del("Authorization")
req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.User, proxy.ds.DecryptedPassword()))
}
} else {
default:
req.URL.Path = util.JoinURLFragments(proxy.targetUrl.Path, proxy.proxyPath)
}

View File

@ -51,9 +51,7 @@ func TestRouteSimpleRegister(t *testing.T) {
}
// Setup
rr := NewRouteRegister(func(name string) macaron.Handler {
return emptyHandler(name)
})
rr := NewRouteRegister(emptyHandler)
rr.Delete("/admin", emptyHandler("1"))
rr.Get("/down", emptyHandler("1"), emptyHandler("2"))
@ -199,10 +197,7 @@ func TestDuplicateRoutShouldPanic(t *testing.T) {
}
}()
rr := NewRouteRegister(func(name string) macaron.Handler {
return emptyHandler(name)
})
rr := NewRouteRegister(emptyHandler)
rr.Get("/api", emptyHandler("1"))
rr.Get("/api", emptyHandler("1"))
@ -220,9 +215,7 @@ func TestNamedMiddlewareRouteRegister(t *testing.T) {
}
// Setup
rr := NewRouteRegister(func(name string) macaron.Handler {
return emptyHandler(name)
})
rr := NewRouteRegister(emptyHandler)
rr.Delete("/admin", emptyHandler("1"))
rr.Get("/down", emptyHandler("1"), emptyHandler("2"))

View File

@ -212,12 +212,10 @@ func (b *BasicDiff) Basic(lines []*JSONLine) []*BasicBlock {
// - if we're not recording a change, then we do nothing,
// since the BasicDiff doesn't report on unchanged JSON
// values.
} else {
if b.writing {
b.writing = false
b.Summary.LineEnd = line.LineNum
b.Block.Summaries = append(b.Block.Summaries, b.Summary)
}
} else if b.writing {
b.writing = false
b.Summary.LineEnd = line.LineNum
b.Block.Summaries = append(b.Block.Summaries, b.Summary)
}
}
}

View File

@ -201,9 +201,8 @@ func (f *JSONFormatter) processArray(array []interface{}, deltas []diff.Delta) e
// additional Added
for _, delta := range deltas {
switch delta.(type) {
case *diff.Added:
d := delta.(*diff.Added)
d, ok := delta.(*diff.Added)
if ok {
// skip items already processed
if int(d.Position.(diff.Index)) < len(array) {
continue
@ -226,9 +225,9 @@ func (f *JSONFormatter) processObject(object map[string]interface{}, deltas []di
// Added
for _, delta := range deltas {
switch delta := delta.(type) {
case *diff.Added:
f.printRecursive(delta.Position.String(), delta.Value, ChangeAdded)
d, ok := delta.(*diff.Added)
if ok {
f.printRecursive(d.Position.String(), d.Value, ChangeAdded)
}
}

View File

@ -29,9 +29,9 @@ func NewFromReader(r io.Reader) (*Json, error) {
// Float64 coerces into a float64
func (j *Json) Float64() (float64, error) {
switch j.data.(type) {
switch n := j.data.(type) {
case json.Number:
return j.data.(json.Number).Float64()
return n.Float64()
case float32, float64:
return reflect.ValueOf(j.data).Float(), nil
case int, int8, int16, int32, int64:
@ -44,10 +44,13 @@ func (j *Json) Float64() (float64, error) {
// Int coerces into an int
func (j *Json) Int() (int, error) {
switch j.data.(type) {
switch n := j.data.(type) {
case json.Number:
i, err := j.data.(json.Number).Int64()
return int(i), err
i, err := n.Int64()
if err != nil {
return 0, err
}
return int(i), nil
case float32, float64:
return int(reflect.ValueOf(j.data).Float()), nil
case int, int8, int16, int32, int64:
@ -60,9 +63,9 @@ func (j *Json) Int() (int, error) {
// Int64 coerces into an int64
func (j *Json) Int64() (int64, error) {
switch j.data.(type) {
switch n := j.data.(type) {
case json.Number:
return j.data.(json.Number).Int64()
return n.Int64()
case float32, float64:
return int64(reflect.ValueOf(j.data).Float()), nil
case int, int8, int16, int32, int64:
@ -75,9 +78,9 @@ func (j *Json) Int64() (int64, error) {
// Uint64 coerces into an uint64
func (j *Json) Uint64() (uint64, error) {
switch j.data.(type) {
switch n := j.data.(type) {
case json.Number:
return strconv.ParseUint(j.data.(json.Number).String(), 10, 64)
return strconv.ParseUint(n.String(), 10, 64)
case float32, float64:
return uint64(reflect.ValueOf(j.data).Float()), nil
case int, int8, int16, int32, int64:

View File

@ -70,7 +70,7 @@ func (uss *UsageStatsService) sendUsageStats(oauthProviders map[string]bool) {
userCount := statsQuery.Result.Users
avgAuthTokensPerUser := statsQuery.Result.AuthTokens
if userCount != 0 {
avgAuthTokensPerUser = avgAuthTokensPerUser / userCount
avgAuthTokensPerUser /= userCount
}
metrics["stats.avg_auth_token_per_user.count"] = avgAuthTokensPerUser

View File

@ -43,11 +43,12 @@ func RequestMetrics(handler string) macaron.Handler {
duration := time.Since(now).Nanoseconds() / int64(time.Millisecond)
metrics.MHttpRequestSummary.WithLabelValues(handler, code, method).Observe(float64(duration))
if strings.HasPrefix(req.RequestURI, "/api/datasources/proxy") {
switch {
case strings.HasPrefix(req.RequestURI, "/api/datasources/proxy"):
countProxyRequests(status)
} else if strings.HasPrefix(req.RequestURI, "/api/") {
case strings.HasPrefix(req.RequestURI, "/api/"):
countApiRequests(status)
} else {
default:
countPageRequests(status)
}
}

View File

@ -48,7 +48,7 @@ func (r *RoleType) UnmarshalJSON(data []byte) error {
*r = RoleType(str)
if !(*r).IsValid() {
if !r.IsValid() {
if (*r) != "" {
return fmt.Errorf("JSON validation error: invalid role value: %s", *r)
}

View File

@ -43,11 +43,11 @@ type User struct {
func (u *User) NameOrFallback() string {
if u.Name != "" {
return u.Name
} else if u.Login != "" {
return u.Login
} else {
return u.Email
}
if u.Login != "" {
return u.Login
}
return u.Email
}
// ---------------------
@ -192,11 +192,11 @@ func (u *SignedInUser) ShouldUpdateLastSeenAt() bool {
func (u *SignedInUser) NameOrFallback() string {
if u.Name != "" {
return u.Name
} else if u.Login != "" {
return u.Login
} else {
return u.Email
}
if u.Login != "" {
return u.Login
}
return u.Email
}
type UpdateUserLastSeenAtCommand struct {

View File

@ -11,6 +11,8 @@ import (
"github.com/grafana/grafana/pkg/services/dashboards"
)
var varRegex = regexp.MustCompile(`(\$\{.+\})`)
type ImportDashboardCommand struct {
Dashboard *simplejson.Json
Path string
@ -109,7 +111,6 @@ type DashTemplateEvaluator struct {
inputs []ImportDashboardInput
variables map[string]string
result *simplejson.Json
varRegex *regexp.Regexp
}
func (this *DashTemplateEvaluator) findInput(varName string, varType string) *ImportDashboardInput {
@ -125,7 +126,6 @@ func (this *DashTemplateEvaluator) findInput(varName string, varType string) *Im
func (this *DashTemplateEvaluator) Eval() (*simplejson.Json, error) {
this.result = simplejson.New()
this.variables = make(map[string]string)
this.varRegex, _ = regexp.Compile(`(\$\{.+\})`)
// check that we have all inputs we need
for _, inputDef := range this.template.Get("__inputs").MustArray() {
@ -149,7 +149,7 @@ func (this *DashTemplateEvaluator) evalValue(source *simplejson.Json) interface{
switch v := sourceValue.(type) {
case string:
interpolated := this.varRegex.ReplaceAllStringFunc(v, func(match string) string {
interpolated := varRegex.ReplaceAllStringFunc(v, func(match string) string {
replacement, exists := this.variables[match]
if exists {
return replacement

View File

@ -36,7 +36,7 @@ func (s *queryReducer) Reduce(series *tsdb.TimeSeries) null.Float {
}
}
if validPointsCount > 0 {
value = value / float64(validPointsCount)
value /= float64(validPointsCount)
}
case "sum":
for _, point := range series.Points {

View File

@ -214,11 +214,12 @@ func (e *AlertEngine) processJob(attemptID int, attemptChan chan int, cancelChan
evalContext.Ctx = resultHandleCtx
evalContext.Rule.State = evalContext.GetNewState()
if err := e.resultHandler.handle(evalContext); err != nil {
if xerrors.Is(err, context.Canceled) {
switch {
case xerrors.Is(err, context.Canceled):
e.log.Debug("Result handler returned context.Canceled")
} else if xerrors.Is(err, context.DeadlineExceeded) {
case xerrors.Is(err, context.DeadlineExceeded):
e.log.Debug("Result handler returned context.DeadlineExceeded")
} else {
default:
e.log.Error("Failed to handle result", "err", err)
}
}

View File

@ -16,6 +16,7 @@ import (
)
// for stubbing in tests
//nolint: gocritic
var newImageUploaderProvider = func() (imguploader.ImageUploader, error) {
return imguploader.NewImageUploader()
}

View File

@ -85,7 +85,7 @@ func (kn *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error {
customData := triggMetrString
for _, evt := range evalContext.EvalMatches {
customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
customData += fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
}
kn.log.Info("Notifying Kafka", "alert_state", state)

View File

@ -67,13 +67,11 @@ type LineNotifier struct {
// Notify send an alert notification to LINE
func (ln *LineNotifier) Notify(evalContext *alerting.EvalContext) error {
ln.log.Info("Executing line notification", "ruleId", evalContext.Rule.ID, "notification", ln.Name)
var err error
switch evalContext.Rule.State {
case models.AlertStateAlerting:
err = ln.createAlert(evalContext)
if evalContext.Rule.State == models.AlertStateAlerting {
return ln.createAlert(evalContext)
}
return err
return nil
}
func (ln *LineNotifier) createAlert(evalContext *alerting.EvalContext) error {

View File

@ -142,7 +142,7 @@ func (on *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) error
customData := triggMetrString
for _, evt := range evalContext.EvalMatches {
customData = customData + fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
customData += fmt.Sprintf("%s: %v\n", evt.Metric, evt.Value)
}
bodyJSON := simplejson.New()

View File

@ -130,11 +130,12 @@ func (sn *SensuNotifier) Notify(evalContext *alerting.EvalContext) error {
bodyJSON.Set("output", "Grafana Metric Condition Met")
bodyJSON.Set("evalMatches", evalContext.EvalMatches)
if evalContext.Rule.State == "alerting" {
switch evalContext.Rule.State {
case "alerting":
bodyJSON.Set("status", 2)
} else if evalContext.Rule.State == "no_data" {
case "no_data":
bodyJSON.Set("status", 1)
} else {
default:
bodyJSON.Set("status", 0)
}

View File

@ -125,16 +125,16 @@ func (tn *TelegramNotifier) buildMessageLinkedImage(evalContext *alerting.EvalCo
ruleURL, err := evalContext.GetRuleURL()
if err == nil {
message = message + fmt.Sprintf("URL: %s\n", ruleURL)
message += fmt.Sprintf("URL: %s\n", ruleURL)
}
if evalContext.ImagePublicURL != "" {
message = message + fmt.Sprintf("Image: %s\n", evalContext.ImagePublicURL)
message += fmt.Sprintf("Image: %s\n", evalContext.ImagePublicURL)
}
metrics := generateMetricsMessage(evalContext)
if metrics != "" {
message = message + fmt.Sprintf("\n<i>Metrics:</i>%s", metrics)
message += fmt.Sprintf("\n<i>Metrics:</i>%s", metrics)
}
return tn.generateTelegramCmd(message, "text", "sendMessage", func(w *multipart.Writer) {

View File

@ -177,10 +177,10 @@ func (notifier *ThreemaNotifier) Notify(evalContext *alerting.EvalContext) error
evalContext.Rule.Name, evalContext.Rule.Message)
ruleURL, err := evalContext.GetRuleURL()
if err == nil {
message = message + fmt.Sprintf("*URL:* %s\n", ruleURL)
message += fmt.Sprintf("*URL:* %s\n", ruleURL)
}
if notifier.NeedsImage() && evalContext.ImagePublicURL != "" {
message = message + fmt.Sprintf("*Image:* %s\n", evalContext.ImagePublicURL)
message += fmt.Sprintf("*Image:* %s\n", evalContext.ImagePublicURL)
}
data.Set("text", message)

View File

@ -100,11 +100,12 @@ func (handler *defaultResultHandler) handle(evalContext *EvalContext) error {
}
if err := handler.notifier.SendIfNeeded(evalContext); err != nil {
if xerrors.Is(err, context.Canceled) {
switch {
case xerrors.Is(err, context.Canceled):
handler.log.Debug("handler.notifier.SendIfNeeded returned context.Canceled")
} else if xerrors.Is(err, context.DeadlineExceeded) {
case xerrors.Is(err, context.DeadlineExceeded):
handler.log.Debug("handler.notifier.SendIfNeeded returned context.DeadlineExceeded")
} else {
default:
handler.log.Error("handler.notifier.SendIfNeeded failed", "err", err)
}
}

View File

@ -89,7 +89,7 @@ func getTimeDurationStringToSeconds(str string) (int64, error) {
matches := valueFormatRegex.FindAllString(str, 1)
if len(matches) <= 0 {
if len(matches) == 0 {
return 0, ErrFrequencyCouldNotBeParsed
}

View File

@ -190,7 +190,7 @@ func (f permissionFlags) String() string {
r = append(r, "<no access>")
}
return strings.Join(r[:], ", ")
return strings.Join(r, ", ")
}
func (sc *scenarioContext) reportSuccess() {

View File

@ -173,11 +173,12 @@ func (server *Server) Login(query *models.LoginUserQuery) (
var authAndBind bool
// Check if we can use a search user
if server.shouldAdminBind() {
switch {
case server.shouldAdminBind():
if err := server.AdminBind(); err != nil {
return nil, err
}
} else if server.shouldSingleBind() {
case server.shouldSingleBind():
authAndBind = true
err = server.UserBind(
server.singleBindDN(query.Username),
@ -186,7 +187,7 @@ func (server *Server) Login(query *models.LoginUserQuery) (
if err != nil {
return nil, err
}
} else {
default:
err := server.Connection.UnauthenticatedBind(server.Config.BindDN)
if err != nil {
return nil, err
@ -368,7 +369,7 @@ func (server *Server) getSearchRequest(
-1,
)
search = search + query
search += query
}
filter := fmt.Sprintf("(|%s)", search)

View File

@ -54,13 +54,14 @@ func TestLDAPHelpers(t *testing.T) {
result := getUsersIteration(logins, func(previous, current int) error {
i++
if i == 1 {
switch i {
case 1:
So(previous, ShouldEqual, 0)
So(current, ShouldEqual, 500)
} else if i == 2 {
case 2:
So(previous, ShouldEqual, 500)
So(current, ShouldEqual, 1000)
} else {
default:
So(previous, ShouldEqual, 1000)
So(current, ShouldEqual, 1500)
}

View File

@ -469,13 +469,13 @@ type mockLDAP struct {
// Login test fn
func (mock *mockLDAP) Login(*models.LoginUserQuery) (*models.ExternalUserInfo, error) {
mock.loginCalledTimes = mock.loginCalledTimes + 1
mock.loginCalledTimes++
return mock.loginReturn, mock.loginErrReturn
}
// Users test fn
func (mock *mockLDAP) Users([]string) ([]*models.ExternalUserInfo, error) {
mock.usersCalledTimes = mock.usersCalledTimes + 1
mock.usersCalledTimes++
if mock.usersCalledTimes == 1 {
return mock.usersFirstReturn, mock.usersErrReturn
@ -491,13 +491,13 @@ func (mock *mockLDAP) UserBind(string, string) error {
// Dial test fn
func (mock *mockLDAP) Dial() error {
mock.dialCalledTimes = mock.dialCalledTimes + 1
mock.dialCalledTimes++
return mock.dialErrReturn
}
// Close test fn
func (mock *mockLDAP) Close() {
mock.closeCalledTimes = mock.closeCalledTimes + 1
mock.closeCalledTimes++
}
func (mock *mockLDAP) Bind() error {

View File

@ -99,7 +99,7 @@ func validateDefaultUniqueness(datasources []*configs) error {
}
if ds.IsDefault {
defaultCount[ds.OrgID] = defaultCount[ds.OrgID] + 1
defaultCount[ds.OrgID]++
if defaultCount[ds.OrgID] > 1 {
return ErrInvalidConfigToManyDefault
}

View File

@ -59,13 +59,14 @@ func (rs *RenderingService) Init() error {
}
// set value used for domain attribute of renderKey cookie
if rs.Cfg.RendererUrl != "" {
switch {
case rs.Cfg.RendererUrl != "":
// RendererCallbackUrl has already been passed, it won't generate an error.
u, _ := url.Parse(rs.Cfg.RendererCallbackUrl)
rs.domain = u.Hostname()
} else if setting.HttpAddr != setting.DEFAULT_HTTP_ADDR {
case setting.HttpAddr != setting.DEFAULT_HTTP_ADDR:
rs.domain = setting.HttpAddr
} else {
default:
rs.domain = "localhost"
}
@ -132,19 +133,23 @@ func (rs *RenderingService) renderUnavailableImage() *RenderResult {
func (rs *RenderingService) Render(ctx context.Context, opts Opts) (*RenderResult, error) {
startTime := time.Now()
result, err := rs.render(ctx, opts)
elapsedTime := time.Since(startTime).Milliseconds()
if err == ErrTimeout {
metrics.MRenderingRequestTotal.WithLabelValues("timeout").Inc()
metrics.MRenderingSummary.WithLabelValues("timeout").Observe(float64(elapsedTime))
} else if err != nil {
metrics.MRenderingRequestTotal.WithLabelValues("failure").Inc()
metrics.MRenderingSummary.WithLabelValues("failure").Observe(float64(elapsedTime))
} else {
metrics.MRenderingRequestTotal.WithLabelValues("success").Inc()
metrics.MRenderingSummary.WithLabelValues("success").Observe(float64(elapsedTime))
result, err := rs.render(ctx, opts)
if err != nil {
if err == ErrTimeout {
metrics.MRenderingRequestTotal.WithLabelValues("timeout").Inc()
metrics.MRenderingSummary.WithLabelValues("timeout").Observe(float64(elapsedTime))
} else {
metrics.MRenderingRequestTotal.WithLabelValues("failure").Inc()
metrics.MRenderingSummary.WithLabelValues("failure").Observe(float64(elapsedTime))
}
return nil, err
}
return result, err
metrics.MRenderingRequestTotal.WithLabelValues("success").Inc()
metrics.MRenderingSummary.WithLabelValues("success").Observe(float64(elapsedTime))
return result, nil
}
func (rs *RenderingService) render(ctx context.Context, opts Opts) (*RenderResult, error) {

View File

@ -102,11 +102,12 @@ func SearchDashboardSnapshots(query *models.GetDashboardSnapshotsQuery) error {
}
// admins can see all snapshots, everyone else can only see their own snapshots
if query.SignedInUser.OrgRole == models.ROLE_ADMIN {
switch {
case query.SignedInUser.OrgRole == models.ROLE_ADMIN:
sess.Where("org_id = ?", query.OrgId)
} else if !query.SignedInUser.IsAnonymous {
case !query.SignedInUser.IsAnonymous:
sess.Where("org_id = ? AND user_id = ?", query.OrgId, query.SignedInUser.UserId)
} else {
default:
query.Result = snapshots
return nil
}

View File

@ -116,7 +116,8 @@ func UpdateOrgQuota(cmd *models.UpdateOrgQuotaCmd) error {
}
} else {
//update existing quota entry in the DB.
if _, err := sess.ID(quota.Id).Update(&quota); err != nil {
_, err := sess.ID(quota.Id).Update(&quota)
if err != nil {
return err
}
}
@ -218,7 +219,8 @@ func UpdateUserQuota(cmd *models.UpdateUserQuotaCmd) error {
}
} else {
//update existing quota entry in the DB.
if _, err := sess.ID(quota.Id).Update(&quota); err != nil {
_, err := sess.ID(quota.Id).Update(&quota)
if err != nil {
return err
}
}

View File

@ -8,12 +8,15 @@ func EnsureTagsExist(sess *DBSession, tags []*models.Tag) ([]*models.Tag, error)
var existingTag models.Tag
// check if it exists
if exists, err := sess.Table("tag").Where("`key`=? AND `value`=?", tag.Key, tag.Value).Get(&existingTag); err != nil {
exists, err := sess.Table("tag").Where("`key`=? AND `value`=?", tag.Key, tag.Value).Get(&existingTag)
if err != nil {
return nil, err
} else if exists {
}
if exists {
tag.Id = existingTag.Id
} else {
if _, err := sess.Table("tag").Insert(tag); err != nil {
_, err := sess.Table("tag").Insert(tag)
if err != nil {
return nil, err
}
}

View File

@ -395,11 +395,12 @@ func GetSignedInUser(query *models.GetSignedInUserQuery) error {
LEFT OUTER JOIN org on org.id = org_user.org_id `
sess := x.Table("user")
if query.UserId > 0 {
switch {
case query.UserId > 0:
sess.SQL(rawSql+"WHERE u.id=?", query.UserId)
} else if query.Login != "" {
case query.Login != "":
sess.SQL(rawSql+"WHERE u.login=?", query.Login)
} else if query.Email != "" {
case query.Email != "":
sess.SQL(rawSql+"WHERE u.email=?", query.Email)
}

View File

@ -208,7 +208,6 @@ func TestAppInsightsPluginRoutes(t *testing.T) {
})
}
}
func TestInsightsDimensionsUnmarshalJSON(t *testing.T) {
a := []byte(`"foo"`)
b := []byte(`["foo"]`)

View File

@ -314,16 +314,17 @@ func reverse(s string) string {
func interpolateFilterWildcards(value string) string {
matches := strings.Count(value, "*")
if matches == 2 && strings.HasSuffix(value, "*") && strings.HasPrefix(value, "*") {
switch {
case matches == 2 && strings.HasSuffix(value, "*") && strings.HasPrefix(value, "*"):
value = strings.Replace(value, "*", "", -1)
value = fmt.Sprintf(`has_substring("%s")`, value)
} else if matches == 1 && strings.HasPrefix(value, "*") {
case matches == 1 && strings.HasPrefix(value, "*"):
value = strings.Replace(value, "*", "", 1)
value = fmt.Sprintf(`ends_with("%s")`, value)
} else if matches == 1 && strings.HasSuffix(value, "*") {
case matches == 1 && strings.HasSuffix(value, "*"):
value = reverse(strings.Replace(reverse(value), "*", "", 1))
value = fmt.Sprintf(`starts_with("%s")`, value)
} else if matches != 0 {
case matches != 0:
value = string(wildcardRegexRe.ReplaceAllFunc([]byte(value), func(in []byte) []byte {
return []byte(strings.Replace(string(in), string(in), `\\`+string(in), 1))
}))
@ -339,19 +340,21 @@ func buildFilterString(metricType string, filterParts []string) string {
filterString := ""
for i, part := range filterParts {
mod := i % 4
if part == "AND" {
switch {
case part == "AND":
filterString += " "
} else if mod == 2 {
case mod == 2:
operator := filterParts[i-1]
if operator == "=~" || operator == "!=~" {
switch {
case operator == "=~" || operator == "!=~":
filterString = reverse(strings.Replace(reverse(filterString), "~", "", 1))
filterString += fmt.Sprintf(`monitoring.regex.full_match("%s")`, part)
} else if strings.Contains(part, "*") {
case strings.Contains(part, "*"):
filterString += interpolateFilterWildcards(part)
} else {
default:
filterString += fmt.Sprintf(`"%s"`, part)
}
} else {
default:
filterString += part
}
}
@ -398,11 +401,12 @@ func calculateAlignmentPeriod(alignmentPeriod string, intervalMs int64, duration
if alignmentPeriod == "cloud-monitoring-auto" || alignmentPeriod == "stackdriver-auto" { // legacy
alignmentPeriodValue := int(math.Max(float64(durationSeconds), 60.0))
if alignmentPeriodValue < 60*60*23 {
switch {
case alignmentPeriodValue < 60*60*23:
alignmentPeriod = "+60s"
} else if alignmentPeriodValue < 60*60*24*6 {
case alignmentPeriodValue < 60*60*24*6:
alignmentPeriod = "+300s"
} else {
default:
alignmentPeriod = "+3600s"
}
}
@ -735,11 +739,12 @@ func calcBucketBound(bucketOptions cloudMonitoringBucketOptions, n int) string {
return bucketBound
}
if bucketOptions.LinearBuckets != nil {
switch {
case bucketOptions.LinearBuckets != nil:
bucketBound = strconv.FormatInt(bucketOptions.LinearBuckets.Offset+(bucketOptions.LinearBuckets.Width*int64(n-1)), 10)
} else if bucketOptions.ExponentialBuckets != nil {
case bucketOptions.ExponentialBuckets != nil:
bucketBound = strconv.FormatInt(int64(bucketOptions.ExponentialBuckets.Scale*math.Pow(bucketOptions.ExponentialBuckets.GrowthFactor, float64(n-1))), 10)
} else if bucketOptions.ExplicitBuckets != nil {
case bucketOptions.ExplicitBuckets != nil:
bucketBound = fmt.Sprintf("%g", bucketOptions.ExplicitBuckets.Bounds[n])
}
return bucketBound

View File

@ -29,18 +29,21 @@ var credsCacheLock sync.RWMutex
// Session factory.
// Stubbable by tests.
//nolint:gocritic
var newSession = func(cfgs ...*aws.Config) (*session.Session, error) {
return session.NewSession(cfgs...)
}
// STS service factory.
// Stubbable by tests.
//nolint:gocritic
var newSTSService = func(p client.ConfigProvider, cfgs ...*aws.Config) stsiface.STSAPI {
return sts.New(p, cfgs...)
}
// EC2Metadata service factory.
// Stubbable by tests.
//nolint:gocritic
var newEC2Metadata = func(p client.ConfigProvider, cfgs ...*aws.Config) *ec2metadata.EC2Metadata {
return ec2metadata.New(p, cfgs...)
}

View File

@ -246,11 +246,12 @@ func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query,
bucket := simplejson.NewFromAny(v)
key := castToNullFloat(bucket.Get("key"))
var value null.Float
if statName == "std_deviation_bounds_upper" {
switch statName {
case "std_deviation_bounds_upper":
value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "upper"))
} else if statName == "std_deviation_bounds_lower" {
case "std_deviation_bounds_lower":
value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "lower"))
} else {
default:
value = castToNullFloat(bucket.GetPath(metric.ID, statName))
}
newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key})
@ -349,11 +350,12 @@ func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef
}
var value null.Float
if statName == "std_deviation_bounds_upper" {
switch statName {
case "std_deviation_bounds_upper":
value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "upper"))
} else if statName == "std_deviation_bounds_lower" {
case "std_deviation_bounds_lower":
value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "lower"))
} else {
default:
value = castToNullFloat(bucket.GetPath(metric.ID, statName))
}
@ -568,11 +570,12 @@ func getErrorFromElasticResponse(response *es.SearchResponse) *tsdb.QueryResult
reason := json.Get("reason").MustString()
rootCauseReason := json.Get("root_cause").GetIndex(0).Get("reason").MustString()
if rootCauseReason != "" {
switch {
case rootCauseReason != "":
result.ErrorString = rootCauseReason
} else if reason != "" {
case reason != "":
result.ErrorString = reason
} else {
default:
result.ErrorString = "Unknown elasticsearch error response"
}

View File

@ -57,7 +57,7 @@ func TestInfluxDB(t *testing.T) {
testBodyValues := url.Values{}
testBodyValues.Add("q", query)
testBody := testBodyValues.Encode()
So(string(body[:]), ShouldEqual, testBody)
So(string(body), ShouldEqual, testBody)
})
})

View File

@ -63,11 +63,12 @@ func (query *Query) renderTags() []string {
// quote value unless regex or number
var textValue string
if tag.Operator == "=~" || tag.Operator == "!~" {
switch tag.Operator {
case "=~", "!~":
textValue = tag.Value
} else if tag.Operator == "<" || tag.Operator == ">" {
case "<", ">":
textValue = tag.Value
} else {
default:
textValue = fmt.Sprintf("'%s'", strings.Replace(tag.Value, `\`, `\\`, -1))
}

View File

@ -55,6 +55,8 @@ var engineCache = engineCacheType{
var sqlIntervalCalculator = tsdb.NewIntervalCalculator(nil)
//nolint:gocritic
// NewXormEngine is an xorm.Engine factory, that can be stubbed by tests.
var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) {
return xorm.NewEngine(driverName, connectionString)
}
@ -481,7 +483,7 @@ func ConvertSqlTimeColumnToEpochMs(values tsdb.RowValues, timeIndex int) {
values[timeIndex] = float64(value.UnixNano()) / float64(time.Millisecond)
case *time.Time:
if value != nil {
values[timeIndex] = float64((*value).UnixNano()) / float64(time.Millisecond)
values[timeIndex] = float64(value.UnixNano()) / float64(time.Millisecond)
}
case int64:
values[timeIndex] = int64(tsdb.EpochPrecisionToMs(float64(value)))

View File

@ -114,29 +114,22 @@ func init() {
})
registerScenario(&Scenario{
Id: "predictable_pulse",
Name: "Predictable Pulse",
Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult {
return getPredictablePulse(query, context)
},
Id: "predictable_pulse",
Name: "Predictable Pulse",
Handler: getPredictablePulse,
Description: PredictablePulseDesc,
})
registerScenario(&Scenario{
Id: "predictable_csv_wave",
Name: "Predictable CSV Wave",
Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult {
return getPredictableCSVWave(query, context)
},
Id: "predictable_csv_wave",
Name: "Predictable CSV Wave",
Handler: getPredictableCSVWave,
})
registerScenario(&Scenario{
Id: "random_walk_table",
Name: "Random Walk Table",
Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult {
return getRandomWalkTable(query, context)
},
Id: "random_walk_table",
Name: "Random Walk Table",
Handler: getRandomWalkTable,
})
registerScenario(&Scenario{
@ -455,7 +448,7 @@ func getPredictablePulse(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.Query
return queryRes
}
timeStep = timeStep * 1000 // Seconds to Milliseconds
timeStep *= 1000 // Seconds to Milliseconds
onFor := func(mod int64) (null.Float, error) { // How many items in the cycle should get the on value
var i int64
for i = 0; i < onCount; i++ {
@ -505,7 +498,7 @@ func getPredictableCSVWave(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.Que
values[i] = val
}
timeStep = timeStep * 1000 // Seconds to Milliseconds
timeStep *= 1000 // Seconds to Milliseconds
valuesLen := int64(len(values))
getValue := func(mod int64) (null.Float, error) {
var i int64

View File

@ -18,7 +18,7 @@ enable = [
"errcheck",
# "gochecknoinits",
"goconst",
# "gocritic",
"gocritic",
"goimports",
"golint",
# "goprintffuncname",