sql: removed unnecessary error-check (#81808)

* sql: removed unnecessary error-check

* updated tests
This commit is contained in:
Gábor Farkas 2024-02-07 08:41:49 +01:00 committed by GitHub
parent 7b8c7b623c
commit 8616f2e80a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 15 additions and 24 deletions

View File

@ -139,8 +139,8 @@ func TestIntegrationPostgresSnapshots(t *testing.T) {
sqleng.Interpolate = origInterpolate
})
sqleng.Interpolate = func(query backend.DataQuery, timeRange backend.TimeRange, timeInterval string, sql string) (string, error) {
return sql, nil
sqleng.Interpolate = func(query backend.DataQuery, timeRange backend.TimeRange, timeInterval string, sql string) string {
return sql
}
cfg := setting.NewCfg()

View File

@ -187,8 +187,8 @@ func TestIntegrationPostgres(t *testing.T) {
t.Cleanup(func() {
sqleng.Interpolate = origInterpolate
})
sqleng.Interpolate = func(query backend.DataQuery, timeRange backend.TimeRange, timeInterval string, sql string) (string, error) {
return sql, nil
sqleng.Interpolate = func(query backend.DataQuery, timeRange backend.TimeRange, timeInterval string, sql string) string {
return sql
}
cfg := setting.NewCfg()

View File

@ -42,8 +42,8 @@ func TestIntegrationMySQL(t *testing.T) {
sqleng.Interpolate = origInterpolate
})
sqleng.Interpolate = func(query backend.DataQuery, timeRange backend.TimeRange, timeInterval string, sql string) (string, error) {
return sql, nil
sqleng.Interpolate = func(query backend.DataQuery, timeRange backend.TimeRange, timeInterval string, sql string) string {
return sql
}
dsInfo := sqleng.DataSourceInfo{

View File

@ -246,14 +246,10 @@ func (e *DataSourceHandler) executeQuery(query backend.DataQuery, wg *sync.WaitG
}
// global substitutions
interpolatedQuery, err := Interpolate(query, timeRange, e.dsInfo.JsonData.TimeInterval, queryJson.RawSql)
if err != nil {
errAppendDebug("interpolation failed", e.TransformQueryError(logger, err), interpolatedQuery)
return
}
interpolatedQuery := Interpolate(query, timeRange, e.dsInfo.JsonData.TimeInterval, queryJson.RawSql)
// data source specific substitutions
interpolatedQuery, err = e.macroEngine.Interpolate(&query, timeRange, interpolatedQuery)
interpolatedQuery, err := e.macroEngine.Interpolate(&query, timeRange, interpolatedQuery)
if err != nil {
errAppendDebug("interpolation failed", e.TransformQueryError(logger, err), interpolatedQuery)
return
@ -372,7 +368,7 @@ func (e *DataSourceHandler) executeQuery(query backend.DataQuery, wg *sync.WaitG
}
// Interpolate provides global macros/substitutions for all sql datasources.
var Interpolate = func(query backend.DataQuery, timeRange backend.TimeRange, timeInterval string, sql string) (string, error) {
var Interpolate = func(query backend.DataQuery, timeRange backend.TimeRange, timeInterval string, sql string) string {
interval := query.Interval
sql = strings.ReplaceAll(sql, "$__interval_ms", strconv.FormatInt(interval.Milliseconds(), 10))
@ -380,7 +376,7 @@ var Interpolate = func(query backend.DataQuery, timeRange backend.TimeRange, tim
sql = strings.ReplaceAll(sql, "$__unixEpochFrom()", fmt.Sprintf("%d", timeRange.From.UTC().Unix()))
sql = strings.ReplaceAll(sql, "$__unixEpochTo()", fmt.Sprintf("%d", timeRange.To.UTC().Unix()))
return sql, nil
return sql
}
func (e *DataSourceHandler) newProcessCfg(query backend.DataQuery, queryContext context.Context,

View File

@ -28,22 +28,19 @@ func TestSQLEngine(t *testing.T) {
t.Run("interpolate 10 minutes $__interval", func(t *testing.T) {
query := backend.DataQuery{JSON: []byte("{}"), MaxDataPoints: 1500, Interval: time.Minute * 10}
sql, err := Interpolate(query, timeRange, "", text)
require.NoError(t, err)
sql := Interpolate(query, timeRange, "", text)
require.Equal(t, "10m $__timeGroupAlias(time,10m) 600000", sql)
})
t.Run("interpolate 4seconds $__interval", func(t *testing.T) {
query := backend.DataQuery{JSON: []byte("{}"), MaxDataPoints: 1500, Interval: time.Second * 4}
sql, err := Interpolate(query, timeRange, "", text)
require.NoError(t, err)
sql := Interpolate(query, timeRange, "", text)
require.Equal(t, "4s $__timeGroupAlias(time,4s) 4000", sql)
})
t.Run("interpolate 200 milliseconds $__interval", func(t *testing.T) {
query := backend.DataQuery{JSON: []byte("{}"), MaxDataPoints: 1500, Interval: time.Millisecond * 200}
sql, err := Interpolate(query, timeRange, "", text)
require.NoError(t, err)
sql := Interpolate(query, timeRange, "", text)
require.Equal(t, "200ms $__timeGroupAlias(time,200ms) 200", sql)
})
})
@ -55,14 +52,12 @@ func TestSQLEngine(t *testing.T) {
query := backend.DataQuery{JSON: []byte("{}"), MaxDataPoints: 1500, Interval: time.Second * 60}
t.Run("interpolate __unixEpochFrom function", func(t *testing.T) {
sql, err := Interpolate(query, timeRange, "", "select $__unixEpochFrom()")
require.NoError(t, err)
sql := Interpolate(query, timeRange, "", "select $__unixEpochFrom()")
require.Equal(t, fmt.Sprintf("select %d", from.Unix()), sql)
})
t.Run("interpolate __unixEpochTo function", func(t *testing.T) {
sql, err := Interpolate(query, timeRange, "", "select $__unixEpochTo()")
require.NoError(t, err)
sql := Interpolate(query, timeRange, "", "select $__unixEpochTo()")
require.Equal(t, fmt.Sprintf("select %d", to.Unix()), sql)
})
})