SQL data sources: Convert to return data frames (#32257)

Convert SQL data sources to return data frames.

Co-authored-by: Marcus Efraimsson <marcus.efraimsson@gmail.com>
Co-authored-by: Arve Knudsen <arve.knudsen@gmail.com>
Co-authored-by: Will Browne <will.browne@grafana.com>
Co-authored-by: Hugo Häggmark <hugo.haggmark@gmail.com>
This commit is contained in:
ying-jeanne 2021-05-05 22:46:07 +08:00 committed by GitHub
parent 06c24476dc
commit bd66c8dde3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
36 changed files with 4947 additions and 3711 deletions

View File

@ -510,6 +510,14 @@ func IsTestDbPostgres() bool {
return false
}
func IsTestDBMSSQL() bool {
if db, present := os.LookupEnv("GRAFANA_TEST_DB"); present {
return db == migrator.MSSQL
}
return false
}
type DatabaseConfig struct {
Type string
Host string

View File

@ -69,7 +69,7 @@ func (ic *intervalCalculator) Calculate(timerange plugins.DataTimeRange, minInte
func GetIntervalFrom(dsInfo *models.DataSource, queryModel *simplejson.Json, defaultInterval time.Duration) (time.Duration, error) {
interval := queryModel.Get("interval").MustString("")
if interval == "" && dsInfo.JsonData != nil {
if interval == "" && dsInfo != nil && dsInfo.JsonData != nil {
dsInterval := dsInfo.JsonData.Get("timeInterval").MustString("")
if dsInterval != "" {
interval = dsInterval

View File

@ -1,13 +1,15 @@
package mssql
import (
"database/sql"
"fmt"
"net/url"
"reflect"
"regexp"
"strconv"
"strings"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/data/sqlutil"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
@ -16,7 +18,6 @@ import (
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/sqleng"
"xorm.io/core"
)
var logger = log.New("tsdb.mssql")
@ -115,49 +116,6 @@ type mssqlQueryResultTransformer struct {
log log.Logger
}
func (t *mssqlQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (
plugins.DataRowValues, error) {
values := make([]interface{}, len(columnTypes))
valuePtrs := make([]interface{}, len(columnTypes))
for i := range columnTypes {
// debug output on large tables causes high memory utilization/leak
// t.log.Debug("type", "type", stype)
valuePtrs[i] = &values[i]
}
if err := rows.Scan(valuePtrs...); err != nil {
return nil, err
}
// convert types not handled by denisenkom/go-mssqldb
// unhandled types are returned as []byte
for i := 0; i < len(columnTypes); i++ {
if value, ok := values[i].([]byte); ok {
switch columnTypes[i].DatabaseTypeName() {
case "MONEY", "SMALLMONEY", "DECIMAL":
if v, err := strconv.ParseFloat(string(value), 64); err == nil {
values[i] = v
} else {
t.log.Debug("Rows", "Error converting numeric to float", value)
}
case "UNIQUEIDENTIFIER":
uuid := &mssql.UniqueIdentifier{}
if err := uuid.Scan(value); err == nil {
values[i] = uuid.String()
} else {
t.log.Debug("Rows", "Error converting uniqueidentifier to string", value)
}
default:
t.log.Debug("Rows", "Unknown database type", columnTypes[i].DatabaseTypeName(), "value", value)
values[i] = string(value)
}
}
}
return values, nil
}
func (t *mssqlQueryResultTransformer) TransformQueryError(err error) error {
// go-mssql overrides source error, so we currently match on string
// ref https://github.com/denisenkom/go-mssqldb/blob/045585d74f9069afe2e115b6235eb043c8047043/tds.go#L904
@ -168,3 +126,85 @@ func (t *mssqlQueryResultTransformer) TransformQueryError(err error) error {
return err
}
func (t *mssqlQueryResultTransformer) GetConverterList() []sqlutil.StringConverter {
return []sqlutil.StringConverter{
{
Name: "handle MONEY",
InputScanKind: reflect.Slice,
InputTypeName: "MONEY",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableFloat64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseFloat(*in, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
{
Name: "handle SMALLMONEY",
InputScanKind: reflect.Slice,
InputTypeName: "SMALLMONEY",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableFloat64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseFloat(*in, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
{
Name: "handle DECIMAL",
InputScanKind: reflect.Slice,
InputTypeName: "DECIMAL",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableFloat64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseFloat(*in, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
{
Name: "handle UNIQUEIDENTIFIER",
InputScanKind: reflect.Slice,
InputTypeName: "UNIQUEIDENTIFIER",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableString,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
uuid := &mssql.UniqueIdentifier{}
if err := uuid.Scan([]byte(*in)); err != nil {
return nil, err
}
v := uuid.String()
return &v, nil
},
},
},
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,15 +1,17 @@
package mysql
import (
"database/sql"
"errors"
"fmt"
"net/url"
"reflect"
"strconv"
"strings"
"time"
"github.com/VividCortex/mysqlerr"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/data/sqlutil"
"github.com/grafana/grafana/pkg/setting"
"github.com/go-sql-driver/mysql"
@ -17,7 +19,12 @@ import (
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/sqleng"
"xorm.io/core"
)
const (
dateFormat = "2006-01-02"
dateTimeFormat1 = "2006-01-02 15:04:05"
dateTimeFormat2 = "2006-01-02T15:04:05Z"
)
func characterEscape(s string, escapeChar string) string {
@ -77,66 +84,6 @@ type mysqlQueryResultTransformer struct {
log log.Logger
}
func (t *mysqlQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (
plugins.DataRowValues, error) {
values := make([]interface{}, len(columnTypes))
for i := range values {
scanType := columnTypes[i].ScanType()
values[i] = reflect.New(scanType).Interface()
if columnTypes[i].DatabaseTypeName() == "BIT" {
values[i] = new([]byte)
}
}
if err := rows.Scan(values...); err != nil {
return nil, err
}
for i := 0; i < len(columnTypes); i++ {
typeName := reflect.ValueOf(values[i]).Type().String()
switch typeName {
case "*sql.RawBytes":
values[i] = string(*values[i].(*sql.RawBytes))
case "*mysql.NullTime":
sqlTime := (*values[i].(*mysql.NullTime))
if sqlTime.Valid {
values[i] = sqlTime.Time
} else {
values[i] = nil
}
case "*sql.NullInt64":
nullInt64 := (*values[i].(*sql.NullInt64))
if nullInt64.Valid {
values[i] = nullInt64.Int64
} else {
values[i] = nil
}
case "*sql.NullFloat64":
nullFloat64 := (*values[i].(*sql.NullFloat64))
if nullFloat64.Valid {
values[i] = nullFloat64.Float64
} else {
values[i] = nil
}
}
if columnTypes[i].DatabaseTypeName() == "DECIMAL" {
f, err := strconv.ParseFloat(values[i].(string), 64)
if err == nil {
values[i] = f
} else {
values[i] = nil
}
}
}
return values, nil
}
func (t *mysqlQueryResultTransformer) TransformQueryError(err error) error {
var driverErr *mysql.MySQLError
if errors.As(err, &driverErr) {
@ -151,3 +98,199 @@ func (t *mysqlQueryResultTransformer) TransformQueryError(err error) error {
}
var errQueryFailed = errors.New("query failed - please inspect Grafana server log for details")
func (t *mysqlQueryResultTransformer) GetConverterList() []sqlutil.StringConverter {
// For the MySQL driver , we have these possible data types:
// https://www.w3schools.com/sql/sql_datatypes.asp#:~:text=In%20MySQL%20there%20are%20three,numeric%2C%20and%20date%20and%20time.
// Since by default, we convert all into String, we need only to handle the Numeric data types
return []sqlutil.StringConverter{
{
Name: "handle DOUBLE",
InputScanKind: reflect.Struct,
InputTypeName: "DOUBLE",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableFloat64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseFloat(*in, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
{
Name: "handle BIGINT",
InputScanKind: reflect.Struct,
InputTypeName: "BIGINT",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableInt64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseInt(*in, 10, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
{
Name: "handle DECIMAL",
InputScanKind: reflect.Slice,
InputTypeName: "DECIMAL",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableFloat64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseFloat(*in, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
{
Name: "handle DATETIME",
InputScanKind: reflect.Struct,
InputTypeName: "DATETIME",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableTime,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := time.Parse(dateTimeFormat1, *in)
if err == nil {
return &v, nil
}
v, err = time.Parse(dateTimeFormat2, *in)
if err == nil {
return &v, nil
}
return nil, err
},
},
},
{
Name: "handle DATE",
InputScanKind: reflect.Struct,
InputTypeName: "DATE",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableTime,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := time.Parse(dateFormat, *in)
if err == nil {
return &v, nil
}
v, err = time.Parse(dateTimeFormat1, *in)
if err == nil {
return &v, nil
}
v, err = time.Parse(dateTimeFormat2, *in)
if err == nil {
return &v, nil
}
return nil, err
},
},
},
{
Name: "handle TIMESTAMP",
InputScanKind: reflect.Struct,
InputTypeName: "TIMESTAMP",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableTime,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := time.Parse(dateTimeFormat1, *in)
if err == nil {
return &v, nil
}
v, err = time.Parse(dateTimeFormat2, *in)
if err == nil {
return &v, nil
}
return nil, err
},
},
},
{
Name: "handle YEAR",
InputScanKind: reflect.Struct,
InputTypeName: "YEAR",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableInt64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseInt(*in, 10, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
{
Name: "handle INT",
InputScanKind: reflect.Struct,
InputTypeName: "INT",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableInt64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseInt(*in, 10, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
{
Name: "handle FLOAT",
InputScanKind: reflect.Struct,
InputTypeName: "FLOAT",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableFloat64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseFloat(*in, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +1,13 @@
package postgres
import (
"database/sql"
"fmt"
"reflect"
"strconv"
"strings"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/data/sqlutil"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util/errutil"
@ -14,7 +16,6 @@ import (
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb/sqleng"
"xorm.io/core"
)
func init() {
@ -114,7 +115,6 @@ func (s *PostgresService) generateConnectionString(datasource *models.DataSource
connStr += fmt.Sprintf(" sslmode='%s'", escape(tlsSettings.Mode))
// Attach root certificate if provided
// Attach root certificate if provided
if tlsSettings.RootCertFile != "" {
s.logger.Debug("Setting server root certificate", "tlsRootCert", tlsSettings.RootCertFile)
@ -137,43 +137,68 @@ type postgresQueryResultTransformer struct {
log log.Logger
}
func (t *postgresQueryResultTransformer) TransformQueryResult(columnTypes []*sql.ColumnType, rows *core.Rows) (
plugins.DataRowValues, error) {
values := make([]interface{}, len(columnTypes))
valuePtrs := make([]interface{}, len(columnTypes))
for i := 0; i < len(columnTypes); i++ {
valuePtrs[i] = &values[i]
}
if err := rows.Scan(valuePtrs...); err != nil {
return nil, err
}
// convert types not handled by lib/pq
// unhandled types are returned as []byte
for i := 0; i < len(columnTypes); i++ {
if value, ok := values[i].([]byte); ok {
switch columnTypes[i].DatabaseTypeName() {
case "NUMERIC":
if v, err := strconv.ParseFloat(string(value), 64); err == nil {
values[i] = v
} else {
t.log.Debug("Rows", "Error converting numeric to float", value)
}
case "UNKNOWN", "CIDR", "INET", "MACADDR":
// char literals have type UNKNOWN
values[i] = string(value)
default:
t.log.Debug("Rows", "Unknown database type", columnTypes[i].DatabaseTypeName(), "value", value)
values[i] = string(value)
}
}
}
return values, nil
}
func (t *postgresQueryResultTransformer) TransformQueryError(err error) error {
return err
}
func (t *postgresQueryResultTransformer) GetConverterList() []sqlutil.StringConverter {
return []sqlutil.StringConverter{
{
Name: "handle FLOAT4",
InputScanKind: reflect.Interface,
InputTypeName: "FLOAT4",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableFloat64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseFloat(*in, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
{
Name: "handle FLOAT8",
InputScanKind: reflect.Interface,
InputTypeName: "FLOAT8",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableFloat64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseFloat(*in, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
{
Name: "handle NUMERIC",
InputScanKind: reflect.Interface,
InputTypeName: "NUMERIC",
ConversionFunc: func(in *string) (*string, error) { return in, nil },
Replacer: &sqlutil.StringFieldReplacer{
OutputFieldType: data.FieldTypeNullableFloat64,
ReplaceFunc: func(in *string) (interface{}, error) {
if in == nil {
return nil, nil
}
v, err := strconv.ParseFloat(*in, 64)
if err != nil {
return nil, err
}
return &v, nil
},
},
},
}
}

View File

@ -10,6 +10,7 @@ import (
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/securejsondata"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
@ -151,10 +152,9 @@ func TestGenerateConnectionString(t *testing.T) {
// devenv/README.md for setup instructions.
func TestPostgres(t *testing.T) {
// change to true to run the PostgreSQL tests
runPostgresTests := false
// runPostgresTests := true
const runPostgresTests = false
if !sqlstore.IsTestDbPostgres() && !runPostgresTests {
if !(sqlstore.IsTestDbPostgres() || runPostgresTests) {
t.Skip()
}
@ -213,7 +213,7 @@ func TestPostgres(t *testing.T) {
c12_date date,
c13_time time without time zone,
c14_timetz time with time zone,
time date,
c15_interval interval
);
`
@ -226,7 +226,7 @@ func TestPostgres(t *testing.T) {
4.5,6.7,1.1,1.2,
'char10','varchar10','text',
now(),now(),now(),now(),now(),'15m'::interval
now(),now(),now(),now(),now(),now(),'15m'::interval
);
`
_, err = sess.Exec(sql)
@ -250,32 +250,36 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
column := queryResult.Tables[0].Rows[0]
require.Equal(t, int64(1), column[0].(int64))
require.Equal(t, int64(2), column[1].(int64))
require.Equal(t, int64(3), column[2].(int64))
frames, _ := queryResult.Dataframes.Decoded()
require.Len(t, frames, 1)
require.Len(t, frames[0].Fields, 17)
require.Equal(t, float64(4.5), column[3].(float64))
require.Equal(t, float64(6.7), column[4].(float64))
require.Equal(t, float64(1.1), column[5].(float64))
require.Equal(t, float64(1.2), column[6].(float64))
require.Equal(t, int16(1), *frames[0].Fields[0].At(0).(*int16))
require.Equal(t, int32(2), *frames[0].Fields[1].At(0).(*int32))
require.Equal(t, int64(3), *frames[0].Fields[2].At(0).(*int64))
require.Equal(t, "char10 ", column[7].(string))
require.Equal(t, "varchar10", column[8].(string))
require.Equal(t, "text", column[9].(string))
require.Equal(t, float64(4.5), *frames[0].Fields[3].At(0).(*float64))
require.Equal(t, float64(6.7), *frames[0].Fields[4].At(0).(*float64))
require.Equal(t, float64(1.1), *frames[0].Fields[5].At(0).(*float64))
require.Equal(t, float64(1.2), *frames[0].Fields[6].At(0).(*float64))
_, ok := column[10].(time.Time)
require.True(t, ok)
_, ok = column[11].(time.Time)
require.True(t, ok)
_, ok = column[12].(time.Time)
require.True(t, ok)
_, ok = column[13].(time.Time)
require.True(t, ok)
_, ok = column[14].(time.Time)
require.True(t, ok)
require.Equal(t, "char10 ", *frames[0].Fields[7].At(0).(*string))
require.Equal(t, "varchar10", *frames[0].Fields[8].At(0).(*string))
require.Equal(t, "text", *frames[0].Fields[9].At(0).(*string))
require.Equal(t, "00:15:00", column[15].(string))
_, ok := frames[0].Fields[10].At(0).(*time.Time)
require.True(t, ok)
_, ok = frames[0].Fields[11].At(0).(*time.Time)
require.True(t, ok)
_, ok = frames[0].Fields[12].At(0).(*time.Time)
require.True(t, ok)
_, ok = frames[0].Fields[13].At(0).(*time.Time)
require.True(t, ok)
_, ok = frames[0].Fields[14].At(0).(*time.Time)
require.True(t, ok)
_, ok = frames[0].Fields[15].At(0).(*time.Time)
require.True(t, ok)
require.Equal(t, "00:15:00", *frames[0].Fields[16].At(0).(*string))
})
})
@ -335,26 +339,27 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
points := queryResult.Series[0].Points
frames, _ := queryResult.Dataframes.Decoded()
require.Len(t, frames, 1)
require.Equal(t, 4, frames[0].Fields[0].Len())
// without fill this should result in 4 buckets
require.Len(t, points, 4)
dt := fromStart
for i := 0; i < 2; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
aValue := *frames[0].Fields[1].At(i).(*float64)
aTime := *frames[0].Fields[0].At(i).(*time.Time)
require.Equal(t, float64(15), aValue)
require.Equal(t, dt, aTime)
require.Equal(t, int64(0), aTime.Unix()%300)
dt = dt.Add(5 * time.Minute)
}
// adjust for 10 minute gap between first and second set of points
dt = dt.Add(10 * time.Minute)
for i := 2; i < 4; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
aValue := *frames[0].Fields[1].At(i).(*float64)
aTime := *frames[0].Fields[0].At(i).(*time.Time)
require.Equal(t, float64(20), aValue)
require.Equal(t, dt, aTime)
dt = dt.Add(5 * time.Minute)
@ -388,10 +393,12 @@ func TestPostgres(t *testing.T) {
resp, err := exe.DataQuery(context.Background(), nil, query)
require.NoError(t, err)
queryResult := resp.Results["A"]
frames, _ := queryResult.Dataframes.Decoded()
require.NoError(t, queryResult.Error)
require.Equal(t,
"SELECT floor(extract(epoch from time)/60)*60 AS time, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
queryResult.Meta.Get(sqleng.MetaKeyExecutedQueryString).MustString())
frames[0].Meta.ExecutedQueryString)
})
t.Run("When doing a metric query using timeGroup with NULL fill enabled", func(t *testing.T) {
@ -416,35 +423,36 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
points := queryResult.Series[0].Points
require.Len(t, points, 7)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 7, frames[0].Fields[0].Len())
dt := fromStart
for i := 0; i < 2; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
aValue := *frames[0].Fields[1].At(i).(*float64)
aTime := *frames[0].Fields[0].At(i).(*time.Time)
require.Equal(t, float64(15), aValue)
require.Equal(t, dt, aTime)
require.True(t, aTime.Equal(dt))
dt = dt.Add(5 * time.Minute)
}
// check for NULL values inserted by fill
require.False(t, points[2][0].Valid)
require.False(t, points[3][0].Valid)
require.Nil(t, frames[0].Fields[1].At(2))
require.Nil(t, frames[0].Fields[1].At(3))
// adjust for 10 minute gap between first and second set of points
dt = dt.Add(10 * time.Minute)
for i := 4; i < 6; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
aValue := *frames[0].Fields[1].At(i).(*float64)
aTime := *frames[0].Fields[0].At(i).(*time.Time)
require.Equal(t, float64(20), aValue)
require.Equal(t, dt, aTime)
require.True(t, aTime.Equal(dt))
dt = dt.Add(5 * time.Minute)
}
// check for NULL values inserted by fill
require.False(t, points[6][0].Valid)
require.Nil(t, frames[0].Fields[1].At(6))
})
t.Run("When doing a metric query using timeGroup with value fill enabled", func(t *testing.T) {
@ -469,8 +477,9 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
points := queryResult.Series[0].Points
require.Equal(t, float64(1.5), points[3][0].Float64)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 1.5, *frames[0].Fields[1].At(3).(*float64))
})
})
@ -496,10 +505,11 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
points := queryResult.Series[0].Points
require.Equal(t, float64(15.0), points[2][0].Float64)
require.Equal(t, float64(15.0), points[3][0].Float64)
require.Equal(t, float64(20.0), points[6][0].Float64)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, float64(15.0), *frames[0].Fields[1].At(2).(*float64))
require.Equal(t, float64(15.0), *frames[0].Fields[1].At(3).(*float64))
require.Equal(t, float64(20.0), *frames[0].Fields[1].At(6).(*float64))
})
t.Run("Given a table with metrics having multiple values and measurements", func(t *testing.T) {
@ -570,7 +580,7 @@ func TestPostgres(t *testing.T) {
require.NoError(t, err)
t.Run(
"When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds",
"When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in time.Time",
func(t *testing.T) {
query := plugins.DataQuery{
Queries: []plugins.DataSubQuery{
@ -589,11 +599,12 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Equal(t, 1, len(queryResult.Series))
require.Equal(t, float64(tInitial.UnixNano()/1e6), queryResult.Series[0].Points[0][1].Float64)
frames, _ := queryResult.Dataframes.Decoded()
require.Len(t, frames, 1)
require.True(t, tInitial.Equal(*frames[0].Fields[0].At(0).(*time.Time)))
})
t.Run("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable,) should return metric with time in milliseconds",
t.Run("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable,) should return metric with time in time.Time",
func(t *testing.T) {
query := plugins.DataQuery{
Queries: []plugins.DataSubQuery{
@ -612,11 +623,12 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Series, 1)
require.Equal(t, float64(tInitial.UnixNano()/1e6), queryResult.Series[0].Points[0][1].Float64)
frames, _ := queryResult.Dataframes.Decoded()
require.Len(t, frames, 1)
require.True(t, tInitial.Equal(*frames[0].Fields[0].At(0).(*time.Time)))
})
t.Run("When doing a metric query using epoch (float64) as time column and value column (float64), should return metric with time in milliseconds",
t.Run("When doing a metric query using epoch (float64) as time column and value column (float64), should return metric with time in time.Time",
func(t *testing.T) {
query := plugins.DataQuery{
Queries: []plugins.DataSubQuery{
@ -635,11 +647,12 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Series, 1)
require.Equal(t, float64(tInitial.UnixNano()/1e6), queryResult.Series[0].Points[0][1].Float64)
frames, _ := queryResult.Dataframes.Decoded()
require.Len(t, frames, 1)
require.True(t, tInitial.Equal(*frames[0].Fields[0].At(0).(*time.Time)))
})
t.Run("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable), should return metric with time in milliseconds",
t.Run("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable), should return metric with time in time.Time",
func(t *testing.T) {
query := plugins.DataQuery{
Queries: []plugins.DataSubQuery{
@ -658,11 +671,12 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Series, 1)
require.Equal(t, float64(tInitial.UnixNano()/1e6), queryResult.Series[0].Points[0][1].Float64)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.True(t, tInitial.Equal(*frames[0].Fields[0].At(0).(*time.Time)))
})
t.Run("When doing a metric query using epoch (int32) as time column and value column (int32), should return metric with time in milliseconds",
t.Run("When doing a metric query using epoch (int32) as time column and value column (int32), should return metric with time in time.Time",
func(t *testing.T) {
query := plugins.DataQuery{
Queries: []plugins.DataSubQuery{
@ -681,11 +695,12 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Series, 1)
require.Equal(t, float64(tInitial.UnixNano()/1e6), queryResult.Series[0].Points[0][1].Float64)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.True(t, tInitial.Equal(*frames[0].Fields[0].At(0).(*time.Time)))
})
t.Run("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable), should return metric with time in milliseconds",
t.Run("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable), should return metric with time in time.Time",
func(t *testing.T) {
query := plugins.DataQuery{
Queries: []plugins.DataSubQuery{
@ -704,11 +719,12 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Series, 1)
require.Equal(t, float64(tInitial.UnixNano()/1e6), queryResult.Series[0].Points[0][1].Float64)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.True(t, tInitial.Equal(*frames[0].Fields[0].At(0).(*time.Time)))
})
t.Run("When doing a metric query using epoch (float32) as time column and value column (float32), should return metric with time in milliseconds",
t.Run("When doing a metric query using epoch (float32) as time column and value column (float32), should return metric with time in time.Time",
func(t *testing.T) {
query := plugins.DataQuery{
Queries: []plugins.DataSubQuery{
@ -727,11 +743,13 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Series, 1)
require.Equal(t, float64(float32(tInitial.Unix()))*1e3, queryResult.Series[0].Points[0][1].Float64)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
aTime := time.Unix(0, int64(float64(float32(tInitial.Unix()))*1e3)*int64(time.Millisecond))
require.True(t, aTime.Equal(*frames[0].Fields[0].At(0).(*time.Time)))
})
t.Run("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable), should return metric with time in milliseconds",
t.Run("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable), should return metric with time in time.Time",
func(t *testing.T) {
query := plugins.DataQuery{
Queries: []plugins.DataSubQuery{
@ -750,8 +768,10 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Series, 1)
require.Equal(t, float64(float32(tInitial.Unix()))*1e3, queryResult.Series[0].Points[0][1].Float64)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
aTime := time.Unix(0, int64(float64(float32(tInitial.Unix()))*1e3)*int64(time.Millisecond))
require.True(t, aTime.Equal(*frames[0].Fields[0].At(0).(*time.Time)))
})
t.Run("When doing a metric query grouping by time and select metric column should return correct series", func(t *testing.T) {
@ -772,9 +792,11 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Series, 2)
require.Equal(t, "Metric A - value one", queryResult.Series[0].Name)
require.Equal(t, "Metric B - value one", queryResult.Series[1].Name)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 3, len(frames[0].Fields))
require.Equal(t, data.Labels{"metric": "Metric A - value one"}, frames[0].Fields[1].Labels)
require.Equal(t, data.Labels{"metric": "Metric B - value one"}, frames[0].Fields[2].Labels)
})
t.Run("When doing a metric query with metric column and multiple value columns", func(t *testing.T) {
@ -795,11 +817,18 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Series, 4)
require.Equal(t, "Metric A valueOne", queryResult.Series[0].Name)
require.Equal(t, "Metric A valueTwo", queryResult.Series[1].Name)
require.Equal(t, "Metric B valueOne", queryResult.Series[2].Name)
require.Equal(t, "Metric B valueTwo", queryResult.Series[3].Name)
frames, err := queryResult.Dataframes.Decoded()
require.NoError(t, err)
require.Equal(t, 1, len(frames))
require.Equal(t, 5, len(frames[0].Fields))
require.Equal(t, "valueOne", frames[0].Fields[1].Name)
require.Equal(t, data.Labels{"metric": "Metric A"}, frames[0].Fields[1].Labels)
require.Equal(t, "valueOne", frames[0].Fields[2].Name)
require.Equal(t, data.Labels{"metric": "Metric B"}, frames[0].Fields[2].Labels)
require.Equal(t, "valueTwo", frames[0].Fields[3].Name)
require.Equal(t, data.Labels{"metric": "Metric A"}, frames[0].Fields[3].Labels)
require.Equal(t, "valueTwo", frames[0].Fields[4].Name)
require.Equal(t, data.Labels{"metric": "Metric B"}, frames[0].Fields[4].Labels)
})
t.Run("When doing a metric query grouping by time should return correct series", func(t *testing.T) {
@ -820,9 +849,11 @@ func TestPostgres(t *testing.T) {
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Series, 2)
require.Equal(t, "valueOne", queryResult.Series[0].Name)
require.Equal(t, "valueTwo", queryResult.Series[1].Name)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 3, len(frames[0].Fields))
require.Equal(t, "valueOne", frames[0].Fields[1].Name)
require.Equal(t, "valueTwo", frames[0].Fields[2].Name)
})
t.Run("When doing a query with timeFrom,timeTo,unixEpochFrom,unixEpochTo macros", func(t *testing.T) {
@ -850,9 +881,11 @@ func TestPostgres(t *testing.T) {
require.NoError(t, err)
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
frames, _ := queryResult.Dataframes.Decoded()
require.Len(t, frames, 1)
require.Equal(t,
"SELECT time FROM metric_values WHERE time > '2018-03-15T12:55:00Z' OR time < '2018-03-15T12:55:00Z' OR 1 < 1521118500 OR 1521118800 > 1 ORDER BY 1",
queryResult.Meta.Get(sqleng.MetaKeyExecutedQueryString).MustString())
frames[0].Meta.ExecutedQueryString)
})
})
@ -910,7 +943,10 @@ func TestPostgres(t *testing.T) {
resp, err := exe.DataQuery(context.Background(), nil, query)
queryResult := resp.Results["Deploys"]
require.NoError(t, err)
require.Len(t, queryResult.Tables[0].Rows, 3)
frames, _ := queryResult.Dataframes.Decoded()
require.Len(t, frames, 1)
require.Len(t, frames[0].Fields, 3)
})
t.Run("When doing an annotation query of ticket events should return expected result", func(t *testing.T) {
@ -933,7 +969,10 @@ func TestPostgres(t *testing.T) {
resp, err := exe.DataQuery(context.Background(), nil, query)
queryResult := resp.Results["Tickets"]
require.NoError(t, err)
require.Len(t, queryResult.Tables[0].Rows, 3)
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 3, len(frames[0].Fields))
})
t.Run("When doing an annotation query with a time column in datetime format", func(t *testing.T) {
@ -960,14 +999,15 @@ func TestPostgres(t *testing.T) {
require.NoError(t, err)
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Tables[0].Rows, 1)
columns := queryResult.Tables[0].Rows[0]
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 3, len(frames[0].Fields))
//Should be in milliseconds
require.Equal(t, float64(dt.UnixNano()/1e6), columns[0].(float64))
// Should be in time.Time
require.Equal(t, dt.Unix(), (*frames[0].Fields[0].At(0).(*time.Time)).Unix())
})
t.Run("When doing an annotation query with a time column in epoch second format should return ms", func(t *testing.T) {
t.Run("When doing an annotation query with a time column in epoch second format should return time.Time", func(t *testing.T) {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := plugins.DataQuery{
@ -975,7 +1015,7 @@ func TestPostgres(t *testing.T) {
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
%d as time,
%d as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()),
@ -990,14 +1030,16 @@ func TestPostgres(t *testing.T) {
require.NoError(t, err)
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Tables[0].Rows, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
require.Equal(t, dt.Unix()*1000, columns[0].(int64))
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 3, len(frames[0].Fields))
// Should be in time.Time
require.Equal(t, dt.Unix(), (*frames[0].Fields[0].At(0).(*time.Time)).Unix())
})
t.Run("When doing an annotation query with a time column in epoch second format (t *testing.Tint) should return ms", func(t *testing.T) {
t.Run("When doing an annotation query with a time column in epoch second format (t *testing.Tint) should return time.Time", func(t *testing.T) {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := plugins.DataQuery{
@ -1005,7 +1047,7 @@ func TestPostgres(t *testing.T) {
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
cast(%d as bigint) as time,
cast(%d as bigint) as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()),
@ -1020,14 +1062,16 @@ func TestPostgres(t *testing.T) {
require.NoError(t, err)
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Tables[0].Rows, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
require.Equal(t, dt.Unix()*1000, columns[0].(int64))
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 3, len(frames[0].Fields))
// Should be in time.Time
require.Equal(t, dt.Unix(), (*frames[0].Fields[0].At(0).(*time.Time)).Unix())
})
t.Run("When doing an annotation query with a time column in epoch millisecond format should return ms", func(t *testing.T) {
t.Run("When doing an annotation query with a time column in epoch millisecond format should return time.Time", func(t *testing.T) {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := plugins.DataQuery{
@ -1050,11 +1094,13 @@ func TestPostgres(t *testing.T) {
require.NoError(t, err)
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Tables[0].Rows, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
require.Equal(t, dt.Unix()*1000, columns[0].(int64))
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 3, len(frames[0].Fields))
// Should be in time.Time
require.Equal(t, dt.Unix(), (*frames[0].Fields[0].At(0).(*time.Time)).Unix())
})
t.Run("When doing an annotation query with a time column holding a bigint null value should return nil", func(t *testing.T) {
@ -1078,11 +1124,13 @@ func TestPostgres(t *testing.T) {
require.NoError(t, err)
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Tables[0].Rows, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
require.Nil(t, columns[0])
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 3, len(frames[0].Fields))
// Should be in time.Time
require.Nil(t, frames[0].Fields[0].At(0))
})
t.Run("When doing an annotation query with a time column holding a timestamp null value should return nil", func(t *testing.T) {
@ -1091,10 +1139,10 @@ func TestPostgres(t *testing.T) {
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT
cast(null as timestamp) as time,
'message' as text,
'tag1,tag2' as tags
`,
cast(null as timestamp) as time,
'message' as text,
'tag1,tag2' as tags
`,
"format": "table",
}),
RefID: "A",
@ -1106,11 +1154,13 @@ func TestPostgres(t *testing.T) {
require.NoError(t, err)
queryResult := resp.Results["A"]
require.NoError(t, queryResult.Error)
require.Len(t, queryResult.Tables[0].Rows, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
assert.Nil(t, columns[0])
frames, _ := queryResult.Dataframes.Decoded()
require.Equal(t, 1, len(frames))
require.Equal(t, 3, len(frames[0].Fields))
// Should be in time.Time
assert.Nil(t, frames[0].Fields[0].At(0))
})
})
}

136
pkg/tsdb/sqleng/resample.go Normal file
View File

@ -0,0 +1,136 @@
package sqleng
import (
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// getRowFillValues populates a slice of values corresponding to the provided data.Frame fields.
// Uses data.FillMissing settings to fill in values that are missing. Values are normally missing
// due to that the selected query interval doesn't match the intervals of the data returned from
// the query and therefore needs to be resampled.
func getRowFillValues(f *data.Frame, tsSchema data.TimeSeriesSchema, currentTime time.Time,
fillMissing *data.FillMissing, intermediateRows []int, lastSeenRowIdx int) []interface{} {
vals := make([]interface{}, 0, len(f.Fields))
for i, field := range f.Fields {
// if the current field is the time index of the series
// set the new value to be added to the new timestamp
if i == tsSchema.TimeIndex {
switch f.Fields[tsSchema.TimeIndex].Type() {
case data.FieldTypeTime:
vals = append(vals, currentTime)
default:
vals = append(vals, &currentTime)
}
continue
}
isValueField := false
for _, idx := range tsSchema.ValueIndices {
if i == idx {
isValueField = true
break
}
}
// if the current field is value Field
// set the new value to the last seen field value (if such exists)
// otherwise set the appropriate value according to the fillMissing mode
// if the current field is string field)
// set the new value to be added to the last seen value (if such exists)
// if the Frame is wide then there should not be any string fields
var newVal interface{}
if isValueField {
if len(intermediateRows) > 0 {
// instead of setting the last seen
// we could set avg, sum, min or max
// of the intermediate values for each field
newVal = f.At(i, intermediateRows[len(intermediateRows)-1])
} else {
val, err := data.GetMissing(fillMissing, field, lastSeenRowIdx)
if err == nil {
newVal = val
}
}
} else if lastSeenRowIdx >= 0 {
newVal = f.At(i, lastSeenRowIdx)
}
vals = append(vals, newVal)
}
return vals
}
// resample resample provided time-series data.Frame.
// This is needed in the case of the selected query interval doesn't
// match the intervals of the time-series field in the data.Frame and
// therefore needs to be resampled.
func resample(f *data.Frame, qm dataQueryModel) (*data.Frame, error) {
tsSchema := f.TimeSeriesSchema()
if tsSchema.Type == data.TimeSeriesTypeNot {
return f, fmt.Errorf("can not fill missing, not timeseries frame")
}
if qm.Interval == 0 {
return f, nil
}
newFields := make([]*data.Field, 0, len(f.Fields))
for _, field := range f.Fields {
newField := data.NewFieldFromFieldType(field.Type(), 0)
newField.Name = field.Name
newField.Labels = field.Labels
newFields = append(newFields, newField)
}
resampledFrame := data.NewFrame(f.Name, newFields...)
resampledFrame.Meta = f.Meta
resampledRowidx := 0
lastSeenRowIdx := -1
timeField := f.Fields[tsSchema.TimeIndex]
for currentTime := qm.TimeRange.From; !currentTime.After(qm.TimeRange.To); currentTime = currentTime.Add(qm.Interval) {
initialRowIdx := 0
if lastSeenRowIdx > 0 {
initialRowIdx = lastSeenRowIdx + 1
}
intermediateRows := make([]int, 0)
for {
rowLen, err := f.RowLen()
if err != nil {
return f, err
}
if initialRowIdx == rowLen {
break
}
t, ok := timeField.ConcreteAt(initialRowIdx)
if !ok {
return f, fmt.Errorf("time point is nil")
}
if t.(time.Time).After(currentTime) {
nextTime := currentTime.Add(qm.Interval)
if t.(time.Time).Before(nextTime) {
intermediateRows = append(intermediateRows, initialRowIdx)
lastSeenRowIdx = initialRowIdx
initialRowIdx++
}
break
}
intermediateRows = append(intermediateRows, initialRowIdx)
lastSeenRowIdx = initialRowIdx
initialRowIdx++
}
// no intermediate points; set values following fill missing mode
fieldVals := getRowFillValues(f, tsSchema, currentTime, qm.FillMissing, intermediateRows, lastSeenRowIdx)
resampledFrame.InsertRow(resampledRowidx, fieldVals...)
resampledRowidx++
}
return resampledFrame, nil
}

View File

@ -0,0 +1,309 @@
package sqleng
import (
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/require"
"github.com/xorcare/pointer"
)
func TestResampleWide(t *testing.T) {
tests := []struct {
name string
input *data.Frame
fillMissing *data.FillMissing
timeRange backend.TimeRange
interval time.Duration
output *data.Frame
}{
{
name: "interval 1s; fill null",
fillMissing: &data.FillMissing{Mode: data.FillModeNull},
timeRange: backend.TimeRange{
From: time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
To: time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
},
interval: time.Second,
input: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
pointer.Int64(10),
pointer.Int64(12),
pointer.Int64(15),
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
pointer.Float64(10.5),
pointer.Float64(12.5),
pointer.Float64(15.0),
})),
output: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 21, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 22, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 23, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 25, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
nil,
pointer.Int64(10),
pointer.Int64(12),
nil,
nil,
nil,
pointer.Int64(15),
nil,
nil,
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
nil,
pointer.Float64(10.5),
pointer.Float64(12.5),
nil,
nil,
nil,
pointer.Float64(15.0),
nil,
nil,
})),
},
{
name: "interval 1s; fill value",
fillMissing: &data.FillMissing{Mode: data.FillModeValue, Value: -1},
timeRange: backend.TimeRange{
From: time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
To: time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
},
interval: time.Second,
input: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
pointer.Int64(10),
pointer.Int64(12),
pointer.Int64(15),
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
pointer.Float64(10.5),
pointer.Float64(12.5),
pointer.Float64(15.0),
})),
output: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 21, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 22, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 23, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 25, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
pointer.Int64(-1),
pointer.Int64(10),
pointer.Int64(12),
pointer.Int64(-1),
pointer.Int64(-1),
pointer.Int64(-1),
pointer.Int64(15),
pointer.Int64(-1),
pointer.Int64(-1),
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
pointer.Float64(-1),
pointer.Float64(10.5),
pointer.Float64(12.5),
pointer.Float64(-1),
pointer.Float64(-1),
pointer.Float64(-1),
pointer.Float64(15.0),
pointer.Float64(-1),
pointer.Float64(-1),
})),
},
{
name: "interval 1s; fill previous",
fillMissing: &data.FillMissing{Mode: data.FillModePrevious},
timeRange: backend.TimeRange{
From: time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
To: time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
},
interval: time.Second,
input: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
pointer.Int64(10),
pointer.Int64(12),
pointer.Int64(15),
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
pointer.Float64(10.5),
pointer.Float64(12.5),
pointer.Float64(15.0),
})),
output: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 21, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 22, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 23, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 25, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
nil,
pointer.Int64(10),
pointer.Int64(12),
pointer.Int64(12),
pointer.Int64(12),
pointer.Int64(12),
pointer.Int64(15),
pointer.Int64(15),
pointer.Int64(15),
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
nil,
pointer.Float64(10.5),
pointer.Float64(12.5),
pointer.Float64(12.5),
pointer.Float64(12.5),
pointer.Float64(12.5),
pointer.Float64(15.0),
pointer.Float64(15.0),
pointer.Float64(15.0),
})),
},
{
name: "interval 2s; fill null",
fillMissing: &data.FillMissing{Mode: data.FillModeNull},
timeRange: backend.TimeRange{
From: time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
To: time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
},
interval: 2 * time.Second,
input: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
pointer.Int64(10),
pointer.Int64(12),
pointer.Int64(15),
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
pointer.Float64(10.5),
pointer.Float64(12.5),
pointer.Float64(15.0),
})),
output: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 22, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
pointer.Int64(12),
nil,
nil,
pointer.Int64(15),
nil,
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
pointer.Float64(12.5),
nil,
nil,
pointer.Float64(15.0),
nil,
})),
},
{
name: "interval 1s; fill null; rows outside timerange window",
fillMissing: &data.FillMissing{Mode: data.FillModeNull},
timeRange: backend.TimeRange{
From: time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
To: time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
},
interval: time.Second,
input: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
pointer.Int64(10),
pointer.Int64(12),
pointer.Int64(15),
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
pointer.Float64(10.5),
pointer.Float64(12.5),
pointer.Float64(15.0),
})),
output: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 21, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 22, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 23, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
pointer.Int64(12),
nil,
nil,
nil,
pointer.Int64(15),
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
pointer.Float64(12.5),
nil,
nil,
nil,
pointer.Float64(15.0),
})),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
frame, err := resample(tt.input, dataQueryModel{
FillMissing: tt.fillMissing,
TimeRange: tt.timeRange,
Interval: tt.interval,
})
require.NoError(t, err)
if diff := cmp.Diff(tt.output, frame, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}

File diff suppressed because it is too large Load Diff

View File

@ -7,19 +7,20 @@ import (
"testing"
"time"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/data/sqlutil"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/xorcare/pointer"
"xorm.io/core"
)
func TestSQLEngine(t *testing.T) {
dt := time.Date(2018, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC)
earlyDt := time.Date(1970, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC)
t.Run("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func(t *testing.T) {
from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC)
@ -58,56 +59,48 @@ func TestSQLEngine(t *testing.T) {
})
})
t.Run("Given row values with time.Time as time columns", func(t *testing.T) {
var nilPointer *time.Time
fixtures := make([]interface{}, 5)
fixtures[0] = dt
fixtures[1] = &dt
fixtures[2] = earlyDt
fixtures[3] = &earlyDt
fixtures[4] = nilPointer
for i := range fixtures {
ConvertSqlTimeColumnToEpochMs(fixtures, i)
}
expected := float64(dt.UnixNano()) / float64(time.Millisecond)
expectedEarly := float64(earlyDt.UnixNano()) / float64(time.Millisecond)
require.Equal(t, expected, fixtures[0].(float64))
require.Equal(t, expected, fixtures[1].(float64))
require.Equal(t, expectedEarly, fixtures[2].(float64))
require.Equal(t, expectedEarly, fixtures[3].(float64))
require.Nil(t, fixtures[4])
})
t.Run("Given row values with int64 as time columns", func(t *testing.T) {
tSeconds := dt.Unix()
tMilliseconds := dt.UnixNano() / 1e6
tNanoSeconds := dt.UnixNano()
var nilPointer *int64
fixtures := make([]interface{}, 7)
fixtures[0] = tSeconds
fixtures[1] = &tSeconds
fixtures[2] = tMilliseconds
fixtures[3] = &tMilliseconds
fixtures[4] = tNanoSeconds
fixtures[5] = &tNanoSeconds
fixtures[6] = nilPointer
originFrame := data.NewFrame("",
data.NewField("time1", nil, []int64{
tSeconds,
}),
data.NewField("time2", nil, []*int64{
pointer.Int64(tSeconds),
}),
data.NewField("time3", nil, []int64{
tMilliseconds,
}),
data.NewField("time4", nil, []*int64{
pointer.Int64(tMilliseconds),
}),
data.NewField("time5", nil, []int64{
tNanoSeconds,
}),
data.NewField("time6", nil, []*int64{
pointer.Int64(tNanoSeconds),
}),
data.NewField("time7", nil, []*int64{
nilPointer,
}),
)
for i := range fixtures {
ConvertSqlTimeColumnToEpochMs(fixtures, i)
for i := 0; i < len(originFrame.Fields); i++ {
err := convertSQLTimeColumnToEpochMS(originFrame, i)
require.NoError(t, err)
}
require.Equal(t, tSeconds*1e3, fixtures[0].(int64))
require.Equal(t, tSeconds*1e3, fixtures[1].(int64))
require.Equal(t, tMilliseconds, fixtures[2].(int64))
require.Equal(t, tMilliseconds, fixtures[3].(int64))
require.Equal(t, tMilliseconds, fixtures[4].(int64))
require.Equal(t, tMilliseconds, fixtures[5].(int64))
require.Nil(t, fixtures[6])
require.Equal(t, dt.Unix(), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[2].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[3].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[4].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[5].At(0).(*time.Time)).Unix())
require.Nil(t, originFrame.Fields[6].At(0))
})
t.Run("Given row values with uint64 as time columns", func(t *testing.T) {
@ -116,62 +109,91 @@ func TestSQLEngine(t *testing.T) {
tNanoSeconds := uint64(dt.UnixNano())
var nilPointer *uint64
fixtures := make([]interface{}, 7)
fixtures[0] = tSeconds
fixtures[1] = &tSeconds
fixtures[2] = tMilliseconds
fixtures[3] = &tMilliseconds
fixtures[4] = tNanoSeconds
fixtures[5] = &tNanoSeconds
fixtures[6] = nilPointer
originFrame := data.NewFrame("",
data.NewField("time1", nil, []uint64{
tSeconds,
}),
data.NewField("time2", nil, []*uint64{
pointer.Uint64(tSeconds),
}),
data.NewField("time3", nil, []uint64{
tMilliseconds,
}),
data.NewField("time4", nil, []*uint64{
pointer.Uint64(tMilliseconds),
}),
data.NewField("time5", nil, []uint64{
tNanoSeconds,
}),
data.NewField("time6", nil, []*uint64{
pointer.Uint64(tNanoSeconds),
}),
data.NewField("time7", nil, []*uint64{
nilPointer,
}),
)
for i := range fixtures {
ConvertSqlTimeColumnToEpochMs(fixtures, i)
for i := 0; i < len(originFrame.Fields); i++ {
err := convertSQLTimeColumnToEpochMS(originFrame, i)
require.NoError(t, err)
}
require.Equal(t, int64(tSeconds*1e3), fixtures[0].(int64))
require.Equal(t, int64(tSeconds*1e3), fixtures[1].(int64))
require.Equal(t, int64(tMilliseconds), fixtures[2].(int64))
require.Equal(t, int64(tMilliseconds), fixtures[3].(int64))
require.Equal(t, int64(tMilliseconds), fixtures[4].(int64))
require.Equal(t, int64(tMilliseconds), fixtures[5].(int64))
require.Nil(t, fixtures[6])
require.Equal(t, dt.Unix(), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[2].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[3].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[4].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[5].At(0).(*time.Time)).Unix())
require.Nil(t, originFrame.Fields[6].At(0))
})
t.Run("Given row values with int32 as time columns", func(t *testing.T) {
tSeconds := int32(dt.Unix())
var nilInt *int32
fixtures := make([]interface{}, 3)
fixtures[0] = tSeconds
fixtures[1] = &tSeconds
fixtures[2] = nilInt
for i := range fixtures {
ConvertSqlTimeColumnToEpochMs(fixtures, i)
originFrame := data.NewFrame("",
data.NewField("time1", nil, []int32{
tSeconds,
}),
data.NewField("time2", nil, []*int32{
pointer.Int32(tSeconds),
}),
data.NewField("time7", nil, []*int32{
nilInt,
}),
)
for i := 0; i < 3; i++ {
err := convertSQLTimeColumnToEpochMS(originFrame, i)
require.NoError(t, err)
}
require.Equal(t, dt.Unix()*1e3, fixtures[0].(int64))
require.Equal(t, dt.Unix()*1e3, fixtures[1].(int64))
require.Nil(t, fixtures[2])
require.Equal(t, dt.Unix(), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
require.Nil(t, originFrame.Fields[2].At(0))
})
t.Run("Given row values with uint32 as time columns", func(t *testing.T) {
tSeconds := uint32(dt.Unix())
var nilInt *uint32
fixtures := make([]interface{}, 3)
fixtures[0] = tSeconds
fixtures[1] = &tSeconds
fixtures[2] = nilInt
for i := range fixtures {
ConvertSqlTimeColumnToEpochMs(fixtures, i)
originFrame := data.NewFrame("",
data.NewField("time1", nil, []uint32{
tSeconds,
}),
data.NewField("time2", nil, []*uint32{
pointer.Uint32(tSeconds),
}),
data.NewField("time7", nil, []*uint32{
nilInt,
}),
)
for i := 0; i < len(originFrame.Fields); i++ {
err := convertSQLTimeColumnToEpochMS(originFrame, i)
require.NoError(t, err)
}
require.Equal(t, dt.Unix()*1e3, fixtures[0].(int64))
require.Equal(t, dt.Unix()*1e3, fixtures[1].(int64))
require.Nil(t, fixtures[2])
require.Equal(t, dt.Unix(), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
require.Nil(t, originFrame.Fields[2].At(0))
})
t.Run("Given row values with float64 as time columns", func(t *testing.T) {
@ -180,137 +202,192 @@ func TestSQLEngine(t *testing.T) {
tNanoSeconds := float64(dt.UnixNano())
var nilPointer *float64
fixtures := make([]interface{}, 7)
fixtures[0] = tSeconds
fixtures[1] = &tSeconds
fixtures[2] = tMilliseconds
fixtures[3] = &tMilliseconds
fixtures[4] = tNanoSeconds
fixtures[5] = &tNanoSeconds
fixtures[6] = nilPointer
originFrame := data.NewFrame("",
data.NewField("time1", nil, []float64{
tSeconds,
}),
data.NewField("time2", nil, []*float64{
pointer.Float64(tSeconds),
}),
data.NewField("time3", nil, []float64{
tMilliseconds,
}),
data.NewField("time4", nil, []*float64{
pointer.Float64(tMilliseconds),
}),
data.NewField("time5", nil, []float64{
tNanoSeconds,
}),
data.NewField("time6", nil, []*float64{
pointer.Float64(tNanoSeconds),
}),
data.NewField("time7", nil, []*float64{
nilPointer,
}),
)
for i := range fixtures {
ConvertSqlTimeColumnToEpochMs(fixtures, i)
for i := 0; i < len(originFrame.Fields); i++ {
err := convertSQLTimeColumnToEpochMS(originFrame, i)
require.NoError(t, err)
}
require.Equal(t, tMilliseconds, fixtures[0].(float64))
require.Equal(t, tMilliseconds, fixtures[1].(float64))
require.Equal(t, tMilliseconds, fixtures[2].(float64))
require.Equal(t, tMilliseconds, fixtures[3].(float64))
require.Equal(t, tMilliseconds, fixtures[4].(float64))
require.Equal(t, tMilliseconds, fixtures[5].(float64))
require.Nil(t, fixtures[6])
require.Equal(t, dt.Unix(), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[2].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[3].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[4].At(0).(*time.Time)).Unix())
require.Equal(t, dt.Unix(), (*originFrame.Fields[5].At(0).(*time.Time)).Unix())
require.Nil(t, originFrame.Fields[6].At(0))
})
t.Run("Given row values with float32 as time columns", func(t *testing.T) {
tSeconds := float32(dt.Unix())
var nilInt *float32
fixtures := make([]interface{}, 3)
fixtures[0] = tSeconds
fixtures[1] = &tSeconds
fixtures[2] = nilInt
for i := range fixtures {
ConvertSqlTimeColumnToEpochMs(fixtures, i)
originFrame := data.NewFrame("",
data.NewField("time1", nil, []float32{
tSeconds,
}),
data.NewField("time2", nil, []*float32{
pointer.Float32(tSeconds),
}),
data.NewField("time7", nil, []*float32{
nilInt,
}),
)
for i := 0; i < len(originFrame.Fields); i++ {
err := convertSQLTimeColumnToEpochMS(originFrame, i)
require.NoError(t, err)
}
require.Equal(t, float64(tSeconds)*1e3, fixtures[0].(float64))
require.Equal(t, float64(tSeconds)*1e3, fixtures[1].(float64))
require.Nil(t, fixtures[2])
require.Equal(t, int64(tSeconds), (*originFrame.Fields[0].At(0).(*time.Time)).Unix())
require.Equal(t, int64(tSeconds), (*originFrame.Fields[1].At(0).(*time.Time)).Unix())
require.Nil(t, originFrame.Fields[2].At(0))
})
t.Run("Given row with value columns", func(t *testing.T) {
intValue := 1
int64Value := int64(1)
int32Value := int32(1)
int16Value := int16(1)
int8Value := int8(1)
float64Value := float64(1)
float32Value := float32(1)
uintValue := uint(1)
uint64Value := uint64(1)
uint32Value := uint32(1)
uint16Value := uint16(1)
uint8Value := uint8(1)
testCases := []struct {
name string
value interface{}
}{
{"intValue", intValue},
{"&intValue", &intValue},
{"int64Value", int64Value},
{"&int64Value", &int64Value},
{"int32Value", int32Value},
{"&int32Value", &int32Value},
{"int16Value", int16Value},
{"&int16Value", &int16Value},
{"int8Value", int8Value},
{"&int8Value", &int8Value},
{"float64Value", float64Value},
{"&float64Value", &float64Value},
{"float32Value", float32Value},
{"&float32Value", &float32Value},
{"uintValue", uintValue},
{"&uintValue", &uintValue},
{"uint64Value", uint64Value},
{"&uint64Value", &uint64Value},
{"uint32Value", uint32Value},
{"&uint32Value", &uint32Value},
{"uint16Value", uint16Value},
{"&uint16Value", &uint16Value},
{"uint8Value", uint8Value},
{"&uint8Value", &uint8Value},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
value, err := ConvertSqlValueColumnToFloat("col", tc.value)
require.NoError(t, err)
require.True(t, value.Valid)
require.Equal(t, null.FloatFrom(1).Float64, value.Float64)
})
t.Run("Given row with value columns, would be converted to float64", func(t *testing.T) {
originFrame := data.NewFrame("",
data.NewField("value1", nil, []int64{
int64(1),
}),
data.NewField("value2", nil, []*int64{
pointer.Int64(1),
}),
data.NewField("value3", nil, []int32{
int32(1),
}),
data.NewField("value4", nil, []*int32{
pointer.Int32(1),
}),
data.NewField("value5", nil, []int16{
int16(1),
}),
data.NewField("value6", nil, []*int16{
pointer.Int16(1),
}),
data.NewField("value7", nil, []int8{
int8(1),
}),
data.NewField("value8", nil, []*int8{
pointer.Int8(1),
}),
data.NewField("value9", nil, []float64{
float64(1),
}),
data.NewField("value10", nil, []*float64{
pointer.Float64(1),
}),
data.NewField("value11", nil, []float32{
float32(1),
}),
data.NewField("value12", nil, []*float32{
pointer.Float32(1),
}),
data.NewField("value13", nil, []uint64{
uint64(1),
}),
data.NewField("value14", nil, []*uint64{
pointer.Uint64(1),
}),
data.NewField("value15", nil, []uint32{
uint32(1),
}),
data.NewField("value16", nil, []*uint32{
pointer.Uint32(1),
}),
data.NewField("value17", nil, []uint16{
uint16(1),
}),
data.NewField("value18", nil, []*uint16{
pointer.Uint16(1),
}),
data.NewField("value19", nil, []uint8{
uint8(1),
}),
data.NewField("value20", nil, []*uint8{
pointer.Uint8(1),
}),
)
for i := 0; i < len(originFrame.Fields); i++ {
_, err := convertSQLValueColumnToFloat(originFrame, i)
require.NoError(t, err)
if i == 8 {
require.Equal(t, float64(1), originFrame.Fields[i].At(0).(float64))
} else {
require.NotNil(t, originFrame.Fields[i].At(0).(*float64))
require.Equal(t, float64(1), *originFrame.Fields[i].At(0).(*float64))
}
}
})
t.Run("Given row with nil value columns", func(t *testing.T) {
var intNilPointer *int
var int64NilPointer *int64
var int32NilPointer *int32
var int16NilPointer *int16
var int8NilPointer *int8
var float64NilPointer *float64
var float32NilPointer *float32
var uintNilPointer *uint
var uint64NilPointer *uint64
var uint32NilPointer *uint32
var uint16NilPointer *uint16
var uint8NilPointer *uint8
testCases := []struct {
name string
value interface{}
}{
{"intNilPointer", intNilPointer},
{"int64NilPointer", int64NilPointer},
{"int32NilPointer", int32NilPointer},
{"int16NilPointer", int16NilPointer},
{"int8NilPointer", int8NilPointer},
{"float64NilPointer", float64NilPointer},
{"float32NilPointer", float32NilPointer},
{"uintNilPointer", uintNilPointer},
{"uint64NilPointer", uint64NilPointer},
{"uint32NilPointer", uint32NilPointer},
{"uint16NilPointer", uint16NilPointer},
{"uint8NilPointer", uint8NilPointer},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
value, err := ConvertSqlValueColumnToFloat("col", tc.value)
originFrame := data.NewFrame("",
data.NewField("value1", nil, []*int64{
int64NilPointer,
}),
data.NewField("value2", nil, []*int32{
int32NilPointer,
}),
data.NewField("value3", nil, []*int16{
int16NilPointer,
}),
data.NewField("value4", nil, []*int8{
int8NilPointer,
}),
data.NewField("value5", nil, []*float64{
float64NilPointer,
}),
data.NewField("value6", nil, []*float32{
float32NilPointer,
}),
data.NewField("value7", nil, []*uint64{
uint64NilPointer,
}),
data.NewField("value8", nil, []*uint32{
uint32NilPointer,
}),
data.NewField("value9", nil, []*uint16{
uint16NilPointer,
}),
data.NewField("value10", nil, []*uint8{
uint8NilPointer,
}),
)
for i := 0; i < len(originFrame.Fields); i++ {
t.Run("", func(t *testing.T) {
_, err := convertSQLValueColumnToFloat(originFrame, i)
require.NoError(t, err)
require.False(t, value.Valid)
require.Nil(t, originFrame.Fields[i].At(0))
})
}
})
@ -352,3 +429,7 @@ func (t *testQueryResultTransformer) TransformQueryError(err error) error {
t.transformQueryErrorWasCalled = true
return err
}
func (t *testQueryResultTransformer) GetConverterList() []sqlutil.StringConverter {
return nil
}

51
pkg/tsdb/sqleng/trim.go Normal file
View File

@ -0,0 +1,51 @@
package sqleng
import (
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// trim trims rows that are outside the qm.TimeRange.
func trim(f *data.Frame, qm dataQueryModel) error {
tsSchema := f.TimeSeriesSchema()
if tsSchema.Type == data.TimeSeriesTypeNot {
return fmt.Errorf("can not trim non-timeseries frame")
}
timeField := f.Fields[tsSchema.TimeIndex]
if timeField.Len() == 0 {
return nil
}
// Trim rows after end
for i := timeField.Len() - 1; i >= 0; i-- {
t, ok := timeField.ConcreteAt(i)
if !ok {
return fmt.Errorf("time point is nil")
}
if !t.(time.Time).After(qm.TimeRange.To) {
break
}
f.DeleteRow(i)
}
// Trim rows before start
for timeField.Len() > 0 {
t, ok := timeField.ConcreteAt(0)
if !ok {
return fmt.Errorf("time point is nil")
}
if !t.(time.Time).Before(qm.TimeRange.From) {
break
}
f.DeleteRow(0)
}
return nil
}

View File

@ -0,0 +1,171 @@
package sqleng
import (
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/require"
"github.com/xorcare/pointer"
)
func TestTrimWide(t *testing.T) {
tests := []struct {
name string
input *data.Frame
timeRange backend.TimeRange
output *data.Frame
}{
{
name: "needs trimming",
timeRange: backend.TimeRange{
From: time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
To: time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
},
input: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 21, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 22, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 23, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 25, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
nil,
pointer.Int64(10),
pointer.Int64(12),
nil,
nil,
nil,
pointer.Int64(15),
nil,
nil,
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
nil,
pointer.Float64(10.5),
pointer.Float64(12.5),
nil,
nil,
nil,
pointer.Float64(15.0),
nil,
nil,
})),
output: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 21, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 22, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 23, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
pointer.Int64(12),
nil,
nil,
nil,
pointer.Int64(15),
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
pointer.Float64(12.5),
nil,
nil,
nil,
pointer.Float64(15.0),
})),
},
{
name: "does not need trimming",
timeRange: backend.TimeRange{
From: time.Date(2020, 1, 2, 3, 4, 15, 0, time.UTC),
To: time.Date(2020, 1, 2, 3, 4, 30, 0, time.UTC),
},
input: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 21, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 22, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 23, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 25, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
nil,
pointer.Int64(10),
pointer.Int64(12),
nil,
nil,
nil,
pointer.Int64(15),
nil,
nil,
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
nil,
pointer.Float64(10.5),
pointer.Float64(12.5),
nil,
nil,
nil,
pointer.Float64(15.0),
nil,
nil,
})),
output: data.NewFrame("wide_test",
data.NewField("Time", nil, []time.Time{
time.Date(2020, 1, 2, 3, 4, 18, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 19, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 20, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 21, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 22, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 23, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 24, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 25, 0, time.UTC),
time.Date(2020, 1, 2, 3, 4, 26, 0, time.UTC),
}),
data.NewField("Values Ints", nil, []*int64{
nil,
pointer.Int64(10),
pointer.Int64(12),
nil,
nil,
nil,
pointer.Int64(15),
nil,
nil,
}),
data.NewField(`Values Floats`, data.Labels{"Animal Factor": "sloth"}, []*float64{
nil,
pointer.Float64(10.5),
pointer.Float64(12.5),
nil,
nil,
nil,
pointer.Float64(15.0),
nil,
nil,
})),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
err := trim(tt.input, dataQueryModel{
TimeRange: tt.timeRange,
})
require.NoError(t, err)
if diff := cmp.Diff(tt.output, tt.input, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}

View File

@ -1,29 +1,31 @@
import { map as _map, filter } from 'lodash';
import { Observable, of } from 'rxjs';
import { map as _map } from 'lodash';
import { of } from 'rxjs';
import { catchError, map, mapTo } from 'rxjs/operators';
import { getBackendSrv } from '@grafana/runtime';
import { ScopedVars } from '@grafana/data';
import { BackendDataSourceResponse, DataSourceWithBackend, FetchResponse, getBackendSrv } from '@grafana/runtime';
import { AnnotationEvent, DataSourceInstanceSettings, ScopedVars, MetricFindValue } from '@grafana/data';
import ResponseParser, { MssqlResponse } from './response_parser';
import ResponseParser from './response_parser';
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
import { MssqlQueryForInterpolation, MssqlQuery, MssqlOptions } from './types';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
import { MssqlQueryForInterpolation } from './types';
export class MssqlDatasource {
export class MssqlDatasource extends DataSourceWithBackend<MssqlQuery, MssqlOptions> {
id: any;
name: any;
responseParser: ResponseParser;
interval: string;
constructor(
instanceSettings: any,
instanceSettings: DataSourceInstanceSettings<MssqlOptions>,
private readonly templateSrv: TemplateSrv = getTemplateSrv(),
private readonly timeSrv: TimeSrv = getTimeSrv()
) {
super(instanceSettings);
this.name = instanceSettings.name;
this.id = instanceSettings.id;
this.responseParser = new ResponseParser();
this.interval = (instanceSettings.jsonData || {}).timeInterval || '1m';
const settingsData = instanceSettings.jsonData || ({} as MssqlOptions);
this.interval = settingsData.timeInterval || '1m';
}
interpolateVariable(value: any, variable: any) {
@ -68,38 +70,16 @@ export class MssqlDatasource {
return expandedQueries;
}
query(options: any): Observable<MssqlResponse> {
const queries = filter(options.targets, (item) => {
return item.hide !== true;
}).map((item) => {
return {
refId: item.refId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
rawSql: this.templateSrv.replace(item.rawSql, options.scopedVars, this.interpolateVariable),
format: item.format,
};
});
if (queries.length === 0) {
return of({ data: [] });
}
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: queries,
},
})
.pipe(map(this.responseParser.processQueryResult));
applyTemplateVariables(target: MssqlQuery, scopedVars: ScopedVars): Record<string, any> {
return {
refId: target.refId,
datasourceId: this.id,
rawSql: this.templateSrv.replace(target.rawSql, scopedVars, this.interpolateVariable),
format: target.format,
};
}
annotationQuery(options: any) {
async annotationQuery(options: any): Promise<AnnotationEvent[]> {
if (!options.annotation.rawQuery) {
return Promise.reject({ message: 'Query missing in annotation definition' });
}
@ -112,25 +92,33 @@ export class MssqlDatasource {
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
.fetch<BackendDataSourceResponse>({
url: '/api/ds/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: [query],
},
requestId: options.annotation.name,
})
.pipe(map((data: any) => this.responseParser.transformAnnotationResponse(options, data)))
.pipe(
map(
async (res: FetchResponse<BackendDataSourceResponse>) =>
await this.responseParser.transformAnnotationResponse(options, res.data)
)
)
.toPromise();
}
metricFindQuery(query: string, optionalOptions: { variable: { name: string } }) {
metricFindQuery(query: string, optionalOptions: any): Promise<MetricFindValue[]> {
let refId = 'tempvar';
if (optionalOptions && optionalOptions.variable && optionalOptions.variable.name) {
refId = optionalOptions.variable.name;
}
const range = this.timeSrv.timeRange();
const interpolatedQuery = {
refId: refId,
datasourceId: this.id,
@ -138,27 +126,29 @@ export class MssqlDatasource {
format: 'table',
};
const range = this.timeSrv.timeRange();
const data = {
queries: [interpolatedQuery],
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
.fetch<BackendDataSourceResponse>({
url: '/api/ds/query',
method: 'POST',
data: data,
data: {
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
queries: [interpolatedQuery],
},
requestId: refId,
})
.pipe(map((data: any) => this.responseParser.parseMetricFindQueryResult(refId, data)))
.pipe(
map((rsp) => {
return this.responseParser.transformMetricFindResponse(rsp);
})
)
.toPromise();
}
testDatasource() {
testDatasource(): Promise<any> {
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
url: '/api/ds/query',
method: 'POST',
data: {
from: '5m',
@ -189,8 +179,8 @@ export class MssqlDatasource {
.toPromise();
}
targetContainsTemplate(target: any) {
const rawSql = target.rawSql.replace('$__', '');
targetContainsTemplate(query: MssqlQuery): boolean {
const rawSql = query.rawSql.replace('$__', '');
return this.templateSrv.variableExists(rawSql);
}
}

View File

@ -1,6 +1,8 @@
import { MssqlDatasource } from './datasource';
import { MssqlQueryCtrl } from './query_ctrl';
import { MssqlConfigCtrl } from './config_ctrl';
import { MssqlQuery } from './types';
import { DataSourcePlugin } from '@grafana/data';
const defaultQuery = `SELECT
<time_column> as time,
@ -16,18 +18,16 @@ const defaultQuery = `SELECT
class MssqlAnnotationsQueryCtrl {
static templateUrl = 'partials/annotations.editor.html';
annotation: any;
declare annotation: any;
/** @ngInject */
constructor() {
constructor($scope: any) {
this.annotation = $scope.ctrl.annotation;
this.annotation.rawQuery = this.annotation.rawQuery || defaultQuery;
}
}
export {
MssqlDatasource,
MssqlDatasource as Datasource,
MssqlQueryCtrl as QueryCtrl,
MssqlConfigCtrl as ConfigCtrl,
MssqlAnnotationsQueryCtrl as AnnotationsQueryCtrl,
};
export const plugin = new DataSourcePlugin<MssqlDatasource, MssqlQuery>(MssqlDatasource)
.setQueryCtrl(MssqlQueryCtrl)
.setConfigCtrl(MssqlConfigCtrl)
.setAnnotationQueryCtrl(MssqlAnnotationsQueryCtrl);

View File

@ -1,13 +1,7 @@
import { QueryCtrl } from 'app/plugins/sdk';
import { auto } from 'angular';
import { PanelEvents, QueryResultMeta } from '@grafana/data';
export interface MssqlQuery {
refId: string;
format: string;
alias: string;
rawSql: string;
}
import { MssqlQuery } from './types';
const defaultQuery = `SELECT
$__timeEpoch(<time_column>),

View File

@ -1,73 +1,42 @@
import { map } from 'lodash';
import { MetricFindValue } from '@grafana/data';
interface TableResponse extends Record<string, any> {
type: string;
refId: string;
meta: any;
}
interface SeriesResponse extends Record<string, any> {
target: string;
refId: string;
meta: any;
datapoints: [any[]];
}
export interface MssqlResponse {
data: Array<TableResponse | SeriesResponse>;
}
import { AnnotationEvent, DataFrame, FieldType, MetricFindValue } from '@grafana/data';
import { BackendDataSourceResponse, toDataQueryResponse, FetchResponse } from '@grafana/runtime';
export default class ResponseParser {
processQueryResult(res: any): MssqlResponse {
const data: any[] = [];
transformMetricFindResponse(raw: FetchResponse<BackendDataSourceResponse>): MetricFindValue[] {
const frames = toDataQueryResponse(raw).data as DataFrame[];
if (!res.data.results) {
return { data };
}
for (const key in res.data.results) {
const queryRes = res.data.results[key];
if (queryRes.series) {
for (const series of queryRes.series) {
data.push({
target: series.name,
datapoints: series.points,
refId: queryRes.refId,
meta: queryRes.meta,
});
}
}
if (queryRes.tables) {
for (const table of queryRes.tables) {
table.type = 'table';
table.refId = queryRes.refId;
table.meta = queryRes.meta;
data.push(table);
}
}
}
return { data: data };
}
parseMetricFindQueryResult(refId: string, results: any): MetricFindValue[] {
if (!results || results.data.length === 0 || results.data.results[refId].meta.rowCount === 0) {
if (!frames || !frames.length) {
return [];
}
const columns = results.data.results[refId].tables[0].columns;
const rows = results.data.results[refId].tables[0].rows;
const textColIndex = this.findColIndex(columns, '__text');
const valueColIndex = this.findColIndex(columns, '__value');
const frame = frames[0];
if (columns.length === 2 && textColIndex !== -1 && valueColIndex !== -1) {
return this.transformToKeyValueList(rows, textColIndex, valueColIndex);
const values: MetricFindValue[] = [];
const textField = frame.fields.find((f) => f.name === '__text');
const valueField = frame.fields.find((f) => f.name === '__value');
if (textField && valueField) {
for (let i = 0; i < textField.values.length; i++) {
values.push({ text: '' + textField.values.get(i), value: '' + valueField.values.get(i) });
}
} else {
const textFields = frame.fields.filter((f) => f.type === FieldType.string);
if (textFields) {
values.push(
...textFields
.flatMap((f) => f.values.toArray())
.map((v) => ({
text: '' + v,
}))
);
}
}
return this.transformToSimpleList(rows);
return Array.from(new Set(values.map((v) => v.text))).map((text) => ({
text,
value: values.find((v) => v.text === text)?.value,
}));
}
transformToKeyValueList(rows: any, textColIndex: number, valueColIndex: number): MetricFindValue[] {
@ -117,41 +86,34 @@ export default class ResponseParser {
return false;
}
transformAnnotationResponse(options: any, data: any) {
const table = data.data.results[options.annotation.name].tables[0];
async transformAnnotationResponse(options: any, data: BackendDataSourceResponse): Promise<AnnotationEvent[]> {
const frames = toDataQueryResponse({ data: data }).data as DataFrame[];
const frame = frames[0];
const timeField = frame.fields.find((f) => f.name === 'time');
let timeColumnIndex = -1;
let timeEndColumnIndex = -1;
let textColumnIndex = -1;
let tagsColumnIndex = -1;
for (let i = 0; i < table.columns.length; i++) {
if (table.columns[i].text === 'time') {
timeColumnIndex = i;
} else if (table.columns[i].text === 'timeend') {
timeEndColumnIndex = i;
} else if (table.columns[i].text === 'text') {
textColumnIndex = i;
} else if (table.columns[i].text === 'tags') {
tagsColumnIndex = i;
}
}
if (timeColumnIndex === -1) {
if (!timeField) {
return Promise.reject({ message: 'Missing mandatory time column (with time column alias) in annotation query.' });
}
const list = [];
for (let i = 0; i < table.rows.length; i++) {
const row = table.rows[i];
const timeEnd =
timeEndColumnIndex !== -1 && row[timeEndColumnIndex] ? Math.floor(row[timeEndColumnIndex]) : undefined;
const timeEndField = frame.fields.find((f) => f.name === 'timeend');
const textField = frame.fields.find((f) => f.name === 'text');
const tagsField = frame.fields.find((f) => f.name === 'tags');
const list: AnnotationEvent[] = [];
for (let i = 0; i < frame.length; i++) {
const timeEnd = timeEndField && timeEndField.values.get(i) ? Math.floor(timeEndField.values.get(i)) : undefined;
list.push({
annotation: options.annotation,
time: Math.floor(row[timeColumnIndex]),
time: Math.floor(timeField.values.get(i)),
timeEnd,
text: row[textColumnIndex],
tags: row[tagsColumnIndex] ? row[tagsColumnIndex].trim().split(/\s*,\s*/) : [],
text: textField && textField.values.get(i) ? textField.values.get(i) : '',
tags:
tagsField && tagsField.values.get(i)
? tagsField.values
.get(i)
.trim()
.split(/\s*,\s*/)
: [],
});
}

View File

@ -1,12 +1,12 @@
import { of } from 'rxjs';
import { dateTime } from '@grafana/data';
import { dataFrameToJSON, dateTime, MetricFindValue, MutableDataFrame } from '@grafana/data';
import { MssqlDatasource } from '../datasource';
import { TimeSrvStub } from 'test/specs/helpers';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { backendSrv } from 'app/core/services/backend_srv';
import { initialCustomVariableModelState } from '../../../../features/variables/custom/reducer';
import { createFetchResponse } from 'test/helpers/createFetchResponse';
import { TimeSrvStub } from 'test/specs/helpers';
jest.mock('@grafana/runtime', () => ({
...((jest.requireActual('@grafana/runtime') as unknown) as object),
@ -47,16 +47,16 @@ describe('MSSQLDatasource', () => {
const response = {
results: {
MyAnno: {
refId: annotationName,
tables: [
{
columns: [{ text: 'time' }, { text: 'text' }, { text: 'tags' }],
rows: [
[1521545610656, 'some text', 'TagA,TagB'],
[1521546251185, 'some text2', ' TagB , TagC'],
[1521546501378, 'some text3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'time', values: [1521545610656, 1521546251185, 1521546501378] },
{ name: 'text', values: ['some text', 'some text2', 'some text3'] },
{ name: 'tags', values: ['TagA,TagB', ' TagB , TagC', null] },
],
})
),
],
},
},
@ -85,24 +85,20 @@ describe('MSSQLDatasource', () => {
});
describe('When performing metricFindQuery', () => {
let results: any;
let results: MetricFindValue[];
const query = 'select * from atable';
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: 'title' }, { text: 'text' }],
rows: [
['aTitle', 'some text'],
['aTitle2', 'some text2'],
['aTitle3', 'some text3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'title', values: ['aTitle', 'aTitle2', 'aTitle3'] },
{ name: 'text', values: ['some text', 'some text2', 'some text3'] },
],
})
),
],
},
},
@ -111,7 +107,7 @@ describe('MSSQLDatasource', () => {
beforeEach(() => {
fetchMock.mockImplementation(() => of(createFetchResponse(response)));
return ctx.ds.metricFindQuery(query).then((data: any) => {
return ctx.ds.metricFindQuery(query).then((data: MetricFindValue[]) => {
results = data;
});
});
@ -129,19 +125,15 @@ describe('MSSQLDatasource', () => {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: '__value' }, { text: '__text' }],
rows: [
['value1', 'aTitle'],
['value2', 'aTitle2'],
['value3', 'aTitle3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: '__value', values: ['value1', 'value2', 'value3'] },
{ name: '__text', values: ['aTitle', 'aTitle2', 'aTitle3'] },
],
})
),
],
},
},
@ -170,19 +162,15 @@ describe('MSSQLDatasource', () => {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: '__text' }, { text: '__value' }],
rows: [
['aTitle', 'same'],
['aTitle', 'same'],
['aTitle', 'diff'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: '__text', values: ['aTitle', 'aTitle', 'aTitle'] },
{ name: '__value', values: ['same', 'same', 'diff'] },
],
})
),
],
},
},
@ -207,15 +195,12 @@ describe('MSSQLDatasource', () => {
const response = {
results: {
tempvar: {
meta: {
rowCount: 1,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: 'title' }],
rows: [['aTitle']],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [{ name: 'test', values: ['aTitle'] }],
})
),
],
},
},
@ -227,10 +212,9 @@ describe('MSSQLDatasource', () => {
beforeEach(() => {
ctx.timeSrv.setTime(time);
fetchMock.mockImplementation(() => of(createFetchResponse(response)));
return ctx.ds.metricFindQuery(query);
return ctx.ds.metricFindQuery(query, { range: time });
});
it('should pass timerange to datasourceRequest', () => {

View File

@ -1,7 +1,21 @@
import { DataQuery, DataSourceJsonData } from '@grafana/data';
export interface MssqlQueryForInterpolation {
alias?: any;
format?: any;
rawSql?: any;
refId?: any;
refId: any;
hide?: any;
}
export type ResultFormat = 'time_series' | 'table';
export interface MssqlQuery extends DataQuery {
alias?: string;
format?: ResultFormat;
rawSql?: any;
}
export interface MssqlOptions extends DataSourceJsonData {
timeInterval: string;
}

View File

@ -1,32 +1,34 @@
import { map as _map, filter } from 'lodash';
import { Observable, of } from 'rxjs';
import { map as _map } from 'lodash';
import { of } from 'rxjs';
import { catchError, map, mapTo } from 'rxjs/operators';
import { getBackendSrv } from '@grafana/runtime';
import { ScopedVars } from '@grafana/data';
import MysqlQuery from 'app/plugins/datasource/mysql/mysql_query';
import ResponseParser, { MysqlResponse } from './response_parser';
import { MysqlMetricFindValue, MysqlQueryForInterpolation } from './types';
import { getBackendSrv, DataSourceWithBackend, FetchResponse, BackendDataSourceResponse } from '@grafana/runtime';
import { DataSourceInstanceSettings, ScopedVars, MetricFindValue, AnnotationEvent } from '@grafana/data';
import MySQLQueryModel from 'app/plugins/datasource/mysql/mysql_query_model';
import ResponseParser from './response_parser';
import { MysqlQueryForInterpolation, MySQLOptions, MySQLQuery } from './types';
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
import { getSearchFilterScopedVar } from '../../../features/variables/utils';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
export class MysqlDatasource {
export class MysqlDatasource extends DataSourceWithBackend<MySQLQuery, MySQLOptions> {
id: any;
name: any;
responseParser: ResponseParser;
queryModel: MysqlQuery;
queryModel: MySQLQueryModel;
interval: string;
constructor(
instanceSettings: any,
instanceSettings: DataSourceInstanceSettings<MySQLOptions>,
private readonly templateSrv: TemplateSrv = getTemplateSrv(),
private readonly timeSrv: TimeSrv = getTimeSrv()
) {
super(instanceSettings);
this.name = instanceSettings.name;
this.id = instanceSettings.id;
this.responseParser = new ResponseParser();
this.queryModel = new MysqlQuery({});
this.interval = (instanceSettings.jsonData || {}).timeInterval || '1m';
this.queryModel = new MySQLQueryModel({});
const settingsData = instanceSettings.jsonData || ({} as MySQLOptions);
this.interval = settingsData.timeInterval || '1m';
}
interpolateVariable = (value: string | string[] | number, variable: any) => {
@ -68,40 +70,24 @@ export class MysqlDatasource {
return expandedQueries;
}
query(options: any): Observable<MysqlResponse> {
const queries = filter(options.targets, (target) => {
return target.hide !== true;
}).map((target) => {
const queryModel = new MysqlQuery(target, this.templateSrv, options.scopedVars);
return {
refId: target.refId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
rawSql: queryModel.render(this.interpolateVariable as any),
format: target.format,
};
});
if (queries.length === 0) {
return of({ data: [] });
filterQuery(query: MySQLQuery): boolean {
if (query.hide) {
return false;
}
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: queries,
},
})
.pipe(map(this.responseParser.processQueryResult));
return true;
}
annotationQuery(options: any) {
applyTemplateVariables(target: MySQLQuery, scopedVars: ScopedVars): Record<string, any> {
const queryModel = new MySQLQueryModel(target, this.templateSrv, scopedVars);
return {
refId: target.refId,
datasourceId: this.id,
rawSql: queryModel.render(this.interpolateVariable as any),
format: target.format,
};
}
async annotationQuery(options: any): Promise<AnnotationEvent[]> {
if (!options.annotation.rawQuery) {
return Promise.reject({
message: 'Query missing in annotation definition',
@ -116,20 +102,26 @@ export class MysqlDatasource {
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
.fetch<BackendDataSourceResponse>({
url: '/api/ds/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: [query],
},
requestId: options.annotation.name,
})
.pipe(map((data: any) => this.responseParser.transformAnnotationResponse(options, data)))
.pipe(
map(
async (res: FetchResponse<BackendDataSourceResponse>) =>
await this.responseParser.transformAnnotationResponse(options, res.data)
)
)
.toPromise();
}
metricFindQuery(query: string, optionalOptions: any): Promise<MysqlMetricFindValue[]> {
metricFindQuery(query: string, optionalOptions: any): Promise<MetricFindValue[]> {
let refId = 'tempvar';
if (optionalOptions && optionalOptions.variable && optionalOptions.variable.name) {
refId = optionalOptions.variable.name;
@ -149,33 +141,30 @@ export class MysqlDatasource {
};
const range = this.timeSrv.timeRange();
const data = {
queries: [interpolatedQuery],
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
};
if (optionalOptions && optionalOptions.range && optionalOptions.range.from) {
data['from'] = optionalOptions.range.from.valueOf().toString();
}
if (optionalOptions && optionalOptions.range && optionalOptions.range.to) {
data['to'] = optionalOptions.range.to.valueOf().toString();
}
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
.fetch<BackendDataSourceResponse>({
url: '/api/ds/query',
method: 'POST',
data: data,
data: {
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
queries: [interpolatedQuery],
},
requestId: refId,
})
.pipe(map((data: any) => this.responseParser.parseMetricFindQueryResult(refId, data)))
.pipe(
map((rsp) => {
return this.responseParser.transformMetricFindResponse(rsp);
})
)
.toPromise();
}
testDatasource() {
testDatasource(): Promise<any> {
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
url: '/api/ds/query',
method: 'POST',
data: {
from: '5m',
@ -212,7 +201,7 @@ export class MysqlDatasource {
if (target.rawQuery) {
rawSql = target.rawSql;
} else {
const query = new MysqlQuery(target);
const query = new MySQLQueryModel(target);
rawSql = query.buildQuery();
}

View File

@ -5,6 +5,8 @@ import {
createResetHandler,
PasswordFieldEnum,
} from '../../../features/datasources/utils/passwordHandlers';
import { MySQLQuery } from './types';
import { DataSourcePlugin } from '@grafana/data';
class MysqlConfigCtrl {
static templateUrl = 'partials/config.html';
@ -31,10 +33,11 @@ const defaultQuery = `SELECT
class MysqlAnnotationsQueryCtrl {
static templateUrl = 'partials/annotations.editor.html';
annotation: any;
declare annotation: any;
/** @ngInject */
constructor() {
constructor($scope: any) {
this.annotation = $scope.ctrl.annotation;
this.annotation.rawQuery = this.annotation.rawQuery || defaultQuery;
}
}
@ -46,3 +49,8 @@ export {
MysqlConfigCtrl as ConfigCtrl,
MysqlAnnotationsQueryCtrl as AnnotationsQueryCtrl,
};
export const plugin = new DataSourcePlugin<MysqlDatasource, MySQLQuery>(MysqlDatasource)
.setQueryCtrl(MysqlQueryCtrl)
.setConfigCtrl(MysqlConfigCtrl)
.setAnnotationQueryCtrl(MysqlAnnotationsQueryCtrl);

View File

@ -2,7 +2,7 @@ import { find, map } from 'lodash';
import { TemplateSrv } from '@grafana/runtime';
import { ScopedVars } from '@grafana/data';
export default class MysqlQuery {
export default class MySQLQueryModel {
target: any;
templateSrv: any;
scopedVars: any;

View File

@ -3,7 +3,7 @@ import appEvents from 'app/core/app_events';
import { MysqlMetaQuery } from './meta_query';
import { QueryCtrl } from 'app/plugins/sdk';
import { SqlPart } from 'app/core/components/sql_part/sql_part';
import MysqlQuery from './mysql_query';
import MySQLQueryModel from './mysql_query_model';
import sqlPart from './sql_part';
import { auto } from 'angular';
import { PanelEvents, QueryResultMeta } from '@grafana/data';
@ -27,7 +27,7 @@ export class MysqlQueryCtrl extends QueryCtrl {
lastQueryError?: string;
showHelp!: boolean;
queryModel: MysqlQuery;
queryModel: MySQLQueryModel;
metaBuilder: MysqlMetaQuery;
lastQueryMeta?: QueryResultMeta;
tableSegment: any;
@ -50,7 +50,7 @@ export class MysqlQueryCtrl extends QueryCtrl {
super($scope, $injector);
this.target = this.target;
this.queryModel = new MysqlQuery(this.target, templateSrv, this.panel.scopedVars);
this.queryModel = new MySQLQueryModel(this.target, templateSrv, this.panel.scopedVars);
this.metaBuilder = new MysqlMetaQuery(this.target, this.queryModel);
this.updateProjection();

View File

@ -1,91 +1,57 @@
import { map } from 'lodash';
import { MysqlMetricFindValue } from './types';
interface TableResponse extends Record<string, any> {
type: string;
refId: string;
meta: any;
}
interface SeriesResponse extends Record<string, any> {
target: string;
refId: string;
meta: any;
datapoints: [any[]];
}
export interface MysqlResponse {
data: Array<TableResponse | SeriesResponse>;
}
import { AnnotationEvent, DataFrame, FieldType, MetricFindValue } from '@grafana/data';
import { BackendDataSourceResponse, FetchResponse, toDataQueryResponse } from '@grafana/runtime';
export default class ResponseParser {
processQueryResult(res: any): MysqlResponse {
const data: any[] = [];
transformMetricFindResponse(raw: FetchResponse<BackendDataSourceResponse>): MetricFindValue[] {
const frames = toDataQueryResponse(raw).data as DataFrame[];
if (!res.data.results) {
return { data: data };
}
for (const key in res.data.results) {
const queryRes = res.data.results[key];
if (queryRes.series) {
for (const series of queryRes.series) {
data.push({
target: series.name,
datapoints: series.points,
refId: queryRes.refId,
meta: queryRes.meta,
});
}
}
if (queryRes.tables) {
for (const table of queryRes.tables) {
table.type = 'table';
table.refId = queryRes.refId;
table.meta = queryRes.meta;
data.push(table);
}
}
}
return { data: data };
}
parseMetricFindQueryResult(refId: string, results: any): MysqlMetricFindValue[] {
if (!results || results.data.length === 0 || results.data.results[refId].meta.rowCount === 0) {
if (!frames || !frames.length) {
return [];
}
const columns = results.data.results[refId].tables[0].columns;
const rows = results.data.results[refId].tables[0].rows;
const textColIndex = this.findColIndex(columns, '__text');
const valueColIndex = this.findColIndex(columns, '__value');
const frame = frames[0];
if (columns.length === 2 && textColIndex !== -1 && valueColIndex !== -1) {
return this.transformToKeyValueList(rows, textColIndex, valueColIndex);
const values: MetricFindValue[] = [];
const textField = frame.fields.find((f) => f.name === '__text');
const valueField = frame.fields.find((f) => f.name === '__value');
if (textField && valueField) {
for (let i = 0; i < textField.values.length; i++) {
values.push({ text: '' + textField.values.get(i), value: '' + valueField.values.get(i) });
}
} else {
const textFields = frame.fields.filter((f) => f.type === FieldType.string);
if (textFields) {
values.push(
...textFields
.flatMap((f) => f.values.toArray())
.map((v) => ({
text: '' + v,
}))
);
}
}
return this.transformToSimpleList(rows);
return Array.from(new Set(values.map((v) => v.text))).map((text) => ({
text,
value: values.find((v) => v.text === text)?.value,
}));
}
transformToKeyValueList(rows: any, textColIndex: number, valueColIndex: number) {
transformToKeyValueList(rows: any, textColIndex: number, valueColIndex: number): MetricFindValue[] {
const res = [];
for (let i = 0; i < rows.length; i++) {
if (!this.containsKey(res, rows[i][textColIndex])) {
res.push({
text: rows[i][textColIndex],
value: rows[i][valueColIndex],
});
res.push({ text: rows[i][textColIndex], value: rows[i][valueColIndex] });
}
}
return res;
}
transformToSimpleList(rows: any) {
transformToSimpleList(rows: any): MetricFindValue[] {
const res = [];
for (let i = 0; i < rows.length; i++) {
@ -120,47 +86,38 @@ export default class ResponseParser {
return false;
}
transformAnnotationResponse(options: any, data: any) {
const table = data.data.results[options.annotation.name].tables[0];
async transformAnnotationResponse(options: any, data: BackendDataSourceResponse): Promise<AnnotationEvent[]> {
const frames = toDataQueryResponse({ data: data }).data as DataFrame[];
const frame = frames[0];
const timeField = frame.fields.find((f) => f.name === 'time' || f.name === 'time_sec');
let timeColumnIndex = -1;
let timeEndColumnIndex = -1;
let textColumnIndex = -1;
let tagsColumnIndex = -1;
for (let i = 0; i < table.columns.length; i++) {
if (table.columns[i].text === 'time_sec' || table.columns[i].text === 'time') {
timeColumnIndex = i;
} else if (table.columns[i].text === 'timeend') {
timeEndColumnIndex = i;
} else if (table.columns[i].text === 'title') {
throw {
message: 'The title column for annotations is deprecated, now only a column named text is returned',
};
} else if (table.columns[i].text === 'text') {
textColumnIndex = i;
} else if (table.columns[i].text === 'tags') {
tagsColumnIndex = i;
}
if (!timeField) {
throw new Error('Missing mandatory time column (with time column alias) in annotation query');
}
if (timeColumnIndex === -1) {
throw {
message: 'Missing mandatory time column (with time_sec column alias) in annotation query.',
};
if (frame.fields.find((f) => f.name === 'title')) {
throw new Error('The title column for annotations is deprecated, now only a column named text is returned');
}
const list = [];
for (let i = 0; i < table.rows.length; i++) {
const row = table.rows[i];
const timeEnd =
timeEndColumnIndex !== -1 && row[timeEndColumnIndex] ? Math.floor(row[timeEndColumnIndex]) : undefined;
const timeEndField = frame.fields.find((f) => f.name === 'timeend');
const textField = frame.fields.find((f) => f.name === 'text');
const tagsField = frame.fields.find((f) => f.name === 'tags');
const list: AnnotationEvent[] = [];
for (let i = 0; i < frame.length; i++) {
const timeEnd = timeEndField && timeEndField.values.get(i) ? Math.floor(timeEndField.values.get(i)) : undefined;
list.push({
annotation: options.annotation,
time: Math.floor(row[timeColumnIndex]),
time: Math.floor(timeField.values.get(i)),
timeEnd,
text: row[textColumnIndex] ? row[textColumnIndex].toString() : '',
tags: row[tagsColumnIndex] ? row[tagsColumnIndex].trim().split(/\s*,\s*/) : [],
text: textField && textField.values.get(i) ? textField.values.get(i) : '',
tags:
tagsField && tagsField.values.get(i)
? tagsField.values
.get(i)
.trim()
.split(/\s*,\s*/)
: [],
});
}

View File

@ -1,22 +1,32 @@
import { of } from 'rxjs';
import { dateTime, toUtc } from '@grafana/data';
import {
dataFrameToJSON,
DataQueryRequest,
DataSourceInstanceSettings,
dateTime,
MutableDataFrame,
toUtc,
} from '@grafana/data';
import { MysqlDatasource } from '../datasource';
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
import { TemplateSrv } from 'app/features/templating/template_srv';
import { initialCustomVariableModelState } from '../../../../features/variables/custom/reducer';
import { FetchResponse } from '@grafana/runtime';
jest.mock('@grafana/runtime', () => ({
...((jest.requireActual('@grafana/runtime') as unknown) as object),
getBackendSrv: () => backendSrv,
}));
import { FetchResponse, setBackendSrv } from '@grafana/runtime';
import { MySQLOptions, MySQLQuery } from './../types';
describe('MySQLDatasource', () => {
const fetchMock = jest.spyOn(backendSrv, 'fetch');
const setupTextContext = (response: any) => {
const instanceSettings = { name: 'mysql' };
jest.clearAllMocks();
setBackendSrv(backendSrv);
const fetchMock = jest.spyOn(backendSrv, 'fetch');
const instanceSettings = ({
jsonData: {
defaultProject: 'testproject',
},
} as unknown) as DataSourceInstanceSettings<MySQLOptions>;
const templateSrv: TemplateSrv = new TemplateSrv();
const variable = { ...initialCustomVariableModelState };
const raw = {
from: toUtc('2018-04-25 10:00'),
to: toUtc('2018-04-25 11:00'),
@ -28,19 +38,44 @@ describe('MySQLDatasource', () => {
raw: raw,
}),
};
const variable = { ...initialCustomVariableModelState };
jest.clearAllMocks();
fetchMock.mockImplementation((options) => of(createFetchResponse(response)));
const ds = new MysqlDatasource(instanceSettings, templateSrv, timeSrvMock);
return { ds, variable, templateSrv };
return { ds, variable, templateSrv, fetchMock };
};
describe('When performing annotationQuery', () => {
const annotationName = 'MyAnno';
describe('When performing a query with hidden target', () => {
it('should return empty result and backendSrv.fetch should not be called', async () => {
const options = ({
range: {
from: dateTime(1432288354),
to: dateTime(1432288401),
},
targets: [
{
format: 'table',
rawQuery: true,
rawSql: 'select time, metric, value from grafana_metric',
refId: 'A',
datasource: 'gdev-ds',
hide: true,
},
],
} as unknown) as DataQueryRequest<MySQLQuery>;
const { ds, fetchMock } = setupTextContext({});
await expect(ds.query(options)).toEmitValuesWith((received) => {
expect(received[0]).toEqual({ data: [] });
expect(fetchMock).not.toHaveBeenCalled();
});
});
});
describe('When performing annotationQuery', () => {
let results: any;
const annotationName = 'MyAnno';
const options = {
annotation: {
name: annotationName,
@ -51,38 +86,37 @@ describe('MySQLDatasource', () => {
to: dateTime(1432288401),
},
};
const response = {
results: {
MyAnno: {
refId: annotationName,
tables: [
{
columns: [{ text: 'time_sec' }, { text: 'text' }, { text: 'tags' }],
rows: [
[1432288355, 'some text', 'TagA,TagB'],
[1432288390, 'some text2', ' TagB , TagC'],
[1432288400, 'some text3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'time_sec', values: [1432288355, 1432288390, 1432288400] },
{ name: 'text', values: ['some text', 'some text2', 'some text3'] },
{ name: 'tags', values: ['TagA,TagB', ' TagB , TagC', null] },
],
})
),
],
},
},
};
it('should return annotation list', async () => {
beforeEach(async () => {
const { ds } = setupTextContext(response);
const results = await ds.annotationQuery(options);
const data = await ds.annotationQuery(options);
results = data;
});
it('should return annotation list', async () => {
expect(results.length).toBe(3);
expect(results[0].text).toBe('some text');
expect(results[0].tags[0]).toBe('TagA');
expect(results[0].tags[1]).toBe('TagB');
expect(results[1].tags[0]).toBe('TagB');
expect(results[1].tags[1]).toBe('TagC');
expect(results[2].tags.length).toBe(0);
});
});
@ -92,19 +126,19 @@ describe('MySQLDatasource', () => {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: 'title' }, { text: 'text' }],
rows: [
['aTitle', 'some text'],
['aTitle2', 'some text2'],
['aTitle3', 'some text3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'title', values: ['aTitle', 'aTitle2', 'aTitle3'] },
{ name: 'text', values: ['some text', 'some text2', 'some text3'] },
],
meta: {
executedQueryString: 'select * from atable',
},
})
),
],
},
},
@ -125,26 +159,26 @@ describe('MySQLDatasource', () => {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: 'title' }, { text: 'text' }],
rows: [
['aTitle', 'some text'],
['aTitle2', 'some text2'],
['aTitle3', 'some text3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'title', values: ['aTitle', 'aTitle2', 'aTitle3'] },
{ name: 'text', values: ['some text', 'some text2', 'some text3'] },
],
meta: {
executedQueryString: 'select * from atable',
},
})
),
],
},
},
};
it('should return list of all column values', async () => {
const { ds } = setupTextContext(response);
const { ds, fetchMock } = setupTextContext(response);
const results = await ds.metricFindQuery(query, { searchFilter: 'aTit' });
expect(fetchMock).toBeCalledTimes(1);
@ -160,26 +194,26 @@ describe('MySQLDatasource', () => {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: 'title' }, { text: 'text' }],
rows: [
['aTitle', 'some text'],
['aTitle2', 'some text2'],
['aTitle3', 'some text3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'title', values: ['aTitle', 'aTitle2', 'aTitle3'] },
{ name: 'text', values: ['some text', 'some text2', 'some text3'] },
],
meta: {
executedQueryString: 'select * from atable',
},
})
),
],
},
},
};
it('should return list of all column values', async () => {
const { ds } = setupTextContext(response);
const { ds, fetchMock } = setupTextContext(response);
const results = await ds.metricFindQuery(query, {});
expect(fetchMock).toBeCalledTimes(1);
@ -193,19 +227,19 @@ describe('MySQLDatasource', () => {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: '__value' }, { text: '__text' }],
rows: [
['value1', 'aTitle'],
['value2', 'aTitle2'],
['value3', 'aTitle3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: '__value', values: ['value1', 'value2', 'value3'] },
{ name: '__text', values: ['aTitle', 'aTitle2', 'aTitle3'] },
],
meta: {
executedQueryString: 'select * from atable',
},
})
),
],
},
},
@ -228,19 +262,19 @@ describe('MySQLDatasource', () => {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: '__text' }, { text: '__value' }],
rows: [
['aTitle', 'same'],
['aTitle', 'same'],
['aTitle', 'diff'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: '__text', values: ['aTitle', 'aTitle', 'aTitle'] },
{ name: '__value', values: ['same', 'same', 'diff'] },
],
meta: {
executedQueryString: 'select * from atable',
},
})
),
],
},
},

View File

@ -1,13 +1,20 @@
import { MetricFindValue } from '@grafana/data';
import { DataQuery, DataSourceJsonData } from '@grafana/data';
export interface MysqlQueryForInterpolation {
alias?: any;
format?: any;
rawSql?: any;
refId?: any;
refId: any;
hide?: any;
}
export interface MysqlMetricFindValue extends MetricFindValue {
value?: string;
export interface MySQLOptions extends DataSourceJsonData {
timeInterval: string;
}
export type ResultFormat = 'time_series' | 'table';
export interface MySQLQuery extends DataQuery {
alias?: string;
format?: ResultFormat;
rawSql?: any;
}

View File

@ -1,36 +1,37 @@
import { map as _map, filter } from 'lodash';
import { Observable, of } from 'rxjs';
import { map as _map } from 'lodash';
import { map } from 'rxjs/operators';
import { getBackendSrv } from '@grafana/runtime';
import { DataQueryResponse, ScopedVars } from '@grafana/data';
import { BackendDataSourceResponse, DataSourceWithBackend, FetchResponse, getBackendSrv } from '@grafana/runtime';
import { AnnotationEvent, DataSourceInstanceSettings, MetricFindValue, ScopedVars } from '@grafana/data';
import ResponseParser from './response_parser';
import PostgresQuery from 'app/plugins/datasource/postgres/postgres_query';
import PostgresQueryModel from 'app/plugins/datasource/postgres/postgres_query_model';
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
//Types
import { PostgresMetricFindValue, PostgresQueryForInterpolation } from './types';
import { PostgresOptions, PostgresQuery, PostgresQueryForInterpolation } from './types';
import { getSearchFilterScopedVar } from '../../../features/variables/utils';
export class PostgresDatasource {
export class PostgresDatasource extends DataSourceWithBackend<PostgresQuery, PostgresOptions> {
id: any;
name: any;
jsonData: any;
responseParser: ResponseParser;
queryModel: PostgresQuery;
queryModel: PostgresQueryModel;
interval: string;
constructor(
instanceSettings: { name: any; id?: any; jsonData?: any },
instanceSettings: DataSourceInstanceSettings<PostgresOptions>,
private readonly templateSrv: TemplateSrv = getTemplateSrv(),
private readonly timeSrv: TimeSrv = getTimeSrv()
) {
super(instanceSettings);
this.name = instanceSettings.name;
this.id = instanceSettings.id;
this.jsonData = instanceSettings.jsonData;
this.responseParser = new ResponseParser();
this.queryModel = new PostgresQuery({});
this.interval = (instanceSettings.jsonData || {}).timeInterval || '1m';
this.queryModel = new PostgresQueryModel({});
const settingsData = instanceSettings.jsonData || ({} as PostgresOptions);
this.interval = settingsData.timeInterval || '1m';
}
interpolateVariable = (value: string | string[], variable: { multi: any; includeAll: any }) => {
@ -71,40 +72,21 @@ export class PostgresDatasource {
return expandedQueries;
}
query(options: any): Observable<DataQueryResponse> {
const queries = filter(options.targets, (target) => {
return target.hide !== true;
}).map((target) => {
const queryModel = new PostgresQuery(target, this.templateSrv, options.scopedVars);
return {
refId: target.refId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
rawSql: queryModel.render(this.interpolateVariable),
format: target.format,
};
});
if (queries.length === 0) {
return of({ data: [] });
}
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: queries,
},
})
.pipe(map(this.responseParser.processQueryResult));
filterQuery(query: PostgresQuery): boolean {
return !query.hide;
}
annotationQuery(options: any) {
applyTemplateVariables(target: PostgresQuery, scopedVars: ScopedVars): Record<string, any> {
const queryModel = new PostgresQueryModel(target, this.templateSrv, scopedVars);
return {
refId: target.refId,
datasourceId: this.id,
rawSql: queryModel.render(this.interpolateVariable as any),
format: target.format,
};
}
async annotationQuery(options: any): Promise<AnnotationEvent[]> {
if (!options.annotation.rawQuery) {
return Promise.reject({
message: 'Query missing in annotation definition',
@ -119,23 +101,26 @@ export class PostgresDatasource {
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
.fetch<BackendDataSourceResponse>({
url: '/api/ds/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: [query],
},
requestId: options.annotation.name,
})
.pipe(map((data: any) => this.responseParser.transformAnnotationResponse(options, data)))
.pipe(
map(
async (res: FetchResponse<BackendDataSourceResponse>) =>
await this.responseParser.transformAnnotationResponse(options, res.data)
)
)
.toPromise();
}
metricFindQuery(
query: string,
optionalOptions: { variable?: any; searchFilter?: string }
): Promise<PostgresMetricFindValue[]> {
metricFindQuery(query: string, optionalOptions: any): Promise<MetricFindValue[]> {
let refId = 'tempvar';
if (optionalOptions && optionalOptions.variable && optionalOptions.variable.name) {
refId = optionalOptions.variable.name;
@ -155,33 +140,37 @@ export class PostgresDatasource {
};
const range = this.timeSrv.timeRange();
const data = {
queries: [interpolatedQuery],
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
};
return getBackendSrv()
.fetch({
url: '/api/tsdb/query',
.fetch<BackendDataSourceResponse>({
url: '/api/ds/query',
method: 'POST',
data: data,
data: {
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
queries: [interpolatedQuery],
},
requestId: refId,
})
.pipe(map((data: any) => this.responseParser.parseMetricFindQueryResult(refId, data)))
.pipe(
map((rsp) => {
return this.responseParser.transformMetricFindResponse(rsp);
})
)
.toPromise();
}
getVersion() {
getVersion(): Promise<any> {
return this.metricFindQuery("SELECT current_setting('server_version_num')::int/100", {});
}
getTimescaleDBVersion() {
getTimescaleDBVersion(): Promise<any> {
return this.metricFindQuery("SELECT extversion FROM pg_extension WHERE extname = 'timescaledb'", {});
}
testDatasource() {
testDatasource(): Promise<any> {
return this.metricFindQuery('SELECT 1', {})
.then((res: any) => {
.then(() => {
return { status: 'success', message: 'Database Connection OK' };
})
.catch((err: any) => {
@ -200,7 +189,7 @@ export class PostgresDatasource {
if (target.rawQuery) {
rawSql = target.rawSql;
} else {
const query = new PostgresQuery(target);
const query = new PostgresQueryModel(target);
rawSql = query.buildQuery();
}

View File

@ -1,4 +1,4 @@
import QueryModel from './postgres_query';
import QueryModel from './postgres_query_model';
export class PostgresMetaQuery {
constructor(private target: { table: string; timeColumn: string }, private queryModel: QueryModel) {}

View File

@ -1,6 +1,8 @@
import { PostgresDatasource } from './datasource';
import { PostgresQueryCtrl } from './query_ctrl';
import { PostgresConfigCtrl } from './config_ctrl';
import { PostgresQuery } from './types';
import { DataSourcePlugin } from '@grafana/data';
const defaultQuery = `SELECT
extract(epoch from time_column) AS time,
@ -24,10 +26,7 @@ class PostgresAnnotationsQueryCtrl {
}
}
export {
PostgresDatasource,
PostgresDatasource as Datasource,
PostgresQueryCtrl as QueryCtrl,
PostgresConfigCtrl as ConfigCtrl,
PostgresAnnotationsQueryCtrl as AnnotationsQueryCtrl,
};
export const plugin = new DataSourcePlugin<PostgresDatasource, PostgresQuery>(PostgresDatasource)
.setQueryCtrl(PostgresQueryCtrl)
.setConfigCtrl(PostgresConfigCtrl)
.setAnnotationQueryCtrl(PostgresAnnotationsQueryCtrl);

View File

@ -2,7 +2,7 @@ import { find, map } from 'lodash';
import { TemplateSrv } from '@grafana/runtime';
import { ScopedVars } from '@grafana/data';
export default class PostgresQuery {
export default class PostgresQueryModel {
target: any;
templateSrv: any;
scopedVars: any;

View File

@ -3,7 +3,7 @@ import appEvents from 'app/core/app_events';
import { PostgresMetaQuery } from './meta_query';
import { QueryCtrl } from 'app/plugins/sdk';
import { SqlPart } from 'app/core/components/sql_part/sql_part';
import PostgresQuery from './postgres_query';
import PostgresQueryModel from './postgres_query_model';
import sqlPart from './sql_part';
import { auto } from 'angular';
import { PanelEvents, QueryResultMeta } from '@grafana/data';
@ -24,7 +24,7 @@ export class PostgresQueryCtrl extends QueryCtrl {
static templateUrl = 'partials/query.editor.html';
formats: any[];
queryModel: PostgresQuery;
queryModel: PostgresQueryModel;
metaBuilder: PostgresMetaQuery;
lastQueryMeta?: QueryResultMeta;
lastQueryError?: string;
@ -48,7 +48,7 @@ export class PostgresQueryCtrl extends QueryCtrl {
) {
super($scope, $injector);
this.target = this.target;
this.queryModel = new PostgresQuery(this.target, templateSrv, this.panel.scopedVars);
this.queryModel = new PostgresQueryModel(this.target, templateSrv, this.panel.scopedVars);
this.metaBuilder = new PostgresMetaQuery(this.target, this.queryModel);
this.updateProjection();

View File

@ -1,55 +1,42 @@
import { AnnotationEvent, DataFrame, FieldType, MetricFindValue } from '@grafana/data';
import { BackendDataSourceResponse, FetchResponse, toDataQueryResponse } from '@grafana/runtime';
import { map } from 'lodash';
export default class ResponseParser {
processQueryResult(res: any) {
const data: any[] = [];
transformMetricFindResponse(raw: FetchResponse<BackendDataSourceResponse>): MetricFindValue[] {
const frames = toDataQueryResponse(raw).data as DataFrame[];
if (!res.data.results) {
return { data: data };
}
for (const key in res.data.results) {
const queryRes = res.data.results[key];
if (queryRes.series) {
for (const series of queryRes.series) {
data.push({
target: series.name,
datapoints: series.points,
refId: queryRes.refId,
meta: queryRes.meta,
});
}
}
if (queryRes.tables) {
for (const table of queryRes.tables) {
table.type = 'table';
table.refId = queryRes.refId;
table.meta = queryRes.meta;
data.push(table);
}
}
}
return { data: data };
}
parseMetricFindQueryResult(refId: string, results: any) {
if (!results || results.data.length === 0 || results.data.results[refId].meta.rowCount === 0) {
if (!frames || !frames.length) {
return [];
}
const columns = results.data.results[refId].tables[0].columns;
const rows = results.data.results[refId].tables[0].rows;
const textColIndex = this.findColIndex(columns, '__text');
const valueColIndex = this.findColIndex(columns, '__value');
const frame = frames[0];
if (columns.length === 2 && textColIndex !== -1 && valueColIndex !== -1) {
return this.transformToKeyValueList(rows, textColIndex, valueColIndex);
const values: MetricFindValue[] = [];
const textField = frame.fields.find((f) => f.name === '__text');
const valueField = frame.fields.find((f) => f.name === '__value');
if (textField && valueField) {
for (let i = 0; i < textField.values.length; i++) {
values.push({ text: '' + textField.values.get(i), value: '' + valueField.values.get(i) });
}
} else {
const textFields = frame.fields.filter((f) => f.type === FieldType.string);
if (textFields) {
values.push(
...textFields
.flatMap((f) => f.values.toArray())
.map((v) => ({
text: '' + v,
}))
);
}
}
return this.transformToSimpleList(rows);
return Array.from(new Set(values.map((v) => v.text))).map((text) => ({
text,
value: values.find((v) => v.text === text)?.value,
}));
}
transformToKeyValueList(rows: any, textColIndex: number, valueColIndex: number) {
@ -102,45 +89,34 @@ export default class ResponseParser {
return false;
}
transformAnnotationResponse(options: any, data: any) {
const table = data.data.results[options.annotation.name].tables[0];
async transformAnnotationResponse(options: any, data: BackendDataSourceResponse): Promise<AnnotationEvent[]> {
const frames = toDataQueryResponse({ data: data }).data as DataFrame[];
const frame = frames[0];
const timeField = frame.fields.find((f) => f.name === 'time');
let timeColumnIndex = -1;
let timeEndColumnIndex = -1;
const titleColumnIndex = -1;
let textColumnIndex = -1;
let tagsColumnIndex = -1;
for (let i = 0; i < table.columns.length; i++) {
if (table.columns[i].text === 'time') {
timeColumnIndex = i;
} else if (table.columns[i].text === 'timeend') {
timeEndColumnIndex = i;
} else if (table.columns[i].text === 'text') {
textColumnIndex = i;
} else if (table.columns[i].text === 'tags') {
tagsColumnIndex = i;
}
if (!timeField) {
throw new Error('Missing mandatory time column (with time column alias) in annotation query');
}
if (timeColumnIndex === -1) {
return Promise.reject({
message: 'Missing mandatory time column in annotation query.',
});
}
const timeEndField = frame.fields.find((f) => f.name === 'timeend');
const textField = frame.fields.find((f) => f.name === 'text');
const tagsField = frame.fields.find((f) => f.name === 'tags');
const list = [];
for (let i = 0; i < table.rows.length; i++) {
const row = table.rows[i];
const timeEnd =
timeEndColumnIndex !== -1 && row[timeEndColumnIndex] ? Math.floor(row[timeEndColumnIndex]) : undefined;
const list: AnnotationEvent[] = [];
for (let i = 0; i < frame.length; i++) {
const timeEnd = timeEndField && timeEndField.values.get(i) ? Math.floor(timeEndField.values.get(i)) : undefined;
list.push({
annotation: options.annotation,
time: Math.floor(row[timeColumnIndex]),
time: Math.floor(timeField.values.get(i)),
timeEnd,
title: row[titleColumnIndex],
text: row[textColumnIndex],
tags: row[tagsColumnIndex] ? row[tagsColumnIndex].trim().split(/\s*,\s*/) : [],
text: textField && textField.values.get(i) ? textField.values.get(i) : '',
tags:
tagsField && tagsField.values.get(i)
? tagsField.values
.get(i)
.trim()
.split(/\s*,\s*/)
: [],
});
}

View File

@ -1,25 +1,47 @@
import { of } from 'rxjs';
import { TestScheduler } from 'rxjs/testing';
import { FetchResponse } from '@grafana/runtime';
import { dateTime, toUtc } from '@grafana/data';
import {
dataFrameToJSON,
DataQueryRequest,
DataSourceInstanceSettings,
dateTime,
MutableDataFrame,
toUtc,
} from '@grafana/data';
import { PostgresDatasource } from '../datasource';
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
import { TemplateSrv } from 'app/features/templating/template_srv';
import { initialCustomVariableModelState } from '../../../../features/variables/custom/reducer';
import { TimeSrv } from '../../../../features/dashboard/services/TimeSrv';
import { PostgresOptions, PostgresQuery } from '../types';
jest.mock('@grafana/runtime', () => ({
...((jest.requireActual('@grafana/runtime') as unknown) as object),
getBackendSrv: () => backendSrv,
}));
jest.mock('@grafana/runtime/src/services', () => ({
...((jest.requireActual('@grafana/runtime/src/services') as unknown) as object),
getBackendSrv: () => backendSrv,
getDataSourceSrv: () => {
return {
getInstanceSettings: () => ({ id: 8674 }),
};
},
}));
describe('PostgreSQLDatasource', () => {
const fetchMock = jest.spyOn(backendSrv, 'fetch');
const setupTestContext = (data: any) => {
jest.clearAllMocks();
fetchMock.mockImplementation(() => of(createFetchResponse(data)));
const instanceSettings = ({
jsonData: {
defaultProject: 'testproject',
},
} as unknown) as DataSourceInstanceSettings<PostgresOptions>;
const templateSrv: TemplateSrv = new TemplateSrv();
const raw = {
from: toUtc('2018-04-25 10:00'),
@ -33,7 +55,7 @@ describe('PostgreSQLDatasource', () => {
}),
} as unknown) as TimeSrv;
const variable = { ...initialCustomVariableModelState };
const ds = new PostgresDatasource({ name: 'dsql' }, templateSrv, timeSrvMock);
const ds = new PostgresDatasource(instanceSettings, templateSrv, timeSrvMock);
return { ds, templateSrv, timeSrvMock, variable };
};
@ -80,42 +102,66 @@ describe('PostgreSQLDatasource', () => {
},
],
};
const data = {
const response = {
results: {
A: {
refId: 'A',
meta: {
executedQueryString: 'select time, metric from grafana_metric',
rowCount: 0,
},
series: [
{
name: 'America',
points: [[30.226249741223704, 1599643351085]],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'time', values: [1599643351085] },
{ name: 'metric', values: [30.226249741223704], labels: { metric: 'America' } },
],
meta: {
executedQueryString: 'select time, metric from grafana_metric',
},
})
),
],
tables: null,
},
},
};
const values = { a: createFetchResponse(data) };
const values = { a: createFetchResponse(response) };
const marble = '-a|';
const expectedMarble = '-a|';
const expectedValues = {
a: {
data: [
{
datapoints: [[30.226249741223704, 1599643351085]],
fields: [
{
config: {},
entities: {},
name: 'time',
type: 'time',
values: {
buffer: [1599643351085],
},
},
{
config: {},
entities: {},
labels: {
metric: 'America',
},
name: 'metric',
type: 'number',
values: {
buffer: [30.226249741223704],
},
},
],
length: 1,
meta: {
executedQueryString: 'select time, metric from grafana_metric',
rowCount: 0,
},
name: undefined,
refId: 'A',
target: 'America',
},
],
state: 'Done',
},
};
@ -140,63 +186,73 @@ describe('PostgreSQLDatasource', () => {
},
],
};
const data = {
const response = {
results: {
A: {
refId: 'A',
meta: {
executedQueryString: 'select time, metric, value from grafana_metric',
rowCount: 1,
},
series: null,
tables: [
{
columns: [
{
text: 'time',
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'time', values: [1599643351085] },
{ name: 'metric', values: ['America'] },
{ name: 'value', values: [30.226249741223704] },
],
meta: {
executedQueryString: 'select time, metric, value from grafana_metric',
},
{
text: 'metric',
},
{
text: 'value',
},
],
rows: [[1599643351085, 'America', 30.226249741223704]],
},
})
),
],
},
},
};
const values = { a: createFetchResponse(data) };
const values = { a: createFetchResponse(response) };
const marble = '-a|';
const expectedMarble = '-a|';
const expectedValues = {
a: {
data: [
{
columns: [
fields: [
{
text: 'time',
config: {},
entities: {},
name: 'time',
type: 'time',
values: {
buffer: [1599643351085],
},
},
{
text: 'metric',
config: {},
entities: {},
name: 'metric',
type: 'string',
values: {
buffer: ['America'],
},
},
{
text: 'value',
config: {},
entities: {},
name: 'value',
type: 'number',
values: {
buffer: [30.226249741223704],
},
},
],
rows: [[1599643351085, 'America', 30.226249741223704]],
type: 'table',
refId: 'A',
length: 1,
meta: {
executedQueryString: 'select time, metric, value from grafana_metric',
rowCount: 1,
},
name: undefined,
refId: 'A',
},
],
state: 'Done',
},
};
@ -206,7 +262,7 @@ describe('PostgreSQLDatasource', () => {
describe('When performing a query with hidden target', () => {
it('should return empty result and backendSrv.fetch should not be called', async () => {
const options = {
const options = ({
range: {
from: dateTime(1432288354),
to: dateTime(1432288401),
@ -221,7 +277,7 @@ describe('PostgreSQLDatasource', () => {
hide: true,
},
],
};
} as unknown) as DataQueryRequest<PostgresQuery>;
const { ds } = setupTestContext({});
@ -233,40 +289,42 @@ describe('PostgreSQLDatasource', () => {
});
describe('When performing annotationQuery', () => {
it('should return annotation list', async () => {
const annotationName = 'MyAnno';
const options = {
annotation: {
name: annotationName,
rawQuery: 'select time, title, text, tags from table;',
},
range: {
from: dateTime(1432288354),
to: dateTime(1432288401),
},
};
const data = {
results: {
MyAnno: {
refId: annotationName,
tables: [
{
columns: [{ text: 'time' }, { text: 'text' }, { text: 'tags' }],
rows: [
[1432288355, 'some text', 'TagA,TagB'],
[1432288390, 'some text2', ' TagB , TagC'],
[1432288400, 'some text3'],
let results: any;
const annotationName = 'MyAnno';
const options = {
annotation: {
name: annotationName,
rawQuery: 'select time, title, text, tags from table;',
},
range: {
from: dateTime(1432288354),
to: dateTime(1432288401),
},
};
const response = {
results: {
MyAnno: {
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'time', values: [1432288355, 1432288390, 1432288400] },
{ name: 'text', values: ['some text', 'some text2', 'some text3'] },
{ name: 'tags', values: ['TagA,TagB', ' TagB , TagC', null] },
],
},
],
},
})
),
],
},
};
},
};
const { ds } = setupTestContext(data);
const results = await ds.annotationQuery(options);
beforeEach(async () => {
const { ds } = setupTestContext(response);
results = await ds.annotationQuery(options);
});
it('should return annotation list', async () => {
expect(results.length).toBe(3);
expect(results[0].text).toBe('some text');
@ -283,29 +341,28 @@ describe('PostgreSQLDatasource', () => {
describe('When performing metricFindQuery', () => {
it('should return list of all column values', async () => {
const query = 'select * from atable';
const data = {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: 'title' }, { text: 'text' }],
rows: [
['aTitle', 'some text'],
['aTitle2', 'some text2'],
['aTitle3', 'some text3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'title', values: ['aTitle', 'aTitle2', 'aTitle3'] },
{ name: 'text', values: ['some text', 'some text2', 'some text3'] },
],
meta: {
executedQueryString: 'select * from atable',
},
})
),
],
},
},
};
const { ds } = setupTestContext(data);
const { ds } = setupTestContext(response);
const results = await ds.metricFindQuery(query, {});
expect(results.length).toBe(6);
@ -317,29 +374,28 @@ describe('PostgreSQLDatasource', () => {
describe('When performing metricFindQuery with $__searchFilter and a searchFilter is given', () => {
it('should return list of all column values', async () => {
const query = "select title from atable where title LIKE '$__searchFilter'";
const data = {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: 'title' }, { text: 'text' }],
rows: [
['aTitle', 'some text'],
['aTitle2', 'some text2'],
['aTitle3', 'some text3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'title', values: ['aTitle', 'aTitle2', 'aTitle3'] },
{ name: 'text', values: ['some text', 'some text2', 'some text3'] },
],
meta: {
executedQueryString: 'select * from atable',
},
})
),
],
},
},
};
const { ds } = setupTestContext(data);
const { ds } = setupTestContext(response);
const results = await ds.metricFindQuery(query, { searchFilter: 'aTit' });
expect(fetchMock).toBeCalledTimes(1);
@ -348,10 +404,10 @@ describe('PostgreSQLDatasource', () => {
);
expect(results).toEqual([
{ text: 'aTitle' },
{ text: 'some text' },
{ text: 'aTitle2' },
{ text: 'some text2' },
{ text: 'aTitle3' },
{ text: 'some text' },
{ text: 'some text2' },
{ text: 'some text3' },
]);
});
@ -360,39 +416,38 @@ describe('PostgreSQLDatasource', () => {
describe('When performing metricFindQuery with $__searchFilter but no searchFilter is given', () => {
it('should return list of all column values', async () => {
const query = "select title from atable where title LIKE '$__searchFilter'";
const data = {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: 'title' }, { text: 'text' }],
rows: [
['aTitle', 'some text'],
['aTitle2', 'some text2'],
['aTitle3', 'some text3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: 'title', values: ['aTitle', 'aTitle2', 'aTitle3'] },
{ name: 'text', values: ['some text', 'some text2', 'some text3'] },
],
meta: {
executedQueryString: 'select * from atable',
},
})
),
],
},
},
};
const { ds } = setupTestContext(data);
const { ds } = setupTestContext(response);
const results = await ds.metricFindQuery(query, {});
expect(fetchMock).toBeCalledTimes(1);
expect(fetchMock.mock.calls[0][0].data.queries[0].rawSql).toBe("select title from atable where title LIKE '%'");
expect(results).toEqual([
{ text: 'aTitle' },
{ text: 'some text' },
{ text: 'aTitle2' },
{ text: 'some text2' },
{ text: 'aTitle3' },
{ text: 'some text' },
{ text: 'some text2' },
{ text: 'some text3' },
]);
});
@ -401,29 +456,27 @@ describe('PostgreSQLDatasource', () => {
describe('When performing metricFindQuery with key, value columns', () => {
it('should return list of as text, value', async () => {
const query = 'select * from atable';
const data = {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: '__value' }, { text: '__text' }],
rows: [
['value1', 'aTitle'],
['value2', 'aTitle2'],
['value3', 'aTitle3'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: '__value', values: ['value1', 'value2', 'value3'] },
{ name: '__text', values: ['aTitle', 'aTitle2', 'aTitle3'] },
],
meta: {
executedQueryString: 'select * from atable',
},
})
),
],
},
},
};
const { ds } = setupTestContext(data);
const { ds } = setupTestContext(response);
const results = await ds.metricFindQuery(query, {});
expect(results).toEqual([
@ -437,29 +490,27 @@ describe('PostgreSQLDatasource', () => {
describe('When performing metricFindQuery with key, value columns and with duplicate keys', () => {
it('should return list of unique keys', async () => {
const query = 'select * from atable';
const data = {
const response = {
results: {
tempvar: {
meta: {
rowCount: 3,
},
refId: 'tempvar',
tables: [
{
columns: [{ text: '__text' }, { text: '__value' }],
rows: [
['aTitle', 'same'],
['aTitle', 'same'],
['aTitle', 'diff'],
],
},
frames: [
dataFrameToJSON(
new MutableDataFrame({
fields: [
{ name: '__text', values: ['aTitle', 'aTitle', 'aTitle'] },
{ name: '__value', values: ['same', 'same', 'diff'] },
],
meta: {
executedQueryString: 'select * from atable',
},
})
),
],
},
},
};
const { ds } = setupTestContext(data);
const { ds } = setupTestContext(response);
const results = await ds.metricFindQuery(query, {});
expect(results).toEqual([{ text: 'aTitle', value: 'same' }]);

View File

@ -1,4 +1,4 @@
import PostgresQuery from '../postgres_query';
import PostgresQueryModel from '../postgres_query_model';
import { TemplateSrv } from 'app/features/templating/template_srv';
describe('PostgresQuery', () => {
@ -9,17 +9,17 @@ describe('PostgresQuery', () => {
describe('When initializing', () => {
it('should not be in SQL mode', () => {
const query = new PostgresQuery({}, templateSrv);
const query = new PostgresQueryModel({}, templateSrv);
expect(query.target.rawQuery).toBe(false);
});
it('should be in SQL mode for pre query builder queries', () => {
const query = new PostgresQuery({ rawSql: 'SELECT 1' }, templateSrv);
const query = new PostgresQueryModel({ rawSql: 'SELECT 1' }, templateSrv);
expect(query.target.rawQuery).toBe(true);
});
});
describe('When generating time column SQL', () => {
const query = new PostgresQuery({}, templateSrv);
const query = new PostgresQueryModel({}, templateSrv);
query.target.timeColumn = 'time';
expect(query.buildTimeColumn()).toBe('time AS "time"');
@ -28,17 +28,20 @@ describe('PostgresQuery', () => {
});
describe('When generating time column SQL with group by time', () => {
let query = new PostgresQuery(
let query = new PostgresQueryModel(
{ timeColumn: 'time', group: [{ type: 'time', params: ['5m', 'none'] }] },
templateSrv
);
expect(query.buildTimeColumn()).toBe('$__timeGroupAlias(time,5m)');
expect(query.buildTimeColumn(false)).toBe('$__timeGroup(time,5m)');
query = new PostgresQuery({ timeColumn: 'time', group: [{ type: 'time', params: ['5m', 'NULL'] }] }, templateSrv);
query = new PostgresQueryModel(
{ timeColumn: 'time', group: [{ type: 'time', params: ['5m', 'NULL'] }] },
templateSrv
);
expect(query.buildTimeColumn()).toBe('$__timeGroupAlias(time,5m,NULL)');
query = new PostgresQuery(
query = new PostgresQueryModel(
{ timeColumn: 'time', timeColumnType: 'int4', group: [{ type: 'time', params: ['5m', 'none'] }] },
templateSrv
);
@ -47,7 +50,7 @@ describe('PostgresQuery', () => {
});
describe('When generating metric column SQL', () => {
const query = new PostgresQuery({}, templateSrv);
const query = new PostgresQueryModel({}, templateSrv);
query.target.metricColumn = 'host';
expect(query.buildMetricColumn()).toBe('host AS metric');
@ -56,7 +59,7 @@ describe('PostgresQuery', () => {
});
describe('When generating value column SQL', () => {
const query = new PostgresQuery({}, templateSrv);
const query = new PostgresQueryModel({}, templateSrv);
let column = [{ type: 'column', params: ['value'] }];
expect(query.buildValueColumn(column)).toBe('value');
@ -84,7 +87,7 @@ describe('PostgresQuery', () => {
});
describe('When generating value column SQL with metric column', () => {
const query = new PostgresQuery({}, templateSrv);
const query = new PostgresQueryModel({}, templateSrv);
query.target.metricColumn = 'host';
let column = [{ type: 'column', params: ['value'] }];
@ -124,7 +127,7 @@ describe('PostgresQuery', () => {
});
describe('When generating WHERE clause', () => {
const query = new PostgresQuery({ where: [] }, templateSrv);
const query = new PostgresQueryModel({ where: [] }, templateSrv);
expect(query.buildWhereClause()).toBe('');
@ -143,7 +146,7 @@ describe('PostgresQuery', () => {
});
describe('When generating GROUP BY clause', () => {
const query = new PostgresQuery({ group: [], metricColumn: 'none' }, templateSrv);
const query = new PostgresQueryModel({ group: [], metricColumn: 'none' }, templateSrv);
expect(query.buildGroupClause()).toBe('');
query.target.group = [{ type: 'time', params: ['5m'] }];
@ -160,7 +163,7 @@ describe('PostgresQuery', () => {
where: [],
};
let result = 'SELECT\n t AS "time",\n value\nFROM table\nORDER BY 1';
const query = new PostgresQuery(target, templateSrv);
const query = new PostgresQueryModel(target, templateSrv);
expect(query.buildQuery()).toBe(result);

View File

@ -1,13 +1,21 @@
import { MetricFindValue } from '@grafana/data';
import { DataQuery, DataSourceJsonData } from '@grafana/data';
export interface PostgresQueryForInterpolation {
alias?: any;
format?: any;
rawSql?: any;
refId?: any;
refId: any;
hide?: any;
}
export interface PostgresMetricFindValue extends MetricFindValue {
value?: string;
export interface PostgresOptions extends DataSourceJsonData {
timeInterval: string;
}
export type ResultFormat = 'time_series' | 'table';
export interface PostgresQuery extends DataQuery {
alias?: string;
format?: ResultFormat;
rawSql?: any;
}