mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
mysql: fix precision for the time column in table/annotation query mode
Use the ConvertSqlTimeColumnToEpochMs function to convert any native datetime data type or epoch time (millisecond precision). Refactored mysql implementation to make it more similar to postgres and mssql implementations. Added $__timeEpoch macro function with same implementation as $__time. Added possibility to use a time column named time in addition to the currectly supported time_sec. Additional tests and update of existing. Added test dashboard.
This commit is contained in:
parent
66c03f84f5
commit
f5654f88e2
docker/blocks/mysql_tests
pkg/tsdb/mysql
public/app/plugins/datasource/mysql
2350
docker/blocks/mysql_tests/dashboard.json
Normal file
2350
docker/blocks/mysql_tests/dashboard.json
Normal file
File diff suppressed because it is too large
Load Diff
@ -68,7 +68,7 @@ func replaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]str
|
||||
|
||||
func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, error) {
|
||||
switch name {
|
||||
case "__time":
|
||||
case "__timeEpoch", "__time":
|
||||
if len(args) == 0 {
|
||||
return "", fmt.Errorf("missing time column argument for macro %v", name)
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows,
|
||||
// check if there is a column named time
|
||||
for i, col := range columnNames {
|
||||
switch col {
|
||||
case "time_sec":
|
||||
case "time", "time_sec":
|
||||
timeIndex = i
|
||||
}
|
||||
}
|
||||
@ -96,13 +96,10 @@ func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows,
|
||||
return err
|
||||
}
|
||||
|
||||
// for annotations, convert to epoch
|
||||
if timeIndex != -1 {
|
||||
switch value := values[timeIndex].(type) {
|
||||
case time.Time:
|
||||
values[timeIndex] = float64(value.UnixNano() / 1e9)
|
||||
}
|
||||
}
|
||||
// converts column named time to unix timestamp in milliseconds to make
|
||||
// native mysql datetime types and epoch dates work in
|
||||
// annotation and table queries.
|
||||
tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
|
||||
|
||||
table.Rows = append(table.Rows, values)
|
||||
}
|
||||
@ -185,9 +182,37 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.
|
||||
return err
|
||||
}
|
||||
|
||||
rowData := NewStringStringScan(columnNames)
|
||||
columnTypes, err := rows.ColumnTypes()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
rowLimit := 1000000
|
||||
rowCount := 0
|
||||
timeIndex := -1
|
||||
metricIndex := -1
|
||||
|
||||
// check columns of resultset: a column named time is mandatory
|
||||
// the first text column is treated as metric name unless a column named metric is present
|
||||
for i, col := range columnNames {
|
||||
switch col {
|
||||
case "time", "time_sec":
|
||||
timeIndex = i
|
||||
case "metric":
|
||||
metricIndex = i
|
||||
default:
|
||||
if metricIndex == -1 {
|
||||
switch columnTypes[i].DatabaseTypeName() {
|
||||
case "CHAR", "VARCHAR", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT":
|
||||
metricIndex = i
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if timeIndex == -1 {
|
||||
return fmt.Errorf("Found no column named time or time_sec")
|
||||
}
|
||||
|
||||
fillMissing := query.Model.Get("fill").MustBool(false)
|
||||
var fillInterval float64
|
||||
@ -198,53 +223,90 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.
|
||||
fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
|
||||
fillValue.Valid = true
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for ; rows.Next(); rowCount++ {
|
||||
for rows.Next() {
|
||||
var timestamp float64
|
||||
var value null.Float
|
||||
var metric string
|
||||
|
||||
if rowCount > rowLimit {
|
||||
return fmt.Errorf("MySQL query row limit exceeded, limit %d", rowLimit)
|
||||
return fmt.Errorf("PostgreSQL query row limit exceeded, limit %d", rowLimit)
|
||||
}
|
||||
|
||||
err := rowData.Update(rows.Rows)
|
||||
values, err := e.getTypedRowData(rows)
|
||||
if err != nil {
|
||||
e.log.Error("MySQL response parsing", "error", err)
|
||||
return fmt.Errorf("MySQL response parsing error %v", err)
|
||||
return err
|
||||
}
|
||||
|
||||
if rowData.metric == "" {
|
||||
rowData.metric = "Unknown"
|
||||
switch columnValue := values[timeIndex].(type) {
|
||||
case int64:
|
||||
timestamp = float64(columnValue * 1000)
|
||||
case float64:
|
||||
timestamp = columnValue * 1000
|
||||
case time.Time:
|
||||
timestamp = float64(columnValue.UnixNano() / 1e6)
|
||||
default:
|
||||
return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue)
|
||||
}
|
||||
|
||||
if !rowData.time.Valid {
|
||||
return fmt.Errorf("Found row with no time value")
|
||||
}
|
||||
|
||||
series, exist := pointsBySeries[rowData.metric]
|
||||
if exist == false {
|
||||
series = &tsdb.TimeSeries{Name: rowData.metric}
|
||||
pointsBySeries[rowData.metric] = series
|
||||
seriesByQueryOrder.PushBack(rowData.metric)
|
||||
}
|
||||
|
||||
if fillMissing {
|
||||
var intervalStart float64
|
||||
if exist == false {
|
||||
intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
|
||||
if metricIndex >= 0 {
|
||||
if columnValue, ok := values[metricIndex].(string); ok == true {
|
||||
metric = columnValue
|
||||
} else {
|
||||
intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
|
||||
}
|
||||
|
||||
// align interval start
|
||||
intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
|
||||
|
||||
for i := intervalStart; i < rowData.time.Float64; i += fillInterval {
|
||||
series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
|
||||
rowCount++
|
||||
return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex])
|
||||
}
|
||||
}
|
||||
|
||||
series.Points = append(series.Points, tsdb.TimePoint{rowData.value, rowData.time})
|
||||
for i, col := range columnNames {
|
||||
if i == timeIndex || i == metricIndex {
|
||||
continue
|
||||
}
|
||||
|
||||
switch columnValue := values[i].(type) {
|
||||
case int64:
|
||||
value = null.FloatFrom(float64(columnValue))
|
||||
case float64:
|
||||
value = null.FloatFrom(columnValue)
|
||||
case nil:
|
||||
value.Valid = false
|
||||
default:
|
||||
return fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", col, columnValue, columnValue)
|
||||
}
|
||||
if metricIndex == -1 {
|
||||
metric = col
|
||||
}
|
||||
|
||||
series, exist := pointsBySeries[metric]
|
||||
if exist == false {
|
||||
series = &tsdb.TimeSeries{Name: metric}
|
||||
pointsBySeries[metric] = series
|
||||
seriesByQueryOrder.PushBack(metric)
|
||||
}
|
||||
|
||||
if fillMissing {
|
||||
var intervalStart float64
|
||||
if exist == false {
|
||||
intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6)
|
||||
} else {
|
||||
intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
|
||||
}
|
||||
|
||||
// align interval start
|
||||
intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
|
||||
|
||||
for i := intervalStart; i < timestamp; i += fillInterval {
|
||||
series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)})
|
||||
rowCount++
|
||||
}
|
||||
}
|
||||
|
||||
series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)})
|
||||
|
||||
e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value)
|
||||
rowCount++
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() {
|
||||
@ -269,62 +331,3 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.
|
||||
result.Meta.Set("rowCount", rowCount)
|
||||
return nil
|
||||
}
|
||||
|
||||
type stringStringScan struct {
|
||||
rowPtrs []interface{}
|
||||
rowValues []string
|
||||
columnNames []string
|
||||
columnCount int
|
||||
|
||||
time null.Float
|
||||
value null.Float
|
||||
metric string
|
||||
}
|
||||
|
||||
func NewStringStringScan(columnNames []string) *stringStringScan {
|
||||
s := &stringStringScan{
|
||||
columnCount: len(columnNames),
|
||||
columnNames: columnNames,
|
||||
rowPtrs: make([]interface{}, len(columnNames)),
|
||||
rowValues: make([]string, len(columnNames)),
|
||||
}
|
||||
|
||||
for i := 0; i < s.columnCount; i++ {
|
||||
s.rowPtrs[i] = new(sql.RawBytes)
|
||||
}
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *stringStringScan) Update(rows *sql.Rows) error {
|
||||
if err := rows.Scan(s.rowPtrs...); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.time = null.FloatFromPtr(nil)
|
||||
s.value = null.FloatFromPtr(nil)
|
||||
|
||||
for i := 0; i < s.columnCount; i++ {
|
||||
if rb, ok := s.rowPtrs[i].(*sql.RawBytes); ok {
|
||||
s.rowValues[i] = string(*rb)
|
||||
|
||||
switch s.columnNames[i] {
|
||||
case "time_sec":
|
||||
if sec, err := strconv.ParseInt(s.rowValues[i], 10, 64); err == nil {
|
||||
s.time = null.FloatFrom(float64(sec * 1000))
|
||||
}
|
||||
case "value":
|
||||
if value, err := strconv.ParseFloat(s.rowValues[i], 64); err == nil {
|
||||
s.value = null.FloatFrom(value)
|
||||
}
|
||||
case "metric":
|
||||
s.metric = s.rowValues[i]
|
||||
}
|
||||
|
||||
*rb = nil // reset pointer to discard current value to avoid a bug
|
||||
} else {
|
||||
return fmt.Errorf("Cannot convert index %d column %s to type *sql.RawBytes", i, s.columnNames[i])
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -1,6 +1,8 @@
|
||||
package mysql
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@ -14,8 +16,12 @@ import (
|
||||
|
||||
// To run this test, remove the Skip from SkipConvey
|
||||
// and set up a MySQL db named grafana_tests and a user/password grafana/password
|
||||
// Use the docker/blocks/mysql_tests/docker-compose.yaml to spin up a
|
||||
// preconfigured MySQL server suitable for running these tests.
|
||||
// Thers's also a dashboard.json in same directory that you can import to Grafana
|
||||
// once you've created a datasource for the test server/database.
|
||||
func TestMySQL(t *testing.T) {
|
||||
SkipConvey("MySQL", t, func() {
|
||||
Convey("MySQL", t, func() {
|
||||
x := InitMySQLTestDB(t)
|
||||
|
||||
endpoint := &MysqlQueryEndpoint{
|
||||
@ -29,110 +35,621 @@ func TestMySQL(t *testing.T) {
|
||||
sess := x.NewSession()
|
||||
defer sess.Close()
|
||||
|
||||
sql := "CREATE TABLE `mysql_types` ("
|
||||
sql += "`atinyint` tinyint(1) NOT NULL,"
|
||||
sql += "`avarchar` varchar(3) NOT NULL,"
|
||||
sql += "`achar` char(3),"
|
||||
sql += "`amediumint` mediumint NOT NULL,"
|
||||
sql += "`asmallint` smallint NOT NULL,"
|
||||
sql += "`abigint` bigint NOT NULL,"
|
||||
sql += "`aint` int(11) NOT NULL,"
|
||||
sql += "`adouble` double(10,2),"
|
||||
sql += "`anewdecimal` decimal(10,2),"
|
||||
sql += "`afloat` float(10,2) NOT NULL,"
|
||||
sql += "`atimestamp` timestamp NOT NULL,"
|
||||
sql += "`adatetime` datetime NOT NULL,"
|
||||
sql += "`atime` time NOT NULL,"
|
||||
// sql += "`ayear` year," // Crashes xorm when running cleandb
|
||||
sql += "`abit` bit(1),"
|
||||
sql += "`atinytext` tinytext,"
|
||||
sql += "`atinyblob` tinyblob,"
|
||||
sql += "`atext` text,"
|
||||
sql += "`ablob` blob,"
|
||||
sql += "`amediumtext` mediumtext,"
|
||||
sql += "`amediumblob` mediumblob,"
|
||||
sql += "`alongtext` longtext,"
|
||||
sql += "`alongblob` longblob,"
|
||||
sql += "`aenum` enum('val1', 'val2'),"
|
||||
sql += "`aset` set('a', 'b', 'c', 'd'),"
|
||||
sql += "`adate` date,"
|
||||
sql += "`time_sec` datetime(6),"
|
||||
sql += "`aintnull` int(11),"
|
||||
sql += "`afloatnull` float(10,2),"
|
||||
sql += "`avarcharnull` varchar(3),"
|
||||
sql += "`adecimalnull` decimal(10,2)"
|
||||
sql += ") ENGINE=InnoDB DEFAULT CHARSET=latin1;"
|
||||
_, err := sess.Exec(sql)
|
||||
So(err, ShouldBeNil)
|
||||
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.Local)
|
||||
|
||||
sql = "INSERT INTO `mysql_types` "
|
||||
sql += "(`atinyint`, `avarchar`, `achar`, `amediumint`, `asmallint`, `abigint`, `aint`, `adouble`, "
|
||||
sql += "`anewdecimal`, `afloat`, `adatetime`, `atimestamp`, `atime`, `abit`, `atinytext`, "
|
||||
sql += "`atinyblob`, `atext`, `ablob`, `amediumtext`, `amediumblob`, `alongtext`, `alongblob`, "
|
||||
sql += "`aenum`, `aset`, `adate`, `time_sec`) "
|
||||
sql += "VALUES(1, 'abc', 'def', 1, 10, 100, 1420070400, 1.11, "
|
||||
sql += "2.22, 3.33, now(), current_timestamp(), '11:11:11', 1, 'tinytext', "
|
||||
sql += "'tinyblob', 'text', 'blob', 'mediumtext', 'mediumblob', 'longtext', 'longblob', "
|
||||
sql += "'val2', 'a,b', curdate(), '2018-01-01 00:01:01.123456');"
|
||||
_, err = sess.Exec(sql)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
Convey("Query with Table format should map MySQL column types to Go types", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": "SELECT * FROM mysql_types",
|
||||
"format": "table",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
Convey("Given a table with different native data types", func() {
|
||||
if exists, err := sess.IsTableExist("mysql_types"); err != nil || exists {
|
||||
So(err, ShouldBeNil)
|
||||
sess.DropTable("mysql_types")
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
queryResult := resp.Results["A"]
|
||||
sql := "CREATE TABLE `mysql_types` ("
|
||||
sql += "`atinyint` tinyint(1) NOT NULL,"
|
||||
sql += "`avarchar` varchar(3) NOT NULL,"
|
||||
sql += "`achar` char(3),"
|
||||
sql += "`amediumint` mediumint NOT NULL,"
|
||||
sql += "`asmallint` smallint NOT NULL,"
|
||||
sql += "`abigint` bigint NOT NULL,"
|
||||
sql += "`aint` int(11) NOT NULL,"
|
||||
sql += "`adouble` double(10,2),"
|
||||
sql += "`anewdecimal` decimal(10,2),"
|
||||
sql += "`afloat` float(10,2) NOT NULL,"
|
||||
sql += "`atimestamp` timestamp NOT NULL,"
|
||||
sql += "`adatetime` datetime NOT NULL,"
|
||||
sql += "`atime` time NOT NULL,"
|
||||
sql += "`ayear` year," // Crashes xorm when running cleandb
|
||||
sql += "`abit` bit(1),"
|
||||
sql += "`atinytext` tinytext,"
|
||||
sql += "`atinyblob` tinyblob,"
|
||||
sql += "`atext` text,"
|
||||
sql += "`ablob` blob,"
|
||||
sql += "`amediumtext` mediumtext,"
|
||||
sql += "`amediumblob` mediumblob,"
|
||||
sql += "`alongtext` longtext,"
|
||||
sql += "`alongblob` longblob,"
|
||||
sql += "`aenum` enum('val1', 'val2'),"
|
||||
sql += "`aset` set('a', 'b', 'c', 'd'),"
|
||||
sql += "`adate` date,"
|
||||
sql += "`time_sec` datetime(6),"
|
||||
sql += "`aintnull` int(11),"
|
||||
sql += "`afloatnull` float(10,2),"
|
||||
sql += "`avarcharnull` varchar(3),"
|
||||
sql += "`adecimalnull` decimal(10,2)"
|
||||
sql += ") ENGINE=InnoDB DEFAULT CHARSET=latin1;"
|
||||
_, err := sess.Exec(sql)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
column := queryResult.Tables[0].Rows[0]
|
||||
sql = "INSERT INTO `mysql_types` "
|
||||
sql += "(`atinyint`, `avarchar`, `achar`, `amediumint`, `asmallint`, `abigint`, `aint`, `adouble`, "
|
||||
sql += "`anewdecimal`, `afloat`, `adatetime`, `atimestamp`, `atime`, `ayear`, `abit`, `atinytext`, "
|
||||
sql += "`atinyblob`, `atext`, `ablob`, `amediumtext`, `amediumblob`, `alongtext`, `alongblob`, "
|
||||
sql += "`aenum`, `aset`, `adate`, `time_sec`) "
|
||||
sql += "VALUES(1, 'abc', 'def', 1, 10, 100, 1420070400, 1.11, "
|
||||
sql += "2.22, 3.33, now(), current_timestamp(), '11:11:11', '2018', 1, 'tinytext', "
|
||||
sql += "'tinyblob', 'text', 'blob', 'mediumtext', 'mediumblob', 'longtext', 'longblob', "
|
||||
sql += "'val2', 'a,b', curdate(), '2018-01-01 00:01:01.123456');"
|
||||
_, err = sess.Exec(sql)
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
So(*column[0].(*int8), ShouldEqual, 1)
|
||||
So(column[1].(string), ShouldEqual, "abc")
|
||||
So(column[2].(string), ShouldEqual, "def")
|
||||
So(*column[3].(*int32), ShouldEqual, 1)
|
||||
So(*column[4].(*int16), ShouldEqual, 10)
|
||||
So(*column[5].(*int64), ShouldEqual, 100)
|
||||
So(*column[6].(*int32), ShouldEqual, 1420070400)
|
||||
So(column[7].(float64), ShouldEqual, 1.11)
|
||||
So(column[8].(float64), ShouldEqual, 2.22)
|
||||
So(*column[9].(*float32), ShouldEqual, 3.33)
|
||||
_, offset := time.Now().Zone()
|
||||
So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second))
|
||||
So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second))
|
||||
So(column[12].(string), ShouldEqual, "11:11:11")
|
||||
So(*column[13].(*[]byte), ShouldHaveSameTypeAs, []byte{1})
|
||||
So(column[14].(string), ShouldEqual, "tinytext")
|
||||
So(column[15].(string), ShouldEqual, "tinyblob")
|
||||
So(column[16].(string), ShouldEqual, "text")
|
||||
So(column[17].(string), ShouldEqual, "blob")
|
||||
So(column[18].(string), ShouldEqual, "mediumtext")
|
||||
So(column[19].(string), ShouldEqual, "mediumblob")
|
||||
So(column[20].(string), ShouldEqual, "longtext")
|
||||
So(column[21].(string), ShouldEqual, "longblob")
|
||||
So(column[22].(string), ShouldEqual, "val2")
|
||||
So(column[23].(string), ShouldEqual, "a,b")
|
||||
So(column[24].(time.Time).Format("2006-01-02T00:00:00Z"), ShouldEqual, time.Now().Format("2006-01-02T00:00:00Z"))
|
||||
So(column[25].(float64), ShouldEqual, 1514764861)
|
||||
So(column[26], ShouldEqual, nil)
|
||||
So(column[27], ShouldEqual, nil)
|
||||
So(column[28], ShouldEqual, "")
|
||||
So(column[29], ShouldEqual, nil)
|
||||
Convey("Query with Table format should map MySQL column types to Go types", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": "SELECT * FROM mysql_types",
|
||||
"format": "table",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
|
||||
column := queryResult.Tables[0].Rows[0]
|
||||
|
||||
So(*column[0].(*int8), ShouldEqual, 1)
|
||||
So(column[1].(string), ShouldEqual, "abc")
|
||||
So(column[2].(string), ShouldEqual, "def")
|
||||
So(*column[3].(*int32), ShouldEqual, 1)
|
||||
So(*column[4].(*int16), ShouldEqual, 10)
|
||||
So(*column[5].(*int64), ShouldEqual, 100)
|
||||
So(*column[6].(*int32), ShouldEqual, 1420070400)
|
||||
So(column[7].(float64), ShouldEqual, 1.11)
|
||||
So(column[8].(float64), ShouldEqual, 2.22)
|
||||
So(*column[9].(*float32), ShouldEqual, 3.33)
|
||||
_, offset := time.Now().Zone()
|
||||
So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second))
|
||||
So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second))
|
||||
So(column[12].(string), ShouldEqual, "11:11:11")
|
||||
So(column[13].(int64), ShouldEqual, 2018)
|
||||
So(*column[14].(*[]byte), ShouldHaveSameTypeAs, []byte{1})
|
||||
So(column[15].(string), ShouldEqual, "tinytext")
|
||||
So(column[16].(string), ShouldEqual, "tinyblob")
|
||||
So(column[17].(string), ShouldEqual, "text")
|
||||
So(column[18].(string), ShouldEqual, "blob")
|
||||
So(column[19].(string), ShouldEqual, "mediumtext")
|
||||
So(column[20].(string), ShouldEqual, "mediumblob")
|
||||
So(column[21].(string), ShouldEqual, "longtext")
|
||||
So(column[22].(string), ShouldEqual, "longblob")
|
||||
So(column[23].(string), ShouldEqual, "val2")
|
||||
So(column[24].(string), ShouldEqual, "a,b")
|
||||
So(column[25].(time.Time).Format("2006-01-02T00:00:00Z"), ShouldEqual, time.Now().Format("2006-01-02T00:00:00Z"))
|
||||
So(column[26].(float64), ShouldEqual, float64(1514764861000))
|
||||
So(column[27], ShouldEqual, nil)
|
||||
So(column[28], ShouldEqual, nil)
|
||||
So(column[29], ShouldEqual, "")
|
||||
So(column[30], ShouldEqual, nil)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given a table with metrics that lacks data for some series ", func() {
|
||||
type metric struct {
|
||||
Time time.Time
|
||||
Value int64
|
||||
}
|
||||
|
||||
if exist, err := sess.IsTableExist(metric{}); err != nil || exist {
|
||||
So(err, ShouldBeNil)
|
||||
sess.DropTable(metric{})
|
||||
}
|
||||
err := sess.CreateTable(metric{})
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
series := []*metric{}
|
||||
firstRange := genTimeRangeByInterval(fromStart, 10*time.Minute, 10*time.Second)
|
||||
secondRange := genTimeRangeByInterval(fromStart.Add(20*time.Minute), 10*time.Minute, 10*time.Second)
|
||||
|
||||
for _, t := range firstRange {
|
||||
series = append(series, &metric{
|
||||
Time: t,
|
||||
Value: 15,
|
||||
})
|
||||
}
|
||||
|
||||
for _, t := range secondRange {
|
||||
series = append(series, &metric{
|
||||
Time: t,
|
||||
Value: 20,
|
||||
})
|
||||
}
|
||||
|
||||
for _, s := range series {
|
||||
_, err = sess.Insert(s)
|
||||
So(err, ShouldBeNil)
|
||||
}
|
||||
|
||||
Convey("When doing a metric query using timeGroup", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": "SELECT $__timeGroup(time, '5m') as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
|
||||
"format": "time_series",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
|
||||
points := queryResult.Series[0].Points
|
||||
So(len(points), ShouldEqual, 6)
|
||||
|
||||
dt := fromStart
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
aValue := points[i][0].Float64
|
||||
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
|
||||
So(aValue, ShouldEqual, 15)
|
||||
So(aTime, ShouldEqual, dt)
|
||||
dt = dt.Add(5 * time.Minute)
|
||||
}
|
||||
|
||||
// adjust for 5 minute gap
|
||||
dt = dt.Add(5 * time.Minute)
|
||||
for i := 3; i < 6; i++ {
|
||||
aValue := points[i][0].Float64
|
||||
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
|
||||
So(aValue, ShouldEqual, 20)
|
||||
So(aTime, ShouldEqual, dt)
|
||||
dt = dt.Add(5 * time.Minute)
|
||||
}
|
||||
})
|
||||
|
||||
Convey("When doing a metric query using timeGroup with NULL fill enabled", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": "SELECT $__timeGroup(time, '5m', NULL) as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
|
||||
"format": "time_series",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
|
||||
points := queryResult.Series[0].Points
|
||||
So(len(points), ShouldEqual, 7)
|
||||
|
||||
dt := fromStart
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
aValue := points[i][0].Float64
|
||||
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
|
||||
So(aValue, ShouldEqual, 15)
|
||||
So(aTime, ShouldEqual, dt)
|
||||
dt = dt.Add(5 * time.Minute)
|
||||
}
|
||||
|
||||
So(points[3][0].Valid, ShouldBeFalse)
|
||||
|
||||
// adjust for 5 minute gap
|
||||
dt = dt.Add(5 * time.Minute)
|
||||
for i := 4; i < 7; i++ {
|
||||
aValue := points[i][0].Float64
|
||||
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
|
||||
So(aValue, ShouldEqual, 20)
|
||||
So(aTime, ShouldEqual, dt)
|
||||
dt = dt.Add(5 * time.Minute)
|
||||
}
|
||||
})
|
||||
|
||||
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": "SELECT $__timeGroup(time, '5m', 1.5) as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
|
||||
"format": "time_series",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
|
||||
points := queryResult.Series[0].Points
|
||||
So(points[3][0].Float64, ShouldEqual, 1.5)
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given a table with metrics having multiple values and measurements", func() {
|
||||
type metric_values struct {
|
||||
Time time.Time
|
||||
Measurement string
|
||||
ValueOne int64 `xorm:"integer 'valueOne'"`
|
||||
ValueTwo int64 `xorm:"integer 'valueTwo'"`
|
||||
}
|
||||
|
||||
if exist, err := sess.IsTableExist(metric_values{}); err != nil || exist {
|
||||
So(err, ShouldBeNil)
|
||||
sess.DropTable(metric_values{})
|
||||
}
|
||||
err := sess.CreateTable(metric_values{})
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
rand.Seed(time.Now().Unix())
|
||||
rnd := func(min, max int64) int64 {
|
||||
return rand.Int63n(max-min) + min
|
||||
}
|
||||
|
||||
series := []*metric_values{}
|
||||
for _, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) {
|
||||
series = append(series, &metric_values{
|
||||
Time: t,
|
||||
Measurement: "Metric A",
|
||||
ValueOne: rnd(0, 100),
|
||||
ValueTwo: rnd(0, 100),
|
||||
})
|
||||
series = append(series, &metric_values{
|
||||
Time: t,
|
||||
Measurement: "Metric B",
|
||||
ValueOne: rnd(0, 100),
|
||||
ValueTwo: rnd(0, 100),
|
||||
})
|
||||
}
|
||||
|
||||
for _, s := range series {
|
||||
_, err := sess.Insert(s)
|
||||
So(err, ShouldBeNil)
|
||||
}
|
||||
|
||||
Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": `SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values ORDER BY 1`,
|
||||
"format": "time_series",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
|
||||
So(len(queryResult.Series), ShouldEqual, 2)
|
||||
So(queryResult.Series[0].Name, ShouldEqual, "Metric B - value one")
|
||||
So(queryResult.Series[1].Name, ShouldEqual, "Metric A - value one")
|
||||
})
|
||||
|
||||
Convey("When doing a metric query grouping by time should return correct series", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": `SELECT $__time(time), valueOne, valueTwo FROM metric_values ORDER BY 1`,
|
||||
"format": "time_series",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
|
||||
So(len(queryResult.Series), ShouldEqual, 2)
|
||||
So(queryResult.Series[0].Name, ShouldEqual, "valueOne")
|
||||
So(queryResult.Series[1].Name, ShouldEqual, "valueTwo")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("Given a table with event data", func() {
|
||||
type event struct {
|
||||
TimeSec int64
|
||||
Description string
|
||||
Tags string
|
||||
}
|
||||
|
||||
if exist, err := sess.IsTableExist(event{}); err != nil || exist {
|
||||
So(err, ShouldBeNil)
|
||||
sess.DropTable(event{})
|
||||
}
|
||||
err := sess.CreateTable(event{})
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
events := []*event{}
|
||||
for _, t := range genTimeRangeByInterval(fromStart.Add(-20*time.Minute), 60*time.Minute, 25*time.Minute) {
|
||||
events = append(events, &event{
|
||||
TimeSec: t.Unix(),
|
||||
Description: "Someone deployed something",
|
||||
Tags: "deploy",
|
||||
})
|
||||
events = append(events, &event{
|
||||
TimeSec: t.Add(5 * time.Minute).Unix(),
|
||||
Description: "New support ticket registered",
|
||||
Tags: "ticket",
|
||||
})
|
||||
}
|
||||
|
||||
for _, e := range events {
|
||||
_, err = sess.Insert(e)
|
||||
So(err, ShouldBeNil)
|
||||
}
|
||||
|
||||
Convey("When doing an annotation query of deploy events should return expected result", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": `SELECT time_sec, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='deploy' ORDER BY 1 ASC`,
|
||||
"format": "table",
|
||||
}),
|
||||
RefId: "Deploys",
|
||||
},
|
||||
},
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000),
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
queryResult := resp.Results["Deploys"]
|
||||
So(err, ShouldBeNil)
|
||||
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
|
||||
})
|
||||
|
||||
Convey("When doing an annotation query of ticket events should return expected result", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": `SELECT time_sec, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='ticket' ORDER BY 1 ASC`,
|
||||
"format": "table",
|
||||
}),
|
||||
RefId: "Tickets",
|
||||
},
|
||||
},
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000),
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
queryResult := resp.Results["Tickets"]
|
||||
So(err, ShouldBeNil)
|
||||
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
|
||||
})
|
||||
|
||||
Convey("When doing an annotation query with a time column in datetime format", func() {
|
||||
dt := time.Date(2018, 3, 14, 21, 20, 6, 0, time.UTC)
|
||||
dtFormat := "2006-01-02 15:04:05.999999999"
|
||||
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": fmt.Sprintf(`SELECT
|
||||
CAST('%s' as datetime) as time_sec,
|
||||
'message' as text,
|
||||
'tag1,tag2' as tags
|
||||
`, dt.Format(dtFormat)),
|
||||
"format": "table",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
|
||||
columns := queryResult.Tables[0].Rows[0]
|
||||
|
||||
//Should be in milliseconds
|
||||
So(columns[0].(float64), ShouldEqual, float64(dt.Unix()*1000))
|
||||
})
|
||||
|
||||
Convey("When doing an annotation query with a time column in epoch second format should return ms", func() {
|
||||
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
|
||||
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": fmt.Sprintf(`SELECT
|
||||
%d as time_sec,
|
||||
'message' as text,
|
||||
'tag1,tag2' as tags
|
||||
`, dt.Unix()),
|
||||
"format": "table",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
|
||||
columns := queryResult.Tables[0].Rows[0]
|
||||
|
||||
//Should be in milliseconds
|
||||
So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
|
||||
})
|
||||
|
||||
Convey("When doing an annotation query with a time column in epoch second format (signed integer) should return ms", func() {
|
||||
dt := time.Date(2018, 3, 14, 21, 20, 6, 0, time.Local)
|
||||
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": fmt.Sprintf(`SELECT
|
||||
CAST('%d' as signed integer) as time_sec,
|
||||
'message' as text,
|
||||
'tag1,tag2' as tags
|
||||
`, dt.Unix()),
|
||||
"format": "table",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
|
||||
columns := queryResult.Tables[0].Rows[0]
|
||||
|
||||
//Should be in milliseconds
|
||||
So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
|
||||
})
|
||||
|
||||
Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {
|
||||
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
|
||||
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": fmt.Sprintf(`SELECT
|
||||
%d as time_sec,
|
||||
'message' as text,
|
||||
'tag1,tag2' as tags
|
||||
`, dt.Unix()*1000),
|
||||
"format": "table",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
|
||||
columns := queryResult.Tables[0].Rows[0]
|
||||
|
||||
//Should be in milliseconds
|
||||
So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
|
||||
})
|
||||
|
||||
Convey("When doing an annotation query with a time column holding a unsigned integer null value should return nil", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": `SELECT
|
||||
cast(null as unsigned integer) as time_sec,
|
||||
'message' as text,
|
||||
'tag1,tag2' as tags
|
||||
`,
|
||||
"format": "table",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
|
||||
columns := queryResult.Tables[0].Rows[0]
|
||||
|
||||
//Should be in milliseconds
|
||||
So(columns[0], ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("When doing an annotation query with a time column holding a DATETIME null value should return nil", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": `SELECT
|
||||
cast(null as DATETIME) as time_sec,
|
||||
'message' as text,
|
||||
'tag1,tag2' as tags
|
||||
`,
|
||||
"format": "table",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
|
||||
columns := queryResult.Tables[0].Rows[0]
|
||||
|
||||
//Should be in milliseconds
|
||||
So(columns[0], ShouldBeNil)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
func InitMySQLTestDB(t *testing.T) *xorm.Engine {
|
||||
x, err := xorm.NewEngine(sqlutil.TestDB_Mysql.DriverName, sqlutil.TestDB_Mysql.ConnStr+"&parseTime=true")
|
||||
x.DatabaseTZ = time.Local
|
||||
x.TZLocation = time.Local
|
||||
|
||||
// x.ShowSQL()
|
||||
|
||||
@ -140,7 +657,18 @@ func InitMySQLTestDB(t *testing.T) *xorm.Engine {
|
||||
t.Fatalf("Failed to init mysql db %v", err)
|
||||
}
|
||||
|
||||
sqlutil.CleanDB(x)
|
||||
|
||||
return x
|
||||
}
|
||||
|
||||
func genTimeRangeByInterval(from time.Time, duration time.Duration, interval time.Duration) []time.Time {
|
||||
durationSec := int64(duration.Seconds())
|
||||
intervalSec := int64(interval.Seconds())
|
||||
timeRange := []time.Time{}
|
||||
|
||||
for i := int64(0); i < durationSec; i += intervalSec {
|
||||
timeRange = append(timeRange, from)
|
||||
from = from.Add(time.Duration(int64(time.Second) * intervalSec))
|
||||
}
|
||||
|
||||
return timeRange
|
||||
}
|
||||
|
@ -18,15 +18,16 @@
|
||||
|
||||
<div class="gf-form" ng-show="ctrl.showHelp">
|
||||
<pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6>
|
||||
An annotation is an event that is overlayed on top of graphs. The query can have up to four columns per row, the time_sec column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
|
||||
An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the <i>time</i> or <i>time_sec</i> column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
|
||||
|
||||
- column with alias: <b>time_sec</b> for the annotation event. Format is UTC in seconds, use UNIX_TIMESTAMP(column)
|
||||
- column with alias: <b>time</b> or <i>time_sec</i> for the annotation event time. Use epoch time or any native date data type.
|
||||
- column with alias: <b>text</b> for the annotation text
|
||||
- column with alias: <b>tags</b> for annotation tags. This is a comma separated string of tags e.g. 'tag1,tag2'
|
||||
|
||||
|
||||
Macros:
|
||||
- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec
|
||||
- $__time(column) -> UNIX_TIMESTAMP(column) as time (or as time_sec)
|
||||
- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time (or as time_sec)
|
||||
- $__timeFilter(column) -> UNIX_TIMESTAMP(time_date_time) > 1492750877 AND UNIX_TIMESTAMP(time_date_time) < 1492750877
|
||||
- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
|
||||
|
||||
|
@ -38,15 +38,16 @@
|
||||
|
||||
<div class="gf-form" ng-show="ctrl.showHelp">
|
||||
<pre class="gf-form-pre alert alert-info">Time series:
|
||||
- return column named time_sec (UTC in seconds), use UNIX_TIMESTAMP(column)
|
||||
- return column named value for the time point value
|
||||
- return column named metric to represent the series name
|
||||
- return column named time or time_sec (in UTC), as a unix time stamp or any sql native date data type. You can use the macros below.
|
||||
- return column(s) with numeric datatype as values
|
||||
- (Optional: return column named <i>metric</i> to represent the series name. If no column named metric is found the column name of the value column is used as series name)
|
||||
|
||||
Table:
|
||||
- return any set of columns
|
||||
|
||||
Macros:
|
||||
- $__time(column) -> UNIX_TIMESTAMP(column) as time_sec
|
||||
- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time_sec
|
||||
- $__timeFilter(column) -> UNIX_TIMESTAMP(time_date_time) ≥ 1492750877 AND UNIX_TIMESTAMP(time_date_time) ≤ 1492750877
|
||||
- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
|
||||
- $__timeGroup(column,'5m') -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
|
||||
|
@ -113,7 +113,7 @@ export default class ResponseParser {
|
||||
let tagsColumnIndex = -1;
|
||||
|
||||
for (let i = 0; i < table.columns.length; i++) {
|
||||
if (table.columns[i].text === 'time_sec') {
|
||||
if (table.columns[i].text === 'time_sec' || table.columns[i].text === 'time') {
|
||||
timeColumnIndex = i;
|
||||
} else if (table.columns[i].text === 'title') {
|
||||
return this.$q.reject({
|
||||
@ -137,7 +137,7 @@ export default class ResponseParser {
|
||||
const row = table.rows[i];
|
||||
list.push({
|
||||
annotation: options.annotation,
|
||||
time: Math.floor(row[timeColumnIndex]) * 1000,
|
||||
time: Math.floor(row[timeColumnIndex]),
|
||||
text: row[textColumnIndex] ? row[textColumnIndex].toString() : '',
|
||||
tags: row[tagsColumnIndex] ? row[tagsColumnIndex].trim().split(/\s*,\s*/) : [],
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user