3
0
mirror of https://github.com/grafana/grafana.git synced 2025-02-25 18:55:37 -06:00

postgres: fix precision for the time column in table/annotation query mode

Use the ConvertSqlTimeColumnToEpochMs function to convert any native
datetime data type or epoch time (millisecond precision).
Additional tests and update of existing due to timezone issues
running postgres on UTC and dev environment on non-utc.
Added test dashboard.
This commit is contained in:
Marcus Efraimsson 2018-03-22 15:27:12 +01:00
parent b69ebee066
commit 66c03f84f5
5 changed files with 2930 additions and 84 deletions
docker/blocks/postgres_tests
pkg/tsdb/postgres
public/app/plugins/datasource/postgres

File diff suppressed because it is too large Load Diff

View File

@ -63,7 +63,6 @@ func (e *PostgresQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSo
}
func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error {
columnNames, err := rows.Columns()
if err != nil {
return err
@ -100,14 +99,10 @@ func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Ro
return err
}
// convert column named time to unix timestamp to make
// native datetime postgres types work in annotation queries
if timeIndex != -1 {
switch value := values[timeIndex].(type) {
case time.Time:
values[timeIndex] = float64(value.UnixNano() / 1e9)
}
}
// converts column named time to unix timestamp in milliseconds to make
// native postgres datetime types and epoch dates work in
// annotation and table queries.
tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex)
table.Rows = append(table.Rows, values)
}
@ -118,7 +113,6 @@ func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Ro
}
func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) {
types, err := rows.ColumnTypes()
if err != nil {
return nil, err
@ -209,7 +203,6 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co
fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
fillValue.Valid = true
}
}
for rows.Next() {

View File

@ -1,6 +1,8 @@
package postgres
import (
"fmt"
"math/rand"
"testing"
"time"
@ -14,7 +16,11 @@ import (
)
// To run this test, remove the Skip from SkipConvey
// and set up a PostgreSQL db named grafanatest and a user/password grafanatest/grafanatest
// and set up a PostgreSQL db named grafanatest and a user/password grafanatest/grafanatest!
// Use the docker/blocks/postgres_tests/docker-compose.yaml to spin up a
// preconfigured Postgres server suitable for running these tests.
// Thers's also a dashboard.json in same directory that you can import to Grafana
// once you've created a datasource for the test server/database.
func TestPostgres(t *testing.T) {
SkipConvey("PostgreSQL", t, func() {
x := InitPostgresTestDB(t)
@ -30,88 +36,599 @@ func TestPostgres(t *testing.T) {
sess := x.NewSession()
defer sess.Close()
sql := `
CREATE TABLE postgres_types(
c00_smallint smallint,
c01_integer integer,
c02_bigint bigint,
fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC).In(time.Local)
c03_real real,
c04_double double precision,
c05_decimal decimal(10,2),
c06_numeric numeric(10,2),
Convey("Given a table with different native data types", func() {
sql := `
DROP TABLE IF EXISTS postgres_types;
CREATE TABLE postgres_types(
c00_smallint smallint,
c01_integer integer,
c02_bigint bigint,
c07_char char(10),
c08_varchar varchar(10),
c09_text text,
c03_real real,
c04_double double precision,
c05_decimal decimal(10,2),
c06_numeric numeric(10,2),
c10_timestamp timestamp without time zone,
c11_timestamptz timestamp with time zone,
c12_date date,
c13_time time without time zone,
c14_timetz time with time zone,
c15_interval interval
);
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
c07_char char(10),
c08_varchar varchar(10),
c09_text text,
sql = `
INSERT INTO postgres_types VALUES(
1,2,3,
4.5,6.7,1.1,1.2,
'char10','varchar10','text',
c10_timestamp timestamp without time zone,
c11_timestamptz timestamp with time zone,
c12_date date,
c13_time time without time zone,
c14_timetz time with time zone,
now(),now(),now(),now(),now(),'15m'::interval
);
`
_, err = sess.Exec(sql)
So(err, ShouldBeNil)
Convey("Query with Table format should map PostgreSQL column types to Go types", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT * FROM postgres_types",
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["A"]
c15_interval interval
);
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
column := queryResult.Tables[0].Rows[0]
So(column[0].(int64), ShouldEqual, 1)
So(column[1].(int64), ShouldEqual, 2)
So(column[2].(int64), ShouldEqual, 3)
So(column[3].(float64), ShouldEqual, 4.5)
So(column[4].(float64), ShouldEqual, 6.7)
// libpq doesnt properly convert decimal, numeric and char to go types but returns []uint8 instead
// So(column[5].(float64), ShouldEqual, 1.1)
// So(column[6].(float64), ShouldEqual, 1.2)
// So(column[7].(string), ShouldEqual, "char")
So(column[8].(string), ShouldEqual, "varchar10")
So(column[9].(string), ShouldEqual, "text")
sql = `
INSERT INTO postgres_types VALUES(
1,2,3,
4.5,6.7,1.1,1.2,
'char10','varchar10','text',
So(column[10].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[11].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[12].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[13].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[14].(time.Time), ShouldHaveSameTypeAs, time.Now())
now(),now(),now(),now(),now(),'15m'::interval
);
`
_, err = sess.Exec(sql)
So(err, ShouldBeNil)
// libpq doesnt properly convert interval to go types but returns []uint8 instead
// So(column[15].(time.Time), ShouldHaveSameTypeAs, time.Now())
Convey("When doing a table query should map Postgres column types to Go types", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT * FROM postgres_types",
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
column := queryResult.Tables[0].Rows[0]
So(column[0].(int64), ShouldEqual, 1)
So(column[1].(int64), ShouldEqual, 2)
So(column[2].(int64), ShouldEqual, 3)
So(column[3].(float64), ShouldEqual, 4.5)
So(column[4].(float64), ShouldEqual, 6.7)
So(column[5].(float64), ShouldEqual, 1.1)
So(column[6].(float64), ShouldEqual, 1.2)
So(column[7].(string), ShouldEqual, "char10 ")
So(column[8].(string), ShouldEqual, "varchar10")
So(column[9].(string), ShouldEqual, "text")
So(column[10].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[11].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[12].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[13].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[14].(time.Time), ShouldHaveSameTypeAs, time.Now())
So(column[15].(string), ShouldEqual, "00:15:00")
})
})
Convey("Given a table with metrics that lacks data for some series ", func() {
sql := `
DROP TABLE IF EXISTS metric;
CREATE TABLE metric (
time timestamp,
value integer
)
`
_, err := sess.Exec(sql)
So(err, ShouldBeNil)
type metric struct {
Time time.Time
Value int64
}
series := []*metric{}
firstRange := genTimeRangeByInterval(fromStart, 10*time.Minute, 10*time.Second)
secondRange := genTimeRangeByInterval(fromStart.Add(20*time.Minute), 10*time.Minute, 10*time.Second)
for _, t := range firstRange {
series = append(series, &metric{
Time: t,
Value: 15,
})
}
for _, t := range secondRange {
series = append(series, &metric{
Time: t,
Value: 20,
})
}
for _, s := range series {
_, err = sess.Insert(s)
So(err, ShouldBeNil)
}
Convey("When doing a metric query using timeGroup", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT $__timeGroup(time, '5m'), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
"format": "time_series",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(len(points), ShouldEqual, 6)
dt := fromStart
for i := 0; i < 3; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
// adjust for 5 minute gap
dt = dt.Add(5 * time.Minute)
for i := 3; i < 6; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
})
Convey("When doing a metric query using timeGroup with NULL fill enabled", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT $__timeGroup(time, '5m', NULL), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
"format": "time_series",
}),
RefId: "A",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(len(points), ShouldEqual, 7)
dt := fromStart
for i := 0; i < 3; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 15)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
So(points[3][0].Valid, ShouldBeFalse)
// adjust for 5 minute gap
dt = dt.Add(5 * time.Minute)
for i := 4; i < 7; i++ {
aValue := points[i][0].Float64
aTime := time.Unix(int64(points[i][1].Float64)/1000, 0)
So(aValue, ShouldEqual, 20)
So(aTime, ShouldEqual, dt)
dt = dt.Add(5 * time.Minute)
}
})
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": "SELECT $__timeGroup(time, '5m', 1.5), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
"format": "time_series",
}),
RefId: "A",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
points := queryResult.Series[0].Points
So(points[3][0].Float64, ShouldEqual, 1.5)
})
})
Convey("Given a table with metrics having multiple values and measurements", func() {
type metric_values struct {
Time time.Time
Measurement string
ValueOne int64 `xorm:"integer 'valueOne'"`
ValueTwo int64 `xorm:"integer 'valueTwo'"`
}
if exist, err := sess.IsTableExist(metric_values{}); err != nil || exist {
So(err, ShouldBeNil)
sess.DropTable(metric_values{})
}
err := sess.CreateTable(metric_values{})
So(err, ShouldBeNil)
rand.Seed(time.Now().Unix())
rnd := func(min, max int64) int64 {
return rand.Int63n(max-min) + min
}
series := []*metric_values{}
for _, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) {
series = append(series, &metric_values{
Time: t,
Measurement: "Metric A",
ValueOne: rnd(0, 100),
ValueTwo: rnd(0, 100),
})
series = append(series, &metric_values{
Time: t,
Measurement: "Metric B",
ValueOne: rnd(0, 100),
ValueTwo: rnd(0, 100),
})
}
for _, s := range series {
_, err := sess.Insert(s)
So(err, ShouldBeNil)
}
Convey("When doing a metric query grouping by time and select metric column should return correct series", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT $__timeEpoch(time), measurement || ' - value one' as metric, "valueOne" FROM metric_values ORDER BY 1`,
"format": "time_series",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 2)
So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one")
So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one")
})
Convey("When doing a metric query grouping by time should return correct series", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT $__timeEpoch(time), "valueOne", "valueTwo" FROM metric_values ORDER BY 1`,
"format": "time_series",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Series), ShouldEqual, 2)
So(queryResult.Series[0].Name, ShouldEqual, "valueOne")
So(queryResult.Series[1].Name, ShouldEqual, "valueTwo")
})
})
Convey("Given a table with event data", func() {
type event struct {
TimeSec int64
Description string
Tags string
}
if exist, err := sess.IsTableExist(event{}); err != nil || exist {
So(err, ShouldBeNil)
sess.DropTable(event{})
}
err := sess.CreateTable(event{})
So(err, ShouldBeNil)
events := []*event{}
for _, t := range genTimeRangeByInterval(fromStart.Add(-20*time.Minute), 60*time.Minute, 25*time.Minute) {
events = append(events, &event{
TimeSec: t.Unix(),
Description: "Someone deployed something",
Tags: "deploy",
})
events = append(events, &event{
TimeSec: t.Add(5 * time.Minute).Unix(),
Description: "New support ticket registered",
Tags: "ticket",
})
}
for _, e := range events {
_, err = sess.Insert(e)
So(err, ShouldBeNil)
}
Convey("When doing an annotation query of deploy events should return expected result", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT "time_sec" as time, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='deploy' ORDER BY 1 ASC`,
"format": "table",
}),
RefId: "Deploys",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["Deploys"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
})
Convey("When doing an annotation query of ticket events should return expected result", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT "time_sec" as time, description as text, tags FROM event WHERE $__unixEpochFilter(time_sec) AND tags='ticket' ORDER BY 1 ASC`,
"format": "table",
}),
RefId: "Tickets",
},
},
TimeRange: &tsdb.TimeRange{
From: fmt.Sprintf("%v", fromStart.Add(-20*time.Minute).Unix()*1000),
To: fmt.Sprintf("%v", fromStart.Add(40*time.Minute).Unix()*1000),
},
}
resp, err := endpoint.Query(nil, nil, query)
queryResult := resp.Results["Tickets"]
So(err, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 3)
})
Convey("When doing an annotation query with a time column in datetime format", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
dtFormat := "2006-01-02 15:04:05.999999999"
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
CAST('%s' AS TIMESTAMP) as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Format(dtFormat)),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(float64), ShouldEqual, float64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch second format should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
%d as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
cast(%d as bigint) as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000))
})
Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() {
dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC)
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": fmt.Sprintf(`SELECT
%d as time,
'message' as text,
'tag1,tag2' as tags
`, dt.Unix()*1000),
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0].(int64), ShouldEqual, dt.Unix()*1000)
})
Convey("When doing an annotation query with a time column holding a bigint null value should return nil", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT
cast(null as bigint) as time,
'message' as text,
'tag1,tag2' as tags
`,
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0], ShouldBeNil)
})
Convey("When doing an annotation query with a time column holding a timestamp null value should return nil", func() {
query := &tsdb.TsdbQuery{
Queries: []*tsdb.Query{
{
Model: simplejson.NewFromAny(map[string]interface{}{
"rawSql": `SELECT
cast(null as timestamp) as time,
'message' as text,
'tag1,tag2' as tags
`,
"format": "table",
}),
RefId: "A",
},
},
}
resp, err := endpoint.Query(nil, nil, query)
So(err, ShouldBeNil)
queryResult := resp.Results["A"]
So(queryResult.Error, ShouldBeNil)
So(len(queryResult.Tables[0].Rows), ShouldEqual, 1)
columns := queryResult.Tables[0].Rows[0]
//Should be in milliseconds
So(columns[0], ShouldBeNil)
})
})
})
}
func InitPostgresTestDB(t *testing.T) *xorm.Engine {
x, err := xorm.NewEngine(sqlutil.TestDB_Postgres.DriverName, sqlutil.TestDB_Postgres.ConnStr)
x.DatabaseTZ = time.UTC
x.TZLocation = time.UTC
// x.ShowSQL()
@ -119,7 +636,18 @@ func InitPostgresTestDB(t *testing.T) *xorm.Engine {
t.Fatalf("Failed to init postgres db %v", err)
}
sqlutil.CleanDB(x)
return x
}
func genTimeRangeByInterval(from time.Time, duration time.Duration, interval time.Duration) []time.Time {
durationSec := int64(duration.Seconds())
intervalSec := int64(interval.Seconds())
timeRange := []time.Time{}
for i := int64(0); i < durationSec; i += intervalSec {
timeRange = append(timeRange, from)
from = from.Add(time.Duration(int64(time.Second) * intervalSec))
}
return timeRange
}

View File

@ -18,15 +18,16 @@
<div class="gf-form" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info"><h6>Annotation Query Format</h6>
An annotation is an event that is overlayed on top of graphs. The query can have up to four columns per row, the time column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the time column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned.
- column with alias: <b>time</b> for the annotation event. Format is UTC in seconds, use extract(epoch from column) as "time"
- column with alias: <b>time</b> for the annotation event time. Use epoch time or any native date data type.
- column with alias: <b>text</b> for the annotation text
- column with alias: <b>tags</b> for annotation tags. This is a comma separated string of tags e.g. 'tag1,tag2'
Macros:
- $__time(column) -&gt; column as "time"
- $__timeEpoch -&gt; extract(epoch from column) as "time"
- $__timeFilter(column) -&gt; column &ge; to_timestamp(1492750877) AND column &le; to_timestamp(1492750877)
- $__unixEpochFilter(column) -&gt; column &gt; 1492750877 AND column &lt; 1492750877

View File

@ -134,7 +134,7 @@ export default class ResponseParser {
const row = table.rows[i];
list.push({
annotation: options.annotation,
time: Math.floor(row[timeColumnIndex]) * 1000,
time: Math.floor(row[timeColumnIndex]),
title: row[titleColumnIndex],
text: row[textColumnIndex],
tags: row[tagsColumnIndex] ? row[tagsColumnIndex].trim().split(/\s*,\s*/) : [],