mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
feat(influxdb): add query part model
This commit is contained in:
parent
946e0bf32e
commit
d0e6a9559e
@ -12,10 +12,14 @@ import (
|
||||
|
||||
type InfluxDBExecutor struct {
|
||||
*tsdb.DataSourceInfo
|
||||
QueryParser *InfluxdbQueryParser
|
||||
}
|
||||
|
||||
func NewInfluxDBExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
|
||||
return &InfluxDBExecutor{dsInfo}
|
||||
return &InfluxDBExecutor{
|
||||
DataSourceInfo: dsInfo,
|
||||
QueryParser: &InfluxdbQueryParser{},
|
||||
}
|
||||
}
|
||||
|
||||
var (
|
||||
@ -41,7 +45,7 @@ func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
|
||||
result := &tsdb.BatchResult{}
|
||||
for _, v := range queries {
|
||||
|
||||
query, err := ParseQuery(v.Model)
|
||||
query, err := e.QueryParser.Parse(v.Model)
|
||||
|
||||
if err != nil {
|
||||
result.Error = err
|
||||
|
@ -1,13 +1,12 @@
|
||||
package influxdb
|
||||
|
||||
import "github.com/grafana/grafana/pkg/components/simplejson"
|
||||
|
||||
type InfluxDBQuery struct {
|
||||
type Query struct {
|
||||
Measurement string
|
||||
Policy string
|
||||
ResultFormat string
|
||||
Tags []Tag
|
||||
GroupBy []GroupBy
|
||||
Tags []*Tag
|
||||
GroupBy []*QueryPart
|
||||
Selects []*Select
|
||||
}
|
||||
|
||||
type Tag struct {
|
||||
@ -16,84 +15,13 @@ type Tag struct {
|
||||
Value string
|
||||
}
|
||||
|
||||
type GroupBy struct {
|
||||
type QueryPart struct {
|
||||
Type string
|
||||
Params []string
|
||||
}
|
||||
|
||||
type Select []QueryPart
|
||||
|
||||
type InfluxDbSelect struct {
|
||||
Type string
|
||||
}
|
||||
|
||||
func ParseQuery(model *simplejson.Json) (*InfluxDBQuery, error) {
|
||||
measurement, err := model.Get("measurement").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
policy := model.Get("policy").MustString("default")
|
||||
|
||||
resultFormat, err := model.Get("resultFormat").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var tags []Tag
|
||||
var groupBy []GroupBy
|
||||
|
||||
for _, g := range model.Get("groupBy").MustArray() {
|
||||
group := simplejson.NewFromAny(g)
|
||||
|
||||
typ, err := group.Get("type").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var params []string
|
||||
for _, p := range group.Get("params").MustArray() {
|
||||
param := simplejson.NewFromAny(p)
|
||||
|
||||
pv, err := param.String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
params = append(params, pv)
|
||||
}
|
||||
|
||||
gap := GroupBy{
|
||||
Type: typ,
|
||||
Params: params,
|
||||
}
|
||||
|
||||
groupBy = append(groupBy, gap)
|
||||
}
|
||||
|
||||
for _, t := range model.Get("tags").MustArray() {
|
||||
tag := simplejson.NewFromAny(t)
|
||||
|
||||
key, err := tag.Get("key").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
operator, err := tag.Get("operator").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
value, err := tag.Get("value").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tags = append(tags, Tag{Key: key, Operator: operator, Value: value})
|
||||
}
|
||||
|
||||
return &InfluxDBQuery{
|
||||
Measurement: measurement,
|
||||
Policy: policy,
|
||||
ResultFormat: resultFormat,
|
||||
GroupBy: groupBy,
|
||||
Tags: tags,
|
||||
}, nil
|
||||
}
|
||||
|
128
pkg/tsdb/influxdb/parser.go
Normal file
128
pkg/tsdb/influxdb/parser.go
Normal file
@ -0,0 +1,128 @@
|
||||
package influxdb
|
||||
|
||||
import "github.com/grafana/grafana/pkg/components/simplejson"
|
||||
|
||||
type InfluxdbQueryParser struct{}
|
||||
|
||||
func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json) (*Query, error) {
|
||||
policy := model.Get("policy").MustString("default")
|
||||
|
||||
measurement, err := model.Get("measurement").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resultFormat, err := model.Get("resultFormat").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tags, err := qp.parseTags(model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
groupBys, err := qp.parseGroupBy(model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
selects, err := qp.parseSelects(model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Query{
|
||||
Measurement: measurement,
|
||||
Policy: policy,
|
||||
ResultFormat: resultFormat,
|
||||
GroupBy: groupBys,
|
||||
Tags: tags,
|
||||
Selects: selects,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (qp *InfluxdbQueryParser) parseSelects(model *simplejson.Json) ([]*Select, error) {
|
||||
var result []*Select
|
||||
|
||||
for _, selectObj := range model.Get("select").MustArray() {
|
||||
selectJson := simplejson.NewFromAny(selectObj)
|
||||
var parts Select
|
||||
|
||||
for _, partObj := range selectJson.MustArray() {
|
||||
part := simplejson.NewFromAny(partObj)
|
||||
queryPart, err := qp.parseQueryPart(part)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
parts = append(parts, *queryPart)
|
||||
}
|
||||
|
||||
result = append(result, &parts)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (*InfluxdbQueryParser) parseTags(model *simplejson.Json) ([]*Tag, error) {
|
||||
var result []*Tag
|
||||
for _, t := range model.Get("tags").MustArray() {
|
||||
tagJson := simplejson.NewFromAny(t)
|
||||
|
||||
key, err := tagJson.Get("key").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
operator, err := tagJson.Get("operator").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
value, err := tagJson.Get("value").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result = append(result, &Tag{Key: key, Operator: operator, Value: value})
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (*InfluxdbQueryParser) parseQueryPart(model *simplejson.Json) (*QueryPart, error) {
|
||||
typ, err := model.Get("type").String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var params []string
|
||||
for _, paramObj := range model.Get("params").MustArray() {
|
||||
param := simplejson.NewFromAny(paramObj)
|
||||
|
||||
pv, err := param.String()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
params = append(params, pv)
|
||||
}
|
||||
|
||||
return &QueryPart{Type: typ, Params: params}, nil
|
||||
}
|
||||
|
||||
func (qp *InfluxdbQueryParser) parseGroupBy(model *simplejson.Json) ([]*QueryPart, error) {
|
||||
var result []*QueryPart
|
||||
|
||||
for _, groupObj := range model.Get("groupBy").MustArray() {
|
||||
groupJson := simplejson.NewFromAny(groupObj)
|
||||
queryPart, err := qp.parseQueryPart(groupJson)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = append(result, queryPart)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
93
pkg/tsdb/influxdb/parser_test.go
Normal file
93
pkg/tsdb/influxdb/parser_test.go
Normal file
@ -0,0 +1,93 @@
|
||||
package influxdb
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestInfluxdbQueryParser(t *testing.T) {
|
||||
Convey("Influxdb query parser", t, func() {
|
||||
|
||||
parser := &InfluxdbQueryParser{}
|
||||
|
||||
Convey("converting metric name", func() {
|
||||
json := `
|
||||
{
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"type": "tag",
|
||||
"params": [
|
||||
"datacenter"
|
||||
]
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"null"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "logins.count",
|
||||
"policy": "default",
|
||||
"refId": "B",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
||||
],
|
||||
"type": "count"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
|
||||
],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "datacenter",
|
||||
"operator": "=",
|
||||
"value": "America"
|
||||
}
|
||||
]
|
||||
}
|
||||
`
|
||||
|
||||
modelJson, err := simplejson.NewJson([]byte(json))
|
||||
So(err, ShouldBeNil)
|
||||
|
||||
res, err := parser.Parse(modelJson)
|
||||
So(err, ShouldBeNil)
|
||||
So(len(res.GroupBy), ShouldEqual, 3)
|
||||
So(len(res.Selects), ShouldEqual, 2)
|
||||
So(len(res.Tags), ShouldEqual, 1)
|
||||
})
|
||||
})
|
||||
}
|
10
pkg/tsdb/influxdb/query_builder.go
Normal file
10
pkg/tsdb/influxdb/query_builder.go
Normal file
@ -0,0 +1,10 @@
|
||||
package influxdb
|
||||
|
||||
import "fmt"
|
||||
|
||||
type QueryBuild struct{}
|
||||
|
||||
func (*QueryBuild) Build(query *Query) (string, error) {
|
||||
|
||||
return "", fmt.Errorf("query is not valid")
|
||||
}
|
57
pkg/tsdb/influxdb/query_builder_test.go
Normal file
57
pkg/tsdb/influxdb/query_builder_test.go
Normal file
@ -0,0 +1,57 @@
|
||||
package influxdb
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestInfluxdbQueryBuilder(t *testing.T) {
|
||||
Convey("Influxdb query builder", t, func() {
|
||||
|
||||
builder := &QueryBuild{}
|
||||
|
||||
Convey("can build query", func() {
|
||||
//query := &Query{}
|
||||
//res, err := builder.Build(query)
|
||||
//So(err, ShouldBeNil)
|
||||
})
|
||||
|
||||
Convey("empty queries should return error", func() {
|
||||
query := &Query{}
|
||||
|
||||
res, err := builder.Build(query)
|
||||
So(err, ShouldNotBeNil)
|
||||
So(res, ShouldEqual, "")
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/*
|
||||
describe('render series with mesurement only', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery({
|
||||
measurement: 'cpu',
|
||||
}, templateSrv, {});
|
||||
|
||||
var queryText = query.render();
|
||||
expect(queryText).to.be('SELECT mean("value") FROM "cpu" WHERE $timeFilter GROUP BY time($interval) fill(null)');
|
||||
});
|
||||
});
|
||||
*/
|
||||
|
||||
/*
|
||||
describe('series with tags OR condition', function() {
|
||||
it('should generate correct query', function() {
|
||||
var query = new InfluxQuery({
|
||||
measurement: 'cpu',
|
||||
groupBy: [{type: 'time', params: ['auto']}],
|
||||
tags: [{key: 'hostname', value: 'server1'}, {key: 'hostname', value: 'server2', condition: "OR"}]
|
||||
}, templateSrv, {});
|
||||
|
||||
var queryText = query.render();
|
||||
expect(queryText).to.be('SELECT mean("value") FROM "cpu" WHERE "hostname" = \'server1\' OR "hostname" = \'server2\' AND ' +
|
||||
'$timeFilter GROUP BY time($interval)');
|
||||
});
|
||||
});
|
||||
*/
|
5
pkg/tsdb/influxdb/query_part.go
Normal file
5
pkg/tsdb/influxdb/query_part.go
Normal file
@ -0,0 +1,5 @@
|
||||
package influxdb
|
||||
|
||||
type Selector interface {
|
||||
Render(input string) string
|
||||
}
|
63
pkg/tsdb/influxdb/query_part_test.go
Normal file
63
pkg/tsdb/influxdb/query_part_test.go
Normal file
@ -0,0 +1,63 @@
|
||||
package influxdb
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestInfluxdbQueryPart(t *testing.T) {
|
||||
Convey("Influxdb query part builder", t, func() {
|
||||
|
||||
Convey("can build query", func() {
|
||||
})
|
||||
|
||||
Convey("empty queries should return error", func() {
|
||||
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/*
|
||||
describe('series with mesurement only', () => {
|
||||
it('should handle nested function parts', () => {
|
||||
var part = queryPart.create({
|
||||
type: 'derivative',
|
||||
params: ['10s'],
|
||||
});
|
||||
|
||||
expect(part.text).to.be('derivative(10s)');
|
||||
expect(part.render('mean(value)')).to.be('derivative(mean(value), 10s)');
|
||||
});
|
||||
|
||||
it('should nest spread function', () => {
|
||||
var part = queryPart.create({
|
||||
type: 'spread'
|
||||
});
|
||||
|
||||
expect(part.text).to.be('spread()');
|
||||
expect(part.render('value')).to.be('spread(value)');
|
||||
});
|
||||
|
||||
it('should handle suffirx parts', () => {
|
||||
var part = queryPart.create({
|
||||
type: 'math',
|
||||
params: ['/ 100'],
|
||||
});
|
||||
|
||||
expect(part.text).to.be('math(/ 100)');
|
||||
expect(part.render('mean(value)')).to.be('mean(value) / 100');
|
||||
});
|
||||
|
||||
it('should handle alias parts', () => {
|
||||
var part = queryPart.create({
|
||||
type: 'alias',
|
||||
params: ['test'],
|
||||
});
|
||||
|
||||
expect(part.text).to.be('alias(test)');
|
||||
expect(part.render('mean(value)')).to.be('mean(value) AS "test"');
|
||||
});
|
||||
|
||||
});
|
||||
*/
|
Loading…
Reference in New Issue
Block a user