mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge branch 'master' into alerting_mqe
This commit is contained in:
@@ -101,6 +101,7 @@ func sendUsageStats() {
|
||||
metrics["stats.plugins.apps.count"] = len(plugins.Apps)
|
||||
metrics["stats.plugins.panels.count"] = len(plugins.Panels)
|
||||
metrics["stats.plugins.datasources.count"] = len(plugins.DataSources)
|
||||
metrics["stats.alerts.count"] = statsQuery.Result.AlertCount
|
||||
|
||||
dsStats := m.GetDataSourceStatsQuery{}
|
||||
if err := bus.Dispatch(&dsStats); err != nil {
|
||||
|
||||
@@ -5,6 +5,7 @@ type SystemStats struct {
|
||||
UserCount int64
|
||||
OrgCount int64
|
||||
PlaylistCount int64
|
||||
AlertCount int64
|
||||
}
|
||||
|
||||
type DataSourceStats struct {
|
||||
@@ -29,6 +30,7 @@ type AdminStats struct {
|
||||
DataSourceCount int `json:"data_source_count"`
|
||||
PlaylistCount int `json:"playlist_count"`
|
||||
StarredDbCount int `json:"starred_db_count"`
|
||||
AlertCount int `json:"alert_count"`
|
||||
}
|
||||
|
||||
type GetAdminStatsQuery struct {
|
||||
|
||||
@@ -26,11 +26,32 @@ type Rule struct {
|
||||
}
|
||||
|
||||
type ValidationError struct {
|
||||
Reason string
|
||||
Reason string
|
||||
Err error
|
||||
Alertid int64
|
||||
DashboardId int64
|
||||
PanelId int64
|
||||
}
|
||||
|
||||
func (e ValidationError) Error() string {
|
||||
return e.Reason
|
||||
extraInfo := ""
|
||||
if e.Alertid != 0 {
|
||||
extraInfo = fmt.Sprintf("%s AlertId: %v", extraInfo, e.Alertid)
|
||||
}
|
||||
|
||||
if e.PanelId != 0 {
|
||||
extraInfo = fmt.Sprintf("%s PanelId: %v ", extraInfo, e.PanelId)
|
||||
}
|
||||
|
||||
if e.DashboardId != 0 {
|
||||
extraInfo = fmt.Sprintf("%s DashboardId: %v", extraInfo, e.DashboardId)
|
||||
}
|
||||
|
||||
if e.Err != nil {
|
||||
return fmt.Sprintf("%s %s%s", e.Err.Error(), e.Reason, extraInfo)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("Failed to extract alert.Reason: %s %s", e.Reason, extraInfo)
|
||||
}
|
||||
|
||||
var (
|
||||
@@ -83,7 +104,7 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) {
|
||||
for _, v := range ruleDef.Settings.Get("notifications").MustArray() {
|
||||
jsonModel := simplejson.NewFromAny(v)
|
||||
if id, err := jsonModel.Get("id").Int64(); err != nil {
|
||||
return nil, ValidationError{Reason: "Invalid notification schema"}
|
||||
return nil, ValidationError{Reason: "Invalid notification schema", DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId}
|
||||
} else {
|
||||
model.Notifications = append(model.Notifications, id)
|
||||
}
|
||||
@@ -93,10 +114,10 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) {
|
||||
conditionModel := simplejson.NewFromAny(condition)
|
||||
conditionType := conditionModel.Get("type").MustString()
|
||||
if factory, exist := conditionFactories[conditionType]; !exist {
|
||||
return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType}
|
||||
return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId}
|
||||
} else {
|
||||
if queryCondition, err := factory(conditionModel, index); err != nil {
|
||||
return nil, err
|
||||
return nil, ValidationError{Err: err, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId}
|
||||
} else {
|
||||
model.Conditions = append(model.Conditions, queryCondition)
|
||||
}
|
||||
|
||||
@@ -39,7 +39,11 @@ func GetSystemStats(query *m.GetSystemStatsQuery) error {
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("playlist") + `
|
||||
) AS playlist_count
|
||||
) AS playlist_count,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("alert") + `
|
||||
) AS alert_count
|
||||
`
|
||||
|
||||
var stats m.SystemStats
|
||||
@@ -85,7 +89,11 @@ func GetAdminStats(query *m.GetAdminStatsQuery) error {
|
||||
(
|
||||
SELECT COUNT(DISTINCT ` + dialect.Quote("dashboard_id") + ` )
|
||||
FROM ` + dialect.Quote("star") + `
|
||||
) AS starred_db_count
|
||||
) AS starred_db_count,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("alert") + `
|
||||
) AS alert_count
|
||||
`
|
||||
|
||||
var stats m.AdminStats
|
||||
|
||||
@@ -18,7 +18,6 @@ import (
|
||||
type InfluxDBExecutor struct {
|
||||
*tsdb.DataSourceInfo
|
||||
QueryParser *InfluxdbQueryParser
|
||||
QueryBuilder *QueryBuilder
|
||||
ResponseParser *ResponseParser
|
||||
}
|
||||
|
||||
@@ -26,7 +25,6 @@ func NewInfluxDBExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
|
||||
return &InfluxDBExecutor{
|
||||
DataSourceInfo: dsInfo,
|
||||
QueryParser: &InfluxdbQueryParser{},
|
||||
QueryBuilder: &QueryBuilder{},
|
||||
ResponseParser: &ResponseParser{},
|
||||
}
|
||||
}
|
||||
@@ -51,11 +49,16 @@ func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
|
||||
return result.WithError(err)
|
||||
}
|
||||
|
||||
if setting.Env == setting.DEV {
|
||||
glog.Debug("Influxdb query", "raw query", query)
|
||||
rawQuery, err := query.Build(context)
|
||||
if err != nil {
|
||||
return result.WithError(err)
|
||||
}
|
||||
|
||||
req, err := e.createRequest(query)
|
||||
if setting.Env == setting.DEV {
|
||||
glog.Debug("Influxdb query", "raw query", rawQuery)
|
||||
}
|
||||
|
||||
req, err := e.createRequest(rawQuery)
|
||||
if err != nil {
|
||||
return result.WithError(err)
|
||||
}
|
||||
@@ -79,29 +82,28 @@ func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
|
||||
return result.WithError(err)
|
||||
}
|
||||
|
||||
if response.Err != nil {
|
||||
return result.WithError(response.Err)
|
||||
}
|
||||
|
||||
result.QueryResults = make(map[string]*tsdb.QueryResult)
|
||||
result.QueryResults["A"] = e.ResponseParser.Parse(&response)
|
||||
result.QueryResults["A"] = e.ResponseParser.Parse(&response, query)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (e *InfluxDBExecutor) getQuery(queries tsdb.QuerySlice, context *tsdb.QueryContext) (string, error) {
|
||||
func (e *InfluxDBExecutor) getQuery(queries tsdb.QuerySlice, context *tsdb.QueryContext) (*Query, error) {
|
||||
for _, v := range queries {
|
||||
|
||||
query, err := e.QueryParser.Parse(v.Model, e.DataSourceInfo)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rawQuery, err := e.QueryBuilder.Build(query, context)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return rawQuery, nil
|
||||
return query, nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("query request contains no queries")
|
||||
return nil, fmt.Errorf("query request contains no queries")
|
||||
}
|
||||
|
||||
func (e *InfluxDBExecutor) createRequest(query string) (*http.Request, error) {
|
||||
|
||||
@@ -12,7 +12,8 @@ type InfluxdbQueryParser struct{}
|
||||
func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *tsdb.DataSourceInfo) (*Query, error) {
|
||||
policy := model.Get("policy").MustString("default")
|
||||
rawQuery := model.Get("query").MustString("")
|
||||
interval := model.Get("interval").MustString("")
|
||||
useRawQuery := model.Get("rawQuery").MustBool(false)
|
||||
alias := model.Get("alias").MustString("")
|
||||
|
||||
measurement := model.Get("measurement").MustString("")
|
||||
|
||||
@@ -36,7 +37,8 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *tsdb.DataSo
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if interval == "" {
|
||||
interval := model.Get("interval").MustString("")
|
||||
if interval == "" && dsInfo.JsonData != nil {
|
||||
dsInterval := dsInfo.JsonData.Get("timeInterval").MustString("")
|
||||
if dsInterval != "" {
|
||||
interval = dsInterval
|
||||
@@ -52,6 +54,8 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *tsdb.DataSo
|
||||
Selects: selects,
|
||||
RawQuery: rawQuery,
|
||||
Interval: interval,
|
||||
Alias: alias,
|
||||
UseRawQuery: useRawQuery,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -90,6 +90,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
|
||||
}
|
||||
]
|
||||
],
|
||||
"alias": "serie alias",
|
||||
"tags": [
|
||||
{
|
||||
"key": "datacenter",
|
||||
@@ -115,6 +116,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
|
||||
So(len(res.Selects), ShouldEqual, 3)
|
||||
So(len(res.Tags), ShouldEqual, 2)
|
||||
So(res.Interval, ShouldEqual, ">20s")
|
||||
So(res.Alias, ShouldEqual, "serie alias")
|
||||
})
|
||||
|
||||
Convey("can part raw query json model", func() {
|
||||
|
||||
@@ -8,6 +8,8 @@ type Query struct {
|
||||
GroupBy []*QueryPart
|
||||
Selects []*Select
|
||||
RawQuery string
|
||||
UseRawQuery bool
|
||||
Alias string
|
||||
|
||||
Interval string
|
||||
}
|
||||
|
||||
@@ -5,31 +5,36 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"regexp"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
type QueryBuilder struct{}
|
||||
var (
|
||||
regexpOperatorPattern *regexp.Regexp = regexp.MustCompile(`^\/.*\/$`)
|
||||
regexpMeasurementPattern *regexp.Regexp = regexp.MustCompile(`^\/.*\/$`)
|
||||
)
|
||||
|
||||
func (qb *QueryBuilder) Build(query *Query, queryContext *tsdb.QueryContext) (string, error) {
|
||||
if query.RawQuery != "" {
|
||||
func (query *Query) Build(queryContext *tsdb.QueryContext) (string, error) {
|
||||
if query.UseRawQuery && query.RawQuery != "" {
|
||||
q := query.RawQuery
|
||||
|
||||
q = strings.Replace(q, "$timeFilter", qb.renderTimeFilter(query, queryContext), 1)
|
||||
q = strings.Replace(q, "$timeFilter", query.renderTimeFilter(queryContext), 1)
|
||||
q = strings.Replace(q, "$interval", tsdb.CalculateInterval(queryContext.TimeRange), 1)
|
||||
|
||||
return q, nil
|
||||
}
|
||||
|
||||
res := qb.renderSelectors(query, queryContext)
|
||||
res += qb.renderMeasurement(query)
|
||||
res += qb.renderWhereClause(query)
|
||||
res += qb.renderTimeFilter(query, queryContext)
|
||||
res += qb.renderGroupBy(query, queryContext)
|
||||
res := query.renderSelectors(queryContext)
|
||||
res += query.renderMeasurement()
|
||||
res += query.renderWhereClause()
|
||||
res += query.renderTimeFilter(queryContext)
|
||||
res += query.renderGroupBy(queryContext)
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderTags(query *Query) []string {
|
||||
func (query *Query) renderTags() []string {
|
||||
var res []string
|
||||
for i, tag := range query.Tags {
|
||||
str := ""
|
||||
@@ -43,24 +48,34 @@ func (qb *QueryBuilder) renderTags(query *Query) []string {
|
||||
str += " "
|
||||
}
|
||||
|
||||
value := tag.Value
|
||||
nValue, err := strconv.ParseFloat(tag.Value, 64)
|
||||
|
||||
if tag.Operator == "=~" || tag.Operator == "!~" {
|
||||
value = fmt.Sprintf("%s", value)
|
||||
} else if err == nil {
|
||||
value = fmt.Sprintf("%v", nValue)
|
||||
} else {
|
||||
value = fmt.Sprintf("'%s'", value)
|
||||
//If the operator is missing we fall back to sensible defaults
|
||||
if tag.Operator == "" {
|
||||
if regexpOperatorPattern.Match([]byte(tag.Value)) {
|
||||
tag.Operator = "=~"
|
||||
} else {
|
||||
tag.Operator = "="
|
||||
}
|
||||
}
|
||||
|
||||
res = append(res, fmt.Sprintf(`%s"%s" %s %s`, str, tag.Key, tag.Operator, value))
|
||||
textValue := ""
|
||||
numericValue, err := strconv.ParseFloat(tag.Value, 64)
|
||||
|
||||
// quote value unless regex or number
|
||||
if tag.Operator == "=~" || tag.Operator == "!~" {
|
||||
textValue = tag.Value
|
||||
} else if err == nil {
|
||||
textValue = fmt.Sprintf("%v", numericValue)
|
||||
} else {
|
||||
textValue = fmt.Sprintf("'%s'", tag.Value)
|
||||
}
|
||||
|
||||
res = append(res, fmt.Sprintf(`%s"%s" %s %s`, str, tag.Key, tag.Operator, textValue))
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderTimeFilter(query *Query, queryContext *tsdb.QueryContext) string {
|
||||
func (query *Query) renderTimeFilter(queryContext *tsdb.QueryContext) string {
|
||||
from := "now() - " + queryContext.TimeRange.From
|
||||
to := ""
|
||||
|
||||
@@ -71,7 +86,7 @@ func (qb *QueryBuilder) renderTimeFilter(query *Query, queryContext *tsdb.QueryC
|
||||
return fmt.Sprintf("time > %s%s", from, to)
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderSelectors(query *Query, queryContext *tsdb.QueryContext) string {
|
||||
func (query *Query) renderSelectors(queryContext *tsdb.QueryContext) string {
|
||||
res := "SELECT "
|
||||
|
||||
var selectors []string
|
||||
@@ -87,19 +102,26 @@ func (qb *QueryBuilder) renderSelectors(query *Query, queryContext *tsdb.QueryCo
|
||||
return res + strings.Join(selectors, ", ")
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderMeasurement(query *Query) string {
|
||||
func (query *Query) renderMeasurement() string {
|
||||
policy := ""
|
||||
if query.Policy == "" || query.Policy == "default" {
|
||||
policy = ""
|
||||
} else {
|
||||
policy = `"` + query.Policy + `".`
|
||||
}
|
||||
return fmt.Sprintf(` FROM %s"%s"`, policy, query.Measurement)
|
||||
|
||||
measurement := query.Measurement
|
||||
|
||||
if !regexpMeasurementPattern.Match([]byte(measurement)) {
|
||||
measurement = fmt.Sprintf(`"%s"`, measurement)
|
||||
}
|
||||
|
||||
return fmt.Sprintf(` FROM %s%s`, policy, measurement)
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderWhereClause(query *Query) string {
|
||||
func (query *Query) renderWhereClause() string {
|
||||
res := " WHERE "
|
||||
conditions := qb.renderTags(query)
|
||||
conditions := query.renderTags()
|
||||
res += strings.Join(conditions, " ")
|
||||
if len(conditions) > 0 {
|
||||
res += " AND "
|
||||
@@ -108,7 +130,7 @@ func (qb *QueryBuilder) renderWhereClause(query *Query) string {
|
||||
return res
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderGroupBy(query *Query, queryContext *tsdb.QueryContext) string {
|
||||
func (query *Query) renderGroupBy(queryContext *tsdb.QueryContext) string {
|
||||
groupBy := ""
|
||||
for i, group := range query.GroupBy {
|
||||
if i == 0 {
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
func TestInfluxdbQueryBuilder(t *testing.T) {
|
||||
|
||||
Convey("Influxdb query builder", t, func() {
|
||||
builder := QueryBuilder{}
|
||||
|
||||
qp1, _ := NewQueryPart("field", []string{"value"})
|
||||
qp2, _ := NewQueryPart("mean", []string{})
|
||||
@@ -37,7 +36,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
|
||||
Interval: "10s",
|
||||
}
|
||||
|
||||
rawQuery, err := builder.Build(query, queryContext)
|
||||
rawQuery, err := query.Build(queryContext)
|
||||
So(err, ShouldBeNil)
|
||||
So(rawQuery, ShouldEqual, `SELECT mean("value") FROM "policy"."cpu" WHERE time > now() - 5m GROUP BY time(10s) fill(null)`)
|
||||
})
|
||||
@@ -51,23 +50,22 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
|
||||
Interval: "5s",
|
||||
}
|
||||
|
||||
rawQuery, err := builder.Build(query, queryContext)
|
||||
rawQuery, err := query.Build(queryContext)
|
||||
So(err, ShouldBeNil)
|
||||
So(rawQuery, ShouldEqual, `SELECT mean("value") FROM "cpu" WHERE "hostname" = 'server1' OR "hostname" = 'server2' AND time > now() - 5m GROUP BY time(5s), "datacenter" fill(null)`)
|
||||
})
|
||||
|
||||
Convey("can render time range", func() {
|
||||
query := Query{}
|
||||
builder := &QueryBuilder{}
|
||||
Convey("render from: 2h to now-1h", func() {
|
||||
query := Query{}
|
||||
queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("2h", "now-1h")}
|
||||
So(builder.renderTimeFilter(&query, queryContext), ShouldEqual, "time > now() - 2h and time < now() - 1h")
|
||||
So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 2h and time < now() - 1h")
|
||||
})
|
||||
|
||||
Convey("render from: 10m", func() {
|
||||
queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("10m", "now")}
|
||||
So(builder.renderTimeFilter(&query, queryContext), ShouldEqual, "time > now() - 10m")
|
||||
So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 10m")
|
||||
})
|
||||
})
|
||||
|
||||
@@ -79,29 +77,60 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
|
||||
GroupBy: []*QueryPart{groupBy1, groupBy3},
|
||||
Interval: "10s",
|
||||
RawQuery: "Raw query",
|
||||
UseRawQuery: true,
|
||||
}
|
||||
|
||||
rawQuery, err := builder.Build(query, queryContext)
|
||||
rawQuery, err := query.Build(queryContext)
|
||||
So(err, ShouldBeNil)
|
||||
So(rawQuery, ShouldEqual, `Raw query`)
|
||||
})
|
||||
|
||||
Convey("can render regex tags", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=~", Value: "value", Key: "key"}}}
|
||||
Convey("can render normal tags without operator", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "", Value: `value`, Key: "key"}}}
|
||||
|
||||
So(strings.Join(builder.renderTags(query), ""), ShouldEqual, `"key" =~ value`)
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" = 'value'`)
|
||||
})
|
||||
|
||||
Convey("can render regex tags without operator", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "", Value: `/value/`, Key: "key"}}}
|
||||
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" =~ /value/`)
|
||||
})
|
||||
|
||||
Convey("can render regex tags", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=~", Value: `/value/`, Key: "key"}}}
|
||||
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" =~ /value/`)
|
||||
})
|
||||
|
||||
Convey("can render number tags", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=", Value: "1", Key: "key"}}}
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=", Value: "10001", Key: "key"}}}
|
||||
|
||||
So(strings.Join(builder.renderTags(query), ""), ShouldEqual, `"key" = 1`)
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" = 10001`)
|
||||
})
|
||||
|
||||
Convey("can render number tags with decimals", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=", Value: "10001.1", Key: "key"}}}
|
||||
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" = 10001.1`)
|
||||
})
|
||||
|
||||
Convey("can render string tags", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=", Value: "value", Key: "key"}}}
|
||||
|
||||
So(strings.Join(builder.renderTags(query), ""), ShouldEqual, `"key" = 'value'`)
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" = 'value'`)
|
||||
})
|
||||
|
||||
Convey("can render regular measurement", func() {
|
||||
query := &Query{Measurement: `apa`, Policy: "policy"}
|
||||
|
||||
So(query.renderMeasurement(), ShouldEqual, ` FROM "policy"."apa"`)
|
||||
})
|
||||
|
||||
Convey("can render regexp measurement", func() {
|
||||
query := &Query{Measurement: `/apa/`, Policy: "policy"}
|
||||
|
||||
So(query.renderMeasurement(), ShouldEqual, ` FROM "policy"./apa/`)
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -3,6 +3,8 @@ package influxdb
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
@@ -11,17 +13,25 @@ import (
|
||||
|
||||
type ResponseParser struct{}
|
||||
|
||||
func (rp *ResponseParser) Parse(response *Response) *tsdb.QueryResult {
|
||||
var (
|
||||
legendFormat *regexp.Regexp
|
||||
)
|
||||
|
||||
func init() {
|
||||
legendFormat = regexp.MustCompile(`\[\[(\w+?)*\]\]*|\$\s*(\w+?)*`)
|
||||
}
|
||||
|
||||
func (rp *ResponseParser) Parse(response *Response, query *Query) *tsdb.QueryResult {
|
||||
queryRes := tsdb.NewQueryResult()
|
||||
|
||||
for _, result := range response.Results {
|
||||
queryRes.Series = append(queryRes.Series, rp.transformRows(result.Series, queryRes)...)
|
||||
queryRes.Series = append(queryRes.Series, rp.transformRows(result.Series, queryRes, query)...)
|
||||
}
|
||||
|
||||
return queryRes
|
||||
}
|
||||
|
||||
func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResult) tsdb.TimeSeriesSlice {
|
||||
func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResult, query *Query) tsdb.TimeSeriesSlice {
|
||||
var result tsdb.TimeSeriesSlice
|
||||
|
||||
for _, row := range rows {
|
||||
@@ -38,7 +48,7 @@ func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResul
|
||||
}
|
||||
}
|
||||
result = append(result, &tsdb.TimeSeries{
|
||||
Name: rp.formatSerieName(row, column),
|
||||
Name: rp.formatSerieName(row, column, query),
|
||||
Points: points,
|
||||
})
|
||||
}
|
||||
@@ -47,7 +57,48 @@ func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResul
|
||||
return result
|
||||
}
|
||||
|
||||
func (rp *ResponseParser) formatSerieName(row Row, column string) string {
|
||||
func (rp *ResponseParser) formatSerieName(row Row, column string, query *Query) string {
|
||||
if query.Alias == "" {
|
||||
return rp.buildSerieNameFromQuery(row, column)
|
||||
}
|
||||
|
||||
nameSegment := strings.Split(row.Name, ".")
|
||||
|
||||
result := legendFormat.ReplaceAllFunc([]byte(query.Alias), func(in []byte) []byte {
|
||||
aliasFormat := string(in)
|
||||
aliasFormat = strings.Replace(aliasFormat, "[[", "", 1)
|
||||
aliasFormat = strings.Replace(aliasFormat, "]]", "", 1)
|
||||
aliasFormat = strings.Replace(aliasFormat, "$", "", 1)
|
||||
|
||||
if aliasFormat == "m" || aliasFormat == "measurement" {
|
||||
return []byte(query.Measurement)
|
||||
}
|
||||
if aliasFormat == "col" {
|
||||
return []byte(column)
|
||||
}
|
||||
|
||||
pos, err := strconv.Atoi(aliasFormat)
|
||||
if err == nil && len(nameSegment) >= pos {
|
||||
return []byte(nameSegment[pos])
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(aliasFormat, "tag_") {
|
||||
return in
|
||||
}
|
||||
|
||||
tagKey := strings.Replace(aliasFormat, "tag_", "", 1)
|
||||
tagValue, exist := row.Tags[tagKey]
|
||||
if exist {
|
||||
return []byte(tagValue)
|
||||
}
|
||||
|
||||
return in
|
||||
})
|
||||
|
||||
return string(result)
|
||||
}
|
||||
|
||||
func (rp *ResponseParser) buildSerieNameFromQuery(row Row, column string) string {
|
||||
var tags []string
|
||||
|
||||
for k, v := range row.Tags {
|
||||
|
||||
@@ -4,56 +4,161 @@ import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestInfluxdbResponseParser(t *testing.T) {
|
||||
Convey("Influxdb response parser", t, func() {
|
||||
Convey("Response parser", func() {
|
||||
parser := &ResponseParser{}
|
||||
|
||||
parser := &ResponseParser{}
|
||||
setting.NewConfigContext(&setting.CommandLineArgs{
|
||||
HomePath: "../../../",
|
||||
})
|
||||
|
||||
response := &Response{
|
||||
Results: []Result{
|
||||
Result{
|
||||
Series: []Row{
|
||||
{
|
||||
Name: "cpu",
|
||||
Columns: []string{"time", "mean", "sum"},
|
||||
Tags: map[string]string{"datacenter": "America"},
|
||||
Values: [][]interface{}{
|
||||
{json.Number("111"), json.Number("222"), json.Number("333")},
|
||||
{json.Number("111"), json.Number("222"), json.Number("333")},
|
||||
{json.Number("111"), json.Number("null"), json.Number("333")},
|
||||
response := &Response{
|
||||
Results: []Result{
|
||||
Result{
|
||||
Series: []Row{
|
||||
{
|
||||
Name: "cpu",
|
||||
Columns: []string{"time", "mean", "sum"},
|
||||
Tags: map[string]string{"datacenter": "America"},
|
||||
Values: [][]interface{}{
|
||||
{json.Number("111"), json.Number("222"), json.Number("333")},
|
||||
{json.Number("111"), json.Number("222"), json.Number("333")},
|
||||
{json.Number("111"), json.Number("null"), json.Number("333")},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
result := parser.Parse(response)
|
||||
query := &Query{}
|
||||
|
||||
Convey("can parse all series", func() {
|
||||
So(len(result.Series), ShouldEqual, 2)
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
Convey("can parse all series", func() {
|
||||
So(len(result.Series), ShouldEqual, 2)
|
||||
})
|
||||
|
||||
Convey("can parse all points", func() {
|
||||
So(len(result.Series[0].Points), ShouldEqual, 3)
|
||||
So(len(result.Series[1].Points), ShouldEqual, 3)
|
||||
})
|
||||
|
||||
Convey("can parse multi row result", func() {
|
||||
So(result.Series[0].Points[1][0].Float64, ShouldEqual, float64(222))
|
||||
So(result.Series[1].Points[1][0].Float64, ShouldEqual, float64(333))
|
||||
})
|
||||
|
||||
Convey("can parse null points", func() {
|
||||
So(result.Series[0].Points[2][0].Valid, ShouldBeFalse)
|
||||
})
|
||||
|
||||
Convey("can format serie names", func() {
|
||||
So(result.Series[0].Name, ShouldEqual, "cpu.mean { datacenter: America }")
|
||||
So(result.Series[1].Name, ShouldEqual, "cpu.sum { datacenter: America }")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("can parse all points", func() {
|
||||
So(len(result.Series[0].Points), ShouldEqual, 3)
|
||||
So(len(result.Series[1].Points), ShouldEqual, 3)
|
||||
})
|
||||
Convey("Response parser with alias", func() {
|
||||
parser := &ResponseParser{}
|
||||
|
||||
Convey("can parse multi row result", func() {
|
||||
So(result.Series[0].Points[1][0].Float64, ShouldEqual, float64(222))
|
||||
So(result.Series[1].Points[1][0].Float64, ShouldEqual, float64(333))
|
||||
})
|
||||
response := &Response{
|
||||
Results: []Result{
|
||||
Result{
|
||||
Series: []Row{
|
||||
{
|
||||
Name: "cpu.upc",
|
||||
Columns: []string{"time", "mean", "sum"},
|
||||
Tags: map[string]string{"datacenter": "America"},
|
||||
Values: [][]interface{}{
|
||||
{json.Number("111"), json.Number("222"), json.Number("333")},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
Convey("can parse null points", func() {
|
||||
So(result.Series[0].Points[2][0].Valid, ShouldBeFalse)
|
||||
})
|
||||
Convey("$ alias", func() {
|
||||
Convey("simple alias", func() {
|
||||
query := &Query{Alias: "serie alias"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
Convey("can format serie names", func() {
|
||||
So(result.Series[0].Name, ShouldEqual, "cpu.mean { datacenter: America }")
|
||||
So(result.Series[1].Name, ShouldEqual, "cpu.sum { datacenter: America }")
|
||||
So(result.Series[0].Name, ShouldEqual, "serie alias")
|
||||
})
|
||||
|
||||
Convey("measurement alias", func() {
|
||||
query := &Query{Alias: "alias $m $measurement", Measurement: "10m"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias 10m 10m")
|
||||
})
|
||||
|
||||
Convey("column alias", func() {
|
||||
query := &Query{Alias: "alias $col", Measurement: "10m"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias mean")
|
||||
So(result.Series[1].Name, ShouldEqual, "alias sum")
|
||||
})
|
||||
|
||||
Convey("tag alias", func() {
|
||||
query := &Query{Alias: "alias $tag_datacenter"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias America")
|
||||
})
|
||||
|
||||
Convey("segment alias", func() {
|
||||
query := &Query{Alias: "alias $1"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias upc")
|
||||
})
|
||||
|
||||
Convey("segment position out of bound", func() {
|
||||
query := &Query{Alias: "alias $5"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias $5")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("[[]] alias", func() {
|
||||
Convey("simple alias", func() {
|
||||
query := &Query{Alias: "serie alias"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "serie alias")
|
||||
})
|
||||
|
||||
Convey("measurement alias", func() {
|
||||
query := &Query{Alias: "alias [[m]] [[measurement]]", Measurement: "10m"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias 10m 10m")
|
||||
})
|
||||
|
||||
Convey("column alias", func() {
|
||||
query := &Query{Alias: "alias [[col]]", Measurement: "10m"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias mean")
|
||||
So(result.Series[1].Name, ShouldEqual, "alias sum")
|
||||
})
|
||||
|
||||
Convey("tag alias", func() {
|
||||
query := &Query{Alias: "alias [[tag_datacenter]]"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias America")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -24,12 +24,14 @@ func NewPrometheusExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
|
||||
}
|
||||
|
||||
var (
|
||||
plog log.Logger
|
||||
plog log.Logger
|
||||
legendFormat *regexp.Regexp
|
||||
)
|
||||
|
||||
func init() {
|
||||
plog = log.New("tsdb.prometheus")
|
||||
tsdb.RegisterExecutor("prometheus", NewPrometheusExecutor)
|
||||
legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
|
||||
}
|
||||
|
||||
func (e *PrometheusExecutor) getClient() (prometheus.QueryAPI, error) {
|
||||
@@ -79,13 +81,11 @@ func (e *PrometheusExecutor) Execute(ctx context.Context, queries tsdb.QuerySlic
|
||||
}
|
||||
|
||||
func formatLegend(metric pmodel.Metric, query *PrometheusQuery) string {
|
||||
reg, _ := regexp.Compile(`\{\{\s*(.+?)\s*\}\}`)
|
||||
|
||||
if query.LegendFormat == "" {
|
||||
return metric.String()
|
||||
}
|
||||
|
||||
result := reg.ReplaceAllFunc([]byte(query.LegendFormat), func(in []byte) []byte {
|
||||
result := legendFormat.ReplaceAllFunc([]byte(query.LegendFormat), func(in []byte) []byte {
|
||||
labelName := strings.Replace(string(in), "{{", "", 1)
|
||||
labelName = strings.Replace(labelName, "}}", "", 1)
|
||||
labelName = strings.TrimSpace(labelName)
|
||||
|
||||
Reference in New Issue
Block a user