mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge branch 'master' into alerting_mqe
This commit is contained in:
11
CHANGELOG.md
11
CHANGELOG.md
@@ -1,4 +1,11 @@
|
||||
# 4.0-pre (unreleased)
|
||||
# 4.0-beta2 (unrelased)
|
||||
|
||||
### Bugfixes
|
||||
* **Graph Panel**: Log base scale on right Y-axis had no effect, max value calc was not applied, [#6534](https://github.com/grafana/grafana/issues/6534)
|
||||
* **Graph Panel**: Bar width if bars was only used in series override, [#6528](https://github.com/grafana/grafana/issues/6528)
|
||||
* **UI/Browser**: Fixed issue with page/view header gradient border not showing in Safari, [#6530](https://github.com/grafana/grafana/issues/6530)
|
||||
|
||||
# 4.0-beta1 (2016-11-09)
|
||||
|
||||
### Enhancements
|
||||
* **Login**: Adds option to disable username/password logins, closes [#4674](https://github.com/grafana/grafana/issues/4674)
|
||||
@@ -24,7 +31,7 @@
|
||||
* **SystemD**: Change systemd description, closes [#5971](https://github.com/grafana/grafana/pull/5971)
|
||||
* **lodash upgrade**: Upgraded lodash from 2.4.2 to 4.15.0, this contains a number of breaking changes that could effect plugins. closes [#6021](https://github.com/grafana/grafana/pull/6021)
|
||||
|
||||
### Bugfixes
|
||||
### Bug fixes
|
||||
* **Table Panel**: Fixed problem when switching to Mixed datasource in metrics tab, fixes [#5999](https://github.com/grafana/grafana/pull/5999)
|
||||
* **Playlist**: Fixed problem with play order not matching order defined in playlist, fixes [#5467](https://github.com/grafana/grafana/pull/5467)
|
||||
* **Graph panel**: Fixed problem with auto decimals on y axis when datamin=datamax, fixes [#6070](https://github.com/grafana/grafana/pull/6070)
|
||||
|
||||
@@ -5,7 +5,7 @@ machine:
|
||||
GOPATH: "/home/ubuntu/.go_workspace"
|
||||
ORG_PATH: "github.com/grafana"
|
||||
REPO_PATH: "${ORG_PATH}/grafana"
|
||||
GODIST: "go1.7.1.linux-amd64.tar.gz"
|
||||
GODIST: "go1.7.3.linux-amd64.tar.gz"
|
||||
post:
|
||||
- mkdir -p download
|
||||
- test -e download/$GODIST || curl -o download/$GODIST https://storage.googleapis.com/golang/$GODIST
|
||||
|
||||
@@ -229,7 +229,7 @@ auth_url = https://accounts.google.com/o/oauth2/auth
|
||||
token_url = https://accounts.google.com/o/oauth2/token
|
||||
api_url = https://www.googleapis.com/oauth2/v1/userinfo
|
||||
allowed_domains =
|
||||
hosted_domain =
|
||||
hosted_domain =
|
||||
|
||||
#################################### Grafana.net Auth ####################
|
||||
[auth.grafananet]
|
||||
@@ -390,21 +390,6 @@ global_api_key = -1
|
||||
global_session = -1
|
||||
|
||||
#################################### Alerting ############################
|
||||
# docs about alerting can be found in /docs/sources/alerting/
|
||||
# __.-/|
|
||||
# \`o_O'
|
||||
# =( )= +----------------------------+
|
||||
# U| | Alerting is still in alpha |
|
||||
# /\ /\ / | +----------------------------+
|
||||
# ) /^\) ^\/ _)\ |
|
||||
# ) /^\/ _) \ |
|
||||
# ) _ / / _) \___|_
|
||||
# /\ )/\/ || | )_)\___,|))
|
||||
# < > |(,,) )__) |
|
||||
# || / \)___)\
|
||||
# | \____( )___) )____
|
||||
# \______(_______;;;)__;;;)
|
||||
|
||||
[alerting]
|
||||
# Makes it possible to turn off alert rule execution.
|
||||
execute_alerts = true
|
||||
|
||||
@@ -339,21 +339,6 @@
|
||||
;path = /var/lib/grafana/dashboards
|
||||
|
||||
#################################### Alerting ######################################
|
||||
# docs about alerting can be found in /docs/sources/alerting/
|
||||
# __.-/|
|
||||
# \`o_O'
|
||||
# =( )= +----------------------------+
|
||||
# U| | Alerting is still in alpha |
|
||||
# /\ /\ / | +----------------------------+
|
||||
# ) /^\) ^\/ _)\ |
|
||||
# ) /^\/ _) \ |
|
||||
# ) _ / / _) \___|_
|
||||
# /\ )/\/ || | )_)\___,|))
|
||||
# < > |(,,) )__) |
|
||||
# || / \)___)\
|
||||
# | \____( )___) )____
|
||||
# \______(_______;;;)__;;;)
|
||||
|
||||
[alerting]
|
||||
# Makes it possible to turn off alert rule execution.
|
||||
;execute_alerts = true
|
||||
|
||||
@@ -118,7 +118,7 @@ SHOW TAG VALUES WITH KEY = "hostname" WHERE region =~ /$region/
|
||||
|
||||
> Always you `regex values` or `regex wildcard` for All format or multi select format.
|
||||
|
||||

|
||||

|
||||
|
||||
## Annotations
|
||||
Annotations allows you to overlay rich event information on top of graphs.
|
||||
|
||||
@@ -4,7 +4,7 @@ description = "Feature & improvement highlights for Grafana v3.1"
|
||||
keywords = ["grafana", "new", "documentation", "3.1"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "Version 3.1 (Latest)"
|
||||
name = "Version 3.1"
|
||||
identifier = "v3.1"
|
||||
parent = "whatsnew"
|
||||
weight = 1
|
||||
|
||||
@@ -4,7 +4,7 @@ description = "Feature & improvement highlights for Grafana v4.0"
|
||||
keywords = ["grafana", "new", "documentation", "4.0"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "Version 4.0"
|
||||
name = "Version 4.0 (Latest)"
|
||||
identifier = "v4.0"
|
||||
parent = "whatsnew"
|
||||
weight = -1
|
||||
|
||||
@@ -85,6 +85,34 @@ page_keywords: grafana, admin, http, api, documentation, orgs, organisation
|
||||
}
|
||||
}
|
||||
|
||||
## Create Organisation
|
||||
|
||||
`POST /api/org`
|
||||
|
||||
**Example Request**:
|
||||
|
||||
POST /api/org HTTP/1.1
|
||||
Accept: application/json
|
||||
Content-Type: application/json
|
||||
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
|
||||
|
||||
{
|
||||
"name":"New Org."
|
||||
}
|
||||
|
||||
|
||||
**Example Response**:
|
||||
|
||||
HTTP/1.1 200
|
||||
Content-Type: application/json
|
||||
|
||||
{
|
||||
"orgId":"1",
|
||||
"message":"Organization created"
|
||||
}
|
||||
|
||||
|
||||
|
||||
## Update current Organisation
|
||||
|
||||
`PUT /api/org`
|
||||
|
||||
@@ -413,7 +413,7 @@ Set to `true` to enable LDAP integration (default: `false`)
|
||||
### config_file
|
||||
Path to the LDAP specific configuration file (default: `/etc/grafana/ldap.toml`)
|
||||
|
||||
> For details on LDAP Configuration, go to the [LDAP Integration](ldap.md) page.
|
||||
> For details on LDAP Configuration, go to the [LDAP Integration]({{< relref "ldap.md" >}}) page.
|
||||
|
||||
<hr>
|
||||
|
||||
|
||||
@@ -101,6 +101,7 @@ func sendUsageStats() {
|
||||
metrics["stats.plugins.apps.count"] = len(plugins.Apps)
|
||||
metrics["stats.plugins.panels.count"] = len(plugins.Panels)
|
||||
metrics["stats.plugins.datasources.count"] = len(plugins.DataSources)
|
||||
metrics["stats.alerts.count"] = statsQuery.Result.AlertCount
|
||||
|
||||
dsStats := m.GetDataSourceStatsQuery{}
|
||||
if err := bus.Dispatch(&dsStats); err != nil {
|
||||
|
||||
@@ -5,6 +5,7 @@ type SystemStats struct {
|
||||
UserCount int64
|
||||
OrgCount int64
|
||||
PlaylistCount int64
|
||||
AlertCount int64
|
||||
}
|
||||
|
||||
type DataSourceStats struct {
|
||||
@@ -29,6 +30,7 @@ type AdminStats struct {
|
||||
DataSourceCount int `json:"data_source_count"`
|
||||
PlaylistCount int `json:"playlist_count"`
|
||||
StarredDbCount int `json:"starred_db_count"`
|
||||
AlertCount int `json:"alert_count"`
|
||||
}
|
||||
|
||||
type GetAdminStatsQuery struct {
|
||||
|
||||
@@ -26,11 +26,32 @@ type Rule struct {
|
||||
}
|
||||
|
||||
type ValidationError struct {
|
||||
Reason string
|
||||
Reason string
|
||||
Err error
|
||||
Alertid int64
|
||||
DashboardId int64
|
||||
PanelId int64
|
||||
}
|
||||
|
||||
func (e ValidationError) Error() string {
|
||||
return e.Reason
|
||||
extraInfo := ""
|
||||
if e.Alertid != 0 {
|
||||
extraInfo = fmt.Sprintf("%s AlertId: %v", extraInfo, e.Alertid)
|
||||
}
|
||||
|
||||
if e.PanelId != 0 {
|
||||
extraInfo = fmt.Sprintf("%s PanelId: %v ", extraInfo, e.PanelId)
|
||||
}
|
||||
|
||||
if e.DashboardId != 0 {
|
||||
extraInfo = fmt.Sprintf("%s DashboardId: %v", extraInfo, e.DashboardId)
|
||||
}
|
||||
|
||||
if e.Err != nil {
|
||||
return fmt.Sprintf("%s %s%s", e.Err.Error(), e.Reason, extraInfo)
|
||||
}
|
||||
|
||||
return fmt.Sprintf("Failed to extract alert.Reason: %s %s", e.Reason, extraInfo)
|
||||
}
|
||||
|
||||
var (
|
||||
@@ -83,7 +104,7 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) {
|
||||
for _, v := range ruleDef.Settings.Get("notifications").MustArray() {
|
||||
jsonModel := simplejson.NewFromAny(v)
|
||||
if id, err := jsonModel.Get("id").Int64(); err != nil {
|
||||
return nil, ValidationError{Reason: "Invalid notification schema"}
|
||||
return nil, ValidationError{Reason: "Invalid notification schema", DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId}
|
||||
} else {
|
||||
model.Notifications = append(model.Notifications, id)
|
||||
}
|
||||
@@ -93,10 +114,10 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) {
|
||||
conditionModel := simplejson.NewFromAny(condition)
|
||||
conditionType := conditionModel.Get("type").MustString()
|
||||
if factory, exist := conditionFactories[conditionType]; !exist {
|
||||
return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType}
|
||||
return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId}
|
||||
} else {
|
||||
if queryCondition, err := factory(conditionModel, index); err != nil {
|
||||
return nil, err
|
||||
return nil, ValidationError{Err: err, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId}
|
||||
} else {
|
||||
model.Conditions = append(model.Conditions, queryCondition)
|
||||
}
|
||||
|
||||
@@ -39,7 +39,11 @@ func GetSystemStats(query *m.GetSystemStatsQuery) error {
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("playlist") + `
|
||||
) AS playlist_count
|
||||
) AS playlist_count,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("alert") + `
|
||||
) AS alert_count
|
||||
`
|
||||
|
||||
var stats m.SystemStats
|
||||
@@ -85,7 +89,11 @@ func GetAdminStats(query *m.GetAdminStatsQuery) error {
|
||||
(
|
||||
SELECT COUNT(DISTINCT ` + dialect.Quote("dashboard_id") + ` )
|
||||
FROM ` + dialect.Quote("star") + `
|
||||
) AS starred_db_count
|
||||
) AS starred_db_count,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM ` + dialect.Quote("alert") + `
|
||||
) AS alert_count
|
||||
`
|
||||
|
||||
var stats m.AdminStats
|
||||
|
||||
@@ -18,7 +18,6 @@ import (
|
||||
type InfluxDBExecutor struct {
|
||||
*tsdb.DataSourceInfo
|
||||
QueryParser *InfluxdbQueryParser
|
||||
QueryBuilder *QueryBuilder
|
||||
ResponseParser *ResponseParser
|
||||
}
|
||||
|
||||
@@ -26,7 +25,6 @@ func NewInfluxDBExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
|
||||
return &InfluxDBExecutor{
|
||||
DataSourceInfo: dsInfo,
|
||||
QueryParser: &InfluxdbQueryParser{},
|
||||
QueryBuilder: &QueryBuilder{},
|
||||
ResponseParser: &ResponseParser{},
|
||||
}
|
||||
}
|
||||
@@ -51,11 +49,16 @@ func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
|
||||
return result.WithError(err)
|
||||
}
|
||||
|
||||
if setting.Env == setting.DEV {
|
||||
glog.Debug("Influxdb query", "raw query", query)
|
||||
rawQuery, err := query.Build(context)
|
||||
if err != nil {
|
||||
return result.WithError(err)
|
||||
}
|
||||
|
||||
req, err := e.createRequest(query)
|
||||
if setting.Env == setting.DEV {
|
||||
glog.Debug("Influxdb query", "raw query", rawQuery)
|
||||
}
|
||||
|
||||
req, err := e.createRequest(rawQuery)
|
||||
if err != nil {
|
||||
return result.WithError(err)
|
||||
}
|
||||
@@ -79,29 +82,28 @@ func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
|
||||
return result.WithError(err)
|
||||
}
|
||||
|
||||
if response.Err != nil {
|
||||
return result.WithError(response.Err)
|
||||
}
|
||||
|
||||
result.QueryResults = make(map[string]*tsdb.QueryResult)
|
||||
result.QueryResults["A"] = e.ResponseParser.Parse(&response)
|
||||
result.QueryResults["A"] = e.ResponseParser.Parse(&response, query)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (e *InfluxDBExecutor) getQuery(queries tsdb.QuerySlice, context *tsdb.QueryContext) (string, error) {
|
||||
func (e *InfluxDBExecutor) getQuery(queries tsdb.QuerySlice, context *tsdb.QueryContext) (*Query, error) {
|
||||
for _, v := range queries {
|
||||
|
||||
query, err := e.QueryParser.Parse(v.Model, e.DataSourceInfo)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rawQuery, err := e.QueryBuilder.Build(query, context)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
return rawQuery, nil
|
||||
return query, nil
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("query request contains no queries")
|
||||
return nil, fmt.Errorf("query request contains no queries")
|
||||
}
|
||||
|
||||
func (e *InfluxDBExecutor) createRequest(query string) (*http.Request, error) {
|
||||
|
||||
@@ -12,7 +12,8 @@ type InfluxdbQueryParser struct{}
|
||||
func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *tsdb.DataSourceInfo) (*Query, error) {
|
||||
policy := model.Get("policy").MustString("default")
|
||||
rawQuery := model.Get("query").MustString("")
|
||||
interval := model.Get("interval").MustString("")
|
||||
useRawQuery := model.Get("rawQuery").MustBool(false)
|
||||
alias := model.Get("alias").MustString("")
|
||||
|
||||
measurement := model.Get("measurement").MustString("")
|
||||
|
||||
@@ -36,7 +37,8 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *tsdb.DataSo
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if interval == "" {
|
||||
interval := model.Get("interval").MustString("")
|
||||
if interval == "" && dsInfo.JsonData != nil {
|
||||
dsInterval := dsInfo.JsonData.Get("timeInterval").MustString("")
|
||||
if dsInterval != "" {
|
||||
interval = dsInterval
|
||||
@@ -52,6 +54,8 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *tsdb.DataSo
|
||||
Selects: selects,
|
||||
RawQuery: rawQuery,
|
||||
Interval: interval,
|
||||
Alias: alias,
|
||||
UseRawQuery: useRawQuery,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -90,6 +90,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
|
||||
}
|
||||
]
|
||||
],
|
||||
"alias": "serie alias",
|
||||
"tags": [
|
||||
{
|
||||
"key": "datacenter",
|
||||
@@ -115,6 +116,7 @@ func TestInfluxdbQueryParser(t *testing.T) {
|
||||
So(len(res.Selects), ShouldEqual, 3)
|
||||
So(len(res.Tags), ShouldEqual, 2)
|
||||
So(res.Interval, ShouldEqual, ">20s")
|
||||
So(res.Alias, ShouldEqual, "serie alias")
|
||||
})
|
||||
|
||||
Convey("can part raw query json model", func() {
|
||||
|
||||
@@ -8,6 +8,8 @@ type Query struct {
|
||||
GroupBy []*QueryPart
|
||||
Selects []*Select
|
||||
RawQuery string
|
||||
UseRawQuery bool
|
||||
Alias string
|
||||
|
||||
Interval string
|
||||
}
|
||||
|
||||
@@ -5,31 +5,36 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"regexp"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
)
|
||||
|
||||
type QueryBuilder struct{}
|
||||
var (
|
||||
regexpOperatorPattern *regexp.Regexp = regexp.MustCompile(`^\/.*\/$`)
|
||||
regexpMeasurementPattern *regexp.Regexp = regexp.MustCompile(`^\/.*\/$`)
|
||||
)
|
||||
|
||||
func (qb *QueryBuilder) Build(query *Query, queryContext *tsdb.QueryContext) (string, error) {
|
||||
if query.RawQuery != "" {
|
||||
func (query *Query) Build(queryContext *tsdb.QueryContext) (string, error) {
|
||||
if query.UseRawQuery && query.RawQuery != "" {
|
||||
q := query.RawQuery
|
||||
|
||||
q = strings.Replace(q, "$timeFilter", qb.renderTimeFilter(query, queryContext), 1)
|
||||
q = strings.Replace(q, "$timeFilter", query.renderTimeFilter(queryContext), 1)
|
||||
q = strings.Replace(q, "$interval", tsdb.CalculateInterval(queryContext.TimeRange), 1)
|
||||
|
||||
return q, nil
|
||||
}
|
||||
|
||||
res := qb.renderSelectors(query, queryContext)
|
||||
res += qb.renderMeasurement(query)
|
||||
res += qb.renderWhereClause(query)
|
||||
res += qb.renderTimeFilter(query, queryContext)
|
||||
res += qb.renderGroupBy(query, queryContext)
|
||||
res := query.renderSelectors(queryContext)
|
||||
res += query.renderMeasurement()
|
||||
res += query.renderWhereClause()
|
||||
res += query.renderTimeFilter(queryContext)
|
||||
res += query.renderGroupBy(queryContext)
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderTags(query *Query) []string {
|
||||
func (query *Query) renderTags() []string {
|
||||
var res []string
|
||||
for i, tag := range query.Tags {
|
||||
str := ""
|
||||
@@ -43,24 +48,34 @@ func (qb *QueryBuilder) renderTags(query *Query) []string {
|
||||
str += " "
|
||||
}
|
||||
|
||||
value := tag.Value
|
||||
nValue, err := strconv.ParseFloat(tag.Value, 64)
|
||||
|
||||
if tag.Operator == "=~" || tag.Operator == "!~" {
|
||||
value = fmt.Sprintf("%s", value)
|
||||
} else if err == nil {
|
||||
value = fmt.Sprintf("%v", nValue)
|
||||
} else {
|
||||
value = fmt.Sprintf("'%s'", value)
|
||||
//If the operator is missing we fall back to sensible defaults
|
||||
if tag.Operator == "" {
|
||||
if regexpOperatorPattern.Match([]byte(tag.Value)) {
|
||||
tag.Operator = "=~"
|
||||
} else {
|
||||
tag.Operator = "="
|
||||
}
|
||||
}
|
||||
|
||||
res = append(res, fmt.Sprintf(`%s"%s" %s %s`, str, tag.Key, tag.Operator, value))
|
||||
textValue := ""
|
||||
numericValue, err := strconv.ParseFloat(tag.Value, 64)
|
||||
|
||||
// quote value unless regex or number
|
||||
if tag.Operator == "=~" || tag.Operator == "!~" {
|
||||
textValue = tag.Value
|
||||
} else if err == nil {
|
||||
textValue = fmt.Sprintf("%v", numericValue)
|
||||
} else {
|
||||
textValue = fmt.Sprintf("'%s'", tag.Value)
|
||||
}
|
||||
|
||||
res = append(res, fmt.Sprintf(`%s"%s" %s %s`, str, tag.Key, tag.Operator, textValue))
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderTimeFilter(query *Query, queryContext *tsdb.QueryContext) string {
|
||||
func (query *Query) renderTimeFilter(queryContext *tsdb.QueryContext) string {
|
||||
from := "now() - " + queryContext.TimeRange.From
|
||||
to := ""
|
||||
|
||||
@@ -71,7 +86,7 @@ func (qb *QueryBuilder) renderTimeFilter(query *Query, queryContext *tsdb.QueryC
|
||||
return fmt.Sprintf("time > %s%s", from, to)
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderSelectors(query *Query, queryContext *tsdb.QueryContext) string {
|
||||
func (query *Query) renderSelectors(queryContext *tsdb.QueryContext) string {
|
||||
res := "SELECT "
|
||||
|
||||
var selectors []string
|
||||
@@ -87,19 +102,26 @@ func (qb *QueryBuilder) renderSelectors(query *Query, queryContext *tsdb.QueryCo
|
||||
return res + strings.Join(selectors, ", ")
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderMeasurement(query *Query) string {
|
||||
func (query *Query) renderMeasurement() string {
|
||||
policy := ""
|
||||
if query.Policy == "" || query.Policy == "default" {
|
||||
policy = ""
|
||||
} else {
|
||||
policy = `"` + query.Policy + `".`
|
||||
}
|
||||
return fmt.Sprintf(` FROM %s"%s"`, policy, query.Measurement)
|
||||
|
||||
measurement := query.Measurement
|
||||
|
||||
if !regexpMeasurementPattern.Match([]byte(measurement)) {
|
||||
measurement = fmt.Sprintf(`"%s"`, measurement)
|
||||
}
|
||||
|
||||
return fmt.Sprintf(` FROM %s%s`, policy, measurement)
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderWhereClause(query *Query) string {
|
||||
func (query *Query) renderWhereClause() string {
|
||||
res := " WHERE "
|
||||
conditions := qb.renderTags(query)
|
||||
conditions := query.renderTags()
|
||||
res += strings.Join(conditions, " ")
|
||||
if len(conditions) > 0 {
|
||||
res += " AND "
|
||||
@@ -108,7 +130,7 @@ func (qb *QueryBuilder) renderWhereClause(query *Query) string {
|
||||
return res
|
||||
}
|
||||
|
||||
func (qb *QueryBuilder) renderGroupBy(query *Query, queryContext *tsdb.QueryContext) string {
|
||||
func (query *Query) renderGroupBy(queryContext *tsdb.QueryContext) string {
|
||||
groupBy := ""
|
||||
for i, group := range query.GroupBy {
|
||||
if i == 0 {
|
||||
@@ -12,7 +12,6 @@ import (
|
||||
func TestInfluxdbQueryBuilder(t *testing.T) {
|
||||
|
||||
Convey("Influxdb query builder", t, func() {
|
||||
builder := QueryBuilder{}
|
||||
|
||||
qp1, _ := NewQueryPart("field", []string{"value"})
|
||||
qp2, _ := NewQueryPart("mean", []string{})
|
||||
@@ -37,7 +36,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
|
||||
Interval: "10s",
|
||||
}
|
||||
|
||||
rawQuery, err := builder.Build(query, queryContext)
|
||||
rawQuery, err := query.Build(queryContext)
|
||||
So(err, ShouldBeNil)
|
||||
So(rawQuery, ShouldEqual, `SELECT mean("value") FROM "policy"."cpu" WHERE time > now() - 5m GROUP BY time(10s) fill(null)`)
|
||||
})
|
||||
@@ -51,23 +50,22 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
|
||||
Interval: "5s",
|
||||
}
|
||||
|
||||
rawQuery, err := builder.Build(query, queryContext)
|
||||
rawQuery, err := query.Build(queryContext)
|
||||
So(err, ShouldBeNil)
|
||||
So(rawQuery, ShouldEqual, `SELECT mean("value") FROM "cpu" WHERE "hostname" = 'server1' OR "hostname" = 'server2' AND time > now() - 5m GROUP BY time(5s), "datacenter" fill(null)`)
|
||||
})
|
||||
|
||||
Convey("can render time range", func() {
|
||||
query := Query{}
|
||||
builder := &QueryBuilder{}
|
||||
Convey("render from: 2h to now-1h", func() {
|
||||
query := Query{}
|
||||
queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("2h", "now-1h")}
|
||||
So(builder.renderTimeFilter(&query, queryContext), ShouldEqual, "time > now() - 2h and time < now() - 1h")
|
||||
So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 2h and time < now() - 1h")
|
||||
})
|
||||
|
||||
Convey("render from: 10m", func() {
|
||||
queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("10m", "now")}
|
||||
So(builder.renderTimeFilter(&query, queryContext), ShouldEqual, "time > now() - 10m")
|
||||
So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 10m")
|
||||
})
|
||||
})
|
||||
|
||||
@@ -79,29 +77,60 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
|
||||
GroupBy: []*QueryPart{groupBy1, groupBy3},
|
||||
Interval: "10s",
|
||||
RawQuery: "Raw query",
|
||||
UseRawQuery: true,
|
||||
}
|
||||
|
||||
rawQuery, err := builder.Build(query, queryContext)
|
||||
rawQuery, err := query.Build(queryContext)
|
||||
So(err, ShouldBeNil)
|
||||
So(rawQuery, ShouldEqual, `Raw query`)
|
||||
})
|
||||
|
||||
Convey("can render regex tags", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=~", Value: "value", Key: "key"}}}
|
||||
Convey("can render normal tags without operator", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "", Value: `value`, Key: "key"}}}
|
||||
|
||||
So(strings.Join(builder.renderTags(query), ""), ShouldEqual, `"key" =~ value`)
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" = 'value'`)
|
||||
})
|
||||
|
||||
Convey("can render regex tags without operator", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "", Value: `/value/`, Key: "key"}}}
|
||||
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" =~ /value/`)
|
||||
})
|
||||
|
||||
Convey("can render regex tags", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=~", Value: `/value/`, Key: "key"}}}
|
||||
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" =~ /value/`)
|
||||
})
|
||||
|
||||
Convey("can render number tags", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=", Value: "1", Key: "key"}}}
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=", Value: "10001", Key: "key"}}}
|
||||
|
||||
So(strings.Join(builder.renderTags(query), ""), ShouldEqual, `"key" = 1`)
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" = 10001`)
|
||||
})
|
||||
|
||||
Convey("can render number tags with decimals", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=", Value: "10001.1", Key: "key"}}}
|
||||
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" = 10001.1`)
|
||||
})
|
||||
|
||||
Convey("can render string tags", func() {
|
||||
query := &Query{Tags: []*Tag{&Tag{Operator: "=", Value: "value", Key: "key"}}}
|
||||
|
||||
So(strings.Join(builder.renderTags(query), ""), ShouldEqual, `"key" = 'value'`)
|
||||
So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" = 'value'`)
|
||||
})
|
||||
|
||||
Convey("can render regular measurement", func() {
|
||||
query := &Query{Measurement: `apa`, Policy: "policy"}
|
||||
|
||||
So(query.renderMeasurement(), ShouldEqual, ` FROM "policy"."apa"`)
|
||||
})
|
||||
|
||||
Convey("can render regexp measurement", func() {
|
||||
query := &Query{Measurement: `/apa/`, Policy: "policy"}
|
||||
|
||||
So(query.renderMeasurement(), ShouldEqual, ` FROM "policy"./apa/`)
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -3,6 +3,8 @@ package influxdb
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb"
|
||||
@@ -11,17 +13,25 @@ import (
|
||||
|
||||
type ResponseParser struct{}
|
||||
|
||||
func (rp *ResponseParser) Parse(response *Response) *tsdb.QueryResult {
|
||||
var (
|
||||
legendFormat *regexp.Regexp
|
||||
)
|
||||
|
||||
func init() {
|
||||
legendFormat = regexp.MustCompile(`\[\[(\w+?)*\]\]*|\$\s*(\w+?)*`)
|
||||
}
|
||||
|
||||
func (rp *ResponseParser) Parse(response *Response, query *Query) *tsdb.QueryResult {
|
||||
queryRes := tsdb.NewQueryResult()
|
||||
|
||||
for _, result := range response.Results {
|
||||
queryRes.Series = append(queryRes.Series, rp.transformRows(result.Series, queryRes)...)
|
||||
queryRes.Series = append(queryRes.Series, rp.transformRows(result.Series, queryRes, query)...)
|
||||
}
|
||||
|
||||
return queryRes
|
||||
}
|
||||
|
||||
func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResult) tsdb.TimeSeriesSlice {
|
||||
func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResult, query *Query) tsdb.TimeSeriesSlice {
|
||||
var result tsdb.TimeSeriesSlice
|
||||
|
||||
for _, row := range rows {
|
||||
@@ -38,7 +48,7 @@ func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResul
|
||||
}
|
||||
}
|
||||
result = append(result, &tsdb.TimeSeries{
|
||||
Name: rp.formatSerieName(row, column),
|
||||
Name: rp.formatSerieName(row, column, query),
|
||||
Points: points,
|
||||
})
|
||||
}
|
||||
@@ -47,7 +57,48 @@ func (rp *ResponseParser) transformRows(rows []Row, queryResult *tsdb.QueryResul
|
||||
return result
|
||||
}
|
||||
|
||||
func (rp *ResponseParser) formatSerieName(row Row, column string) string {
|
||||
func (rp *ResponseParser) formatSerieName(row Row, column string, query *Query) string {
|
||||
if query.Alias == "" {
|
||||
return rp.buildSerieNameFromQuery(row, column)
|
||||
}
|
||||
|
||||
nameSegment := strings.Split(row.Name, ".")
|
||||
|
||||
result := legendFormat.ReplaceAllFunc([]byte(query.Alias), func(in []byte) []byte {
|
||||
aliasFormat := string(in)
|
||||
aliasFormat = strings.Replace(aliasFormat, "[[", "", 1)
|
||||
aliasFormat = strings.Replace(aliasFormat, "]]", "", 1)
|
||||
aliasFormat = strings.Replace(aliasFormat, "$", "", 1)
|
||||
|
||||
if aliasFormat == "m" || aliasFormat == "measurement" {
|
||||
return []byte(query.Measurement)
|
||||
}
|
||||
if aliasFormat == "col" {
|
||||
return []byte(column)
|
||||
}
|
||||
|
||||
pos, err := strconv.Atoi(aliasFormat)
|
||||
if err == nil && len(nameSegment) >= pos {
|
||||
return []byte(nameSegment[pos])
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(aliasFormat, "tag_") {
|
||||
return in
|
||||
}
|
||||
|
||||
tagKey := strings.Replace(aliasFormat, "tag_", "", 1)
|
||||
tagValue, exist := row.Tags[tagKey]
|
||||
if exist {
|
||||
return []byte(tagValue)
|
||||
}
|
||||
|
||||
return in
|
||||
})
|
||||
|
||||
return string(result)
|
||||
}
|
||||
|
||||
func (rp *ResponseParser) buildSerieNameFromQuery(row Row, column string) string {
|
||||
var tags []string
|
||||
|
||||
for k, v := range row.Tags {
|
||||
|
||||
@@ -4,56 +4,161 @@ import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestInfluxdbResponseParser(t *testing.T) {
|
||||
Convey("Influxdb response parser", t, func() {
|
||||
Convey("Response parser", func() {
|
||||
parser := &ResponseParser{}
|
||||
|
||||
parser := &ResponseParser{}
|
||||
setting.NewConfigContext(&setting.CommandLineArgs{
|
||||
HomePath: "../../../",
|
||||
})
|
||||
|
||||
response := &Response{
|
||||
Results: []Result{
|
||||
Result{
|
||||
Series: []Row{
|
||||
{
|
||||
Name: "cpu",
|
||||
Columns: []string{"time", "mean", "sum"},
|
||||
Tags: map[string]string{"datacenter": "America"},
|
||||
Values: [][]interface{}{
|
||||
{json.Number("111"), json.Number("222"), json.Number("333")},
|
||||
{json.Number("111"), json.Number("222"), json.Number("333")},
|
||||
{json.Number("111"), json.Number("null"), json.Number("333")},
|
||||
response := &Response{
|
||||
Results: []Result{
|
||||
Result{
|
||||
Series: []Row{
|
||||
{
|
||||
Name: "cpu",
|
||||
Columns: []string{"time", "mean", "sum"},
|
||||
Tags: map[string]string{"datacenter": "America"},
|
||||
Values: [][]interface{}{
|
||||
{json.Number("111"), json.Number("222"), json.Number("333")},
|
||||
{json.Number("111"), json.Number("222"), json.Number("333")},
|
||||
{json.Number("111"), json.Number("null"), json.Number("333")},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
result := parser.Parse(response)
|
||||
query := &Query{}
|
||||
|
||||
Convey("can parse all series", func() {
|
||||
So(len(result.Series), ShouldEqual, 2)
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
Convey("can parse all series", func() {
|
||||
So(len(result.Series), ShouldEqual, 2)
|
||||
})
|
||||
|
||||
Convey("can parse all points", func() {
|
||||
So(len(result.Series[0].Points), ShouldEqual, 3)
|
||||
So(len(result.Series[1].Points), ShouldEqual, 3)
|
||||
})
|
||||
|
||||
Convey("can parse multi row result", func() {
|
||||
So(result.Series[0].Points[1][0].Float64, ShouldEqual, float64(222))
|
||||
So(result.Series[1].Points[1][0].Float64, ShouldEqual, float64(333))
|
||||
})
|
||||
|
||||
Convey("can parse null points", func() {
|
||||
So(result.Series[0].Points[2][0].Valid, ShouldBeFalse)
|
||||
})
|
||||
|
||||
Convey("can format serie names", func() {
|
||||
So(result.Series[0].Name, ShouldEqual, "cpu.mean { datacenter: America }")
|
||||
So(result.Series[1].Name, ShouldEqual, "cpu.sum { datacenter: America }")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("can parse all points", func() {
|
||||
So(len(result.Series[0].Points), ShouldEqual, 3)
|
||||
So(len(result.Series[1].Points), ShouldEqual, 3)
|
||||
})
|
||||
Convey("Response parser with alias", func() {
|
||||
parser := &ResponseParser{}
|
||||
|
||||
Convey("can parse multi row result", func() {
|
||||
So(result.Series[0].Points[1][0].Float64, ShouldEqual, float64(222))
|
||||
So(result.Series[1].Points[1][0].Float64, ShouldEqual, float64(333))
|
||||
})
|
||||
response := &Response{
|
||||
Results: []Result{
|
||||
Result{
|
||||
Series: []Row{
|
||||
{
|
||||
Name: "cpu.upc",
|
||||
Columns: []string{"time", "mean", "sum"},
|
||||
Tags: map[string]string{"datacenter": "America"},
|
||||
Values: [][]interface{}{
|
||||
{json.Number("111"), json.Number("222"), json.Number("333")},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
Convey("can parse null points", func() {
|
||||
So(result.Series[0].Points[2][0].Valid, ShouldBeFalse)
|
||||
})
|
||||
Convey("$ alias", func() {
|
||||
Convey("simple alias", func() {
|
||||
query := &Query{Alias: "serie alias"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
Convey("can format serie names", func() {
|
||||
So(result.Series[0].Name, ShouldEqual, "cpu.mean { datacenter: America }")
|
||||
So(result.Series[1].Name, ShouldEqual, "cpu.sum { datacenter: America }")
|
||||
So(result.Series[0].Name, ShouldEqual, "serie alias")
|
||||
})
|
||||
|
||||
Convey("measurement alias", func() {
|
||||
query := &Query{Alias: "alias $m $measurement", Measurement: "10m"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias 10m 10m")
|
||||
})
|
||||
|
||||
Convey("column alias", func() {
|
||||
query := &Query{Alias: "alias $col", Measurement: "10m"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias mean")
|
||||
So(result.Series[1].Name, ShouldEqual, "alias sum")
|
||||
})
|
||||
|
||||
Convey("tag alias", func() {
|
||||
query := &Query{Alias: "alias $tag_datacenter"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias America")
|
||||
})
|
||||
|
||||
Convey("segment alias", func() {
|
||||
query := &Query{Alias: "alias $1"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias upc")
|
||||
})
|
||||
|
||||
Convey("segment position out of bound", func() {
|
||||
query := &Query{Alias: "alias $5"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias $5")
|
||||
})
|
||||
})
|
||||
|
||||
Convey("[[]] alias", func() {
|
||||
Convey("simple alias", func() {
|
||||
query := &Query{Alias: "serie alias"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "serie alias")
|
||||
})
|
||||
|
||||
Convey("measurement alias", func() {
|
||||
query := &Query{Alias: "alias [[m]] [[measurement]]", Measurement: "10m"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias 10m 10m")
|
||||
})
|
||||
|
||||
Convey("column alias", func() {
|
||||
query := &Query{Alias: "alias [[col]]", Measurement: "10m"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias mean")
|
||||
So(result.Series[1].Name, ShouldEqual, "alias sum")
|
||||
})
|
||||
|
||||
Convey("tag alias", func() {
|
||||
query := &Query{Alias: "alias [[tag_datacenter]]"}
|
||||
result := parser.Parse(response, query)
|
||||
|
||||
So(result.Series[0].Name, ShouldEqual, "alias America")
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -24,12 +24,14 @@ func NewPrometheusExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
|
||||
}
|
||||
|
||||
var (
|
||||
plog log.Logger
|
||||
plog log.Logger
|
||||
legendFormat *regexp.Regexp
|
||||
)
|
||||
|
||||
func init() {
|
||||
plog = log.New("tsdb.prometheus")
|
||||
tsdb.RegisterExecutor("prometheus", NewPrometheusExecutor)
|
||||
legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
|
||||
}
|
||||
|
||||
func (e *PrometheusExecutor) getClient() (prometheus.QueryAPI, error) {
|
||||
@@ -79,13 +81,11 @@ func (e *PrometheusExecutor) Execute(ctx context.Context, queries tsdb.QuerySlic
|
||||
}
|
||||
|
||||
func formatLegend(metric pmodel.Metric, query *PrometheusQuery) string {
|
||||
reg, _ := regexp.Compile(`\{\{\s*(.+?)\s*\}\}`)
|
||||
|
||||
if query.LegendFormat == "" {
|
||||
return metric.String()
|
||||
}
|
||||
|
||||
result := reg.ReplaceAllFunc([]byte(query.LegendFormat), func(in []byte) []byte {
|
||||
result := legendFormat.ReplaceAllFunc([]byte(query.LegendFormat), func(in []byte) []byte {
|
||||
labelName := strings.Replace(string(in), "{{", "", 1)
|
||||
labelName = strings.Replace(labelName, "}}", "", 1)
|
||||
labelName = strings.TrimSpace(labelName)
|
||||
|
||||
@@ -10,7 +10,7 @@ import "./directives/grafana_version_check";
|
||||
import "./directives/metric_segment";
|
||||
import "./directives/misc";
|
||||
import "./directives/ng_model_on_blur";
|
||||
import "./directives/password_strenght";
|
||||
import "./directives/password_strength";
|
||||
import "./directives/spectrum_picker";
|
||||
import "./directives/tags";
|
||||
import "./directives/value_select_dropdown";
|
||||
|
||||
@@ -102,6 +102,7 @@ export default class TimeSeries {
|
||||
this.stats.min = Number.MAX_VALUE;
|
||||
this.stats.avg = null;
|
||||
this.stats.current = null;
|
||||
this.stats.timeStep = Number.MAX_VALUE;
|
||||
this.allIsNull = true;
|
||||
this.allIsZero = true;
|
||||
|
||||
@@ -110,11 +111,22 @@ export default class TimeSeries {
|
||||
var currentTime;
|
||||
var currentValue;
|
||||
var nonNulls = 0;
|
||||
var previousTime;
|
||||
|
||||
for (var i = 0; i < this.datapoints.length; i++) {
|
||||
currentValue = this.datapoints[i][0];
|
||||
currentTime = this.datapoints[i][1];
|
||||
|
||||
// Due to missing values we could have different timeStep all along the series
|
||||
// so we have to find the minimum one (could occur with aggregators such as ZimSum)
|
||||
if (previousTime !== undefined) {
|
||||
let timeStep = currentTime - previousTime;
|
||||
if (timeStep < this.stats.timeStep) {
|
||||
this.stats.timeStep = timeStep;
|
||||
}
|
||||
}
|
||||
previousTime = currentTime;
|
||||
|
||||
if (currentValue === null) {
|
||||
if (ignoreNulls) { continue; }
|
||||
if (nullAsZero) {
|
||||
@@ -145,10 +157,6 @@ export default class TimeSeries {
|
||||
result.push([currentTime, currentValue]);
|
||||
}
|
||||
|
||||
if (this.datapoints.length >= 2) {
|
||||
this.stats.timeStep = this.datapoints[1][1] - this.datapoints[0][1];
|
||||
}
|
||||
|
||||
if (this.stats.max === -Number.MAX_VALUE) { this.stats.max = null; }
|
||||
if (this.stats.min === Number.MAX_VALUE) { this.stats.min = null; }
|
||||
|
||||
|
||||
@@ -46,6 +46,10 @@
|
||||
<td>Total starred dashboards</td>
|
||||
<td>{{ctrl.stats.starred_db_count}}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Total alerts</td>
|
||||
<td>{{ctrl.stats.alert_count}}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
@@ -78,10 +78,10 @@ function (angular, _, moment, dateMath, kbn, CloudWatchAnnotationQuery) {
|
||||
} else {
|
||||
period = kbn.interval_to_seconds(templateSrv.replace(target.period, options.scopedVars));
|
||||
}
|
||||
if (query.period < 60) {
|
||||
if (period < 60) {
|
||||
period = 60;
|
||||
}
|
||||
if (range / query.period >= 1440) {
|
||||
if (range / period >= 1440) {
|
||||
period = Math.ceil(range / 1440 / 60) * 60;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,16 +4,15 @@
|
||||
<div class="gf-form">
|
||||
<span class="gf-form-label"><i class="fa fa-wrench"></i></span>
|
||||
<span class="gf-form-label width-11">Group by time interval</span>
|
||||
<input type="text" class="gf-form-input" ng-model="ctrl.panelCtrl.panel.interval" ng-blur="ctrl.panelCtrl.refresh();"
|
||||
<input type="text" class="gf-form-input width-16" ng-model="ctrl.panelCtrl.panel.interval" ng-blur="ctrl.panelCtrl.refresh();"
|
||||
spellcheck='false' placeholder="example: >10s">
|
||||
<span class="gf-form-label"><i class="fa fa-question-circle" bs-tooltip="'Set a low limit by having a greater sign: example: >60s'" data-placement="right"></i></span>
|
||||
<info-popover mode="right-absolute">
|
||||
Set a low limit by having a greater sign: example: >60s
|
||||
</info-popover>
|
||||
</div>
|
||||
</div>
|
||||
<div class="gf-form-inline">
|
||||
<div class="gf-form">
|
||||
<!--span class="gf-form-label">
|
||||
<i class="fa fa-info-circle"></i>
|
||||
</span-->
|
||||
<span class="gf-form-label width-10">
|
||||
<a ng-click="ctrl.panelCtrl.toggleEditorHelp(1);" bs-tooltip="'click to show helpful info'" data-placement="bottom">
|
||||
<i class="fa fa-info-circle"></i>
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
<span class="gf-form-label width-8">Max items</span>
|
||||
<input type="text" class="gf-form-input max-width-15" ng-model="ctrl.panel.limit" ng-change="ctrl.onRender()" />
|
||||
</div>
|
||||
<gf-form-switch class="gf-form" label="Alerts from this dashboard" label-class="width-18" checked="ctrl.panel.onlyAlertsOnDashboard" on-change="ctrl.updateStateFilter()"></gf-form-switch>
|
||||
</div>
|
||||
<div class="section gf-form-group">
|
||||
<h5 class="section-heading">State filter</h5>
|
||||
|
||||
@@ -25,7 +25,8 @@ class AlertListPanel extends PanelCtrl {
|
||||
panelDefaults = {
|
||||
show: 'current',
|
||||
limit: 10,
|
||||
stateFilter: []
|
||||
stateFilter: [],
|
||||
onlyAlertsOnDashboard: false
|
||||
};
|
||||
|
||||
|
||||
@@ -71,9 +72,13 @@ class AlertListPanel extends PanelCtrl {
|
||||
var params: any = {
|
||||
limit: this.panel.limit,
|
||||
type: 'alert',
|
||||
newState: this.panel.stateFilter
|
||||
newState: this.panel.stateFilter,
|
||||
};
|
||||
|
||||
if (this.panel.onlyAlertsOnDashboard) {
|
||||
params.dashboardId = this.dashboard.id;
|
||||
}
|
||||
|
||||
params.from = dateMath.parse(this.dashboard.time.from).unix() * 1000;
|
||||
params.to = dateMath.parse(this.dashboard.time.to).unix() * 1000;
|
||||
|
||||
@@ -93,6 +98,10 @@ class AlertListPanel extends PanelCtrl {
|
||||
state: this.panel.stateFilter
|
||||
};
|
||||
|
||||
if (this.panel.onlyAlertsOnDashboard) {
|
||||
params.dashboardId = this.dashboard.id;
|
||||
}
|
||||
|
||||
this.backendSrv.get(`/api/alerts`, params)
|
||||
.then(res => {
|
||||
this.currentAlerts = _.map(res, al => {
|
||||
|
||||
@@ -183,8 +183,10 @@ module.directive('grafanaGraph', function($rootScope, timeSrv) {
|
||||
}
|
||||
}
|
||||
|
||||
// Series could have different timeSteps,
|
||||
// let's find the smallest one so that bars are correctly rendered.
|
||||
function getMinTimeStepOfSeries(data) {
|
||||
var min = 100000000000;
|
||||
var min = Number.MAX_VALUE;
|
||||
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
if (!data[i].stats.timeStep) {
|
||||
@@ -295,9 +297,7 @@ module.directive('grafanaGraph', function($rootScope, timeSrv) {
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
if (panel.bars) {
|
||||
options.series.bars.barWidth = getMinTimeStepOfSeries(data) / 1.5;
|
||||
}
|
||||
options.series.bars.barWidth = getMinTimeStepOfSeries(data) / 1.5;
|
||||
addTimeAxis(options);
|
||||
break;
|
||||
}
|
||||
@@ -460,7 +460,7 @@ module.directive('grafanaGraph', function($rootScope, timeSrv) {
|
||||
show: panel.yaxes[0].show,
|
||||
index: 1,
|
||||
logBase: panel.yaxes[0].logBase || 1,
|
||||
max: 100, // correct later
|
||||
max: null
|
||||
};
|
||||
|
||||
options.yaxes.push(defaults);
|
||||
@@ -472,6 +472,8 @@ module.directive('grafanaGraph', function($rootScope, timeSrv) {
|
||||
secondY.logBase = panel.yaxes[1].logBase || 1;
|
||||
secondY.position = 'right';
|
||||
options.yaxes.push(secondY);
|
||||
|
||||
applyLogScale(options.yaxes[1], data);
|
||||
configureAxisMode(options.yaxes[1], panel.percentage && panel.stack ? "percent" : panel.yaxes[1].format);
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,9 @@ function ($) {
|
||||
var initial = last*ps;
|
||||
var len = series.datapoints.points.length;
|
||||
for (var j = initial; j < len; j += ps) {
|
||||
// Special case of a non stepped line, highlight the very last point just before a null point
|
||||
if ((series.datapoints.points[initial] != null && series.datapoints.points[j] == null && ! series.lines.steps)
|
||||
//normal case
|
||||
|| series.datapoints.points[j] > posX) {
|
||||
return Math.max(j - ps, 0)/ps;
|
||||
}
|
||||
@@ -58,11 +60,13 @@ function ($) {
|
||||
series = seriesList[i];
|
||||
|
||||
if (!series.data.length || (panel.legend.hideEmpty && series.allIsNull)) {
|
||||
// Init value & yaxis so that it does not brake series sorting
|
||||
results.push({ hidden: true, value: 0, yaxis: 0 });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!series.data.length || (panel.legend.hideZero && series.allIsZero)) {
|
||||
// Init value & yaxis so that it does not brake series sorting
|
||||
results.push({ hidden: true, value: 0, yaxis: 0 });
|
||||
continue;
|
||||
}
|
||||
@@ -71,6 +75,7 @@ function ($) {
|
||||
hoverDistance = pos.x - series.data[hoverIndex][0];
|
||||
pointTime = series.data[hoverIndex][0];
|
||||
|
||||
// Take the closest point before the cursor, or if it does not exist, the closest after
|
||||
if (! minDistance
|
||||
|| (hoverDistance >=0 && (hoverDistance < minDistance || minDistance < 0))
|
||||
|| (hoverDistance < 0 && hoverDistance > minDistance)) {
|
||||
@@ -99,6 +104,7 @@ function ($) {
|
||||
hoverIndex = this.findHoverIndexFromDataPoints(pos.x, series, hoverIndex);
|
||||
}
|
||||
|
||||
// Be sure we have a yaxis so that it does not brake series sorting
|
||||
yaxis = 0;
|
||||
if (series.yaxis) {
|
||||
yaxis = series.yaxis.n;
|
||||
@@ -116,7 +122,7 @@ function ($) {
|
||||
});
|
||||
}
|
||||
|
||||
// Find point which closer to pointer
|
||||
// Time of the point closer to pointer
|
||||
results.time = minTime;
|
||||
|
||||
return results;
|
||||
@@ -189,7 +195,7 @@ function ($) {
|
||||
}
|
||||
|
||||
var highlightClass = '';
|
||||
if (item && i === item.seriesIndex) {
|
||||
if (item && hoverInfo.index === item.seriesIndex) {
|
||||
highlightClass = 'graph-tooltip-list-item--highlight';
|
||||
}
|
||||
|
||||
|
||||
@@ -335,21 +335,24 @@
|
||||
}
|
||||
|
||||
@mixin left-brand-border-gradient() {
|
||||
border: none;
|
||||
border-image: linear-gradient(rgba(255,213,0,1) 0%, rgba(255,68,0,1) 99%, rgba(255,68,0,1) 100%);
|
||||
border-image-slice: 1;
|
||||
border-style: solid;
|
||||
border-top: 0;
|
||||
border-right: 0;
|
||||
border-bottom: 0;
|
||||
border-left: 2px solid transparent;
|
||||
border-left-width: 2px;
|
||||
}
|
||||
|
||||
@mixin brand-bottom-border() {
|
||||
border-image: $brand-gradient;
|
||||
border-image-slice: 1;
|
||||
border-style: solid;
|
||||
border-top: 0;
|
||||
border-right: 0;
|
||||
border-left: 0;
|
||||
border-bottom: 1px solid transparent;
|
||||
border-bottom-width: 1px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
2
public/vendor/flot/jquery.flot.js
vendored
2
public/vendor/flot/jquery.flot.js
vendored
@@ -1210,7 +1210,7 @@ Licensed under the MIT license.
|
||||
// middle point has same y
|
||||
points[k + 1] = points[k - ps + 1] || 0;
|
||||
|
||||
// if series has null values, let's give the last correct value a nice step
|
||||
// if series has null values, let's give the last !null value a nice step
|
||||
if(nullify)
|
||||
points[k] = p[0];
|
||||
|
||||
|
||||
25
public/vendor/flot/jquery.flot.stack.js
vendored
25
public/vendor/flot/jquery.flot.stack.js
vendored
@@ -78,9 +78,12 @@ charts or filled areas).
|
||||
i = 0, j = 0, l, m;
|
||||
|
||||
while (true) {
|
||||
// browse all points from the current series and from the previous series
|
||||
if (i >= points.length && j >= otherpoints.length)
|
||||
break;
|
||||
|
||||
// newpoints will replace current series with
|
||||
// as many points as different timestamps we have in the 2 (current & previous) series
|
||||
l = newpoints.length;
|
||||
px = points[i + keyOffset];
|
||||
py = points[i + accumulateOffset];
|
||||
@@ -89,30 +92,32 @@ charts or filled areas).
|
||||
bottom = 0;
|
||||
|
||||
if (i < points.length && px == null) {
|
||||
// ignore point
|
||||
// let's ignore null points from current series, nothing to do with them
|
||||
i += ps;
|
||||
}
|
||||
else if (j < otherpoints.length && qx == null) {
|
||||
// ignore point
|
||||
// let's ignore null points from previous series, nothing to do with them
|
||||
j += otherps;
|
||||
}
|
||||
else if (i >= points.length) {
|
||||
// take the remaining points from the previous series
|
||||
// no more points in the current series, simply take the remaining points
|
||||
// from the previous series so that next series will correctly stack
|
||||
for (m = 0; m < ps; ++m)
|
||||
newpoints.push(otherpoints[j + m]);
|
||||
bottom = qy;
|
||||
j += otherps;
|
||||
}
|
||||
else if (j >= otherpoints.length) {
|
||||
// take the remaining points from the current series
|
||||
// no more points in the previous series, of course let's take
|
||||
// the remaining points from the current series
|
||||
for (m = 0; m < ps; ++m)
|
||||
newpoints.push(points[i + m]);
|
||||
i += ps;
|
||||
}
|
||||
else {
|
||||
// cases where we actually got two points
|
||||
// next available points from current and previous series have the same timestamp
|
||||
if (px == qx) {
|
||||
// take the point from the current series and skip the previous' one
|
||||
// so take the point from the current series and skip the previous' one
|
||||
for (m = 0; m < ps; ++m)
|
||||
newpoints.push(points[i + m]);
|
||||
|
||||
@@ -122,8 +127,9 @@ charts or filled areas).
|
||||
i += ps;
|
||||
j += otherps;
|
||||
}
|
||||
// next available point with the smallest timestamp is from the previous series
|
||||
else if (px > qx) {
|
||||
// take the point from the previous series so that the next series can stack over it
|
||||
// so take the point from the previous series so that next series will correctly stack
|
||||
for (m = 0; m < ps; ++m)
|
||||
newpoints.push(otherpoints[j + m]);
|
||||
|
||||
@@ -135,8 +141,9 @@ charts or filled areas).
|
||||
|
||||
j += otherps;
|
||||
}
|
||||
else { // px < qx
|
||||
// take the point from the current series
|
||||
// (px < qx) next available point with the smallest timestamp is from the current series
|
||||
else {
|
||||
// so of course let's take the point from the current series
|
||||
for (m = 0; m < ps; ++m)
|
||||
newpoints.push(points[i + m]);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user