Merge branch 'master' into develop

This commit is contained in:
Torkel Ödegaard 2017-09-22 08:42:07 +02:00
commit 18337f610d
418 changed files with 24139 additions and 3267 deletions

1
.gitignore vendored
View File

@ -25,6 +25,7 @@ public/css/*.min.css
.idea/ .idea/
*.iml *.iml
*.tmp *.tmp
.DS_Store
.vscode/ .vscode/
/data/* /data/*

View File

@ -13,10 +13,17 @@
* **GCS**: Adds support for Google Cloud Storage [#8370](https://github.com/grafana/grafana/issues/8370) thx [@chuhlomin](https://github.com/chuhlomin) * **GCS**: Adds support for Google Cloud Storage [#8370](https://github.com/grafana/grafana/issues/8370) thx [@chuhlomin](https://github.com/chuhlomin)
* **Prometheus**: Adds /metrics endpoint for exposing Grafana metrics. [#9187](https://github.com/grafana/grafana/pull/9187) * **Prometheus**: Adds /metrics endpoint for exposing Grafana metrics. [#9187](https://github.com/grafana/grafana/pull/9187)
* **Graph**: Add support for local formating in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk) * **Graph**: Add support for local formating in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk)
* **Jaeger**: Add support for open tracing using jaeger in Grafana. [#9213](https://github.com/grafana/grafana/pull/9213)
* **Unit types**: New date & time unit types added, useful in singlestat to show dates & times. [#3678](https://github.com/grafana/grafana/issues/3678), [#6710](https://github.com/grafana/grafana/issues/6710), [#2764](https://github.com/grafana/grafana/issues/6710)
## Breaking changes ## Breaking changes
* **Metrics**: The metric structure for internal metrics about Grafana published to graphite has changed. This might break dashboards for internal metrics. * **Metrics**: The metric structure for internal metrics about Grafana published to graphite has changed. This might break dashboards for internal metrics.
# 4.5.2 (unreleased)
## Fixes
* **Metrics**: dont write NaN values to graphite [#9279](https://github.com/grafana/grafana/issues/9279)
# 4.5.1 (2017-09-15) # 4.5.1 (2017-09-15)
## Fixes ## Fixes
@ -49,6 +56,7 @@
### Breaking change ### Breaking change
* **InfluxDB/Elasticsearch**: The panel & data source option named "Group by time interval" is now named "Min time interval" and does now always define a lower limit for the auto group by time. Without having to use `>` prefix (that prefix still works). This should in theory have close to zero actual impact on existing dashboards. It does mean that if you used this setting to define a hard group by time interval of, say "1d", if you zoomed to a time range wide enough the time range could increase above the "1d" range as the setting is now always considered a lower limit. * **InfluxDB/Elasticsearch**: The panel & data source option named "Group by time interval" is now named "Min time interval" and does now always define a lower limit for the auto group by time. Without having to use `>` prefix (that prefix still works). This should in theory have close to zero actual impact on existing dashboards. It does mean that if you used this setting to define a hard group by time interval of, say "1d", if you zoomed to a time range wide enough the time range could increase above the "1d" range as the setting is now always considered a lower limit.
* **Elasticsearch**: Elasticsearch metric queries without date histogram now return table formated data making table panel much easier to use for this use case. Should not break/change existing dashboards with stock panels but external panel plugins can be affected.
## Changes ## Changes

View File

@ -452,6 +452,23 @@ url = https://grafana.com
[grafana_com] [grafana_com]
url = https://grafana.com url = https://grafana.com
#################################### Distributed tracing ############
[tracing.jaeger]
# jaeger destination (ex localhost:6831)
address =
# tag that will always be included in when creating new spans. ex (tag1:value1,tag2:value2)
always_included_tag =
# Type specifies the type of the sampler: const, probabilistic, rateLimiting, or remote
sampler_type = const
# jaeger samplerconfig param
# for "const" sampler, 0 or 1 for always false/true respectively
# for "probabilistic" sampler, a probability between 0 and 1
# for "rateLimiting" sampler, the number of spans per second
# for "remote" sampler, param is the same as for "probabilistic"
# and indicates the initial sampling rate before the actual one
# is received from the mothership
sampler_param = 1
#################################### External Image Storage ############## #################################### External Image Storage ##############
[external_image_storage] [external_image_storage]
# You can choose between (s3, webdav, gcs) # You can choose between (s3, webdav, gcs)

View File

@ -391,6 +391,23 @@
;address = ;address =
;prefix = prod.grafana.%(instance_name)s. ;prefix = prod.grafana.%(instance_name)s.
#################################### Distributed tracing ############
[tracing.jaeger]
# Enable by setting the address sending traces to jaeger (ex localhost:6831)
;address = localhost:6831
# Tag that will always be included in when creating new spans. ex (tag1:value1,tag2:value2)
;always_included_tag = tag1:value1
# Type specifies the type of the sampler: const, probabilistic, rateLimiting, or remote
;sampler_type = const
# jaeger samplerconfig param
# for "const" sampler, 0 or 1 for always false/true respectively
# for "probabilistic" sampler, a probability between 0 and 1
# for "rateLimiting" sampler, the number of spans per second
# for "remote" sampler, param is the same as for "probabilistic"
# and indicates the initial sampling rate before the actual one
# is received from the mothership
;sampler_param = 1
#################################### Grafana.com integration ########################## #################################### Grafana.com integration ##########################
# Url used to to import dashboards directly from Grafana.com # Url used to to import dashboards directly from Grafana.com
[grafana_com] [grafana_com]

6
docker/blocks/jaeger/fig Normal file
View File

@ -0,0 +1,6 @@
jaeger:
image: jaegertracing/all-in-one:latest
ports:
- "localhost:6831:6831/udp"
- "16686:16686"

View File

@ -1,2 +1,3 @@
FROM prom/prometheus FROM prom/prometheus
ADD prometheus.yml /etc/prometheus/ ADD prometheus.yml /etc/prometheus/
ADD alert.rules /etc/prometheus/

View File

@ -0,0 +1,10 @@
# Alert Rules
ALERT AppCrash
IF process_open_fds > 0
FOR 15s
LABELS { severity="critical" }
ANNOTATIONS {
summary = "Number of open fds > 0",
description = "Just testing"
}

View File

@ -18,3 +18,8 @@ fake-prometheus-data:
environment: environment:
FD_DATASOURCE: prom FD_DATASOURCE: prom
alertmanager:
image: quay.io/prometheus/alertmanager
net: host
ports:
- "9093:9093"

View File

@ -6,9 +6,18 @@ global:
# Load and evaluate rules in this file every 'evaluation_interval' seconds. # Load and evaluate rules in this file every 'evaluation_interval' seconds.
rule_files: rule_files:
- "alert.rules"
# - "first.rules" # - "first.rules"
# - "second.rules" # - "second.rules"
alerting:
alertmanagers:
- scheme: http
static_configs:
- targets:
- "127.0.0.1:9093"
# A scrape configuration containing exactly one endpoint to scrape: # A scrape configuration containing exactly one endpoint to scrape:
# Here it's Prometheus itself. # Here it's Prometheus itself.
scrape_configs: scrape_configs:

View File

@ -308,15 +308,15 @@ options are `Editor` and `Admin`.
## [auth.github] ## [auth.github]
You need to create a GitHub application (you find this under the GitHub You need to create a GitHub OAuth application (you find this under the GitHub
profile page). When you create the application you will need to specify settings page). When you create the application you will need to specify
a callback URL. Specify this as callback: a callback URL. Specify this as callback:
http://<my_grafana_server_name_or_ip>:<grafana_server_port>/login/github http://<my_grafana_server_name_or_ip>:<grafana_server_port>/login/github
This callback URL must match the full HTTP address that you use in your This callback URL must match the full HTTP address that you use in your
browser to access Grafana, but with the prefix path of `/login/github`. browser to access Grafana, but with the prefix path of `/login/github`.
When the GitHub application is created you will get a Client ID and a When the GitHub OAuth application is created you will get a Client ID and a
Client Secret. Specify these in the Grafana configuration file. For Client Secret. Specify these in the Grafana configuration file. For
example: example:

View File

@ -10,6 +10,8 @@
"url": "http://github.com/grafana/grafana.git" "url": "http://github.com/grafana/grafana.git"
}, },
"devDependencies": { "devDependencies": {
"@types/react": "^16.0.5",
"@types/react-dom": "^15.5.4",
"autoprefixer": "^6.4.0", "autoprefixer": "^6.4.0",
"es6-promise": "^3.0.2", "es6-promise": "^3.0.2",
"es6-shim": "^0.35.1", "es6-shim": "^0.35.1",
@ -48,7 +50,7 @@
"mocha": "3.2.0", "mocha": "3.2.0",
"phantomjs-prebuilt": "^2.1.14", "phantomjs-prebuilt": "^2.1.14",
"reflect-metadata": "0.1.8", "reflect-metadata": "0.1.8",
"rxjs": "^5.0.0-rc.5", "rxjs": "^5.4.3",
"sass-lint": "^1.10.2", "sass-lint": "^1.10.2",
"systemjs": "0.19.41", "systemjs": "0.19.41",
"zone.js": "^0.7.2" "zone.js": "^0.7.2"
@ -60,6 +62,7 @@
}, },
"license": "Apache-2.0", "license": "Apache-2.0",
"dependencies": { "dependencies": {
"@types/enzyme": "^2.8.8",
"ace-builds": "^1.2.8", "ace-builds": "^1.2.8",
"eventemitter3": "^2.0.2", "eventemitter3": "^2.0.2",
"gaze": "^1.1.2", "gaze": "^1.1.2",
@ -73,13 +76,17 @@
"karma-sinon": "^1.0.5", "karma-sinon": "^1.0.5",
"lodash": "^4.17.4", "lodash": "^4.17.4",
"mousetrap": "^1.6.0", "mousetrap": "^1.6.0",
"ngreact": "^0.4.1",
"react": "^15.6.1",
"react-dom": "^15.6.1",
"react-test-renderer": "^15.6.1",
"remarkable": "^1.7.1", "remarkable": "^1.7.1",
"sinon": "1.17.6", "sinon": "1.17.6",
"systemjs-builder": "^0.15.34", "systemjs-builder": "^0.15.34",
"tether": "^1.4.0", "tether": "^1.4.0",
"tether-drop": "https://github.com/torkelo/drop", "tether-drop": "https://github.com/torkelo/drop",
"tslint": "^5.1.0", "tslint": "^5.7.0",
"typescript": "^2.2.2", "typescript": "^2.5.2",
"virtual-scroll": "^1.1.1" "virtual-scroll": "^1.1.1"
} }
} }

View File

@ -21,7 +21,7 @@ func (hs *HttpServer) registerRoutes() {
// automatically set HEAD for every GET // automatically set HEAD for every GET
macaronR.SetAutoHead(true) macaronR.SetAutoHead(true)
r := newRouteRegister(middleware.RequestMetrics) r := newRouteRegister(middleware.RequestMetrics, middleware.RequestTracing)
// not logged in views // not logged in views
r.Get("/", reqSignedIn, Index) r.Get("/", reqSignedIn, Index)

View File

@ -126,7 +126,7 @@ func init() {
"AWS/NATGateway": {"NatGatewayId"}, "AWS/NATGateway": {"NatGatewayId"},
"AWS/OpsWorks": {"StackId", "LayerId", "InstanceId"}, "AWS/OpsWorks": {"StackId", "LayerId", "InstanceId"},
"AWS/Redshift": {"NodeID", "ClusterIdentifier"}, "AWS/Redshift": {"NodeID", "ClusterIdentifier"},
"AWS/RDS": {"DBInstanceIdentifier", "DBClusterIdentifier", "DatabaseClass", "EngineName", "Role"}, "AWS/RDS": {"DBInstanceIdentifier", "DBClusterIdentifier", "DbClusterIdentifier", "DatabaseClass", "EngineName", "Role"},
"AWS/Route53": {"HealthCheckId", "Region"}, "AWS/Route53": {"HealthCheckId", "Region"},
"AWS/S3": {"BucketName", "StorageType", "FilterId"}, "AWS/S3": {"BucketName", "StorageType", "FilterId"},
"AWS/SES": {}, "AWS/SES": {},

View File

@ -31,7 +31,7 @@ func QueryMetrics(c *middleware.Context, reqDto dtos.MetricRequest) Response {
return ApiError(500, "failed to fetch data source", err) return ApiError(500, "failed to fetch data source", err)
} }
request := &tsdb.Request{TimeRange: timeRange} request := &tsdb.TsdbQuery{TimeRange: timeRange}
for _, query := range reqDto.Queries { for _, query := range reqDto.Queries {
request.Queries = append(request.Queries, &tsdb.Query{ request.Queries = append(request.Queries, &tsdb.Query{
@ -98,7 +98,7 @@ func GetTestDataRandomWalk(c *middleware.Context) Response {
intervalMs := c.QueryInt64("intervalMs") intervalMs := c.QueryInt64("intervalMs")
timeRange := tsdb.NewTimeRange(from, to) timeRange := tsdb.NewTimeRange(from, to)
request := &tsdb.Request{TimeRange: timeRange} request := &tsdb.TsdbQuery{TimeRange: timeRange}
request.Queries = append(request.Queries, &tsdb.Query{ request.Queries = append(request.Queries, &tsdb.Query{
RefId: "A", RefId: "A",

View File

@ -15,6 +15,8 @@ import (
"text/template" "text/template"
"time" "time"
"github.com/opentracing/opentracing-go"
"github.com/grafana/grafana/pkg/api/cloudwatch" "github.com/grafana/grafana/pkg/api/cloudwatch"
"github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/middleware"
@ -85,6 +87,20 @@ func (proxy *DataSourceProxy) HandleRequest() {
proxy.logRequest() proxy.logRequest()
span, ctx := opentracing.StartSpanFromContext(proxy.ctx.Req.Context(), "datasource reverse proxy")
proxy.ctx.Req.Request = proxy.ctx.Req.WithContext(ctx)
defer span.Finish()
span.SetTag("datasource_id", proxy.ds.Id)
span.SetTag("datasource_type", proxy.ds.Type)
span.SetTag("user_id", proxy.ctx.SignedInUser.UserId)
span.SetTag("org_id", proxy.ctx.SignedInUser.OrgId)
opentracing.GlobalTracer().Inject(
span.Context(),
opentracing.HTTPHeaders,
opentracing.HTTPHeadersCarrier(proxy.ctx.Req.Request.Header))
reverseProxy.ServeHTTP(proxy.ctx.Resp, proxy.ctx.Req.Request) reverseProxy.ServeHTTP(proxy.ctx.Resp, proxy.ctx.Req.Request)
proxy.ctx.Resp.Header().Del("Set-Cookie") proxy.ctx.Resp.Header().Del("Set-Cookie")
} }

View File

@ -8,6 +8,7 @@ import (
type Router interface { type Router interface {
Handle(method, pattern string, handlers []macaron.Handler) *macaron.Route Handle(method, pattern string, handlers []macaron.Handler) *macaron.Route
Get(pattern string, handlers ...macaron.Handler) *macaron.Route
} }
type RouteRegister interface { type RouteRegister interface {
@ -62,7 +63,14 @@ func (rr *routeRegister) Group(pattern string, fn func(rr RouteRegister), handle
func (rr *routeRegister) Register(router Router) *macaron.Router { func (rr *routeRegister) Register(router Router) *macaron.Router {
for _, r := range rr.routes { for _, r := range rr.routes {
router.Handle(r.method, r.pattern, r.handlers) // GET requests have to be added to macaron routing using Get()
// Otherwise HEAD requests will not be allowed.
// https://github.com/go-macaron/macaron/blob/a325110f8b392bce3e5cdeb8c44bf98078ada3be/router.go#L198
if r.method == http.MethodGet {
router.Get(r.pattern, r.handlers...)
} else {
router.Handle(r.method, r.pattern, r.handlers)
}
} }
for _, g := range rr.groups { for _, g := range rr.groups {

View File

@ -1,6 +1,7 @@
package api package api
import ( import (
"net/http"
"strconv" "strconv"
"testing" "testing"
@ -21,6 +22,16 @@ func (fr *fakeRouter) Handle(method, pattern string, handlers []macaron.Handler)
return &macaron.Route{} return &macaron.Route{}
} }
func (fr *fakeRouter) Get(pattern string, handlers ...macaron.Handler) *macaron.Route {
fr.route = append(fr.route, route{
pattern: pattern,
method: http.MethodGet,
handlers: handlers,
})
return &macaron.Route{}
}
func emptyHandlers(n int) []macaron.Handler { func emptyHandlers(n int) []macaron.Handler {
res := []macaron.Handler{} res := []macaron.Handler{}
for i := 1; n >= i; i++ { for i := 1; n >= i; i++ {

View File

@ -22,9 +22,9 @@ import (
_ "github.com/grafana/grafana/pkg/services/alerting/notifiers" _ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
_ "github.com/grafana/grafana/pkg/tsdb/graphite" _ "github.com/grafana/grafana/pkg/tsdb/graphite"
_ "github.com/grafana/grafana/pkg/tsdb/influxdb" _ "github.com/grafana/grafana/pkg/tsdb/influxdb"
_ "github.com/grafana/grafana/pkg/tsdb/mqe"
_ "github.com/grafana/grafana/pkg/tsdb/mysql" _ "github.com/grafana/grafana/pkg/tsdb/mysql"
_ "github.com/grafana/grafana/pkg/tsdb/opentsdb" _ "github.com/grafana/grafana/pkg/tsdb/opentsdb"
_ "github.com/grafana/grafana/pkg/tsdb/prometheus" _ "github.com/grafana/grafana/pkg/tsdb/prometheus"
_ "github.com/grafana/grafana/pkg/tsdb/testdata" _ "github.com/grafana/grafana/pkg/tsdb/testdata"
) )

View File

@ -24,6 +24,7 @@ import (
"github.com/grafana/grafana/pkg/services/search" "github.com/grafana/grafana/pkg/services/search"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/social" "github.com/grafana/grafana/pkg/social"
"github.com/grafana/grafana/pkg/tracing"
) )
func NewGrafanaServer() models.GrafanaServer { func NewGrafanaServer() models.GrafanaServer {
@ -61,6 +62,14 @@ func (g *GrafanaServerImpl) Start() {
eventpublisher.Init() eventpublisher.Init()
plugins.Init() plugins.Init()
closer, err := tracing.Init(setting.Cfg)
if err != nil {
g.log.Error("Tracing settings is not valid", "error", err)
g.Shutdown(1, "Startup failed")
return
}
defer closer.Close()
// init alerting // init alerting
if setting.AlertingEnabled && setting.ExecuteAlerts { if setting.AlertingEnabled && setting.ExecuteAlerts {
engine := alerting.NewEngine() engine := alerting.NewEngine()
@ -71,8 +80,8 @@ func (g *GrafanaServerImpl) Start() {
cleanUpService := cleanup.NewCleanUpService() cleanUpService := cleanup.NewCleanUpService()
g.childRoutines.Go(func() error { return cleanUpService.Run(g.context) }) g.childRoutines.Go(func() error { return cleanUpService.Run(g.context) })
if err := notifications.Init(); err != nil { if err = notifications.Init(); err != nil {
g.log.Error("Notification service failed to initialize", "erro", err) g.log.Error("Notification service failed to initialize", "error", err)
g.Shutdown(1, "Startup failed") g.Shutdown(1, "Startup failed")
return return
} }

View File

@ -20,6 +20,7 @@ import (
"errors" "errors"
"fmt" "fmt"
"io" "io"
"math"
"net" "net"
"sort" "sort"
"strings" "strings"
@ -53,7 +54,17 @@ const (
AbortOnError AbortOnError
) )
var metricCategoryPrefix []string = []string{"proxy_", "api_", "page_", "alerting_", "aws_", "db_", "stat_", "go_", "process_"} var metricCategoryPrefix []string = []string{
"proxy_",
"api_",
"page_",
"alerting_",
"aws_",
"db_",
"stat_",
"go_",
"process_"}
var trimMetricPrefix []string = []string{"grafana_"} var trimMetricPrefix []string = []string{"grafana_"}
// Config defines the Graphite bridge config. // Config defines the Graphite bridge config.
@ -208,6 +219,10 @@ func (b *Bridge) writeMetrics(w io.Writer, mfs []*dto.MetricFamily, prefix strin
buf := bufio.NewWriter(w) buf := bufio.NewWriter(w)
for _, s := range vec { for _, s := range vec {
if math.IsNaN(float64(s.Value)) {
continue
}
if err := writePrefix(buf, prefix); err != nil { if err := writePrefix(buf, prefix); err != nil {
return err return err
} }
@ -235,12 +250,6 @@ func writeMetric(buf *bufio.Writer, m model.Metric, mf *dto.MetricFamily) error
if !hasName { if !hasName {
numLabels = len(m) numLabels = len(m)
} }
for _, v := range metricCategoryPrefix {
if strings.HasPrefix(string(metricName), v) {
group := strings.Replace(v, "_", " ", 1)
metricName = model.LabelValue(strings.Replace(string(metricName), v, group, 1))
}
}
for _, v := range trimMetricPrefix { for _, v := range trimMetricPrefix {
if strings.HasPrefix(string(metricName), v) { if strings.HasPrefix(string(metricName), v) {
@ -248,6 +257,13 @@ func writeMetric(buf *bufio.Writer, m model.Metric, mf *dto.MetricFamily) error
} }
} }
for _, v := range metricCategoryPrefix {
if strings.HasPrefix(string(metricName), v) {
group := strings.Replace(v, "_", " ", 1)
metricName = model.LabelValue(strings.Replace(string(metricName), v, group, 1))
}
}
labelStrings := make([]string, 0, numLabels) labelStrings := make([]string, 0, numLabels)
for label, value := range m { for label, value := range m {
if label != model.MetricNameLabel { if label != model.MetricNameLabel {
@ -357,7 +373,7 @@ func replaceInvalidRune(c rune) rune {
if c == ' ' { if c == ' ' {
return '.' return '.'
} }
if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || c == ':' || (c >= '0' && c <= '9')) { if !((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '-' || c == '_' || c == ':' || (c >= '0' && c <= '9')) {
return '_' return '_'
} }
return c return c

View File

@ -128,6 +128,7 @@ func TestWriteSummary(t *testing.T) {
prometheus.SummaryOpts{ prometheus.SummaryOpts{
Name: "name", Name: "name",
Help: "docstring", Help: "docstring",
Namespace: "grafana",
ConstLabels: prometheus.Labels{"constname": "constvalue"}, ConstLabels: prometheus.Labels{"constname": "constvalue"},
Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001},
}, },
@ -187,6 +188,7 @@ func TestWriteHistogram(t *testing.T) {
prometheus.HistogramOpts{ prometheus.HistogramOpts{
Name: "name", Name: "name",
Help: "docstring", Help: "docstring",
Namespace: "grafana",
ConstLabels: prometheus.Labels{"constname": "constvalue"}, ConstLabels: prometheus.Labels{"constname": "constvalue"},
Buckets: []float64{0.01, 0.02, 0.05, 0.1}, Buckets: []float64{0.01, 0.02, 0.05, 0.1},
}, },
@ -248,6 +250,17 @@ func TestCounterVec(t *testing.T) {
cntVec := prometheus.NewCounterVec( cntVec := prometheus.NewCounterVec(
prometheus.CounterOpts{ prometheus.CounterOpts{
Name: "page_response", Name: "page_response",
Namespace: "grafana",
Help: "docstring",
ConstLabels: prometheus.Labels{"constname": "constvalue"},
},
[]string{"labelname"},
)
apicntVec := prometheus.NewCounterVec(
prometheus.CounterOpts{
Name: "api_response",
Namespace: "grafana",
Help: "docstring", Help: "docstring",
ConstLabels: prometheus.Labels{"constname": "constvalue"}, ConstLabels: prometheus.Labels{"constname": "constvalue"},
}, },
@ -256,9 +269,12 @@ func TestCounterVec(t *testing.T) {
reg := prometheus.NewRegistry() reg := prometheus.NewRegistry()
reg.MustRegister(cntVec) reg.MustRegister(cntVec)
reg.MustRegister(apicntVec)
cntVec.WithLabelValues("val1").Inc() cntVec.WithLabelValues("val1").Inc()
cntVec.WithLabelValues("val2").Inc() cntVec.WithLabelValues("val2").Inc()
apicntVec.WithLabelValues("val1").Inc()
apicntVec.WithLabelValues("val2").Inc()
b, err := NewBridge(&Config{ b, err := NewBridge(&Config{
URL: "localhost:8080", URL: "localhost:8080",
@ -281,7 +297,9 @@ func TestCounterVec(t *testing.T) {
t.Fatalf("error: %v", err) t.Fatalf("error: %v", err)
} }
want := `prefix.page.response.constname.constvalue.labelname.val1.count 1 1477043 want := `prefix.api.response.constname.constvalue.labelname.val1.count 1 1477043
prefix.api.response.constname.constvalue.labelname.val2.count 1 1477043
prefix.page.response.constname.constvalue.labelname.val1.count 1 1477043
prefix.page.response.constname.constvalue.labelname.val2.count 1 1477043 prefix.page.response.constname.constvalue.labelname.val2.count 1 1477043
` `
if got := buf.String(); want != got { if got := buf.String(); want != got {
@ -291,6 +309,8 @@ prefix.page.response.constname.constvalue.labelname.val2.count 1 1477043
//next collect //next collect
cntVec.WithLabelValues("val1").Inc() cntVec.WithLabelValues("val1").Inc()
cntVec.WithLabelValues("val2").Inc() cntVec.WithLabelValues("val2").Inc()
apicntVec.WithLabelValues("val1").Inc()
apicntVec.WithLabelValues("val2").Inc()
mfs, err = reg.Gather() mfs, err = reg.Gather()
if err != nil { if err != nil {
@ -303,7 +323,9 @@ prefix.page.response.constname.constvalue.labelname.val2.count 1 1477043
t.Fatalf("error: %v", err) t.Fatalf("error: %v", err)
} }
want2 := `prefix.page.response.constname.constvalue.labelname.val1.count 1 1477053 want2 := `prefix.api.response.constname.constvalue.labelname.val1.count 1 1477053
prefix.api.response.constname.constvalue.labelname.val2.count 1 1477053
prefix.page.response.constname.constvalue.labelname.val1.count 1 1477053
prefix.page.response.constname.constvalue.labelname.val2.count 1 1477053 prefix.page.response.constname.constvalue.labelname.val2.count 1 1477053
` `
if got := buf.String(); want2 != got { if got := buf.String(); want2 != got {
@ -316,6 +338,7 @@ func TestCounter(t *testing.T) {
prometheus.CounterOpts{ prometheus.CounterOpts{
Name: "page_response", Name: "page_response",
Help: "docstring", Help: "docstring",
Namespace: "grafana",
ConstLabels: prometheus.Labels{"constname": "constvalue"}, ConstLabels: prometheus.Labels{"constname": "constvalue"},
}) })
@ -373,7 +396,7 @@ func TestCounter(t *testing.T) {
func TestTrimGrafanaNamespace(t *testing.T) { func TestTrimGrafanaNamespace(t *testing.T) {
cntVec := prometheus.NewCounter( cntVec := prometheus.NewCounter(
prometheus.CounterOpts{ prometheus.CounterOpts{
Name: "grafana_http_request_total", Name: "http_request_total",
Help: "docstring", Help: "docstring",
ConstLabels: prometheus.Labels{"constname": "constvalue"}, ConstLabels: prometheus.Labels{"constname": "constvalue"},
}) })
@ -410,12 +433,54 @@ func TestTrimGrafanaNamespace(t *testing.T) {
} }
} }
func TestSkipNanValues(t *testing.T) {
cntVec := prometheus.NewSummary(
prometheus.SummaryOpts{
Name: "http_request_total",
Help: "docstring",
ConstLabels: prometheus.Labels{"constname": "constvalue"},
})
reg := prometheus.NewRegistry()
reg.MustRegister(cntVec)
b, err := NewBridge(&Config{
URL: "localhost:8080",
Gatherer: reg,
CountersAsDelta: true,
})
if err != nil {
t.Fatalf("error creating bridge: %v", err)
}
// first collect
mfs, err := reg.Gather()
if err != nil {
t.Fatalf("error: %v", err)
}
var buf bytes.Buffer
err = b.writeMetrics(&buf, mfs, "prefix.", model.Time(1477043083))
if err != nil {
t.Fatalf("error: %v", err)
}
want := `prefix.http_request_total_sum.constname.constvalue 0 1477043
prefix.http_request_total_count.constname.constvalue.count 0 1477043
`
if got := buf.String(); want != got {
t.Fatalf("wanted \n%s\n, got \n%s\n", want, got)
}
}
func TestPush(t *testing.T) { func TestPush(t *testing.T) {
reg := prometheus.NewRegistry() reg := prometheus.NewRegistry()
cntVec := prometheus.NewCounterVec( cntVec := prometheus.NewCounterVec(
prometheus.CounterOpts{ prometheus.CounterOpts{
Name: "name", Name: "name",
Help: "docstring", Help: "docstring",
Namespace: "grafana",
ConstLabels: prometheus.Labels{"constname": "constvalue"}, ConstLabels: prometheus.Labels{"constname": "constvalue"},
}, },
[]string{"labelname"}, []string{"labelname"},

View File

@ -102,7 +102,7 @@ func init() {
M_Http_Request_Summary = prometheus.NewSummaryVec( M_Http_Request_Summary = prometheus.NewSummaryVec(
prometheus.SummaryOpts{ prometheus.SummaryOpts{
Name: "http_request_duration_milleseconds", Name: "http_request_duration_milliseconds",
Help: "http request summary", Help: "http request summary",
}, },
[]string{"handler", "statuscode", "method"}, []string{"handler", "statuscode", "method"},
@ -127,19 +127,19 @@ func init() {
}) })
M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{ M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dashboard_save_milleseconds", Name: "api_dashboard_save_milliseconds",
Help: "summary for dashboard save duration", Help: "summary for dashboard save duration",
Namespace: exporterName, Namespace: exporterName,
}) })
M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{ M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dashboard_get_milleseconds", Name: "api_dashboard_get_milliseconds",
Help: "summary for dashboard get duration", Help: "summary for dashboard get duration",
Namespace: exporterName, Namespace: exporterName,
}) })
M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{ M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dashboard_search_milleseconds", Name: "api_dashboard_search_milliseconds",
Help: "summary for dashboard search duration", Help: "summary for dashboard search duration",
Namespace: exporterName, Namespace: exporterName,
}) })
@ -223,7 +223,7 @@ func init() {
}) })
M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{ M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{
Name: "api_dataproxy_request_all_milleseconds", Name: "api_dataproxy_request_all_milliseconds",
Help: "summary for dashboard search duration", Help: "summary for dashboard search duration",
Namespace: exporterName, Namespace: exporterName,
}) })

View File

@ -0,0 +1,36 @@
package middleware
import (
"fmt"
"net/http"
opentracing "github.com/opentracing/opentracing-go"
"github.com/opentracing/opentracing-go/ext"
"gopkg.in/macaron.v1"
)
func RequestTracing(handler string) macaron.Handler {
return func(res http.ResponseWriter, req *http.Request, c *macaron.Context) {
rw := res.(macaron.ResponseWriter)
tracer := opentracing.GlobalTracer()
wireContext, _ := tracer.Extract(opentracing.HTTPHeaders, opentracing.HTTPHeadersCarrier(req.Header))
span := tracer.StartSpan(fmt.Sprintf("HTTP %s", handler), ext.RPCServerOption(wireContext))
defer span.Finish()
ctx := opentracing.ContextWithSpan(req.Context(), span)
c.Req.Request = req.WithContext(ctx)
c.Next()
status := rw.Status()
ext.HTTPStatusCode.Set(span, uint16(status))
ext.HTTPUrl.Set(span, req.RequestURI)
ext.HTTPMethod.Set(span, req.Method)
if status >= 400 {
ext.Error.Set(span, true)
}
}
}

View File

@ -139,8 +139,8 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *
return result, nil return result, nil
} }
func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource, timeRange *tsdb.TimeRange) *tsdb.Request { func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource, timeRange *tsdb.TimeRange) *tsdb.TsdbQuery {
req := &tsdb.Request{ req := &tsdb.TsdbQuery{
TimeRange: timeRange, TimeRange: timeRange,
Queries: []*tsdb.Query{ Queries: []*tsdb.Query{
{ {

View File

@ -168,7 +168,7 @@ func (ctx *queryConditionTestContext) exec() (*alerting.ConditionResult, error)
ctx.condition = condition ctx.condition = condition
condition.HandleRequest = func(context context.Context, req *tsdb.Request) (*tsdb.Response, error) { condition.HandleRequest = func(context context.Context, req *tsdb.TsdbQuery) (*tsdb.Response, error) {
return &tsdb.Response{ return &tsdb.Response{
Results: map[string]*tsdb.QueryResult{ Results: map[string]*tsdb.QueryResult{
"A": {Series: ctx.series}, "A": {Series: ctx.series},

View File

@ -2,8 +2,13 @@ package alerting
import ( import (
"context" "context"
"fmt"
"time" "time"
"github.com/opentracing/opentracing-go"
"github.com/opentracing/opentracing-go/ext"
tlog "github.com/opentracing/opentracing-go/log"
"github.com/benbjohnson/clock" "github.com/benbjohnson/clock"
"github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/log"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
@ -99,22 +104,44 @@ func (e *Engine) processJob(grafanaCtx context.Context, job *Job) error {
}() }()
alertCtx, cancelFn := context.WithTimeout(context.Background(), alertTimeout) alertCtx, cancelFn := context.WithTimeout(context.Background(), alertTimeout)
span := opentracing.StartSpan("alert execution")
alertCtx = opentracing.ContextWithSpan(alertCtx, span)
job.Running = true job.Running = true
evalContext := NewEvalContext(alertCtx, job.Rule) evalContext := NewEvalContext(alertCtx, job.Rule)
evalContext.Ctx = alertCtx
done := make(chan struct{}) done := make(chan struct{})
go func() { go func() {
defer func() { defer func() {
if err := recover(); err != nil { if err := recover(); err != nil {
e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1)) e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1))
ext.Error.Set(span, true)
span.LogFields(
tlog.Error(fmt.Errorf("%v", err)),
tlog.String("message", "failed to execute alert rule. panic was recovered."),
)
span.Finish()
close(done) close(done)
} }
}() }()
e.evalHandler.Eval(evalContext) e.evalHandler.Eval(evalContext)
e.resultHandler.Handle(evalContext) e.resultHandler.Handle(evalContext)
span.SetTag("alertId", evalContext.Rule.Id)
span.SetTag("dashboardId", evalContext.Rule.DashboardId)
span.SetTag("firing", evalContext.Firing)
span.SetTag("nodatapoints", evalContext.NoDataFound)
if evalContext.Error != nil {
ext.Error.Set(span, true)
span.LogFields(
tlog.Error(evalContext.Error),
tlog.String("message", "alerting execution failed"),
)
}
span.Finish()
close(done) close(done)
}() }()

View File

@ -89,6 +89,11 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) {
continue continue
} }
panelId, err := panel.Get("id").Int64()
if err != nil {
return nil, fmt.Errorf("panel id is required. err %v", err)
}
// backward compatibility check, can be removed later // backward compatibility check, can be removed later
enabled, hasEnabled := jsonAlert.CheckGet("enabled") enabled, hasEnabled := jsonAlert.CheckGet("enabled")
if hasEnabled && enabled.MustBool() == false { if hasEnabled && enabled.MustBool() == false {
@ -103,7 +108,7 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) {
alert := &m.Alert{ alert := &m.Alert{
DashboardId: e.Dash.Id, DashboardId: e.Dash.Id,
OrgId: e.OrgId, OrgId: e.OrgId,
PanelId: panel.Get("id").MustInt64(), PanelId: panelId,
Id: jsonAlert.Get("id").MustInt64(), Id: jsonAlert.Get("id").MustInt64(),
Name: jsonAlert.Get("name").MustString(), Name: jsonAlert.Get("name").MustString(),
Handler: jsonAlert.Get("handler").MustInt64(), Handler: jsonAlert.Get("handler").MustInt64(),

View File

@ -200,6 +200,83 @@ func TestAlertRuleExtraction(t *testing.T) {
}) })
}) })
Convey("Panels missing id should return error", func() {
panelWithoutId := `
{
"id": 57,
"title": "Graphite 4",
"originalTitle": "Graphite 4",
"tags": ["graphite"],
"rows": [
{
"panels": [
{
"title": "Active desktop users",
"editable": true,
"type": "graph",
"targets": [
{
"refId": "A",
"target": "aliasByNode(statsd.fakesite.counters.session_start.desktop.count, 4)"
}
],
"datasource": null,
"alert": {
"name": "name1",
"message": "desc1",
"handler": 1,
"frequency": "60s",
"conditions": [
{
"type": "query",
"query": {"params": ["A", "5m", "now"]},
"reducer": {"type": "avg", "params": []},
"evaluator": {"type": ">", "params": [100]}
}
]
}
},
{
"title": "Active mobile users",
"id": 4,
"targets": [
{"refId": "A", "target": ""},
{"refId": "B", "target": "aliasByNode(statsd.fakesite.counters.session_start.mobile.count, 4)"}
],
"datasource": "graphite2",
"alert": {
"name": "name2",
"message": "desc2",
"handler": 0,
"frequency": "60s",
"severity": "warning",
"conditions": [
{
"type": "query",
"query": {"params": ["B", "5m", "now"]},
"reducer": {"type": "avg", "params": []},
"evaluator": {"type": ">", "params": [100]}
}
]
}
}
]
}
]
}`
dashJson, err := simplejson.NewJson([]byte(panelWithoutId))
So(err, ShouldBeNil)
dash := m.NewDashboardFromJson(dashJson)
extractor := NewDashAlertExtractor(dash, 1)
_, err = extractor.GetAlerts()
Convey("panels without Id should return error", func() {
So(err, ShouldNotBeNil)
})
})
Convey("Parse and validate dashboard containing influxdb alert", func() { Convey("Parse and validate dashboard containing influxdb alert", func() {
json2 := `{ json2 := `{

View File

@ -28,7 +28,6 @@ func init() {
bus.AddHandler("sql", SearchUsers) bus.AddHandler("sql", SearchUsers)
bus.AddHandler("sql", GetUserOrgList) bus.AddHandler("sql", GetUserOrgList)
bus.AddHandler("sql", DeleteUser) bus.AddHandler("sql", DeleteUser)
bus.AddHandler("sql", SetUsingOrg)
bus.AddHandler("sql", UpdateUserPermissions) bus.AddHandler("sql", UpdateUserPermissions)
bus.AddHandler("sql", SetUserHelpFlag) bus.AddHandler("sql", SetUserHelpFlag)
} }

116
pkg/tracing/tracing.go Normal file
View File

@ -0,0 +1,116 @@
package tracing
import (
"io"
"strings"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/setting"
opentracing "github.com/opentracing/opentracing-go"
jaegercfg "github.com/uber/jaeger-client-go/config"
ini "gopkg.in/ini.v1"
)
var (
logger log.Logger = log.New("tracing")
)
type TracingSettings struct {
Enabled bool
Address string
CustomTags map[string]string
SamplerType string
SamplerParam float64
}
func Init(file *ini.File) (io.Closer, error) {
settings := parseSettings(file)
return internalInit(settings)
}
func parseSettings(file *ini.File) *TracingSettings {
settings := &TracingSettings{}
var section, err = setting.Cfg.GetSection("tracing.jaeger")
if err != nil {
return settings
}
settings.Address = section.Key("address").MustString("")
if settings.Address != "" {
settings.Enabled = true
}
settings.CustomTags = splitTagSettings(section.Key("always_included_tag").MustString(""))
settings.SamplerType = section.Key("sampler_type").MustString("")
settings.SamplerParam = section.Key("sampler_param").MustFloat64(1)
return settings
}
func internalInit(settings *TracingSettings) (io.Closer, error) {
if !settings.Enabled {
return &nullCloser{}, nil
}
cfg := jaegercfg.Configuration{
Disabled: !settings.Enabled,
Sampler: &jaegercfg.SamplerConfig{
Type: settings.SamplerType,
Param: settings.SamplerParam,
},
Reporter: &jaegercfg.ReporterConfig{
LogSpans: false,
LocalAgentHostPort: settings.Address,
},
}
jLogger := &jaegerLogWrapper{logger: log.New("jaeger")}
options := []jaegercfg.Option{}
options = append(options, jaegercfg.Logger(jLogger))
for tag, value := range settings.CustomTags {
options = append(options, jaegercfg.Tag(tag, value))
}
tracer, closer, err := cfg.New("grafana", options...)
if err != nil {
return nil, err
}
logger.Info("Initialized jaeger tracer", "address", settings.Address)
opentracing.InitGlobalTracer(tracer)
return closer, nil
}
func splitTagSettings(input string) map[string]string {
res := map[string]string{}
tags := strings.Split(input, ",")
for _, v := range tags {
kv := strings.Split(v, ":")
if len(kv) > 1 {
res[kv[0]] = kv[1]
}
}
return res
}
type jaegerLogWrapper struct {
logger log.Logger
}
func (jlw *jaegerLogWrapper) Error(msg string) {
jlw.logger.Error(msg)
}
func (jlw *jaegerLogWrapper) Infof(msg string, args ...interface{}) {
jlw.logger.Info(msg, args)
}
type nullCloser struct{}
func (*nullCloser) Close() error { return nil }

View File

@ -0,0 +1,36 @@
package tracing
import "testing"
func TestGroupSplit(t *testing.T) {
tests := []struct {
input string
expected map[string]string
}{
{
input: "tag1:value1,tag2:value2",
expected: map[string]string{
"tag1": "value1",
"tag2": "value2",
},
},
{
input: "",
expected: map[string]string{},
},
{
input: "tag1",
expected: map[string]string{},
},
}
for _, test := range tests {
tags := splitTagSettings(test.input)
for k, v := range test.expected {
value, exists := tags[k]
if !exists || value != v {
t.Errorf("tags does not match %v ", test)
}
}
}
}

View File

@ -1,90 +0,0 @@
package tsdb
import "context"
type Batch struct {
DataSourceId int64
Queries QuerySlice
Depends map[string]bool
Done bool
Started bool
}
type BatchSlice []*Batch
func newBatch(dsId int64, queries QuerySlice) *Batch {
return &Batch{
DataSourceId: dsId,
Queries: queries,
Depends: make(map[string]bool),
}
}
func (bg *Batch) process(ctx context.Context, queryContext *QueryContext) {
executor, err := getExecutorFor(bg.Queries[0].DataSource)
if err != nil {
bg.Done = true
result := &BatchResult{
Error: err,
QueryResults: make(map[string]*QueryResult),
}
for _, query := range bg.Queries {
result.QueryResults[query.RefId] = &QueryResult{Error: result.Error}
}
queryContext.ResultsChan <- result
return
}
res := executor.Execute(ctx, bg.Queries, queryContext)
bg.Done = true
queryContext.ResultsChan <- res
}
func (bg *Batch) addQuery(query *Query) {
bg.Queries = append(bg.Queries, query)
}
func (bg *Batch) allDependenciesAreIn(context *QueryContext) bool {
for key := range bg.Depends {
if _, exists := context.Results[key]; !exists {
return false
}
}
return true
}
func getBatches(req *Request) (BatchSlice, error) {
batches := make(BatchSlice, 0)
for _, query := range req.Queries {
if foundBatch := findMatchingBatchGroup(query, batches); foundBatch != nil {
foundBatch.addQuery(query)
} else {
newBatch := newBatch(query.DataSource.Id, QuerySlice{query})
batches = append(batches, newBatch)
for _, refId := range query.Depends {
for _, batch := range batches {
for _, batchQuery := range batch.Queries {
if batchQuery.RefId == refId {
newBatch.Depends[refId] = true
}
}
}
}
}
}
return batches, nil
}
func findMatchingBatchGroup(query *Query, batches BatchSlice) *Batch {
for _, batch := range batches {
if batch.DataSourceId == query.DataSource.Id {
return batch
}
}
return nil
}

View File

@ -1,36 +0,0 @@
package tsdb
import (
"context"
"fmt"
"github.com/grafana/grafana/pkg/models"
)
type Executor interface {
Execute(ctx context.Context, queries QuerySlice, query *QueryContext) *BatchResult
}
var registry map[string]GetExecutorFn
type GetExecutorFn func(dsInfo *models.DataSource) (Executor, error)
func init() {
registry = make(map[string]GetExecutorFn)
}
func getExecutorFor(dsInfo *models.DataSource) (Executor, error) {
if fn, exists := registry[dsInfo.Type]; exists {
executor, err := fn(dsInfo)
if err != nil {
return nil, err
}
return executor, nil
}
return nil, fmt.Errorf("Could not find executor for data source type: %s", dsInfo.Type)
}
func RegisterExecutor(pluginId string, fn GetExecutorFn) {
registry[pluginId] = fn
}

View File

@ -11,7 +11,7 @@ type FakeExecutor struct {
resultsFn map[string]ResultsFn resultsFn map[string]ResultsFn
} }
type ResultsFn func(context *QueryContext) *QueryResult type ResultsFn func(context *TsdbQuery) *QueryResult
func NewFakeExecutor(dsInfo *models.DataSource) (*FakeExecutor, error) { func NewFakeExecutor(dsInfo *models.DataSource) (*FakeExecutor, error) {
return &FakeExecutor{ return &FakeExecutor{
@ -20,9 +20,9 @@ func NewFakeExecutor(dsInfo *models.DataSource) (*FakeExecutor, error) {
}, nil }, nil
} }
func (e *FakeExecutor) Execute(ctx context.Context, queries QuerySlice, context *QueryContext) *BatchResult { func (e *FakeExecutor) Query(ctx context.Context, dsInfo *models.DataSource, context *TsdbQuery) *BatchResult {
result := &BatchResult{QueryResults: make(map[string]*QueryResult)} result := &BatchResult{QueryResults: make(map[string]*QueryResult)}
for _, query := range queries { for _, query := range context.Queries {
if results, has := e.results[query.RefId]; has { if results, has := e.results[query.RefId]; has {
result.QueryResults[query.RefId] = results result.QueryResults[query.RefId] = results
} }

View File

@ -17,24 +17,15 @@ import (
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb"
opentracing "github.com/opentracing/opentracing-go"
) )
type GraphiteExecutor struct { type GraphiteExecutor struct {
*models.DataSource
HttpClient *http.Client HttpClient *http.Client
} }
func NewGraphiteExecutor(datasource *models.DataSource) (tsdb.Executor, error) { func NewGraphiteExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
httpClient, err := datasource.GetHttpClient() return &GraphiteExecutor{}, nil
if err != nil {
return nil, err
}
return &GraphiteExecutor{
DataSource: datasource,
HttpClient: httpClient,
}, nil
} }
var ( var (
@ -43,38 +34,61 @@ var (
func init() { func init() {
glog = log.New("tsdb.graphite") glog = log.New("tsdb.graphite")
tsdb.RegisterExecutor("graphite", NewGraphiteExecutor) tsdb.RegisterTsdbQueryEndpoint("graphite", NewGraphiteExecutor)
} }
func (e *GraphiteExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult { func (e *GraphiteExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) *tsdb.BatchResult {
result := &tsdb.BatchResult{} result := &tsdb.BatchResult{}
from := "-" + formatTimeRange(tsdbQuery.TimeRange.From)
until := formatTimeRange(tsdbQuery.TimeRange.To)
var target string
formData := url.Values{ formData := url.Values{
"from": []string{"-" + formatTimeRange(context.TimeRange.From)}, "from": []string{from},
"until": []string{formatTimeRange(context.TimeRange.To)}, "until": []string{until},
"format": []string{"json"}, "format": []string{"json"},
"maxDataPoints": []string{"500"}, "maxDataPoints": []string{"500"},
} }
for _, query := range queries { for _, query := range tsdbQuery.Queries {
if fullTarget, err := query.Model.Get("targetFull").String(); err == nil { if fullTarget, err := query.Model.Get("targetFull").String(); err == nil {
formData["target"] = []string{fixIntervalFormat(fullTarget)} target = fixIntervalFormat(fullTarget)
} else { } else {
formData["target"] = []string{fixIntervalFormat(query.Model.Get("target").MustString())} target = fixIntervalFormat(query.Model.Get("target").MustString())
} }
} }
formData["target"] = []string{target}
if setting.Env == setting.DEV { if setting.Env == setting.DEV {
glog.Debug("Graphite request", "params", formData) glog.Debug("Graphite request", "params", formData)
} }
req, err := e.createRequest(formData) req, err := e.createRequest(dsInfo, formData)
if err != nil { if err != nil {
result.Error = err result.Error = err
return result return result
} }
res, err := ctxhttp.Do(ctx, e.HttpClient, req) httpClient, err := dsInfo.GetHttpClient()
if err != nil {
result.Error = err
return result
}
span, ctx := opentracing.StartSpanFromContext(ctx, "graphite query")
span.SetTag("target", target)
span.SetTag("from", from)
span.SetTag("until", until)
defer span.Finish()
opentracing.GlobalTracer().Inject(
span.Context(),
opentracing.HTTPHeaders,
opentracing.HTTPHeadersCarrier(req.Header))
res, err := ctxhttp.Do(ctx, httpClient, req)
if err != nil { if err != nil {
result.Error = err result.Error = err
return result return result
@ -126,8 +140,8 @@ func (e *GraphiteExecutor) parseResponse(res *http.Response) ([]TargetResponseDT
return data, nil return data, nil
} }
func (e *GraphiteExecutor) createRequest(data url.Values) (*http.Request, error) { func (e *GraphiteExecutor) createRequest(dsInfo *models.DataSource, data url.Values) (*http.Request, error) {
u, _ := url.Parse(e.Url) u, _ := url.Parse(dsInfo.Url)
u.Path = path.Join(u.Path, "render") u.Path = path.Join(u.Path, "render")
req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(data.Encode())) req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(data.Encode()))
@ -137,8 +151,8 @@ func (e *GraphiteExecutor) createRequest(data url.Values) (*http.Request, error)
} }
req.Header.Set("Content-Type", "application/x-www-form-urlencoded") req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
if e.BasicAuth { if dsInfo.BasicAuth {
req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword) req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.BasicAuthPassword)
} }
return req, err return req, err

View File

@ -17,24 +17,16 @@ import (
) )
type InfluxDBExecutor struct { type InfluxDBExecutor struct {
*models.DataSource //*models.DataSource
QueryParser *InfluxdbQueryParser QueryParser *InfluxdbQueryParser
ResponseParser *ResponseParser ResponseParser *ResponseParser
HttpClient *http.Client //HttpClient *http.Client
} }
func NewInfluxDBExecutor(datasource *models.DataSource) (tsdb.Executor, error) { func NewInfluxDBExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
httpClient, err := datasource.GetHttpClient()
if err != nil {
return nil, err
}
return &InfluxDBExecutor{ return &InfluxDBExecutor{
DataSource: datasource,
QueryParser: &InfluxdbQueryParser{}, QueryParser: &InfluxdbQueryParser{},
ResponseParser: &ResponseParser{}, ResponseParser: &ResponseParser{},
HttpClient: httpClient,
}, nil }, nil
} }
@ -44,18 +36,18 @@ var (
func init() { func init() {
glog = log.New("tsdb.influxdb") glog = log.New("tsdb.influxdb")
tsdb.RegisterExecutor("influxdb", NewInfluxDBExecutor) tsdb.RegisterTsdbQueryEndpoint("influxdb", NewInfluxDBExecutor)
} }
func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult { func (e *InfluxDBExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) *tsdb.BatchResult {
result := &tsdb.BatchResult{} result := &tsdb.BatchResult{}
query, err := e.getQuery(queries, context) query, err := e.getQuery(dsInfo, tsdbQuery.Queries, tsdbQuery)
if err != nil { if err != nil {
return result.WithError(err) return result.WithError(err)
} }
rawQuery, err := query.Build(context) rawQuery, err := query.Build(tsdbQuery)
if err != nil { if err != nil {
return result.WithError(err) return result.WithError(err)
} }
@ -64,12 +56,17 @@ func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
glog.Debug("Influxdb query", "raw query", rawQuery) glog.Debug("Influxdb query", "raw query", rawQuery)
} }
req, err := e.createRequest(rawQuery) req, err := e.createRequest(dsInfo, rawQuery)
if err != nil { if err != nil {
return result.WithError(err) return result.WithError(err)
} }
resp, err := ctxhttp.Do(ctx, e.HttpClient, req) httpClient, err := dsInfo.GetHttpClient()
if err != nil {
return result.WithError(err)
}
resp, err := ctxhttp.Do(ctx, httpClient, req)
if err != nil { if err != nil {
return result.WithError(err) return result.WithError(err)
} }
@ -98,10 +95,10 @@ func (e *InfluxDBExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
return result return result
} }
func (e *InfluxDBExecutor) getQuery(queries tsdb.QuerySlice, context *tsdb.QueryContext) (*Query, error) { func (e *InfluxDBExecutor) getQuery(dsInfo *models.DataSource, queries []*tsdb.Query, context *tsdb.TsdbQuery) (*Query, error) {
for _, v := range queries { for _, v := range queries {
query, err := e.QueryParser.Parse(v.Model, e.DataSource) query, err := e.QueryParser.Parse(v.Model, dsInfo)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -112,8 +109,8 @@ func (e *InfluxDBExecutor) getQuery(queries tsdb.QuerySlice, context *tsdb.Query
return nil, fmt.Errorf("query request contains no queries") return nil, fmt.Errorf("query request contains no queries")
} }
func (e *InfluxDBExecutor) createRequest(query string) (*http.Request, error) { func (e *InfluxDBExecutor) createRequest(dsInfo *models.DataSource, query string) (*http.Request, error) {
u, _ := url.Parse(e.Url) u, _ := url.Parse(dsInfo.Url)
u.Path = path.Join(u.Path, "query") u.Path = path.Join(u.Path, "query")
req, err := http.NewRequest(http.MethodGet, u.String(), nil) req, err := http.NewRequest(http.MethodGet, u.String(), nil)
@ -123,18 +120,18 @@ func (e *InfluxDBExecutor) createRequest(query string) (*http.Request, error) {
params := req.URL.Query() params := req.URL.Query()
params.Set("q", query) params.Set("q", query)
params.Set("db", e.Database) params.Set("db", dsInfo.Database)
params.Set("epoch", "s") params.Set("epoch", "s")
req.URL.RawQuery = params.Encode() req.URL.RawQuery = params.Encode()
req.Header.Set("User-Agent", "Grafana") req.Header.Set("User-Agent", "Grafana")
if e.BasicAuth { if dsInfo.BasicAuth {
req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword) req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.BasicAuthPassword)
} }
if !e.BasicAuth && e.User != "" { if !dsInfo.BasicAuth && dsInfo.User != "" {
req.SetBasicAuth(e.User, e.Password) req.SetBasicAuth(dsInfo.User, dsInfo.Password)
} }
glog.Debug("Influxdb request", "url", req.URL.String()) glog.Debug("Influxdb request", "url", req.URL.String())

View File

@ -16,7 +16,7 @@ var (
regexpMeasurementPattern *regexp.Regexp = regexp.MustCompile(`^\/.*\/$`) regexpMeasurementPattern *regexp.Regexp = regexp.MustCompile(`^\/.*\/$`)
) )
func (query *Query) Build(queryContext *tsdb.QueryContext) (string, error) { func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) {
var res string var res string
if query.UseRawQuery && query.RawQuery != "" { if query.UseRawQuery && query.RawQuery != "" {
@ -41,7 +41,7 @@ func (query *Query) Build(queryContext *tsdb.QueryContext) (string, error) {
return res, nil return res, nil
} }
func getDefinedInterval(query *Query, queryContext *tsdb.QueryContext) (*tsdb.Interval, error) { func getDefinedInterval(query *Query, queryContext *tsdb.TsdbQuery) (*tsdb.Interval, error) {
defaultInterval := tsdb.CalculateInterval(queryContext.TimeRange) defaultInterval := tsdb.CalculateInterval(queryContext.TimeRange)
if query.Interval == "" { if query.Interval == "" {
@ -104,7 +104,7 @@ func (query *Query) renderTags() []string {
return res return res
} }
func (query *Query) renderTimeFilter(queryContext *tsdb.QueryContext) string { func (query *Query) renderTimeFilter(queryContext *tsdb.TsdbQuery) string {
from := "now() - " + queryContext.TimeRange.From from := "now() - " + queryContext.TimeRange.From
to := "" to := ""
@ -115,7 +115,7 @@ func (query *Query) renderTimeFilter(queryContext *tsdb.QueryContext) string {
return fmt.Sprintf("time > %s%s", from, to) return fmt.Sprintf("time > %s%s", from, to)
} }
func (query *Query) renderSelectors(queryContext *tsdb.QueryContext) string { func (query *Query) renderSelectors(queryContext *tsdb.TsdbQuery) string {
res := "SELECT " res := "SELECT "
var selectors []string var selectors []string
@ -163,7 +163,7 @@ func (query *Query) renderWhereClause() string {
return res return res
} }
func (query *Query) renderGroupBy(queryContext *tsdb.QueryContext) string { func (query *Query) renderGroupBy(queryContext *tsdb.TsdbQuery) string {
groupBy := "" groupBy := ""
for i, group := range query.GroupBy { for i, group := range query.GroupBy {
if i == 0 { if i == 0 {

View File

@ -15,7 +15,7 @@ type DefinitionParameters struct {
} }
type QueryDefinition struct { type QueryDefinition struct {
Renderer func(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string Renderer func(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string
Params []DefinitionParameters Params []DefinitionParameters
} }
@ -94,14 +94,14 @@ func init() {
renders["alias"] = QueryDefinition{Renderer: aliasRenderer} renders["alias"] = QueryDefinition{Renderer: aliasRenderer}
} }
func fieldRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string { func fieldRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string {
if part.Params[0] == "*" { if part.Params[0] == "*" {
return "*" return "*"
} }
return fmt.Sprintf(`"%s"`, part.Params[0]) return fmt.Sprintf(`"%s"`, part.Params[0])
} }
func functionRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string { func functionRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string {
for i, param := range part.Params { for i, param := range part.Params {
if part.Type == "time" && param == "auto" { if part.Type == "time" && param == "auto" {
part.Params[i] = "$__interval" part.Params[i] = "$__interval"
@ -117,15 +117,15 @@ func functionRenderer(query *Query, queryContext *tsdb.QueryContext, part *Query
return fmt.Sprintf("%s(%s)", part.Type, params) return fmt.Sprintf("%s(%s)", part.Type, params)
} }
func suffixRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string { func suffixRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string {
return fmt.Sprintf("%s %s", innerExpr, part.Params[0]) return fmt.Sprintf("%s %s", innerExpr, part.Params[0])
} }
func aliasRenderer(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string { func aliasRenderer(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string {
return fmt.Sprintf(`%s AS "%s"`, innerExpr, part.Params[0]) return fmt.Sprintf(`%s AS "%s"`, innerExpr, part.Params[0])
} }
func (r QueryDefinition) Render(query *Query, queryContext *tsdb.QueryContext, part *QueryPart, innerExpr string) string { func (r QueryDefinition) Render(query *Query, queryContext *tsdb.TsdbQuery, part *QueryPart, innerExpr string) string {
return r.Renderer(query, queryContext, part, innerExpr) return r.Renderer(query, queryContext, part, innerExpr)
} }
@ -149,6 +149,6 @@ type QueryPart struct {
Params []string Params []string
} }
func (qp *QueryPart) Render(query *Query, queryContext *tsdb.QueryContext, expr string) string { func (qp *QueryPart) Render(query *Query, queryContext *tsdb.TsdbQuery, expr string) string {
return qp.Def.Renderer(query, queryContext, qp, expr) return qp.Def.Renderer(query, queryContext, qp, expr)
} }

View File

@ -10,7 +10,7 @@ import (
func TestInfluxdbQueryPart(t *testing.T) { func TestInfluxdbQueryPart(t *testing.T) {
Convey("Influxdb query parts", t, func() { Convey("Influxdb query parts", t, func() {
queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("5m", "now")} queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("5m", "now")}
query := &Query{} query := &Query{}
Convey("render field ", func() { Convey("render field ", func() {

View File

@ -28,7 +28,7 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
tag1 := &Tag{Key: "hostname", Value: "server1", Operator: "="} tag1 := &Tag{Key: "hostname", Value: "server1", Operator: "="}
tag2 := &Tag{Key: "hostname", Value: "server2", Operator: "=", Condition: "OR"} tag2 := &Tag{Key: "hostname", Value: "server2", Operator: "=", Condition: "OR"}
queryContext := &tsdb.QueryContext{ queryContext := &tsdb.TsdbQuery{
TimeRange: tsdb.NewTimeRange("5m", "now"), TimeRange: tsdb.NewTimeRange("5m", "now"),
} }
@ -101,12 +101,12 @@ func TestInfluxdbQueryBuilder(t *testing.T) {
query := Query{} query := Query{}
Convey("render from: 2h to now-1h", func() { Convey("render from: 2h to now-1h", func() {
query := Query{} query := Query{}
queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("2h", "now-1h")} queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("2h", "now-1h")}
So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 2h and time < now() - 1h") So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 2h and time < now() - 1h")
}) })
Convey("render from: 10m", func() { Convey("render from: 10m", func() {
queryContext := &tsdb.QueryContext{TimeRange: tsdb.NewTimeRange("10m", "now")} queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("10m", "now")}
So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 10m") So(query.renderTimeFilter(queryContext), ShouldEqual, "time > now() - 10m")
}) })
}) })

View File

@ -6,24 +6,21 @@ import (
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
) )
type TsdbQuery struct {
TimeRange *TimeRange
Queries []*Query
}
type Query struct { type Query struct {
RefId string RefId string
Model *simplejson.Json Model *simplejson.Json
Depends []string Depends []string
DataSource *models.DataSource DataSource *models.DataSource
Results []*TimeSeries Results []*TimeSeries
Exclude bool
MaxDataPoints int64 MaxDataPoints int64
IntervalMs int64 IntervalMs int64
} }
type QuerySlice []*Query
type Request struct {
TimeRange *TimeRange
Queries QuerySlice
}
type Response struct { type Response struct {
BatchTimings []*BatchTiming `json:"timings"` BatchTimings []*BatchTiming `json:"timings"`
Results map[string]*QueryResult `json:"results"` Results map[string]*QueryResult `json:"results"`

View File

@ -1,129 +0,0 @@
package mqe
import (
"context"
"net/http"
"net/url"
"path"
"strings"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tsdb"
"golang.org/x/net/context/ctxhttp"
)
var (
MaxWorker int = 4
)
type apiClient struct {
*models.DataSource
log log.Logger
httpClient *http.Client
responseParser *ResponseParser
}
func NewApiClient(httpClient *http.Client, datasource *models.DataSource) *apiClient {
return &apiClient{
DataSource: datasource,
log: log.New("tsdb.mqe"),
httpClient: httpClient,
responseParser: NewResponseParser(),
}
}
func (e *apiClient) PerformRequests(ctx context.Context, queries []QueryToSend) (*tsdb.QueryResult, error) {
queryResult := &tsdb.QueryResult{}
queryCount := len(queries)
jobsChan := make(chan QueryToSend, queryCount)
resultChan := make(chan []*tsdb.TimeSeries, queryCount)
errorsChan := make(chan error, 1)
for w := 1; w <= MaxWorker; w++ {
go e.spawnWorker(ctx, w, jobsChan, resultChan, errorsChan)
}
for _, v := range queries {
jobsChan <- v
}
close(jobsChan)
resultCounter := 0
for {
select {
case timeseries := <-resultChan:
queryResult.Series = append(queryResult.Series, timeseries...)
resultCounter++
if resultCounter == queryCount {
close(resultChan)
return queryResult, nil
}
case err := <-errorsChan:
return nil, err
case <-ctx.Done():
return nil, ctx.Err()
}
}
}
func (e *apiClient) spawnWorker(ctx context.Context, id int, jobs chan QueryToSend, results chan []*tsdb.TimeSeries, errors chan error) {
e.log.Debug("Spawning worker", "id", id)
for query := range jobs {
if setting.Env == setting.DEV {
e.log.Debug("Sending request", "query", query.RawQuery)
}
req, err := e.createRequest(query.RawQuery)
resp, err := ctxhttp.Do(ctx, e.httpClient, req)
if err != nil {
errors <- err
return
}
series, err := e.responseParser.Parse(resp, query)
if err != nil {
errors <- err
return
}
results <- series
}
e.log.Debug("Worker is complete", "id", id)
}
func (e *apiClient) createRequest(query string) (*http.Request, error) {
u, err := url.Parse(e.Url)
if err != nil {
return nil, err
}
u.Path = path.Join(u.Path, "query")
payload := simplejson.New()
payload.Set("query", query)
jsonPayload, err := payload.MarshalJSON()
if err != nil {
return nil, err
}
req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(string(jsonPayload)))
if err != nil {
return nil, err
}
req.Header.Set("User-Agent", "Grafana")
req.Header.Set("Content-Type", "application/json")
if e.BasicAuth {
req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword)
}
return req, nil
}

View File

@ -1,60 +0,0 @@
package mqe
import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
)
func NewQueryParser() *QueryParser {
return &QueryParser{}
}
type QueryParser struct{}
func (qp *QueryParser) Parse(model *simplejson.Json, dsInfo *models.DataSource, queryContext *tsdb.QueryContext) (*Query, error) {
query := &Query{TimeRange: queryContext.TimeRange}
query.AddClusterToAlias = model.Get("addClusterToAlias").MustBool(false)
query.AddHostToAlias = model.Get("addHostToAlias").MustBool(false)
query.UseRawQuery = model.Get("rawQuery").MustBool(false)
query.RawQuery = model.Get("query").MustString("")
query.Cluster = model.Get("cluster").MustStringArray([]string{})
query.Hosts = model.Get("hosts").MustStringArray([]string{})
var metrics []Metric
var err error
for _, metricsObj := range model.Get("metrics").MustArray() {
metricJson := simplejson.NewFromAny(metricsObj)
var m Metric
m.Alias = metricJson.Get("alias").MustString("")
m.Metric, err = metricJson.Get("metric").String()
if err != nil {
return nil, err
}
metrics = append(metrics, m)
}
query.Metrics = metrics
var functions []Function
for _, functionListObj := range model.Get("functionList").MustArray() {
functionListJson := simplejson.NewFromAny(functionListObj)
var f Function
f.Func = functionListJson.Get("func").MustString("")
if err != nil {
return nil, err
}
if f.Func != "" {
functions = append(functions, f)
}
}
query.FunctionList = functions
return query, nil
}

View File

@ -1,127 +0,0 @@
package mqe
import (
"testing"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
. "github.com/smartystreets/goconvey/convey"
)
func TestMQEQueryParser(t *testing.T) {
Convey("MQE query parser", t, func() {
parser := &QueryParser{}
dsInfo := &models.DataSource{JsonData: simplejson.New()}
queryContext := &tsdb.QueryContext{}
Convey("can parse simple mqe model", func() {
json := `
{
"cluster": [],
"hosts": [
"staples-lab-1"
],
"metrics": [
{
"metric": "os.cpu.all*"
}
],
"rawQuery": "",
"refId": "A"
}
`
modelJson, err := simplejson.NewJson([]byte(json))
So(err, ShouldBeNil)
query, err := parser.Parse(modelJson, dsInfo, queryContext)
So(err, ShouldBeNil)
So(query.UseRawQuery, ShouldBeFalse)
So(len(query.Cluster), ShouldEqual, 0)
So(query.Hosts[0], ShouldEqual, "staples-lab-1")
So(query.Metrics[0].Metric, ShouldEqual, "os.cpu.all*")
})
Convey("can parse multi serie mqe model", func() {
json := `
{
"cluster": [
"demoapp"
],
"hosts": [
"staples-lab-1"
],
"metrics": [
{
"metric": "os.cpu.all.active_percentage"
},
{
"metric": "os.disk.sda.io_time"
}
],
"functionList": [
{
"func": "aggregate.min"
},
{
"func": "aggregate.max"
}
],
"rawQuery": "",
"refId": "A",
"addClusterToAlias": true,
"addHostToAlias": true
}
`
modelJson, err := simplejson.NewJson([]byte(json))
So(err, ShouldBeNil)
query, err := parser.Parse(modelJson, dsInfo, queryContext)
So(err, ShouldBeNil)
So(query.UseRawQuery, ShouldBeFalse)
So(query.Cluster[0], ShouldEqual, "demoapp")
So(query.Metrics[0].Metric, ShouldEqual, "os.cpu.all.active_percentage")
So(query.Metrics[1].Metric, ShouldEqual, "os.disk.sda.io_time")
So(query.FunctionList[0].Func, ShouldEqual, "aggregate.min")
So(query.FunctionList[1].Func, ShouldEqual, "aggregate.max")
})
Convey("can parse raw query", func() {
json := `
{
"addClusterToAlias": true,
"addHostToAlias": true,
"cluster": [],
"hosts": [
"staples-lab-1"
],
"metrics": [
{
"alias": "cpu active",
"metric": "os.cpu.all.active_percentage"
},
{
"alias": "disk sda time",
"metric": "os.disk.sda.io_time"
}
],
"rawQuery": true,
"query": "raw-query",
"refId": "A"
}
`
modelJson, err := simplejson.NewJson([]byte(json))
So(err, ShouldBeNil)
query, err := parser.Parse(modelJson, dsInfo, queryContext)
So(err, ShouldBeNil)
So(query.UseRawQuery, ShouldBeTrue)
So(query.RawQuery, ShouldEqual, "raw-query")
So(query.AddClusterToAlias, ShouldBeTrue)
So(query.AddHostToAlias, ShouldBeTrue)
})
})
}

View File

@ -1,85 +0,0 @@
package mqe
import (
"context"
"net/http"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
)
type MQEExecutor struct {
*models.DataSource
queryParser *QueryParser
apiClient *apiClient
httpClient *http.Client
log log.Logger
tokenClient *TokenClient
}
func NewMQEExecutor(dsInfo *models.DataSource) (tsdb.Executor, error) {
httpclient, err := dsInfo.GetHttpClient()
if err != nil {
return nil, err
}
return &MQEExecutor{
DataSource: dsInfo,
httpClient: httpclient,
log: log.New("tsdb.mqe"),
queryParser: NewQueryParser(),
apiClient: NewApiClient(httpclient, dsInfo),
tokenClient: NewTokenClient(dsInfo),
}, nil
}
func init() {
tsdb.RegisterExecutor("mqe-datasource", NewMQEExecutor)
}
type QueryToSend struct {
RawQuery string
Metric Metric
QueryRef *Query
}
func (e *MQEExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) *tsdb.BatchResult {
result := &tsdb.BatchResult{}
availableSeries, err := e.tokenClient.GetTokenData(ctx)
if err != nil {
return result.WithError(err)
}
var mqeQueries []*Query
for _, v := range queries {
q, err := e.queryParser.Parse(v.Model, e.DataSource, queryContext)
if err != nil {
return result.WithError(err)
}
mqeQueries = append(mqeQueries, q)
}
var rawQueries []QueryToSend
for _, v := range mqeQueries {
queries, err := v.Build(availableSeries.Metrics)
if err != nil {
return result.WithError(err)
}
rawQueries = append(rawQueries, queries...)
}
e.log.Debug("Sending request", "url", e.DataSource.Url)
queryResult, err := e.apiClient.PerformRequests(ctx, rawQueries)
if err != nil {
return result.WithError(err)
}
result.QueryResults = make(map[string]*tsdb.QueryResult)
result.QueryResults["A"] = queryResult
return result
}

View File

@ -1,177 +0,0 @@
package mqe
import (
"encoding/json"
"io/ioutil"
"net/http"
"strconv"
"strings"
"fmt"
"regexp"
"github.com/grafana/grafana/pkg/components/null"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb"
)
func NewResponseParser() *ResponseParser {
return &ResponseParser{
log: log.New("tsdb.mqe"),
}
}
var (
indexAliasPattern *regexp.Regexp
wildcardAliasPattern *regexp.Regexp
)
func init() {
indexAliasPattern = regexp.MustCompile(`\$(\d)`)
wildcardAliasPattern = regexp.MustCompile(`[*!]`)
}
type MQEResponse struct {
Success bool `json:"success"`
Name string `json:"name"`
Body []MQEResponseSerie `json:"body"`
}
type ResponseTimeRange struct {
Start int64 `json:"start"`
End int64 `json:"end"`
Resolution int64 `json:"Resolution"`
}
type MQEResponseSerie struct {
Query string `json:"query"`
Name string `json:"name"`
Type string `json:"type"`
Series []MQESerie `json:"series"`
TimeRange ResponseTimeRange `json:"timerange"`
}
type MQESerie struct {
Values []null.Float `json:"values"`
Tagset map[string]string `json:"tagset"`
}
type ResponseParser struct {
log log.Logger
}
func (parser *ResponseParser) Parse(res *http.Response, queryRef QueryToSend) ([]*tsdb.TimeSeries, error) {
body, err := ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return nil, err
}
if res.StatusCode/100 != 2 {
parser.log.Error("Request failed", "status code", res.StatusCode, "body", string(body))
return nil, fmt.Errorf("Returned invalid statuscode")
}
var data *MQEResponse = &MQEResponse{}
err = json.Unmarshal(body, data)
if err != nil {
parser.log.Info("Failed to unmarshal response", "error", err, "status", res.Status, "body", string(body))
return nil, err
}
if !data.Success {
return nil, fmt.Errorf("Request failed.")
}
var series []*tsdb.TimeSeries
for _, body := range data.Body {
for _, mqeSerie := range body.Series {
serie := &tsdb.TimeSeries{
Tags: map[string]string{},
Name: parser.formatLegend(body, mqeSerie, queryRef),
}
for key, value := range mqeSerie.Tagset {
serie.Tags[key] = value
}
for i, value := range mqeSerie.Values {
timestamp := body.TimeRange.Start + int64(i)*body.TimeRange.Resolution
serie.Points = append(serie.Points, tsdb.NewTimePoint(value, float64(timestamp)))
}
series = append(series, serie)
}
}
return series, nil
}
func (parser *ResponseParser) formatLegend(body MQEResponseSerie, mqeSerie MQESerie, queryToSend QueryToSend) string {
namePrefix := ""
//append predefined tags to seriename
for key, value := range mqeSerie.Tagset {
if key == "cluster" && queryToSend.QueryRef.AddClusterToAlias {
namePrefix += value + " "
}
}
for key, value := range mqeSerie.Tagset {
if key == "host" && queryToSend.QueryRef.AddHostToAlias {
namePrefix += value + " "
}
}
return namePrefix + parser.formatName(body, queryToSend)
}
func (parser *ResponseParser) formatName(body MQEResponseSerie, queryToSend QueryToSend) string {
if indexAliasPattern.MatchString(queryToSend.Metric.Alias) {
return parser.indexAlias(body, queryToSend)
}
if wildcardAliasPattern.MatchString(queryToSend.Metric.Metric) && wildcardAliasPattern.MatchString(queryToSend.Metric.Alias) {
return parser.wildcardAlias(body, queryToSend)
}
return body.Name
}
func (parser *ResponseParser) wildcardAlias(body MQEResponseSerie, queryToSend QueryToSend) string {
regString := strings.Replace(queryToSend.Metric.Metric, `*`, `(.*)`, 1)
reg, err := regexp.Compile(regString)
if err != nil {
return queryToSend.Metric.Alias
}
matches := reg.FindAllStringSubmatch(queryToSend.RawQuery, -1)
if len(matches) == 0 || len(matches[0]) < 2 {
return queryToSend.Metric.Alias
}
return matches[0][1]
}
func (parser *ResponseParser) indexAlias(body MQEResponseSerie, queryToSend QueryToSend) string {
queryNameParts := strings.Split(queryToSend.Metric.Metric, `.`)
name := indexAliasPattern.ReplaceAllStringFunc(queryToSend.Metric.Alias, func(in string) string {
positionName := strings.TrimSpace(strings.Replace(in, "$", "", 1))
pos, err := strconv.Atoi(positionName)
if err != nil {
return ""
}
for i, part := range queryNameParts {
if i == pos-1 {
return strings.TrimSpace(part)
}
}
return ""
})
return strings.Replace(name, " ", ".", -1)
}

View File

@ -1,187 +0,0 @@
package mqe
import (
"testing"
"net/http"
"strings"
"io/ioutil"
"github.com/grafana/grafana/pkg/components/null"
. "github.com/smartystreets/goconvey/convey"
)
var (
testJson string
)
func TestMQEResponseParser(t *testing.T) {
Convey("MQE response parser", t, func() {
parser := NewResponseParser()
Convey("Can parse response", func() {
queryRef := QueryToSend{
QueryRef: &Query{
AddClusterToAlias: true,
AddHostToAlias: true,
},
Metric: Metric{Alias: ""},
}
response := &http.Response{
StatusCode: 200,
Body: ioutil.NopCloser(strings.NewReader(testJson)),
}
res, err := parser.Parse(response, queryRef)
So(err, ShouldBeNil)
So(len(res), ShouldEqual, 2)
So(len(res[0].Points), ShouldEqual, 14)
So(res[0].Name, ShouldEqual, "demoapp staples-lab-1 os.disk.sda3.weighted_io_time")
startTime := 1479287280000
for i := 0; i < 11; i++ {
So(res[0].Points[i][0].Float64, ShouldEqual, i+1)
So(res[0].Points[i][1].Float64, ShouldEqual, startTime+(i*30000))
}
})
Convey("Can format legend", func() {
mqeSerie := MQESerie{
Tagset: map[string]string{
"cluster": "demoapp",
"host": "staples-lab-1",
},
Values: []null.Float{null.NewFloat(3, true)},
}
Convey("with empty alias", func() {
serie := MQEResponseSerie{Name: "os.disk.sda3.weighted_io_time"}
queryRef := QueryToSend{
QueryRef: &Query{
AddClusterToAlias: true,
AddHostToAlias: true,
},
Metric: Metric{Alias: ""},
}
legend := parser.formatLegend(serie, mqeSerie, queryRef)
So(legend, ShouldEqual, "demoapp staples-lab-1 os.disk.sda3.weighted_io_time")
})
Convey("with index alias (ex $2 $3)", func() {
serie := MQEResponseSerie{Name: "os.disk.sda3.weighted_io_time"}
queryRef := QueryToSend{
QueryRef: &Query{
AddClusterToAlias: true,
AddHostToAlias: true,
},
Metric: Metric{Alias: "$2 $3", Metric: "os.disk.sda3.weighted_io_time"},
}
legend := parser.formatLegend(serie, mqeSerie, queryRef)
So(legend, ShouldEqual, "demoapp staples-lab-1 disk.sda3")
})
Convey("with wildcard alias", func() {
serie := MQEResponseSerie{Name: "os.disk.sda3.weighted_io_time", Query: "os.disk.*"}
queryRef := QueryToSend{
QueryRef: &Query{
AddClusterToAlias: true,
AddHostToAlias: true,
},
RawQuery: "os.disk.sda3.weighted_io_time",
Metric: Metric{Alias: "*", Metric: "os.disk.*.weighted_io_time"},
}
legend := parser.formatLegend(serie, mqeSerie, queryRef)
So(legend, ShouldEqual, "demoapp staples-lab-1 sda3")
})
})
})
}
func init() {
testJson = `{
"success": true,
"name": "select",
"body": [
{
"query": "os.disk.sda3.weighted_io_time",
"name": "os.disk.sda3.weighted_io_time",
"type": "series",
"series": [
{
"tagset": {
"cluster": "demoapp",
"host": "staples-lab-1"
},
"values": [1,2,3,4,5,6,7,8,9,10,11, null, null, null]
},
{
"tagset": {
"cluster": "demoapp",
"host": "staples-lab-2"
},
"values": [11,10,9,8,7,6,5,4,3,2,1]
}
],
"timerange": {
"start": 1479287280000,
"end": 1479287580000,
"resolution": 30000
}
}
],
"metadata": {
"description": {
"cluster": [
"demoapp"
],
"host": [
"staples-lab-1",
"staples-lab-2"
]
},
"notes": null,
"profile": [
{
"name": "Parsing Query",
"start": "2016-11-16T04:16:21.874354721-05:00",
"finish": "2016-11-16T04:16:21.874762291-05:00"
},
{
"name": "Cassandra GetAllTags",
"start": "2016-11-16T04:16:21.874907171-05:00",
"finish": "2016-11-16T04:16:21.876401922-05:00"
},
{
"name": "CachedMetricMetadataAPI_GetAllTags_Expired",
"start": "2016-11-16T04:16:21.874904751-05:00",
"finish": "2016-11-16T04:16:21.876407852-05:00"
},
{
"name": "CachedMetricMetadataAPI_GetAllTags",
"start": "2016-11-16T04:16:21.874899491-05:00",
"finish": "2016-11-16T04:16:21.876410382-05:00"
},
{
"name": "Blueflood FetchSingleTimeseries Resolution",
"description": "os.disk.sda3.weighted_io_time [app=demoapp,host=staples-lab-1]\n at 30s",
"start": "2016-11-16T04:16:21.876623312-05:00",
"finish": "2016-11-16T04:16:21.881763444-05:00"
},
{
"name": "Blueflood FetchSingleTimeseries Resolution",
"description": "os.disk.sda3.weighted_io_time [app=demoapp,host=staples-lab-2]\n at 30s",
"start": "2016-11-16T04:16:21.876642682-05:00",
"finish": "2016-11-16T04:16:21.881895914-05:00"
},
{
"name": "Blueflood FetchMultipleTimeseries",
"start": "2016-11-16T04:16:21.876418022-05:00",
"finish": "2016-11-16T04:16:21.881921474-05:00"
}
]
}
}
`
}

View File

@ -1,101 +0,0 @@
package mqe
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"path"
"time"
"golang.org/x/net/context/ctxhttp"
"strconv"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
"github.com/patrickmn/go-cache"
)
var tokenCache *cache.Cache
func init() {
tokenCache = cache.New(5*time.Minute, 30*time.Second)
}
type TokenClient struct {
log log.Logger
Datasource *models.DataSource
HttpClient *http.Client
}
func NewTokenClient(datasource *models.DataSource) *TokenClient {
httpClient, _ := datasource.GetHttpClient()
return &TokenClient{
log: log.New("tsdb.mqe.tokenclient"),
Datasource: datasource,
HttpClient: httpClient,
}
}
func (client *TokenClient) GetTokenData(ctx context.Context) (*TokenBody, error) {
key := strconv.FormatInt(client.Datasource.Id, 10)
item, found := tokenCache.Get(key)
if found {
if result, ok := item.(*TokenBody); ok {
return result, nil
}
}
b, err := client.RequestTokenData(ctx)
if err != nil {
return nil, err
}
tokenCache.Set(key, b, cache.DefaultExpiration)
return b, nil
}
func (client *TokenClient) RequestTokenData(ctx context.Context) (*TokenBody, error) {
u, _ := url.Parse(client.Datasource.Url)
u.Path = path.Join(u.Path, "token")
req, err := http.NewRequest(http.MethodGet, u.String(), nil)
if err != nil {
client.log.Info("Failed to create request", "error", err)
}
res, err := ctxhttp.Do(ctx, client.HttpClient, req)
if err != nil {
return nil, err
}
body, err := ioutil.ReadAll(res.Body)
defer res.Body.Close()
if err != nil {
return nil, err
}
if res.StatusCode/100 != 2 {
client.log.Info("Request failed", "status", res.Status, "body", string(body))
return nil, fmt.Errorf("Request failed status: %v", res.Status)
}
var result *TokenResponse
err = json.Unmarshal(body, &result)
if err != nil {
client.log.Info("Failed to unmarshal response", "error", err, "status", res.Status, "body", string(body))
return nil, err
}
if !result.Success {
return nil, fmt.Errorf("Request failed for unknown reason.")
}
return &result.Body, nil
}

View File

@ -1,27 +0,0 @@
package mqe
import (
"context"
"testing"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
. "github.com/smartystreets/goconvey/convey"
)
func TestTokenClient(t *testing.T) {
SkipConvey("Token client", t, func() {
dsInfo := &models.DataSource{
JsonData: simplejson.New(),
Url: "",
}
client := NewTokenClient(dsInfo)
body, err := client.RequestTokenData(context.TODO())
So(err, ShouldBeNil)
//So(len(body.Functions), ShouldBeGreaterThan, 1)
So(len(body.Metrics), ShouldBeGreaterThan, 1)
})
}

View File

@ -1,137 +0,0 @@
package mqe
import (
"fmt"
"strings"
"regexp"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb"
)
type Metric struct {
Metric string
Alias string
}
type Function struct {
Func string
}
type Query struct {
Metrics []Metric
Hosts []string
Cluster []string
FunctionList []Function
AddClusterToAlias bool
AddHostToAlias bool
TimeRange *tsdb.TimeRange
UseRawQuery bool
RawQuery string
}
var (
containsWildcardPattern *regexp.Regexp = regexp.MustCompile(`\*`)
)
func (q *Query) Build(availableSeries []string) ([]QueryToSend, error) {
var queriesToSend []QueryToSend
where := q.buildWhereClause()
functions := q.buildFunctionList()
for _, metric := range q.Metrics {
alias := ""
if metric.Alias != "" {
alias = fmt.Sprintf(" {%s}", metric.Alias)
}
if !containsWildcardPattern.Match([]byte(metric.Metric)) {
rawQuery := q.renderQuerystring(metric.Metric, functions, alias, where, q.TimeRange)
queriesToSend = append(queriesToSend, QueryToSend{
RawQuery: rawQuery,
QueryRef: q,
Metric: metric,
})
} else {
m := strings.Replace(metric.Metric, "*", ".*", -1)
mp, err := regexp.Compile(m)
if err != nil {
log.Error2("failed to compile regex for ", "metric", m)
continue
}
//TODO: this lookup should be cached
for _, wildcardMatch := range availableSeries {
if mp.Match([]byte(wildcardMatch)) {
rawQuery := q.renderQuerystring(wildcardMatch, functions, alias, where, q.TimeRange)
queriesToSend = append(queriesToSend, QueryToSend{
RawQuery: rawQuery,
QueryRef: q,
Metric: metric,
})
}
}
}
}
return queriesToSend, nil
}
func (q *Query) renderQuerystring(path, functions, alias, where string, timerange *tsdb.TimeRange) string {
return fmt.Sprintf(
"`%s`%s%s %s from %v to %v",
path,
functions,
alias,
where,
q.TimeRange.GetFromAsMsEpoch(),
q.TimeRange.GetToAsMsEpoch())
}
func (q *Query) buildFunctionList() string {
functions := ""
for _, v := range q.FunctionList {
functions = fmt.Sprintf("%s|%s", functions, v.Func)
}
return functions
}
func (q *Query) buildWhereClause() string {
hasApps := len(q.Cluster) > 0
hasHosts := len(q.Hosts) > 0
where := ""
if hasHosts || hasApps {
where += "where "
}
if hasApps {
apps := strings.Join(q.Cluster, "', '")
where += fmt.Sprintf("cluster in ('%s')", apps)
}
if hasHosts && hasApps {
where += " and "
}
if hasHosts {
hosts := strings.Join(q.Hosts, "', '")
where += fmt.Sprintf("host in ('%s')", hosts)
}
return where
}
type TokenBody struct {
Metrics []string
}
type TokenResponse struct {
Success bool
Body TokenBody
}

View File

@ -1,95 +0,0 @@
package mqe
import (
"testing"
"time"
"fmt"
"github.com/grafana/grafana/pkg/tsdb"
. "github.com/smartystreets/goconvey/convey"
)
func TestWildcardExpansion(t *testing.T) {
availableMetrics := []string{
"os.cpu.all.idle",
"os.cpu.1.idle",
"os.cpu.2.idle",
"os.cpu.3.idle",
}
now := time.Now()
from := now.Add((time.Minute*5)*-1).UnixNano() / int64(time.Millisecond)
to := now.UnixNano() / int64(time.Millisecond)
Convey("Can expanding query", t, func() {
Convey("Without wildcard series", func() {
query := &Query{
Metrics: []Metric{
{Metric: "os.cpu.3.idle", Alias: ""},
{Metric: "os.cpu.2.idle", Alias: ""},
{Metric: "os.cpu.1.idle", Alias: "cpu"},
},
Hosts: []string{"staples-lab-1", "staples-lab-2"},
Cluster: []string{"demoapp-1", "demoapp-2"},
AddClusterToAlias: false,
AddHostToAlias: false,
FunctionList: []Function{
{Func: "aggregate.min"},
},
TimeRange: &tsdb.TimeRange{Now: now, From: "5m", To: "now"},
}
expandeQueries, err := query.Build(availableMetrics)
So(err, ShouldBeNil)
So(len(expandeQueries), ShouldEqual, 3)
So(expandeQueries[0].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.3.idle`|aggregate.min where cluster in ('demoapp-1', 'demoapp-2') and host in ('staples-lab-1', 'staples-lab-2') from %v to %v", from, to))
So(expandeQueries[1].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.2.idle`|aggregate.min where cluster in ('demoapp-1', 'demoapp-2') and host in ('staples-lab-1', 'staples-lab-2') from %v to %v", from, to))
So(expandeQueries[2].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.1.idle`|aggregate.min {cpu} where cluster in ('demoapp-1', 'demoapp-2') and host in ('staples-lab-1', 'staples-lab-2') from %v to %v", from, to))
})
Convey("With two aggregate functions", func() {
query := &Query{
Metrics: []Metric{
{Metric: "os.cpu.3.idle", Alias: ""},
},
Hosts: []string{"staples-lab-1", "staples-lab-2"},
Cluster: []string{"demoapp-1", "demoapp-2"},
AddClusterToAlias: false,
AddHostToAlias: false,
FunctionList: []Function{
{Func: "aggregate.min"},
{Func: "aggregate.max"},
},
TimeRange: &tsdb.TimeRange{Now: now, From: "5m", To: "now"},
}
expandeQueries, err := query.Build(availableMetrics)
So(err, ShouldBeNil)
So(len(expandeQueries), ShouldEqual, 1)
So(expandeQueries[0].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.3.idle`|aggregate.min|aggregate.max where cluster in ('demoapp-1', 'demoapp-2') and host in ('staples-lab-1', 'staples-lab-2') from %v to %v", from, to))
})
Convey("Containing wildcard series", func() {
query := &Query{
Metrics: []Metric{
{Metric: "os.cpu*", Alias: ""},
},
Hosts: []string{"staples-lab-1"},
AddClusterToAlias: false,
AddHostToAlias: false,
TimeRange: &tsdb.TimeRange{Now: now, From: "5m", To: "now"},
}
expandeQueries, err := query.Build(availableMetrics)
So(err, ShouldBeNil)
So(len(expandeQueries), ShouldEqual, 4)
So(expandeQueries[0].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.all.idle` where host in ('staples-lab-1') from %v to %v", from, to))
So(expandeQueries[1].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.1.idle` where host in ('staples-lab-1') from %v to %v", from, to))
So(expandeQueries[2].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.2.idle` where host in ('staples-lab-1') from %v to %v", from, to))
So(expandeQueries[3].RawQuery, ShouldEqual, fmt.Sprintf("`os.cpu.3.idle` where host in ('staples-lab-1') from %v to %v", from, to))
})
})
}

View File

@ -21,9 +21,8 @@ import (
) )
type MysqlExecutor struct { type MysqlExecutor struct {
datasource *models.DataSource engine *xorm.Engine
engine *xorm.Engine log log.Logger
log log.Logger
} }
type engineCacheType struct { type engineCacheType struct {
@ -38,16 +37,15 @@ var engineCache = engineCacheType{
} }
func init() { func init() {
tsdb.RegisterExecutor("mysql", NewMysqlExecutor) tsdb.RegisterTsdbQueryEndpoint("mysql", NewMysqlExecutor)
} }
func NewMysqlExecutor(datasource *models.DataSource) (tsdb.Executor, error) { func NewMysqlExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
executor := &MysqlExecutor{ executor := &MysqlExecutor{
datasource: datasource, log: log.New("tsdb.mysql"),
log: log.New("tsdb.mysql"),
} }
err := executor.initEngine() err := executor.initEngine(datasource)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -55,18 +53,24 @@ func NewMysqlExecutor(datasource *models.DataSource) (tsdb.Executor, error) {
return executor, nil return executor, nil
} }
func (e *MysqlExecutor) initEngine() error { func (e *MysqlExecutor) initEngine(dsInfo *models.DataSource) error {
engineCache.Lock() engineCache.Lock()
defer engineCache.Unlock() defer engineCache.Unlock()
if engine, present := engineCache.cache[e.datasource.Id]; present { if engine, present := engineCache.cache[dsInfo.Id]; present {
if version, _ := engineCache.versions[e.datasource.Id]; version == e.datasource.Version { if version, _ := engineCache.versions[dsInfo.Id]; version == dsInfo.Version {
e.engine = engine e.engine = engine
return nil return nil
} }
} }
cnnstr := fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC", e.datasource.User, e.datasource.Password, "tcp", e.datasource.Url, e.datasource.Database) cnnstr := fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&parseTime=true&loc=UTC",
dsInfo.User,
dsInfo.Password,
"tcp",
dsInfo.Url,
dsInfo.Database)
e.log.Debug("getEngine", "connection", cnnstr) e.log.Debug("getEngine", "connection", cnnstr)
engine, err := xorm.NewEngine("mysql", cnnstr) engine, err := xorm.NewEngine("mysql", cnnstr)
@ -76,22 +80,22 @@ func (e *MysqlExecutor) initEngine() error {
return err return err
} }
engineCache.cache[e.datasource.Id] = engine engineCache.cache[dsInfo.Id] = engine
e.engine = engine e.engine = engine
return nil return nil
} }
func (e *MysqlExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult { func (e *MysqlExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) *tsdb.BatchResult {
result := &tsdb.BatchResult{ result := &tsdb.BatchResult{
QueryResults: make(map[string]*tsdb.QueryResult), QueryResults: make(map[string]*tsdb.QueryResult),
} }
macroEngine := NewMysqlMacroEngine(context.TimeRange) macroEngine := NewMysqlMacroEngine(tsdbQuery.TimeRange)
session := e.engine.NewSession() session := e.engine.NewSession()
defer session.Close() defer session.Close()
db := session.DB() db := session.DB()
for _, query := range queries { for _, query := range tsdbQuery.Queries {
rawSql := query.Model.Get("rawSql").MustString() rawSql := query.Model.Get("rawSql").MustString()
if rawSql == "" { if rawSql == "" {
continue continue
@ -272,8 +276,6 @@ func (e MysqlExecutor) TransformToTimeSeries(query *tsdb.Query, rows *core.Rows,
rowData.metric = "Unknown" rowData.metric = "Unknown"
} }
//e.log.Debug("Rows", "metric", rowData.metric, "time", rowData.time, "value", rowData.value)
if !rowData.time.Valid { if !rowData.time.Valid {
return fmt.Errorf("Found row with no time value") return fmt.Errorf("Found row with no time value")
} }

View File

@ -22,20 +22,22 @@ import (
) )
type OpenTsdbExecutor struct { type OpenTsdbExecutor struct {
*models.DataSource //*models.DataSource
httpClient *http.Client //httpClient *http.Client
} }
func NewOpenTsdbExecutor(datasource *models.DataSource) (tsdb.Executor, error) { func NewOpenTsdbExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
httpClient, err := datasource.GetHttpClient() /*
httpClient, err := datasource.GetHttpClient()
if err != nil { if err != nil {
return nil, err return nil, err
} }
*/
return &OpenTsdbExecutor{ return &OpenTsdbExecutor{
DataSource: datasource, //DataSource: datasource,
httpClient: httpClient, //httpClient: httpClient,
}, nil }, nil
} }
@ -45,10 +47,10 @@ var (
func init() { func init() {
plog = log.New("tsdb.opentsdb") plog = log.New("tsdb.opentsdb")
tsdb.RegisterExecutor("opentsdb", NewOpenTsdbExecutor) tsdb.RegisterTsdbQueryEndpoint("opentsdb", NewOpenTsdbExecutor)
} }
func (e *OpenTsdbExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) *tsdb.BatchResult { func (e *OpenTsdbExecutor) Query(ctx context.Context, dsInfo *models.DataSource, queryContext *tsdb.TsdbQuery) *tsdb.BatchResult {
result := &tsdb.BatchResult{} result := &tsdb.BatchResult{}
var tsdbQuery OpenTsdbQuery var tsdbQuery OpenTsdbQuery
@ -56,7 +58,7 @@ func (e *OpenTsdbExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
tsdbQuery.Start = queryContext.TimeRange.GetFromAsMsEpoch() tsdbQuery.Start = queryContext.TimeRange.GetFromAsMsEpoch()
tsdbQuery.End = queryContext.TimeRange.GetToAsMsEpoch() tsdbQuery.End = queryContext.TimeRange.GetToAsMsEpoch()
for _, query := range queries { for _, query := range queryContext.Queries {
metric := e.buildMetric(query) metric := e.buildMetric(query)
tsdbQuery.Queries = append(tsdbQuery.Queries, metric) tsdbQuery.Queries = append(tsdbQuery.Queries, metric)
} }
@ -65,13 +67,19 @@ func (e *OpenTsdbExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
plog.Debug("OpenTsdb request", "params", tsdbQuery) plog.Debug("OpenTsdb request", "params", tsdbQuery)
} }
req, err := e.createRequest(tsdbQuery) req, err := e.createRequest(dsInfo, tsdbQuery)
if err != nil { if err != nil {
result.Error = err result.Error = err
return result return result
} }
res, err := ctxhttp.Do(ctx, e.httpClient, req) httpClient, err := dsInfo.GetHttpClient()
if err != nil {
result.Error = err
return result
}
res, err := ctxhttp.Do(ctx, httpClient, req)
if err != nil { if err != nil {
result.Error = err result.Error = err
return result return result
@ -86,8 +94,8 @@ func (e *OpenTsdbExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice,
return result return result
} }
func (e *OpenTsdbExecutor) createRequest(data OpenTsdbQuery) (*http.Request, error) { func (e *OpenTsdbExecutor) createRequest(dsInfo *models.DataSource, data OpenTsdbQuery) (*http.Request, error) {
u, _ := url.Parse(e.Url) u, _ := url.Parse(dsInfo.Url)
u.Path = path.Join(u.Path, "api/query") u.Path = path.Join(u.Path, "api/query")
postData, err := json.Marshal(data) postData, err := json.Marshal(data)
@ -99,8 +107,8 @@ func (e *OpenTsdbExecutor) createRequest(data OpenTsdbQuery) (*http.Request, err
} }
req.Header.Set("Content-Type", "application/json") req.Header.Set("Content-Type", "application/json")
if e.BasicAuth { if dsInfo.BasicAuth {
req.SetBasicAuth(e.BasicAuthUser, e.BasicAuthPassword) req.SetBasicAuth(dsInfo.BasicAuthUser, dsInfo.BasicAuthPassword)
} }
return req, err return req, err

View File

@ -7,6 +7,8 @@ import (
"strings" "strings"
"time" "time"
"github.com/opentracing/opentracing-go"
"net/http" "net/http"
"github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/null"
@ -16,12 +18,9 @@ import (
api "github.com/prometheus/client_golang/api" api "github.com/prometheus/client_golang/api"
apiv1 "github.com/prometheus/client_golang/api/prometheus/v1" apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
//api "github.com/prometheus/client_golang/api"
//apiv1 "github.com/prometheus/client_golang/api/prometheus/v1"
) )
type PrometheusExecutor struct { type PrometheusExecutor struct {
*models.DataSource
Transport *http.Transport Transport *http.Transport
} }
@ -37,15 +36,14 @@ func (bat basicAuthTransport) RoundTrip(req *http.Request) (*http.Response, erro
return bat.Transport.RoundTrip(req) return bat.Transport.RoundTrip(req)
} }
func NewPrometheusExecutor(dsInfo *models.DataSource) (tsdb.Executor, error) { func NewPrometheusExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
transport, err := dsInfo.GetHttpTransport() transport, err := dsInfo.GetHttpTransport()
if err != nil { if err != nil {
return nil, err return nil, err
} }
return &PrometheusExecutor{ return &PrometheusExecutor{
DataSource: dsInfo, Transport: transport,
Transport: transport,
}, nil }, nil
} }
@ -56,21 +54,21 @@ var (
func init() { func init() {
plog = log.New("tsdb.prometheus") plog = log.New("tsdb.prometheus")
tsdb.RegisterExecutor("prometheus", NewPrometheusExecutor) tsdb.RegisterTsdbQueryEndpoint("prometheus", NewPrometheusExecutor)
legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`) legendFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
} }
func (e *PrometheusExecutor) getClient() (apiv1.API, error) { func (e *PrometheusExecutor) getClient(dsInfo *models.DataSource) (apiv1.API, error) {
cfg := api.Config{ cfg := api.Config{
Address: e.DataSource.Url, Address: dsInfo.Url,
RoundTripper: e.Transport, RoundTripper: e.Transport,
} }
if e.BasicAuth { if dsInfo.BasicAuth {
cfg.RoundTripper = basicAuthTransport{ cfg.RoundTripper = basicAuthTransport{
Transport: e.Transport, Transport: e.Transport,
username: e.BasicAuthUser, username: dsInfo.BasicAuthUser,
password: e.BasicAuthPassword, password: dsInfo.BasicAuthPassword,
} }
} }
@ -82,15 +80,15 @@ func (e *PrometheusExecutor) getClient() (apiv1.API, error) {
return apiv1.NewAPI(client), nil return apiv1.NewAPI(client), nil
} }
func (e *PrometheusExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) *tsdb.BatchResult { func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) *tsdb.BatchResult {
result := &tsdb.BatchResult{} result := &tsdb.BatchResult{}
client, err := e.getClient() client, err := e.getClient(dsInfo)
if err != nil { if err != nil {
return result.WithError(err) return result.WithError(err)
} }
query, err := parseQuery(queries, queryContext) query, err := parseQuery(tsdbQuery.Queries, tsdbQuery)
if err != nil { if err != nil {
return result.WithError(err) return result.WithError(err)
} }
@ -101,6 +99,12 @@ func (e *PrometheusExecutor) Execute(ctx context.Context, queries tsdb.QuerySlic
Step: query.Step, Step: query.Step,
} }
span, ctx := opentracing.StartSpanFromContext(ctx, "alerting.prometheus")
span.SetTag("expr", query.Expr)
span.SetTag("start_unixnano", int64(query.Start.UnixNano()))
span.SetTag("stop_unixnano", int64(query.End.UnixNano()))
defer span.Finish()
value, err := client.QueryRange(ctx, query.Expr, timeRange) value, err := client.QueryRange(ctx, query.Expr, timeRange)
if err != nil { if err != nil {
@ -134,7 +138,7 @@ func formatLegend(metric model.Metric, query *PrometheusQuery) string {
return string(result) return string(result)
} }
func parseQuery(queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) (*PrometheusQuery, error) { func parseQuery(queries []*tsdb.Query, queryContext *tsdb.TsdbQuery) (*PrometheusQuery, error) {
queryModel := queries[0] queryModel := queries[0]
expr, err := queryModel.Model.Get("expr").String() expr, err := queryModel.Model.Get("expr").String()

View File

@ -1,21 +0,0 @@
package tsdb
import "sync"
type QueryContext struct {
TimeRange *TimeRange
Queries QuerySlice
Results map[string]*QueryResult
ResultsChan chan *BatchResult
Lock sync.RWMutex
BatchWaits sync.WaitGroup
}
func NewQueryContext(queries QuerySlice, timeRange *TimeRange) *QueryContext {
return &QueryContext{
TimeRange: timeRange,
Queries: queries,
ResultsChan: make(chan *BatchResult),
Results: make(map[string]*QueryResult),
}
}

View File

@ -0,0 +1,36 @@
package tsdb
import (
"context"
"fmt"
"github.com/grafana/grafana/pkg/models"
)
type TsdbQueryEndpoint interface {
Query(ctx context.Context, ds *models.DataSource, query *TsdbQuery) *BatchResult
}
var registry map[string]GetTsdbQueryEndpointFn
type GetTsdbQueryEndpointFn func(dsInfo *models.DataSource) (TsdbQueryEndpoint, error)
func init() {
registry = make(map[string]GetTsdbQueryEndpointFn)
}
func getTsdbQueryEndpointFor(dsInfo *models.DataSource) (TsdbQueryEndpoint, error) {
if fn, exists := registry[dsInfo.Type]; exists {
executor, err := fn(dsInfo)
if err != nil {
return nil, err
}
return executor, nil
}
return nil, fmt.Errorf("Could not find executor for data source type: %s", dsInfo.Type)
}
func RegisterTsdbQueryEndpoint(pluginId string, fn GetTsdbQueryEndpointFn) {
registry[pluginId] = fn
}

View File

@ -4,60 +4,23 @@ import (
"context" "context"
) )
type HandleRequestFunc func(ctx context.Context, req *Request) (*Response, error) type HandleRequestFunc func(ctx context.Context, req *TsdbQuery) (*Response, error)
func HandleRequest(ctx context.Context, req *Request) (*Response, error) { func HandleRequest(ctx context.Context, req *TsdbQuery) (*Response, error) {
context := NewQueryContext(req.Queries, req.TimeRange) //TODO niceify
ds := req.Queries[0].DataSource
batches, err := getBatches(req) endpoint, err := getTsdbQueryEndpointFor(ds)
if err != nil { if err != nil {
return nil, err return nil, err
} }
currentlyExecuting := 0 res := endpoint.Query(ctx, ds, req)
if res.Error != nil {
for _, batch := range batches { return nil, res.Error
if len(batch.Depends) == 0 {
currentlyExecuting += 1
batch.Started = true
go batch.process(ctx, context)
}
} }
response := &Response{} return &Response{
Results: res.QueryResults,
for currentlyExecuting != 0 { BatchTimings: []*BatchTiming{res.Timings},
select { }, nil
case batchResult := <-context.ResultsChan:
currentlyExecuting -= 1
response.BatchTimings = append(response.BatchTimings, batchResult.Timings)
if batchResult.Error != nil {
return nil, batchResult.Error
}
for refId, result := range batchResult.QueryResults {
context.Results[refId] = result
}
for _, batch := range batches {
// not interested in started batches
if batch.Started {
continue
}
if batch.allDependenciesAreIn(context) {
currentlyExecuting += 1
batch.Started = true
go batch.process(ctx, context)
}
}
case <-ctx.Done():
return nil, ctx.Err()
}
}
response.Results = context.Results
return response, nil
} }

View File

@ -11,7 +11,7 @@ import (
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb"
) )
type ScenarioHandler func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult type ScenarioHandler func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult
type Scenario struct { type Scenario struct {
Id string `json:"id"` Id string `json:"id"`
@ -33,9 +33,9 @@ func init() {
Id: "random_walk", Id: "random_walk",
Name: "Random Walk", Name: "Random Walk",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult { Handler: func(query *tsdb.Query, tsdbQuery *tsdb.TsdbQuery) *tsdb.QueryResult {
timeWalkerMs := context.TimeRange.GetFromAsMsEpoch() timeWalkerMs := tsdbQuery.TimeRange.GetFromAsMsEpoch()
to := context.TimeRange.GetToAsMsEpoch() to := tsdbQuery.TimeRange.GetToAsMsEpoch()
series := newSeriesForQuery(query) series := newSeriesForQuery(query)
@ -60,7 +60,7 @@ func init() {
registerScenario(&Scenario{ registerScenario(&Scenario{
Id: "no_data_points", Id: "no_data_points",
Name: "No Data Points", Name: "No Data Points",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult { Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult {
return tsdb.NewQueryResult() return tsdb.NewQueryResult()
}, },
}) })
@ -68,7 +68,7 @@ func init() {
registerScenario(&Scenario{ registerScenario(&Scenario{
Id: "datapoints_outside_range", Id: "datapoints_outside_range",
Name: "Datapoints Outside Range", Name: "Datapoints Outside Range",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult { Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult {
queryRes := tsdb.NewQueryResult() queryRes := tsdb.NewQueryResult()
series := newSeriesForQuery(query) series := newSeriesForQuery(query)
@ -85,7 +85,7 @@ func init() {
Id: "csv_metric_values", Id: "csv_metric_values",
Name: "CSV Metric Values", Name: "CSV Metric Values",
StringInput: "1,20,90,30,5,0", StringInput: "1,20,90,30,5,0",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult { Handler: func(query *tsdb.Query, context *tsdb.TsdbQuery) *tsdb.QueryResult {
queryRes := tsdb.NewQueryResult() queryRes := tsdb.NewQueryResult()
stringInput := query.Model.Get("stringInput").MustString() stringInput := query.Model.Get("stringInput").MustString()

View File

@ -13,7 +13,7 @@ type TestDataExecutor struct {
log log.Logger log log.Logger
} }
func NewTestDataExecutor(dsInfo *models.DataSource) (tsdb.Executor, error) { func NewTestDataExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
return &TestDataExecutor{ return &TestDataExecutor{
DataSource: dsInfo, DataSource: dsInfo,
log: log.New("tsdb.testdata"), log: log.New("tsdb.testdata"),
@ -21,17 +21,17 @@ func NewTestDataExecutor(dsInfo *models.DataSource) (tsdb.Executor, error) {
} }
func init() { func init() {
tsdb.RegisterExecutor("grafana-testdata-datasource", NewTestDataExecutor) tsdb.RegisterTsdbQueryEndpoint("grafana-testdata-datasource", NewTestDataExecutor)
} }
func (e *TestDataExecutor) Execute(ctx context.Context, queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult { func (e *TestDataExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) *tsdb.BatchResult {
result := &tsdb.BatchResult{} result := &tsdb.BatchResult{}
result.QueryResults = make(map[string]*tsdb.QueryResult) result.QueryResults = make(map[string]*tsdb.QueryResult)
for _, query := range queries { for _, query := range tsdbQuery.Queries {
scenarioId := query.Model.Get("scenarioId").MustString("random_walk") scenarioId := query.Model.Get("scenarioId").MustString("random_walk")
if scenario, exist := ScenarioRegistry[scenarioId]; exist { if scenario, exist := ScenarioRegistry[scenarioId]; exist {
result.QueryResults[query.RefId] = scenario.Handler(query, context) result.QueryResults[query.RefId] = scenario.Handler(query, tsdbQuery)
result.QueryResults[query.RefId].RefId = query.RefId result.QueryResults[query.RefId].RefId = query.RefId
} else { } else {
e.log.Error("Scenario not found", "scenarioId", scenarioId) e.log.Error("Scenario not found", "scenarioId", scenarioId)

View File

@ -3,60 +3,15 @@ package tsdb
import ( import (
"context" "context"
"testing" "testing"
"time"
"github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/models"
. "github.com/smartystreets/goconvey/convey" . "github.com/smartystreets/goconvey/convey"
) )
func TestMetricQuery(t *testing.T) { func TestMetricQuery(t *testing.T) {
Convey("When batches groups for query", t, func() {
Convey("Given 3 queries for 2 data sources", func() {
request := &Request{
Queries: QuerySlice{
{RefId: "A", DataSource: &models.DataSource{Id: 1}},
{RefId: "B", DataSource: &models.DataSource{Id: 1}},
{RefId: "C", DataSource: &models.DataSource{Id: 2}},
},
}
batches, err := getBatches(request)
So(err, ShouldBeNil)
Convey("Should group into two batches", func() {
So(len(batches), ShouldEqual, 2)
})
})
Convey("Given query 2 depends on query 1", func() {
request := &Request{
Queries: QuerySlice{
{RefId: "A", DataSource: &models.DataSource{Id: 1}},
{RefId: "B", DataSource: &models.DataSource{Id: 2}},
{RefId: "C", DataSource: &models.DataSource{Id: 3}, Depends: []string{"A", "B"}},
},
}
batches, err := getBatches(request)
So(err, ShouldBeNil)
Convey("Should return three batch groups", func() {
So(len(batches), ShouldEqual, 3)
})
Convey("Group 3 should have group 1 and 2 as dependencies", func() {
So(batches[2].Depends["A"], ShouldEqual, true)
So(batches[2].Depends["B"], ShouldEqual, true)
})
})
})
Convey("When executing request with one query", t, func() { Convey("When executing request with one query", t, func() {
req := &Request{ req := &TsdbQuery{
Queries: QuerySlice{ Queries: []*Query{
{RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}}, {RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}},
}, },
} }
@ -74,8 +29,8 @@ func TestMetricQuery(t *testing.T) {
}) })
Convey("When executing one request with two queries from same data source", t, func() { Convey("When executing one request with two queries from same data source", t, func() {
req := &Request{ req := &TsdbQuery{
Queries: QuerySlice{ Queries: []*Query{
{RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}}, {RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}},
{RefId: "B", DataSource: &models.DataSource{Id: 1, Type: "test"}}, {RefId: "B", DataSource: &models.DataSource{Id: 1, Type: "test"}},
}, },
@ -99,26 +54,9 @@ func TestMetricQuery(t *testing.T) {
}) })
Convey("When executing one request with three queries from different datasources", t, func() {
req := &Request{
Queries: QuerySlice{
{RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"}},
{RefId: "B", DataSource: &models.DataSource{Id: 1, Type: "test"}},
{RefId: "C", DataSource: &models.DataSource{Id: 2, Type: "test"}},
},
}
res, err := HandleRequest(context.TODO(), req)
So(err, ShouldBeNil)
Convey("Should have been batched in two requests", func() {
So(len(res.BatchTimings), ShouldEqual, 2)
})
})
Convey("When query uses data source of unknown type", t, func() { Convey("When query uses data source of unknown type", t, func() {
req := &Request{ req := &TsdbQuery{
Queries: QuerySlice{ Queries: []*Query{
{RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "asdasdas"}}, {RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "asdasdas"}},
}, },
} }
@ -126,50 +64,11 @@ func TestMetricQuery(t *testing.T) {
_, err := HandleRequest(context.TODO(), req) _, err := HandleRequest(context.TODO(), req)
So(err, ShouldNotBeNil) So(err, ShouldNotBeNil)
}) })
Convey("When executing request that depend on other query", t, func() {
req := &Request{
Queries: QuerySlice{
{
RefId: "A", DataSource: &models.DataSource{Id: 1, Type: "test"},
},
{
RefId: "B", DataSource: &models.DataSource{Id: 2, Type: "test"}, Depends: []string{"A"},
},
},
}
fakeExecutor := registerFakeExecutor()
fakeExecutor.HandleQuery("A", func(c *QueryContext) *QueryResult {
time.Sleep(10 * time.Millisecond)
return &QueryResult{
Series: TimeSeriesSlice{
&TimeSeries{Name: "Ares"},
}}
})
fakeExecutor.HandleQuery("B", func(c *QueryContext) *QueryResult {
return &QueryResult{
Series: TimeSeriesSlice{
&TimeSeries{Name: "Bres+" + c.Results["A"].Series[0].Name},
}}
})
res, err := HandleRequest(context.TODO(), req)
So(err, ShouldBeNil)
Convey("Should have been batched in two requests", func() {
So(len(res.BatchTimings), ShouldEqual, 2)
})
Convey("Query B should have access to Query A results", func() {
So(res.Results["B"].Series[0].Name, ShouldEqual, "Bres+Ares")
})
})
} }
func registerFakeExecutor() *FakeExecutor { func registerFakeExecutor() *FakeExecutor {
executor, _ := NewFakeExecutor(nil) executor, _ := NewFakeExecutor(nil)
RegisterExecutor("test", func(dsInfo *models.DataSource) (Executor, error) { RegisterTsdbQueryEndpoint("test", func(dsInfo *models.DataSource) (TsdbQueryEndpoint, error) {
return executor, nil return executor, nil
}) })

View File

@ -9,6 +9,7 @@ import 'angular-sanitize';
import 'angular-dragdrop'; import 'angular-dragdrop';
import 'angular-bindonce'; import 'angular-bindonce';
import 'angular-ui'; import 'angular-ui';
import 'ngreact';
import $ from 'jquery'; import $ from 'jquery';
import angular from 'angular'; import angular from 'angular';
@ -84,6 +85,7 @@ export class GrafanaApp {
'pasvaz.bindonce', 'pasvaz.bindonce',
'ui.bootstrap', 'ui.bootstrap',
'ui.bootstrap.tpls', 'ui.bootstrap.tpls',
'react'
]; ];
var module_types = ['controllers', 'directives', 'factories', 'services', 'filters', 'routes']; var module_types = ['controllers', 'directives', 'factories', 'services', 'filters', 'routes'];

View File

@ -0,0 +1,39 @@
import * as React from 'react';
import 'react-dom';
import coreModule from '../core_module';
export interface IProps {
password: string;
}
export class PasswordStrength extends React.Component<IProps, any> {
constructor(props) {
super(props);
}
render() {
let strengthText = "strength: strong like a bull.";
let strengthClass = "password-strength-good";
if (this.props.password.length < 4) {
strengthText = "strength: weak sauce.";
strengthClass = "password-strength-bad";
}
if (this.props.password.length <= 8) {
strengthText = "strength: you can do better.";
strengthClass = "password-strength-ok";
}
return (
<div className={`password-strength small ${strengthClass}`}>
<em>{strengthText}</em>
</div>
);
}
}
coreModule.directive('passwordStrength', function(reactDirective) {
return reactDirective(PasswordStrength, ['password']);
});

View File

@ -1,58 +0,0 @@
///<reference path="../../headers/common.d.ts" />
import coreModule from 'app/core/core_module';
const template = `
<div class="collapse-box">
<div class="collapse-box__header">
<a class="collapse-box__header-title pointer" ng-click="ctrl.toggle()">
<span class="fa fa-fw fa-caret-right" ng-hide="ctrl.isOpen"></span>
<span class="fa fa-fw fa-caret-down" ng-hide="!ctrl.isOpen"></span>
{{ctrl.title}}
</a>
<div class="collapse-box__header-actions" ng-transclude="actions" ng-if="ctrl.isOpen"></div>
</div>
<div class="collapse-box__body" ng-transclude="body" ng-if="ctrl.isOpen">
</div>
</div>
`;
export class CollapseBoxCtrl {
isOpen: boolean;
stateChanged: () => void;
/** @ngInject **/
constructor(private $timeout) {
this.isOpen = false;
}
toggle() {
this.isOpen = !this.isOpen;
this.$timeout(() => {
this.stateChanged();
});
}
}
export function collapseBox() {
return {
restrict: 'E',
template: template,
controller: CollapseBoxCtrl,
bindToController: true,
controllerAs: 'ctrl',
scope: {
"title": "@",
"isOpen": "=?",
"stateChanged": "&"
},
transclude: {
'actions': '?collapseBoxActions',
'body': 'collapseBoxBody',
},
link: function(scope, elem, attrs) {
}
};
}
coreModule.directive('collapseBox', collapseBox);

View File

@ -1,8 +1,5 @@
///<reference path="../../headers/common.d.ts" /> ///<reference path="../../headers/common.d.ts" />
import config from 'app/core/config';
import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
var template = ` var template = `
@ -37,7 +34,7 @@ export class ColorPickerCtrl {
showAxisControls: boolean; showAxisControls: boolean;
/** @ngInject */ /** @ngInject */
constructor(private $scope, private $rootScope) { constructor(private $scope, $rootScope) {
this.colors = $rootScope.colors; this.colors = $rootScope.colors;
this.autoClose = $scope.autoClose; this.autoClose = $scope.autoClose;
this.series = $scope.series; this.series = $scope.series;

View File

@ -1,8 +1,5 @@
///<reference path="../../headers/common.d.ts" /> ///<reference path="../../headers/common.d.ts" />
import config from 'app/core/config';
import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
var template = ` var template = `

View File

@ -1,6 +1,5 @@
///<reference path="../../../headers/common.d.ts" /> ///<reference path="../../../headers/common.d.ts" />
import config from 'app/core/config';
import _ from 'lodash'; import _ from 'lodash';
import $ from 'jquery'; import $ from 'jquery';
import coreModule from '../../core_module'; import coreModule from '../../core_module';

View File

@ -1,9 +1,7 @@
///<reference path="../../headers/common.d.ts" /> ///<reference path="../../headers/common.d.ts" />
import config from 'app/core/config'; import config from 'app/core/config';
import store from 'app/core/store';
import _ from 'lodash'; import _ from 'lodash';
import angular from 'angular';
import $ from 'jquery'; import $ from 'jquery';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';

View File

@ -8,7 +8,7 @@ export class HelpCtrl {
shortcuts: any; shortcuts: any;
/** @ngInject */ /** @ngInject */
constructor(private $scope, $sce) { constructor() {
this.tabIndex = 0; this.tabIndex = 0;
this.shortcuts = { this.shortcuts = {
'Global': [ 'Global': [

View File

@ -1,7 +1,6 @@
///<reference path="../../headers/common.d.ts" /> ///<reference path="../../headers/common.d.ts" />
import _ from 'lodash'; import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import Drop from 'tether-drop'; import Drop from 'tether-drop';

View File

@ -60,7 +60,7 @@ export function getValuePreview (object: Object, value: string): string {
if (type === 'string') { if (type === 'string') {
value = '"' + escapeString(value) + '"'; value = '"' + escapeString(value) + '"';
} }
if (type === 'function'){ if (type === 'function') {
// Remove content of the function // Remove content of the function
return object.toString() return object.toString()

View File

@ -6,7 +6,6 @@ import {
getObjectName, getObjectName,
getType, getType,
getValuePreview, getValuePreview,
getPreview,
cssClass, cssClass,
createElement createElement
} from './helpers'; } from './helpers';
@ -191,7 +190,7 @@ export class JsonExplorer {
if (this.element) { if (this.element) {
if (this.isOpen) { if (this.isOpen) {
this.appendChildren(this.config.animateOpen); this.appendChildren(this.config.animateOpen);
} else{ } else {
this.removeChildren(this.config.animateClose); this.removeChildren(this.config.animateClose);
} }
this.element.classList.toggle(cssClass('open')); this.element.classList.toggle(cssClass('open'));

View File

@ -1,9 +1,6 @@
///<reference path="../../../headers/common.d.ts" /> ///<reference path="../../../headers/common.d.ts" />
import config from 'app/core/config';
import store from 'app/core/store'; import store from 'app/core/store';
import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
var template = ` var template = `

View File

@ -1,16 +1,13 @@
///<reference path="../../../headers/common.d.ts" /> ///<reference path="../../../headers/common.d.ts" />
import config from 'app/core/config';
import _ from 'lodash';
import $ from 'jquery';
import coreModule from '../../core_module'; import coreModule from '../../core_module';
import {NavModel, NavModelItem} from '../../nav_model_srv'; import {NavModel} from '../../nav_model_srv';
export class NavbarCtrl { export class NavbarCtrl {
model: NavModel; model: NavModel;
/** @ngInject */ /** @ngInject */
constructor(private $scope, private $rootScope, private contextSrv) { constructor(private $rootScope) {
} }
showSearch() { showSearch() {

View File

@ -1,7 +1,6 @@
///<reference path="../../headers/common.d.ts" /> ///<reference path="../../headers/common.d.ts" />
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import config from 'app/core/config';
import {contextSrv} from 'app/core/services/context_srv'; import {contextSrv} from 'app/core/services/context_srv';
const template = ` const template = `

View File

@ -30,7 +30,7 @@ export class DashboardRowCtrl {
dashboard: any; dashboard: any;
panel: any; panel: any;
constructor(private $rootScope) { constructor() {
this.panel.hiddenPanels = this.panel.hiddenPanels || []; this.panel.hiddenPanels = this.panel.hiddenPanels || [];
} }

View File

@ -2,7 +2,6 @@
import GeminiScrollbar from 'gemini-scrollbar'; import GeminiScrollbar from 'gemini-scrollbar';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import _ from 'lodash';
export function geminiScrollbar() { export function geminiScrollbar() {
return { return {

View File

@ -1,11 +1,7 @@
///<reference path="../../../headers/common.d.ts" /> ///<reference path="../../../headers/common.d.ts" />
import angular from 'angular';
import config from 'app/core/config';
import _ from 'lodash'; import _ from 'lodash';
import $ from 'jquery';
import coreModule from '../../core_module'; import coreModule from '../../core_module';
import appEvents from 'app/core/app_events';
export class SearchCtrl { export class SearchCtrl {
isOpen: boolean; isOpen: boolean;
@ -22,7 +18,7 @@ export class SearchCtrl {
openCompleted: boolean; openCompleted: boolean;
/** @ngInject */ /** @ngInject */
constructor(private $scope, private $location, private $timeout, private backendSrv, private contextSrv, private $rootScope) { constructor($scope, private $location, private $timeout, private backendSrv, public contextSrv, $rootScope) {
$rootScope.onAppEvent('show-dash-search', this.openSearch.bind(this), $scope); $rootScope.onAppEvent('show-dash-search', this.openSearch.bind(this), $scope);
$rootScope.onAppEvent('hide-dash-search', this.closeSearch.bind(this), $scope); $rootScope.onAppEvent('hide-dash-search', this.closeSearch.bind(this), $scope);
} }

View File

@ -1,7 +1,7 @@
///<reference path="../../../headers/common.d.ts" /> ///<reference path="../../../headers/common.d.ts" />
import config from 'app/core/config';
import _ from 'lodash'; import _ from 'lodash';
import config from 'app/core/config';
import $ from 'jquery'; import $ from 'jquery';
import coreModule from '../../core_module'; import coreModule from '../../core_module';

View File

@ -1,10 +1,6 @@
///<reference path="../../headers/common.d.ts" /> ///<reference path="../../headers/common.d.ts" />
import config from 'app/core/config';
import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import Drop from 'tether-drop';
var template = ` var template = `
<label for="check-{{ctrl.id}}" class="gf-form-label {{ctrl.labelClass}} pointer" ng-show="ctrl.label"> <label for="check-{{ctrl.id}}" class="gf-form-label {{ctrl.labelClass}} pointer" ng-show="ctrl.label">

View File

@ -1,5 +1,4 @@
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import appEvents from 'app/core/app_events';
import _ from 'lodash'; import _ from 'lodash';
const template = ` const template = `
@ -17,7 +16,7 @@ export class UserGroupPickerCtrl {
debouncedSearchGroups: any; debouncedSearchGroups: any;
/** @ngInject */ /** @ngInject */
constructor(private backendSrv, private $scope, $sce, private uiSegmentSrv) { constructor(private backendSrv) {
this.debouncedSearchGroups = _.debounce(this.searchGroups, 500, {'leading': true, 'trailing': false}); this.debouncedSearchGroups = _.debounce(this.searchGroups, 500, {'leading': true, 'trailing': false});
this.reset(); this.reset();
} }

View File

@ -1,5 +1,4 @@
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import appEvents from 'app/core/app_events';
import _ from 'lodash'; import _ from 'lodash';
const template = ` const template = `
@ -17,7 +16,7 @@ export class UserPickerCtrl {
userPicked: any; userPicked: any;
/** @ngInject */ /** @ngInject */
constructor(private backendSrv, private $scope, $sce) { constructor(private backendSrv) {
this.reset(); this.reset();
this.debouncedSearchUsers = _.debounce(this.searchUsers, 500, {'leading': true, 'trailing': false}); this.debouncedSearchUsers = _.debounce(this.searchUsers, 500, {'leading': true, 'trailing': false});
} }

View File

@ -1,32 +0,0 @@
<div class="modal-body">
<div class="modal-header">
<h2 class="modal-header-title">
<i class="fa fa-cog fa-spin"></i>
<span class="p-l-1">{{model.name}}</span>
</h2>
<a class="modal-header-close" ng-click="dismiss();">
<i class="fa fa-remove"></i>
</a>
</div>
<div class="modal-content">
<div ng-if="activeStep">
</div>
<!-- <table class="filter&#45;table"> -->
<!-- <tbody> -->
<!-- <tr ng&#45;repeat="step in model.steps"> -->
<!-- <td>{{step.name}}</td> -->
<!-- <td>{{step.status}}</td> -->
<!-- <td width="1%"> -->
<!-- <i class="fa fa&#45;check" style="color: #39A039"></i> -->
<!-- </td> -->
<!-- </tr> -->
<!-- </tbody> -->
<!-- </table> -->
</div>
</div>

View File

@ -1,73 +0,0 @@
///<reference path="../../../headers/common.d.ts" />
import config from 'app/core/config';
import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module';
import appEvents from 'app/core/app_events';
export class WizardSrv {
/** @ngInject */
constructor() {
}
}
export interface WizardStep {
name: string;
type: string;
process: any;
}
export class SelectOptionStep {
type: string;
name: string;
fulfill: any;
constructor() {
this.type = 'select';
}
process() {
return new Promise((fulfill, reject) => {
});
}
}
export class WizardFlow {
name: string;
steps: WizardStep[];
constructor(name) {
this.name = name;
this.steps = [];
}
addStep(step) {
this.steps.push(step);
}
next(index) {
var step = this.steps[0];
return step.process().then(() => {
if (this.steps.length === index+1) {
return;
}
return this.next(index+1);
});
}
start() {
appEvents.emit('show-modal', {
src: 'public/app/core/components/wizard/wizard.html',
model: this
});
return this.next(0);
}
}
coreModule.service('wizardSrv', WizardSrv);

View File

@ -8,9 +8,9 @@ export class SignUpCtrl {
/** @ngInject */ /** @ngInject */
constructor( constructor(
private $scope: any, private $scope: any,
private $location: any, private backendSrv: any,
private contextSrv: any, $location: any,
private backendSrv: any) { contextSrv: any) {
contextSrv.sidemenu = false; contextSrv.sidemenu = false;
$scope.ctrl = this; $scope.ctrl = this;

View File

@ -35,7 +35,6 @@ import {layoutSelector} from './components/layout_selector/layout_selector';
import {switchDirective} from './components/switch'; import {switchDirective} from './components/switch';
import {dashboardSelector} from './components/dashboard_selector'; import {dashboardSelector} from './components/dashboard_selector';
import {queryPartEditorDirective} from './components/query_part/query_part_editor'; import {queryPartEditorDirective} from './components/query_part/query_part_editor';
import {WizardFlow} from './components/wizard/wizard';
import {formDropdownDirective} from './components/form_dropdown/form_dropdown'; import {formDropdownDirective} from './components/form_dropdown/form_dropdown';
import 'app/core/controllers/all'; import 'app/core/controllers/all';
import 'app/core/services/all'; import 'app/core/services/all';
@ -48,7 +47,7 @@ import {assignModelProperties} from './utils/model_utils';
import {contextSrv} from './services/context_srv'; import {contextSrv} from './services/context_srv';
import {KeybindingSrv} from './services/keybindingSrv'; import {KeybindingSrv} from './services/keybindingSrv';
import {helpModal} from './components/help/help'; import {helpModal} from './components/help/help';
import {collapseBox} from './components/collapse_box'; import {PasswordStrength} from './components/PasswordStrength';
import {JsonExplorer} from './components/json_explorer/json_explorer'; import {JsonExplorer} from './components/json_explorer/json_explorer';
import {NavModelSrv, NavModel} from './nav_model_srv'; import {NavModelSrv, NavModel} from './nav_model_srv';
import {userPicker} from './components/user_picker'; import {userPicker} from './components/user_picker';
@ -73,14 +72,12 @@ export {
appEvents, appEvents,
dashboardSelector, dashboardSelector,
queryPartEditorDirective, queryPartEditorDirective,
WizardFlow,
colors, colors,
formDropdownDirective, formDropdownDirective,
assignModelProperties, assignModelProperties,
contextSrv, contextSrv,
KeybindingSrv, KeybindingSrv,
helpModal, helpModal,
collapseBox,
JsonExplorer, JsonExplorer,
NavModelSrv, NavModelSrv,
NavModel, NavModel,
@ -89,4 +86,5 @@ export {
geminiScrollbar, geminiScrollbar,
gfPageDirective, gfPageDirective,
orgSwitcher, orgSwitcher,
PasswordStrength,
}; };

View File

@ -7,7 +7,7 @@ export class DeltaCtrl {
observer: any; observer: any;
/** @ngInject */ /** @ngInject */
constructor(private $rootScope) { constructor($rootScope) {
const waitForCompile = function(mutations) { const waitForCompile = function(mutations) {
if (mutations.length === 1) { if (mutations.length === 1) {
this.$rootScope.appEvent('json-diff-ready'); this.$rootScope.appEvent('json-diff-ready');

View File

@ -4,7 +4,7 @@ define([
function (coreModule) { function (coreModule) {
'use strict'; 'use strict';
coreModule.default.directive('passwordStrength', function() { coreModule.default.directive('passwordStrength2', function() {
var template = '<div class="password-strength small" ng-if="!loginMode" ng-class="strengthClass">' + var template = '<div class="password-strength small" ng-if="!loginMode" ng-class="strengthClass">' +
'<em>{{strengthText}}</em>' + '<em>{{strengthText}}</em>' +
'</div>'; '</div>';

View File

@ -1,7 +1,5 @@
///<reference path="../../headers/common.d.ts" /> ///<reference path="../../headers/common.d.ts" />
import angular from 'angular';
import _ from 'lodash';
import $ from 'jquery'; import $ from 'jquery';
import coreModule from '../core_module'; import coreModule from '../core_module';

View File

@ -2,7 +2,6 @@
import _ from 'lodash'; import _ from 'lodash';
import config from 'app/core/config'; import config from 'app/core/config';
import coreModule from 'app/core/core_module';
import {Observable} from 'vendor/npm/rxjs/Observable'; import {Observable} from 'vendor/npm/rxjs/Observable';

View File

@ -1,7 +1,6 @@
///<reference path="../headers/common.d.ts" /> ///<reference path="../headers/common.d.ts" />
import $ from 'jquery'; import $ from 'jquery';
import _ from 'lodash';
import angular from 'angular'; import angular from 'angular';
export class Profiler { export class Profiler {

View File

@ -2,7 +2,6 @@
import './dashboard_loaders'; import './dashboard_loaders';
import angular from 'angular';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import {BundleLoader} from './bundle_loader'; import {BundleLoader} from './bundle_loader';

View File

@ -2,7 +2,6 @@
import angular from 'angular'; import angular from 'angular';
import _ from 'lodash'; import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import appEvents from 'app/core/app_events'; import appEvents from 'app/core/app_events';
@ -10,7 +9,7 @@ export class AlertSrv {
list: any[]; list: any[];
/** @ngInject */ /** @ngInject */
constructor(private $timeout, private $sce, private $rootScope, private $modal) { constructor(private $timeout, private $rootScope, private $modal) {
this.list = []; this.list = [];
} }

View File

@ -1,8 +1,6 @@
///<reference path="../../headers/common.d.ts" /> ///<reference path="../../headers/common.d.ts" />
import angular from 'angular';
import _ from 'lodash'; import _ from 'lodash';
import config from 'app/core/config';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import appEvents from 'app/core/app_events'; import appEvents from 'app/core/app_events';
@ -12,7 +10,7 @@ export class BackendSrv {
private noBackendCache: boolean; private noBackendCache: boolean;
/** @ngInject */ /** @ngInject */
constructor(private $http, private alertSrv, private $rootScope, private $q, private $timeout, private contextSrv) { constructor(private $http, private alertSrv, private $q, private $timeout, private contextSrv) {
} }
get(url, params?) { get(url, params?) {

View File

@ -6,7 +6,7 @@ import coreModule from '../core_module';
class DynamicDirectiveSrv { class DynamicDirectiveSrv {
/** @ngInject */ /** @ngInject */
constructor(private $compile, private $parse, private $rootScope) {} constructor(private $compile, private $rootScope) {}
addDirective(element, name, scope) { addDirective(element, name, scope) {
var child = angular.element(document.createElement(name)); var child = angular.element(document.createElement(name));

View File

@ -14,10 +14,7 @@ export class KeybindingSrv {
/** @ngInject */ /** @ngInject */
constructor( constructor(
private $rootScope, private $rootScope,
private $modal, private $location) {
private $location,
private contextSrv,
private $timeout) {
// clear out all shortcuts on route change // clear out all shortcuts on route change
$rootScope.$on('$routeChangeSuccess', () => { $rootScope.$on('$routeChangeSuccess', () => {

View File

@ -1,8 +1,6 @@
///<reference path="../../headers/common.d.ts" /> ///<reference path="../../headers/common.d.ts" />
import config from 'app/core/config';
import _ from 'lodash'; import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import Drop from 'tether-drop'; import Drop from 'tether-drop';

View File

@ -1,9 +1,5 @@
///<reference path="../../headers/common.d.ts" /> ///<reference path="../../headers/common.d.ts" />
import config from 'app/core/config';
import _ from 'lodash';
import $ from 'jquery';
import coreModule from 'app/core/core_module'; import coreModule from 'app/core/core_module';
import appEvents from 'app/core/app_events'; import appEvents from 'app/core/app_events';

View File

@ -0,0 +1,14 @@
// import React from 'react';
// import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
// import {shallow} from 'enzyme';
//
// import {PasswordStrength} from '../components/PasswordStrength';
//
// describe('PasswordStrength', () => {
//
// it.skip('should have class bad if length below 4', () => {
// const wrapper = shallow(<PasswordStrength password="asd" />);
// expect(wrapper.find(".password-strength-bad")).to.have.length(3);
// });
// });
//

View File

@ -168,15 +168,16 @@ export default class TimeSeries {
if (currentValue < this.stats.min) { if (currentValue < this.stats.min) {
this.stats.min = currentValue; this.stats.min = currentValue;
} }
if (this.stats.first === null){
if (this.stats.first === null) {
this.stats.first = currentValue; this.stats.first = currentValue;
}else{ } else {
if (previousValue > currentValue) { // counter reset if (previousValue > currentValue) { // counter reset
previousDeltaUp = false; previousDeltaUp = false;
if (i === this.datapoints.length-1) { // reset on last if (i === this.datapoints.length-1) { // reset on last
this.stats.delta += currentValue; this.stats.delta += currentValue;
} }
}else{ } else {
if (previousDeltaUp) { if (previousDeltaUp) {
this.stats.delta += currentValue - previousValue; // normal increment this.stats.delta += currentValue - previousValue; // normal increment
} else { } else {

View File

@ -2,12 +2,6 @@
import EventEmitter from 'eventemitter3'; import EventEmitter from 'eventemitter3';
var hasOwnProp = {}.hasOwnProperty;
function createName(name) {
return '$' + name;
}
export class Emitter { export class Emitter {
emitter: any; emitter: any;

Some files were not shown because too many files have changed in this diff Show More