merge with master

This commit is contained in:
Torkel Ödegaard 2016-09-28 13:02:15 +02:00
commit 5ccdbf01fd
164 changed files with 7241 additions and 1917 deletions

View File

@ -13,6 +13,9 @@
* **Influxdb**: Add support for elapsed(), closes [#5827](https://github.com/grafana/grafana/pull/5827)
* **OAuth**: Add support for generic oauth, closes [#4718](https://github.com/grafana/grafana/pull/4718)
* **Cloudwatch**: Add support to expand multi select template variable, closes [#5003](https://github.com/grafana/grafana/pull/5003)
* **Graph Panel**: Now supports flexible lower/upper bounds on Y-Max and Y-Min, PR [#5720](https://github.com/grafana/grafana/pull/5720)
* **Background Tasks**: Now support automatic purging of old snapshots, closes [#4087](https://github.com/grafana/grafana/issues/4087)
* **Background Tasks**: Now support automatic purging of old rendered images, closes [#2172](https://github.com/grafana/grafana/issues/2172)
### Breaking changes
* **SystemD**: Change systemd description, closes [#5971](https://github.com/grafana/grafana/pull/5971)
@ -20,6 +23,11 @@
### Bugfixes
* **Table Panel**: Fixed problem when switching to Mixed datasource in metrics tab, fixes [#5999](https://github.com/grafana/grafana/pull/5999)
* **Playlist**: Fixed problem with play order not matching order defined in playlist, fixes [#5467](https://github.com/grafana/grafana/pull/5467)
* **Graph panel**: Fixed problem with auto decimals on y axis when datamin=datamax, fixes [#6070](https://github.com/grafana/grafana/pull/6070)
* **Snapshot**: Can view embedded panels/png rendered panels in snapshots without login, fixes [#3769](https://github.com/grafana/grafana/pull/3769)
* **Elasticsearch**: Fix for query template variable when looking up terms without query, no longer relies on elasticsearch default field, fixes [#3887](https://github.com/grafana/grafana/pull/3887)
* **PNG Rendering**: Fix for server side rendering when using auth proxy, fixes [#5906](https://github.com/grafana/grafana/pull/5906)
# 3.1.2 (unreleased)
* **Templating**: Fixed issue when combining row & panel repeats, fixes [#5790](https://github.com/grafana/grafana/issues/5790)

View File

@ -334,9 +334,7 @@ func gruntBuildArg(task string) []string {
func setup() {
runPrint("go", "get", "-v", "github.com/kardianos/govendor")
runPrint("go", "get", "-v", "github.com/blang/semver")
runPrint("go", "get", "-v", "github.com/mattn/go-sqlite3")
runPrint("go", "install", "-v", "github.com/mattn/go-sqlite3")
runPrint("go", "install", "-v", "./pkg/cmd/grafana-server")
}
func test(pkg string) {

View File

@ -161,7 +161,13 @@ external_enabled = true
external_snapshot_url = https://snapshots-origin.raintank.io
external_snapshot_name = Publish to snapshot.raintank.io
#################################### Users ###############################
# remove expired snapshot
snapshot_remove_expired = true
# remove snapshots after 90 days
snapshot_TTL_days = 90
#################################### Users ####################################
[users]
# disable user signup / registration
allow_sign_up = true
@ -276,7 +282,7 @@ from_address = admin@grafana.localhost
welcome_email_on_sign_up = false
templates_pattern = emails/*.html
#################################### Logging #############################
#################################### Logging ##########################
[log]
# Either "console", "file", "syslog". Default is console and file
# Use space to separate multiple modes, e.g. "console file"

View File

@ -116,7 +116,7 @@
# in some UI views to notify that grafana or plugin update exists
# This option does not cause any auto updates, nor send any information
# only a GET request to http://grafana.net to get latest versions
check_for_updates = true
;check_for_updates = true
# Google Analytics universal tracking code, only enabled if you specify an id here
;google_analytics_ua_id =
@ -149,6 +149,12 @@ check_for_updates = true
;external_snapshot_url = https://snapshots-origin.raintank.io
;external_snapshot_name = Publish to snapshot.raintank.io
# remove expired snapshot
;snapshot_remove_expired = true
# remove snapshots after 90 days
;snapshot_TTL_days = 90
#################################### Users ####################################
[users]
# disable user signup / registration
@ -218,6 +224,15 @@ check_for_updates = true
;team_ids =
;allowed_organizations =
#################################### Grafana.net Auth ####################
[auth.grafananet]
;enabled = false
;allow_sign_up = false
;client_id = some_id
;client_secret = some_secret
;scopes = user:email
;allowed_organizations =
#################################### Auth Proxy ##########################
[auth.proxy]
;enabled = false

View File

@ -6,6 +6,10 @@ page_keywords: grafana, admin, http, api, documentation
# Admin API
The admin http API does not currently work with an api token. Api Token's are currently only linked to an organization and organization role. They cannot given
the permission of server admin, only user's can be given that permission. So in order to use these API calls you will have to use basic auth and Grafana user
with Grafana admin permission.
## Settings
`GET /api/admin/settings`
@ -15,7 +19,6 @@ page_keywords: grafana, admin, http, api, documentation
GET /api/admin/settings
Accept: application/json
Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
**Example Response**:
@ -171,7 +174,6 @@ page_keywords: grafana, admin, http, api, documentation
GET /api/admin/stats
Accept: application/json
Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
**Example Response**:
@ -201,7 +203,6 @@ Create new user
POST /api/admin/users HTTP/1.1
Accept: application/json
Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
{
"name":"User",
@ -228,7 +229,6 @@ Change password for specific user
PUT /api/admin/users/2/password HTTP/1.1
Accept: application/json
Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
**Example Response**:
@ -246,7 +246,6 @@ Change password for specific user
PUT /api/admin/users/2/permissions HTTP/1.1
Accept: application/json
Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
**Example Response**:
@ -264,7 +263,6 @@ Change password for specific user
DELETE /api/admin/users/2 HTTP/1.1
Accept: application/json
Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
**Example Response**:

View File

@ -525,3 +525,9 @@ Set root url to a Grafana instance where you want to publish external snapshots
### external_snapshot_name
Set name for external snapshot button. Defaults to `Publish to snapshot.raintank.io`
### remove expired snapshot
Enabled to automatically remove expired snapshots
### remove snapshots after 90 days
Time to live for snapshots.

View File

@ -58,6 +58,7 @@ func Register(r *macaron.Macaron) {
r.Get("/plugins/:id/page/:page", reqSignedIn, Index)
r.Get("/dashboard/*", reqSignedIn, Index)
r.Get("/dashboard-solo/snapshot/*", Index)
r.Get("/dashboard-solo/*", reqSignedIn, Index)
r.Get("/import/dashboard", reqSignedIn, Index)
r.Get("/dashboards/*", reqSignedIn, Index)
@ -202,9 +203,9 @@ func Register(r *macaron.Macaron) {
r.Get("/plugins", wrap(GetPluginList))
r.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingById))
r.Get("/plugins/:pluginId/readme", wrap(GetPluginReadme))
r.Group("/plugins", func() {
r.Get("/:pluginId/readme", wrap(GetPluginReadme))
r.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards))
r.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting))
}, reqOrgAdmin)
@ -243,7 +244,8 @@ func Register(r *macaron.Macaron) {
r.Get("/search/", Search)
// metrics
r.Get("/metrics/test", wrap(GetTestMetrics))
r.Post("/tsdb/query", bind(dtos.MetricRequest{}), wrap(QueryMetrics))
r.Get("/tsdb/testdata/scenarios", wrap(GetTestDataScenarios))
// metrics
r.Get("/metrics", wrap(GetInternalMetrics))

View File

@ -96,13 +96,10 @@ func (slice DataSourceList) Swap(i, j int) {
slice[i], slice[j] = slice[j], slice[i]
}
type MetricQueryResultDto struct {
Data []MetricQueryResultDataDto `json:"data"`
}
type MetricQueryResultDataDto struct {
Target string `json:"target"`
DataPoints [][2]float64 `json:"datapoints"`
type MetricRequest struct {
From string `json:"from"`
To string `json:"to"`
Queries []*simplejson.Json `json:"queries"`
}
type UserStars struct {

23
pkg/api/dtos/playlist.go Normal file
View File

@ -0,0 +1,23 @@
package dtos
type PlaylistDashboard struct {
Id int64 `json:"id"`
Slug string `json:"slug"`
Title string `json:"title"`
Uri string `json:"uri"`
Order int `json:"order"`
}
type PlaylistDashboardsSlice []PlaylistDashboard
func (slice PlaylistDashboardsSlice) Len() int {
return len(slice)
}
func (slice PlaylistDashboardsSlice) Less(i, j int) bool {
return slice[i].Order < slice[j].Order
}
func (slice PlaylistDashboardsSlice) Swap(i, j int) {
slice[i], slice[j] = slice[j], slice[i]
}

View File

@ -38,7 +38,7 @@ func getFrontendSettingsMap(c *middleware.Context) (map[string]interface{}, erro
url := ds.Url
if ds.Access == m.DS_ACCESS_PROXY {
url = setting.AppSubUrl + "/api/datasources/proxy/" + strconv.FormatInt(ds.Id, 10)
url = "/api/datasources/proxy/" + strconv.FormatInt(ds.Id, 10)
}
var dsMap = map[string]interface{}{

View File

@ -1,6 +1,7 @@
package api
import (
"fmt"
"strings"
"github.com/grafana/grafana/pkg/api/dtos"
@ -32,6 +33,16 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
locale = parts[0]
}
appUrl := setting.AppUrl
appSubUrl := setting.AppSubUrl
// special case when doing localhost call from phantomjs
if c.IsRenderCall {
appUrl = fmt.Sprintf("%s://localhost:%s", setting.Protocol, setting.HttpPort)
appSubUrl = ""
settings["appSubUrl"] = ""
}
var data = dtos.IndexViewData{
User: &dtos.CurrentUser{
Id: c.UserId,
@ -49,8 +60,8 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
Locale: locale,
},
Settings: settings,
AppUrl: setting.AppUrl,
AppSubUrl: setting.AppSubUrl,
AppUrl: appUrl,
AppSubUrl: appSubUrl,
GoogleAnalyticsId: setting.GoogleAnalyticsId,
GoogleTagManagerId: setting.GoogleTagManagerId,
BuildVersion: setting.BuildVersion,
@ -154,7 +165,7 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
}
}
if c.OrgRole == m.ROLE_ADMIN {
if len(appLink.Children) > 0 && c.OrgRole == m.ROLE_ADMIN {
appLink.Children = append(appLink.Children, &dtos.NavLink{Divider: true})
appLink.Children = append(appLink.Children, &dtos.NavLink{Text: "Plugin Config", Icon: "fa fa-cog", Url: setting.AppSubUrl + "/plugins/" + plugin.Id + "/edit"})
}

View File

@ -2,39 +2,54 @@ package api
import (
"encoding/json"
"math/rand"
"net/http"
"strconv"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/metrics"
"github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/tsdb/testdata"
"github.com/grafana/grafana/pkg/util"
)
func GetTestMetrics(c *middleware.Context) Response {
from := c.QueryInt64("from")
to := c.QueryInt64("to")
maxDataPoints := c.QueryInt64("maxDataPoints")
stepInSeconds := (to - from) / maxDataPoints
// POST /api/tsdb/query
func QueryMetrics(c *middleware.Context, reqDto dtos.MetricRequest) Response {
timeRange := tsdb.NewTimeRange(reqDto.From, reqDto.To)
result := dtos.MetricQueryResultDto{}
result.Data = make([]dtos.MetricQueryResultDataDto, 1)
request := &tsdb.Request{TimeRange: timeRange}
for seriesIndex := range result.Data {
points := make([][2]float64, maxDataPoints)
walker := rand.Float64() * 100
time := from
for _, query := range reqDto.Queries {
request.Queries = append(request.Queries, &tsdb.Query{
RefId: query.Get("refId").MustString("A"),
MaxDataPoints: query.Get("maxDataPoints").MustInt64(100),
IntervalMs: query.Get("intervalMs").MustInt64(1000),
Model: query,
DataSource: &tsdb.DataSourceInfo{
Name: "Grafana TestDataDB",
PluginId: "grafana-testdata-datasource",
},
})
}
for i := range points {
points[i][0] = walker
points[i][1] = float64(time)
walker += rand.Float64() - 0.5
time += stepInSeconds
}
resp, err := tsdb.HandleRequest(request)
if err != nil {
return ApiError(500, "Metric request error", err)
}
result.Data[seriesIndex].Target = "test-series-" + strconv.Itoa(seriesIndex)
result.Data[seriesIndex].DataPoints = points
return Json(200, &resp)
}
// GET /api/tsdb/testdata/scenarios
func GetTestDataScenarios(c *middleware.Context) Response {
result := make([]interface{}, 0)
for _, scenario := range testdata.ScenarioRegistry {
result = append(result, map[string]interface{}{
"id": scenario.Id,
"name": scenario.Name,
"description": scenario.Description,
"stringInput": scenario.StringInput,
})
}
return Json(200, &result)

View File

@ -1,16 +1,18 @@
package api
import (
"sort"
"strconv"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/bus"
_ "github.com/grafana/grafana/pkg/log"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/search"
)
func populateDashboardsById(dashboardByIds []int64) ([]m.PlaylistDashboardDto, error) {
result := make([]m.PlaylistDashboardDto, 0)
func populateDashboardsById(dashboardByIds []int64, dashboardIdOrder map[int64]int) (dtos.PlaylistDashboardsSlice, error) {
result := make(dtos.PlaylistDashboardsSlice, 0)
if len(dashboardByIds) > 0 {
dashboardQuery := m.GetDashboardsQuery{DashboardIds: dashboardByIds}
@ -19,11 +21,12 @@ func populateDashboardsById(dashboardByIds []int64) ([]m.PlaylistDashboardDto, e
}
for _, item := range dashboardQuery.Result {
result = append(result, m.PlaylistDashboardDto{
result = append(result, dtos.PlaylistDashboard{
Id: item.Id,
Slug: item.Slug,
Title: item.Title,
Uri: "db/" + item.Slug,
Order: dashboardIdOrder[item.Id],
})
}
}
@ -31,8 +34,8 @@ func populateDashboardsById(dashboardByIds []int64) ([]m.PlaylistDashboardDto, e
return result, nil
}
func populateDashboardsByTag(orgId, userId int64, dashboardByTag []string) []m.PlaylistDashboardDto {
result := make([]m.PlaylistDashboardDto, 0)
func populateDashboardsByTag(orgId, userId int64, dashboardByTag []string, dashboardTagOrder map[string]int) dtos.PlaylistDashboardsSlice {
result := make(dtos.PlaylistDashboardsSlice, 0)
if len(dashboardByTag) > 0 {
for _, tag := range dashboardByTag {
@ -47,10 +50,11 @@ func populateDashboardsByTag(orgId, userId int64, dashboardByTag []string) []m.P
if err := bus.Dispatch(&searchQuery); err == nil {
for _, item := range searchQuery.Result {
result = append(result, m.PlaylistDashboardDto{
result = append(result, dtos.PlaylistDashboard{
Id: item.Id,
Title: item.Title,
Uri: item.Uri,
Order: dashboardTagOrder[tag],
})
}
}
@ -60,28 +64,33 @@ func populateDashboardsByTag(orgId, userId int64, dashboardByTag []string) []m.P
return result
}
func LoadPlaylistDashboards(orgId, userId, playlistId int64) ([]m.PlaylistDashboardDto, error) {
func LoadPlaylistDashboards(orgId, userId, playlistId int64) (dtos.PlaylistDashboardsSlice, error) {
playlistItems, _ := LoadPlaylistItems(playlistId)
dashboardByIds := make([]int64, 0)
dashboardByTag := make([]string, 0)
dashboardIdOrder := make(map[int64]int)
dashboardTagOrder := make(map[string]int)
for _, i := range playlistItems {
if i.Type == "dashboard_by_id" {
dashboardId, _ := strconv.ParseInt(i.Value, 10, 64)
dashboardByIds = append(dashboardByIds, dashboardId)
dashboardIdOrder[dashboardId] = i.Order
}
if i.Type == "dashboard_by_tag" {
dashboardByTag = append(dashboardByTag, i.Value)
dashboardTagOrder[i.Value] = i.Order
}
}
result := make([]m.PlaylistDashboardDto, 0)
result := make(dtos.PlaylistDashboardsSlice, 0)
var k, _ = populateDashboardsById(dashboardByIds)
var k, _ = populateDashboardsById(dashboardByIds, dashboardIdOrder)
result = append(result, k...)
result = append(result, populateDashboardsByTag(orgId, userId, dashboardByTag)...)
result = append(result, populateDashboardsByTag(orgId, userId, dashboardByTag, dashboardTagOrder)...)
sort.Sort(sort.Reverse(result))
return result, nil
}

View File

@ -6,35 +6,21 @@ import (
"github.com/grafana/grafana/pkg/components/renderer"
"github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
)
func RenderToPng(c *middleware.Context) {
queryReader := util.NewUrlQueryReader(c.Req.URL)
queryParams := fmt.Sprintf("?%s", c.Req.URL.RawQuery)
sessionId := c.Session.ID()
// Handle api calls authenticated without session
if sessionId == "" && c.ApiKeyId != 0 {
c.Session.Start(c)
c.Session.Set(middleware.SESS_KEY_APIKEY, c.ApiKeyId)
// release will make sure the new session is persisted before
// we spin up phantomjs
c.Session.Release()
// cleanup session after render is complete
defer func() { c.Session.Destory(c) }()
}
renderOpts := &renderer.RenderOpts{
Url: c.Params("*") + queryParams,
Width: queryReader.Get("width", "800"),
Height: queryReader.Get("height", "400"),
SessionId: c.Session.ID(),
Timeout: queryReader.Get("timeout", "30"),
Path: c.Params("*") + queryParams,
Width: queryReader.Get("width", "800"),
Height: queryReader.Get("height", "400"),
OrgId: c.OrgId,
Timeout: queryReader.Get("timeout", "30"),
}
renderOpts.Url = setting.ToAbsUrl(renderOpts.Url)
pngPath, err := renderer.RenderToPng(renderOpts)
if err != nil {

View File

@ -17,6 +17,7 @@ import (
"github.com/grafana/grafana/pkg/metrics"
"github.com/grafana/grafana/pkg/plugins"
alertingInit "github.com/grafana/grafana/pkg/services/alerting/init"
"github.com/grafana/grafana/pkg/services/backgroundtasks"
"github.com/grafana/grafana/pkg/services/eventpublisher"
"github.com/grafana/grafana/pkg/services/notifications"
"github.com/grafana/grafana/pkg/services/search"
@ -56,19 +57,19 @@ func main() {
setting.BuildCommit = commit
setting.BuildStamp = buildstampInt64
go listenToSystemSignels()
go listenToSystemSignals()
flag.Parse()
writePIDFile()
initRuntime()
metrics.Init()
search.Init()
login.Init()
social.NewOAuthService()
eventpublisher.Init()
plugins.Init()
alertingInit.Init()
backgroundtasks.Init()
if err := notifications.Init(); err != nil {
log.Fatal(3, "Notification service failed to initialize", err)
@ -116,7 +117,7 @@ func writePIDFile() {
}
}
func listenToSystemSignels() {
func listenToSystemSignals() {
signalChan := make(chan os.Signal, 1)
code := 0

View File

@ -12,36 +12,51 @@ import (
"strconv"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
)
type RenderOpts struct {
Url string
Width string
Height string
SessionId string
Timeout string
Path string
Width string
Height string
Timeout string
OrgId int64
}
var rendererLog log.Logger = log.New("png-renderer")
func RenderToPng(params *RenderOpts) (string, error) {
rendererLog.Info("Rendering", "url", params.Url)
rendererLog.Info("Rendering", "path", params.Path)
var executable = "phantomjs"
if runtime.GOOS == "windows" {
executable = executable + ".exe"
}
url := fmt.Sprintf("%s://localhost:%s/%s", setting.Protocol, setting.HttpPort, params.Path)
binPath, _ := filepath.Abs(filepath.Join(setting.PhantomDir, executable))
scriptPath, _ := filepath.Abs(filepath.Join(setting.PhantomDir, "render.js"))
pngPath, _ := filepath.Abs(filepath.Join(setting.ImagesDir, util.GetRandomString(20)))
pngPath = pngPath + ".png"
cmd := exec.Command(binPath, "--ignore-ssl-errors=true", scriptPath, "url="+params.Url, "width="+params.Width,
"height="+params.Height, "png="+pngPath, "cookiename="+setting.SessionOptions.CookieName,
"domain="+setting.Domain, "sessionid="+params.SessionId)
renderKey := middleware.AddRenderAuthKey(params.OrgId)
defer middleware.RemoveRenderAuthKey(renderKey)
cmdArgs := []string{
"--ignore-ssl-errors=true",
scriptPath,
"url=" + url,
"width=" + params.Width,
"height=" + params.Height,
"png=" + pngPath,
"domain=" + setting.Domain,
"renderKey=" + renderKey,
}
cmd := exec.Command(binPath, cmdArgs...)
stdout, err := cmd.StdoutPipe()
if err != nil {

View File

@ -32,11 +32,25 @@ func New(logger string, ctx ...interface{}) Logger {
}
func Trace(format string, v ...interface{}) {
Root.Debug(fmt.Sprintf(format, v))
var message string
if len(v) > 0 {
message = fmt.Sprintf(format, v)
} else {
message = format
}
Root.Debug(message)
}
func Debug(format string, v ...interface{}) {
Root.Debug(fmt.Sprintf(format, v))
var message string
if len(v) > 0 {
message = fmt.Sprintf(format, v)
} else {
message = format
}
Root.Debug(message)
}
func Debug2(message string, v ...interface{}) {

View File

@ -24,10 +24,10 @@ func NewGauge(meta *MetricMeta) Gauge {
}
}
func RegGauge(meta *MetricMeta) Gauge {
g := NewGauge(meta)
MetricStats.Register(g)
return g
func RegGauge(name string, tagStrings ...string) Gauge {
tr := NewGauge(NewMetricMeta(name, tagStrings))
MetricStats.Register(tr)
return tr
}
// GaugeSnapshot is a read-only copy of another Gauge.

View File

@ -63,6 +63,8 @@ func (this *GraphitePublisher) Publish(metrics []Metric) {
switch metric := m.(type) {
case Counter:
this.addCount(buf, metricName+".count", metric.Count(), now)
case Gauge:
this.addCount(buf, metricName, metric.Value(), now)
case Timer:
percentiles := metric.Percentiles([]float64{0.25, 0.75, 0.90, 0.99})
this.addCount(buf, metricName+".count", metric.Count(), now)

View File

@ -49,6 +49,12 @@ var (
// Timers
M_DataSource_ProxyReq_Timer Timer
M_Alerting_Exeuction_Time Timer
// StatTotals
M_StatTotal_Dashboards Gauge
M_StatTotal_Users Gauge
M_StatTotal_Orgs Gauge
M_StatTotal_Playlists Gauge
)
func initMetricVars(settings *MetricSettings) {
@ -105,4 +111,10 @@ func initMetricVars(settings *MetricSettings) {
// Timers
M_DataSource_ProxyReq_Timer = RegTimer("api.dataproxy.request.all")
M_Alerting_Exeuction_Time = RegTimer("alerting.execution_time")
// StatTotals
M_StatTotal_Dashboards = RegGauge("stat_totals", "stat", "dashboards")
M_StatTotal_Users = RegGauge("stat_totals", "stat", "users")
M_StatTotal_Orgs = RegGauge("stat_totals", "stat", "orgs")
M_StatTotal_Playlists = RegGauge("stat_totals", "stat", "playlists")
}

View File

@ -15,6 +15,7 @@ import (
)
var metricsLogger log.Logger = log.New("metrics")
var metricPublishCounter int64 = 0
func Init() {
settings := readSettings()
@ -45,12 +46,33 @@ func sendMetrics(settings *MetricSettings) {
return
}
updateTotalStats()
metrics := MetricStats.GetSnapshots()
for _, publisher := range settings.Publishers {
publisher.Publish(metrics)
}
}
func updateTotalStats() {
// every interval also publish totals
metricPublishCounter++
if metricPublishCounter%10 == 0 {
// get stats
statsQuery := m.GetSystemStatsQuery{}
if err := bus.Dispatch(&statsQuery); err != nil {
metricsLogger.Error("Failed to get system stats", "error", err)
return
}
M_StatTotal_Dashboards.Update(statsQuery.Result.DashboardCount)
M_StatTotal_Users.Update(statsQuery.Result.UserCount)
M_StatTotal_Playlists.Update(statsQuery.Result.PlaylistCount)
M_StatTotal_Orgs.Update(statsQuery.Result.OrgCount)
}
}
func sendUsageStats() {
if !setting.ReportingEnabled {
return

View File

@ -22,6 +22,7 @@ type Context struct {
Session SessionStore
IsSignedIn bool
IsRenderCall bool
AllowAnonymous bool
Logger log.Logger
}
@ -42,11 +43,11 @@ func GetContextHandler() macaron.Handler {
// then init session and look for userId in session
// then look for api key in session (special case for render calls via api)
// then test if anonymous access is enabled
if initContextWithApiKey(ctx) ||
if initContextWithRenderAuth(ctx) ||
initContextWithApiKey(ctx) ||
initContextWithBasicAuth(ctx) ||
initContextWithAuthProxy(ctx) ||
initContextWithUserSessionCookie(ctx) ||
initContextWithApiKeyFromSession(ctx) ||
initContextWithAnonymousUser(ctx) {
}
@ -176,29 +177,6 @@ func initContextWithBasicAuth(ctx *Context) bool {
}
}
// special case for panel render calls with api key
func initContextWithApiKeyFromSession(ctx *Context) bool {
keyId := ctx.Session.Get(SESS_KEY_APIKEY)
if keyId == nil {
return false
}
keyQuery := m.GetApiKeyByIdQuery{ApiKeyId: keyId.(int64)}
if err := bus.Dispatch(&keyQuery); err != nil {
ctx.Logger.Error("Failed to get api key by id", "id", keyId, "error", err)
return false
} else {
apikey := keyQuery.Result
ctx.IsSignedIn = true
ctx.SignedInUser = &m.SignedInUser{}
ctx.OrgRole = apikey.Role
ctx.ApiKeyId = apikey.Id
ctx.OrgId = apikey.OrgId
return true
}
}
// Handle handles and logs error by given status.
func (ctx *Context) Handle(status int, title string, err error) {
if err != nil {

View File

@ -0,0 +1,55 @@
package middleware
import (
"sync"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/util"
)
var renderKeysLock sync.Mutex
var renderKeys map[string]*m.SignedInUser = make(map[string]*m.SignedInUser)
func initContextWithRenderAuth(ctx *Context) bool {
key := ctx.GetCookie("renderKey")
if key == "" {
return false
}
renderKeysLock.Lock()
defer renderKeysLock.Unlock()
if renderUser, exists := renderKeys[key]; !exists {
ctx.JsonApiErr(401, "Invalid Render Key", nil)
return true
} else {
ctx.IsSignedIn = true
ctx.SignedInUser = renderUser
ctx.IsRenderCall = true
return true
}
}
type renderContextFunc func(key string) (string, error)
func AddRenderAuthKey(orgId int64) string {
renderKeysLock.Lock()
key := util.GetRandomString(32)
renderKeys[key] = &m.SignedInUser{
OrgId: orgId,
OrgRole: m.ROLE_VIEWER,
}
renderKeysLock.Unlock()
return key
}
func RemoveRenderAuthKey(key string) {
renderKeysLock.Lock()
delete(renderKeys, key)
renderKeysLock.Unlock()
}

View File

@ -13,7 +13,6 @@ import (
const (
SESS_KEY_USERID = "uid"
SESS_KEY_APIKEY = "apikey_id" // used fror render requests with api keys
)
var sessionManager *session.Manager

View File

@ -57,17 +57,6 @@ func (this PlaylistDashboard) TableName() string {
type Playlists []*Playlist
type PlaylistDashboards []*PlaylistDashboard
//
// DTOS
//
type PlaylistDashboardDto struct {
Id int64 `json:"id"`
Slug string `json:"slug"`
Title string `json:"title"`
Uri string `json:"uri"`
}
//
// COMMANDS
//

View File

@ -1,10 +1,10 @@
package models
type SystemStats struct {
DashboardCount int
UserCount int
OrgCount int
PlaylistCount int
DashboardCount int64
UserCount int64
OrgCount int64
PlaylistCount int64
}
type DataSourceStats struct {

7
pkg/models/timer.go Normal file
View File

@ -0,0 +1,7 @@
package models
import "time"
type HourCommand struct {
Time time.Time
}

View File

@ -6,6 +6,7 @@ type DataSourcePlugin struct {
FrontendPluginBase
Annotations bool `json:"annotations"`
Metrics bool `json:"metrics"`
Alerting bool `json:"alerting"`
BuiltIn bool `json:"builtIn"`
Mixed bool `json:"mixed"`
App string `json:"app"`

View File

@ -43,7 +43,12 @@ func (fp *FrontendPluginBase) setPathsBasedOnApp(app *AppPlugin) {
appSubPath := strings.Replace(fp.PluginDir, app.PluginDir, "", 1)
fp.IncludedInAppId = app.Id
fp.BaseUrl = app.BaseUrl
fp.Module = util.JoinUrlFragments("plugins/"+app.Id, appSubPath) + "/module"
if isExternalPlugin(app.PluginDir) {
fp.Module = util.JoinUrlFragments("plugins/"+app.Id, appSubPath) + "/module"
} else {
fp.Module = util.JoinUrlFragments("app/plugins/app/"+app.Id, appSubPath) + "/module"
}
}
func (fp *FrontendPluginBase) handleModuleDefaults() {

View File

@ -9,6 +9,11 @@ import (
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/setting"
"github.com/hashicorp/go-version"
)
var (
httpClient http.Client = http.Client{Timeout: time.Duration(10 * time.Second)}
)
type GrafanaNetPlugin struct {
@ -39,26 +44,23 @@ func StartPluginUpdateChecker() {
}
func getAllExternalPluginSlugs() string {
str := ""
var result []string
for _, plug := range Plugins {
if plug.IsCorePlugin {
continue
}
str += plug.Id + ","
result = append(result, plug.Id)
}
return str
return strings.Join(result, ",")
}
func checkForUpdates() {
log.Trace("Checking for updates")
client := http.Client{Timeout: time.Duration(5 * time.Second)}
pluginSlugs := getAllExternalPluginSlugs()
resp, err := client.Get("https://grafana.net/api/plugins/versioncheck?slugIn=" + pluginSlugs + "&grafanaVersion=" + setting.BuildVersion)
resp, err := httpClient.Get("https://grafana.net/api/plugins/versioncheck?slugIn=" + pluginSlugs + "&grafanaVersion=" + setting.BuildVersion)
if err != nil {
log.Trace("Failed to get plugins repo from grafana.net, %v", err.Error())
@ -84,12 +86,20 @@ func checkForUpdates() {
for _, gplug := range gNetPlugins {
if gplug.Slug == plug.Id {
plug.GrafanaNetVersion = gplug.Version
plug.GrafanaNetHasUpdate = plug.Info.Version != plug.GrafanaNetVersion
plugVersion, err1 := version.NewVersion(plug.Info.Version)
gplugVersion, err2 := version.NewVersion(gplug.Version)
if err1 != nil || err2 != nil {
plug.GrafanaNetHasUpdate = plug.Info.Version != plug.GrafanaNetVersion
} else {
plug.GrafanaNetHasUpdate = plugVersion.LessThan(gplugVersion)
}
}
}
}
resp2, err := client.Get("https://raw.githubusercontent.com/grafana/grafana/master/latest.json")
resp2, err := httpClient.Get("https://raw.githubusercontent.com/grafana/grafana/master/latest.json")
if err != nil {
log.Trace("Failed to get latest.json repo from github: %v", err.Error())
return
@ -116,4 +126,11 @@ func checkForUpdates() {
GrafanaLatestVersion = githubLatest.Stable
GrafanaHasUpdate = githubLatest.Stable != setting.BuildVersion
}
currVersion, err1 := version.NewVersion(setting.BuildVersion)
latestVersion, err2 := version.NewVersion(GrafanaLatestVersion)
if err1 == nil && err2 == nil {
GrafanaHasUpdate = currVersion.LessThan(latestVersion)
}
}

View File

@ -5,6 +5,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/services/alerting"
"gopkg.in/guregu/null.v3"
)
var (
@ -13,13 +14,13 @@ var (
)
type AlertEvaluator interface {
Eval(reducedValue *float64) bool
Eval(reducedValue null.Float) bool
}
type NoDataEvaluator struct{}
func (e *NoDataEvaluator) Eval(reducedValue *float64) bool {
return reducedValue == nil
func (e *NoDataEvaluator) Eval(reducedValue null.Float) bool {
return reducedValue.Valid == false
}
type ThresholdEvaluator struct {
@ -43,16 +44,16 @@ func newThresholdEvaludator(typ string, model *simplejson.Json) (*ThresholdEvalu
return defaultEval, nil
}
func (e *ThresholdEvaluator) Eval(reducedValue *float64) bool {
if reducedValue == nil {
func (e *ThresholdEvaluator) Eval(reducedValue null.Float) bool {
if reducedValue.Valid == false {
return false
}
switch e.Type {
case "gt":
return *reducedValue > e.Threshold
return reducedValue.Float64 > e.Threshold
case "lt":
return *reducedValue < e.Threshold
return reducedValue.Float64 < e.Threshold
}
return false
@ -86,16 +87,18 @@ func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, e
return rangedEval, nil
}
func (e *RangedEvaluator) Eval(reducedValue *float64) bool {
if reducedValue == nil {
func (e *RangedEvaluator) Eval(reducedValue null.Float) bool {
if reducedValue.Valid == false {
return false
}
floatValue := reducedValue.Float64
switch e.Type {
case "within_range":
return (e.Lower < *reducedValue && e.Upper > *reducedValue) || (e.Upper < *reducedValue && e.Lower > *reducedValue)
return (e.Lower < floatValue && e.Upper > floatValue) || (e.Upper < floatValue && e.Lower > floatValue)
case "outside_range":
return (e.Upper < *reducedValue && e.Lower < *reducedValue) || (e.Upper > *reducedValue && e.Lower > *reducedValue)
return (e.Upper < floatValue && e.Lower < floatValue) || (e.Upper > floatValue && e.Lower > floatValue)
}
return false

View File

@ -3,6 +3,8 @@ package conditions
import (
"testing"
"gopkg.in/guregu/null.v3"
"github.com/grafana/grafana/pkg/components/simplejson"
. "github.com/smartystreets/goconvey/convey"
)
@ -14,7 +16,7 @@ func evalutorScenario(json string, reducedValue float64, datapoints ...float64)
evaluator, err := NewAlertEvaluator(jsonModel)
So(err, ShouldBeNil)
return evaluator.Eval(&reducedValue)
return evaluator.Eval(null.FloatFrom(reducedValue))
}
func TestEvalutors(t *testing.T) {
@ -51,6 +53,6 @@ func TestEvalutors(t *testing.T) {
evaluator, err := NewAlertEvaluator(jsonModel)
So(err, ShouldBeNil)
So(evaluator.Eval(nil), ShouldBeTrue)
So(evaluator.Eval(null.FloatFromPtr(nil)), ShouldBeTrue)
})
}

View File

@ -2,6 +2,8 @@ package conditions
import (
"fmt"
"strings"
"time"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
@ -32,7 +34,8 @@ type AlertQuery struct {
}
func (c *QueryCondition) Eval(context *alerting.EvalContext) {
seriesList, err := c.executeQuery(context)
timeRange := tsdb.NewTimeRange(c.Query.From, c.Query.To)
seriesList, err := c.executeQuery(context, timeRange)
if err != nil {
context.Error = err
return
@ -43,21 +46,21 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) {
reducedValue := c.Reducer.Reduce(series)
evalMatch := c.Evaluator.Eval(reducedValue)
if reducedValue == nil {
if reducedValue.Valid == false {
emptySerieCount++
continue
}
if context.IsTestRun {
context.Logs = append(context.Logs, &alerting.ResultLogEntry{
Message: fmt.Sprintf("Condition[%d]: Eval: %v, Metric: %s, Value: %1.3f", c.Index, evalMatch, series.Name, *reducedValue),
Message: fmt.Sprintf("Condition[%d]: Eval: %v, Metric: %s, Value: %1.3f", c.Index, evalMatch, series.Name, reducedValue.Float64),
})
}
if evalMatch {
context.EvalMatches = append(context.EvalMatches, &alerting.EvalMatch{
Metric: series.Name,
Value: *reducedValue,
Value: reducedValue.Float64,
})
}
}
@ -66,7 +69,7 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) {
context.Firing = len(context.EvalMatches) > 0
}
func (c *QueryCondition) executeQuery(context *alerting.EvalContext) (tsdb.TimeSeriesSlice, error) {
func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *tsdb.TimeRange) (tsdb.TimeSeriesSlice, error) {
getDsInfo := &m.GetDataSourceByIdQuery{
Id: c.Query.DatasourceId,
OrgId: context.Rule.OrgId,
@ -76,7 +79,7 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext) (tsdb.TimeS
return nil, fmt.Errorf("Could not find datasource")
}
req := c.getRequestForAlertRule(getDsInfo.Result)
req := c.getRequestForAlertRule(getDsInfo.Result, timeRange)
result := make(tsdb.TimeSeriesSlice, 0)
resp, err := c.HandleRequest(req)
@ -102,16 +105,13 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext) (tsdb.TimeS
return result, nil
}
func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource) *tsdb.Request {
func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource, timeRange *tsdb.TimeRange) *tsdb.Request {
req := &tsdb.Request{
TimeRange: tsdb.TimeRange{
From: c.Query.From,
To: c.Query.To,
},
TimeRange: timeRange,
Queries: []*tsdb.Query{
{
RefId: "A",
Query: c.Query.Model.Get("target").MustString(),
Model: c.Query.Model,
DataSource: &tsdb.DataSourceInfo{
Id: datasource.Id,
Name: datasource.Name,
@ -141,6 +141,15 @@ func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, erro
condition.Query.Model = queryJson.Get("model")
condition.Query.From = queryJson.Get("params").MustArray()[1].(string)
condition.Query.To = queryJson.Get("params").MustArray()[2].(string)
if err := validateFromValue(condition.Query.From); err != nil {
return nil, err
}
if err := validateToValue(condition.Query.To); err != nil {
return nil, err
}
condition.Query.DatasourceId = queryJson.Get("datasourceId").MustInt64()
reducerJson := model.Get("reducer")
@ -155,3 +164,26 @@ func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, erro
condition.Evaluator = evaluator
return &condition, nil
}
func validateFromValue(from string) error {
fromRaw := strings.Replace(from, "now-", "", 1)
_, err := time.ParseDuration("-" + fromRaw)
return err
}
func validateToValue(to string) error {
if to == "now" {
return nil
} else if strings.HasPrefix(to, "now-") {
withoutNow := strings.Replace(to, "now-", "", 1)
_, err := time.ParseDuration("-" + withoutNow)
if err == nil {
return nil
}
}
_, err := time.ParseDuration(to)
return err
}

View File

@ -3,6 +3,8 @@ package conditions
import (
"testing"
null "gopkg.in/guregu/null.v3"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models"
@ -41,9 +43,8 @@ func TestQueryCondition(t *testing.T) {
})
Convey("should fire when avg is above 100", func() {
one := float64(120)
two := float64(0)
ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", [][2]*float64{{&one, &two}})}
points := tsdb.NewTimeSeriesPointsFromArgs(120, 0)
ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", points)}
ctx.exec()
So(ctx.result.Error, ShouldBeNil)
@ -51,9 +52,8 @@ func TestQueryCondition(t *testing.T) {
})
Convey("Should not fire when avg is below 100", func() {
one := float64(90)
two := float64(0)
ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", [][2]*float64{{&one, &two}})}
points := tsdb.NewTimeSeriesPointsFromArgs(90, 0)
ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", points)}
ctx.exec()
So(ctx.result.Error, ShouldBeNil)
@ -61,11 +61,9 @@ func TestQueryCondition(t *testing.T) {
})
Convey("Should fire if only first serie matches", func() {
one := float64(120)
two := float64(0)
ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{{&one, &two}}),
tsdb.NewTimeSeries("test2", [][2]*float64{{&two, &two}}),
tsdb.NewTimeSeries("test1", tsdb.NewTimeSeriesPointsFromArgs(120, 0)),
tsdb.NewTimeSeries("test2", tsdb.NewTimeSeriesPointsFromArgs(0, 0)),
}
ctx.exec()
@ -76,8 +74,8 @@ func TestQueryCondition(t *testing.T) {
Convey("Empty series", func() {
Convey("Should set NoDataFound both series are empty", func() {
ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{}),
tsdb.NewTimeSeries("test2", [][2]*float64{}),
tsdb.NewTimeSeries("test1", tsdb.NewTimeSeriesPointsFromArgs()),
tsdb.NewTimeSeries("test2", tsdb.NewTimeSeriesPointsFromArgs()),
}
ctx.exec()
@ -86,10 +84,9 @@ func TestQueryCondition(t *testing.T) {
})
Convey("Should set NoDataFound both series contains null", func() {
one := float64(120)
ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{{nil, &one}}),
tsdb.NewTimeSeries("test2", [][2]*float64{{nil, &one}}),
tsdb.NewTimeSeries("test1", tsdb.TimeSeriesPoints{tsdb.TimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}),
tsdb.NewTimeSeries("test2", tsdb.TimeSeriesPoints{tsdb.TimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}),
}
ctx.exec()
@ -98,11 +95,9 @@ func TestQueryCondition(t *testing.T) {
})
Convey("Should not set NoDataFound if one serie is empty", func() {
one := float64(120)
two := float64(0)
ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{}),
tsdb.NewTimeSeries("test2", [][2]*float64{{&one, &two}}),
tsdb.NewTimeSeries("test1", tsdb.NewTimeSeriesPointsFromArgs()),
tsdb.NewTimeSeries("test2", tsdb.NewTimeSeriesPointsFromArgs(120, 0)),
}
ctx.exec()

View File

@ -4,19 +4,20 @@ import (
"math"
"github.com/grafana/grafana/pkg/tsdb"
"gopkg.in/guregu/null.v3"
)
type QueryReducer interface {
Reduce(timeSeries *tsdb.TimeSeries) *float64
Reduce(timeSeries *tsdb.TimeSeries) null.Float
}
type SimpleReducer struct {
Type string
}
func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) *float64 {
func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float {
if len(series.Points) == 0 {
return nil
return null.FloatFromPtr(nil)
}
value := float64(0)
@ -25,36 +26,36 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) *float64 {
switch s.Type {
case "avg":
for _, point := range series.Points {
if point[0] != nil {
value += *point[0]
if point[0].Valid {
value += point[0].Float64
allNull = false
}
}
value = value / float64(len(series.Points))
case "sum":
for _, point := range series.Points {
if point[0] != nil {
value += *point[0]
if point[0].Valid {
value += point[0].Float64
allNull = false
}
}
case "min":
value = math.MaxFloat64
for _, point := range series.Points {
if point[0] != nil {
if point[0].Valid {
allNull = false
if value > *point[0] {
value = *point[0]
if value > point[0].Float64 {
value = point[0].Float64
}
}
}
case "max":
value = -math.MaxFloat64
for _, point := range series.Points {
if point[0] != nil {
if point[0].Valid {
allNull = false
if value < *point[0] {
value = *point[0]
if value < point[0].Float64 {
value = point[0].Float64
}
}
}
@ -64,10 +65,10 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) *float64 {
}
if allNull {
return nil
return null.FloatFromPtr(nil)
}
return &value
return null.FloatFrom(value)
}
func NewSimpleReducer(typ string) *SimpleReducer {

View File

@ -10,44 +10,41 @@ import (
func TestSimpleReducer(t *testing.T) {
Convey("Test simple reducer by calculating", t, func() {
Convey("avg", func() {
result := *testReducer("avg", 1, 2, 3)
result := testReducer("avg", 1, 2, 3)
So(result, ShouldEqual, float64(2))
})
Convey("sum", func() {
result := *testReducer("sum", 1, 2, 3)
result := testReducer("sum", 1, 2, 3)
So(result, ShouldEqual, float64(6))
})
Convey("min", func() {
result := *testReducer("min", 3, 2, 1)
result := testReducer("min", 3, 2, 1)
So(result, ShouldEqual, float64(1))
})
Convey("max", func() {
result := *testReducer("max", 1, 2, 3)
result := testReducer("max", 1, 2, 3)
So(result, ShouldEqual, float64(3))
})
Convey("count", func() {
result := *testReducer("count", 1, 2, 3000)
result := testReducer("count", 1, 2, 3000)
So(result, ShouldEqual, float64(3))
})
})
}
func testReducer(typ string, datapoints ...float64) *float64 {
func testReducer(typ string, datapoints ...float64) float64 {
reducer := NewSimpleReducer(typ)
var timeserie [][2]*float64
dummieTimestamp := float64(521452145)
series := &tsdb.TimeSeries{
Name: "test time serie",
}
for idx := range datapoints {
timeserie = append(timeserie, [2]*float64{&datapoints[idx], &dummieTimestamp})
series.Points = append(series.Points, tsdb.NewTimePoint(datapoints[idx], 1234134))
}
tsdb := &tsdb.TimeSeries{
Name: "test time serie",
Points: timeserie,
}
return reducer.Reduce(tsdb)
return reducer.Reduce(series).Float64
}

View File

@ -93,14 +93,18 @@ func (e *Engine) executeJob(job *Job) {
}
func (e *Engine) resultDispatcher() {
for result := range e.resultQueue {
go e.handleResponse(result)
}
}
func (e *Engine) handleResponse(result *EvalContext) {
defer func() {
if err := recover(); err != nil {
e.log.Error("Panic in resultDispatcher", "error", err, "stack", log.Stack(1))
}
}()
for result := range e.resultQueue {
e.log.Debug("Alert Rule Result", "ruleId", result.Rule.Id, "firing", result.Firing)
e.resultHandler.Handle(result)
}
e.log.Debug("Alert Rule Result", "ruleId", result.Rule.Id, "firing", result.Firing)
e.resultHandler.Handle(result)
}

View File

@ -71,7 +71,7 @@ func (c *EvalContext) GetNotificationTitle() string {
return "[" + c.GetStateModel().Text + "] " + c.Rule.Name
}
func (c *EvalContext) getDashboardSlug() (string, error) {
func (c *EvalContext) GetDashboardSlug() (string, error) {
if c.dashboardSlug != "" {
return c.dashboardSlug, nil
}
@ -86,7 +86,7 @@ func (c *EvalContext) getDashboardSlug() (string, error) {
}
func (c *EvalContext) GetRuleUrl() (string, error) {
if slug, err := c.getDashboardSlug(); err != nil {
if slug, err := c.GetDashboardSlug(); err != nil {
return "", err
} else {
ruleUrl := fmt.Sprintf("%sdashboard/db/%s?fullscreen&edit&tab=alert&panelId=%d", setting.AppUrl, slug, c.Rule.PanelId)
@ -94,15 +94,6 @@ func (c *EvalContext) GetRuleUrl() (string, error) {
}
}
func (c *EvalContext) GetImageUrl() (string, error) {
if slug, err := c.getDashboardSlug(); err != nil {
return "", err
} else {
ruleUrl := fmt.Sprintf("%sdashboard-solo/db/%s?&panelId=%d", setting.AppUrl, slug, c.Rule.PanelId)
return ruleUrl, nil
}
}
func NewEvalContext(rule *Rule) *EvalContext {
return &EvalContext{
StartTime: time.Now(),

View File

@ -20,7 +20,7 @@ type DefaultEvalHandler struct {
func NewEvalHandler() *DefaultEvalHandler {
return &DefaultEvalHandler{
log: log.New("alerting.evalHandler"),
alertJobTimeout: time.Second * 10,
alertJobTimeout: time.Second * 15,
}
}

View File

@ -6,6 +6,8 @@ import (
_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
"github.com/grafana/grafana/pkg/setting"
_ "github.com/grafana/grafana/pkg/tsdb/graphite"
_ "github.com/grafana/grafana/pkg/tsdb/prometheus"
_ "github.com/grafana/grafana/pkg/tsdb/testdata"
)
var engine *alerting.Engine

View File

@ -2,6 +2,7 @@ package alerting
import (
"errors"
"fmt"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/imguploader"
@ -60,20 +61,20 @@ func (n *RootNotifier) sendNotifications(notifiers []Notifier, context *EvalCont
}
}
func (n *RootNotifier) uploadImage(context *EvalContext) error {
func (n *RootNotifier) uploadImage(context *EvalContext) (err error) {
uploader, _ := imguploader.NewImageUploader()
imageUrl, err := context.GetImageUrl()
if err != nil {
return err
renderOpts := &renderer.RenderOpts{
Width: "800",
Height: "400",
Timeout: "30",
OrgId: context.Rule.OrgId,
}
renderOpts := &renderer.RenderOpts{
Url: imageUrl,
Width: "800",
Height: "400",
SessionId: "123",
Timeout: "10",
if slug, err := context.GetDashboardSlug(); err != nil {
return err
} else {
renderOpts.Path = fmt.Sprintf("dashboard-solo/db/%s?&panelId=%d", slug, context.Rule.PanelId)
}
if imagePath, err := renderer.RenderToPng(renderOpts); err != nil {

View File

@ -52,9 +52,8 @@ func (this *WebhookNotifier) Notify(context *alerting.EvalContext) {
bodyJSON.Set("rule_url", ruleUrl)
}
imageUrl, err := context.GetImageUrl()
if err == nil {
bodyJSON.Set("image_url", imageUrl)
if context.ImagePublicUrl != "" {
bodyJSON.Set("image_url", context.ImagePublicUrl)
}
body, _ := bodyJSON.MarshalJSON()

View File

@ -0,0 +1,39 @@
//"I want to be a cleaner, just like you," said Mathilda
//"Okay," replied Leon
package backgroundtasks
import (
"time"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/models"
)
var (
tlog log.Logger = log.New("ticker")
)
func Init() {
go start()
}
func start() {
go cleanup(time.Now())
ticker := time.NewTicker(time.Hour * 1)
for {
select {
case tick := <-ticker.C:
go cleanup(tick)
}
}
}
func cleanup(now time.Time) {
err := bus.Publish(&models.HourCommand{Time: now})
if err != nil {
tlog.Error("Cleanup job failed", "error", err)
}
}

View File

@ -0,0 +1,38 @@
package backgroundtasks
import (
"io/ioutil"
"os"
"path"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
)
func init() {
bus.AddEventListener(CleanTmpFiles)
}
func CleanTmpFiles(cmd *models.HourCommand) error {
files, err := ioutil.ReadDir(setting.ImagesDir)
var toDelete []os.FileInfo
for _, file := range files {
if file.ModTime().AddDate(0, 0, setting.RenderedImageTTLDays).Before(cmd.Time) {
toDelete = append(toDelete, file)
}
}
for _, file := range toDelete {
fullPath := path.Join(setting.ImagesDir, file.Name())
err := os.Remove(fullPath)
if err != nil {
return err
}
}
tlog.Debug("Found old rendered image to delete", "deleted", len(toDelete), "keept", len(files))
return err
}

View File

@ -12,6 +12,7 @@ import (
"net/smtp"
"os"
"strings"
"time"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/setting"
@ -66,7 +67,7 @@ func sendToSmtpServer(recipients []string, msgContent []byte) error {
tlsconfig.Certificates = []tls.Certificate{cert}
}
conn, err := net.Dial("tcp", net.JoinHostPort(host, port))
conn, err := net.DialTimeout("tcp", net.JoinHostPort(host, port), time.Second*10)
if err != nil {
return err
}

View File

@ -44,7 +44,7 @@ func sendWebRequest(webhook *Webhook) error {
webhookLog.Debug("Sending webhook", "url", webhook.Url)
client := http.Client{
Timeout: time.Duration(3 * time.Second),
Timeout: time.Duration(10 * time.Second),
}
request, err := http.NewRequest("POST", webhook.Url, bytes.NewReader([]byte(webhook.Body)))

View File

@ -92,7 +92,7 @@ func HandleAlertsQuery(query *m.GetAlertsQuery) error {
params = append(params, query.Limit)
}
sql.WriteString("ORDER BY name ASC")
sql.WriteString(" ORDER BY name ASC")
alerts := make([]*m.Alert, 0)
if err := x.Sql(sql.String(), params...).Find(&alerts); err != nil {

View File

@ -66,7 +66,8 @@ func GetAlertNotificationsToSend(query *m.GetAlertNotificationsToSendQuery) erro
sql.WriteString(` WHERE alert_notification.org_id = ?`)
params = append(params, query.OrgId)
sql.WriteString(` AND ((alert_notification.is_default = 1)`)
sql.WriteString(` AND ((alert_notification.is_default = ?)`)
params = append(params, dialect.BooleanStr(true))
if len(query.Ids) > 0 {
sql.WriteString(` OR alert_notification.id IN (?` + strings.Repeat(",?", len(query.Ids)-1) + ")")
for _, v := range query.Ids {

View File

@ -75,7 +75,7 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I
query.Limit = 10
}
sql.WriteString(fmt.Sprintf("ORDER BY epoch DESC LIMIT %v", query.Limit))
sql.WriteString(fmt.Sprintf(" ORDER BY epoch DESC LIMIT %v", query.Limit))
items := make([]*annotations.Item, 0)
if err := x.Sql(sql.String(), params...).Find(&items); err != nil {

View File

@ -5,7 +5,9 @@ import (
"github.com/go-xorm/xorm"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
)
func init() {
@ -13,6 +15,31 @@ func init() {
bus.AddHandler("sql", GetDashboardSnapshot)
bus.AddHandler("sql", DeleteDashboardSnapshot)
bus.AddHandler("sql", SearchDashboardSnapshots)
bus.AddEventListener(DeleteExpiredSnapshots)
}
func DeleteExpiredSnapshots(cmd *m.HourCommand) error {
return inTransaction(func(sess *xorm.Session) error {
var expiredCount int64 = 0
var oldCount int64 = 0
if setting.SnapShotRemoveExpired {
deleteExpiredSql := "DELETE FROM dashboard_snapshot WHERE expires < ?"
expiredResponse, err := x.Exec(deleteExpiredSql, cmd.Time)
if err != nil {
return err
}
expiredCount, _ = expiredResponse.RowsAffected()
}
oldSnapshotsSql := "DELETE FROM dashboard_snapshot WHERE created < ?"
oldResponse, err := x.Exec(oldSnapshotsSql, cmd.Time.AddDate(0, 0, setting.SnapShotTTLDays*-1))
oldCount, _ = oldResponse.RowsAffected()
log.Debug2("Deleted old/expired snaphots", "to old", oldCount, "expired", expiredCount)
return err
})
}
func CreateDashboardSnapshot(cmd *m.CreateDashboardSnapshotCommand) error {

View File

@ -120,4 +120,9 @@ func addDashboardMigration(mg *Migrator) {
mg.AddMigration("Add index for plugin_id in dashboard", NewAddIndexMigration(dashboardV2, &Index{
Cols: []string{"org_id", "plugin_id"}, Type: IndexType,
}))
// dashboard_id index for dashboard_tag table
mg.AddMigration("Add index for dashboard_id in dashboard_tag", NewAddIndexMigration(dashboardTagV1, &Index{
Cols: []string{"dashboard_id"}, Type: IndexType,
}))
}

View File

@ -18,6 +18,7 @@ type Dialect interface {
SupportEngine() bool
LikeStr() string
Default(col *Column) string
BooleanStr(bool) string
CreateIndexSql(tableName string, index *Index) string
CreateTableSql(table *Table) string

View File

@ -29,6 +29,10 @@ func (db *Mysql) AutoIncrStr() string {
return "AUTO_INCREMENT"
}
func (db *Mysql) BooleanStr(value bool) string {
return strconv.FormatBool(value)
}
func (db *Mysql) SqlType(c *Column) string {
var res string
switch c.Type {

View File

@ -36,6 +36,10 @@ func (db *Postgres) AutoIncrStr() string {
return ""
}
func (db *Postgres) BooleanStr(value bool) string {
return strconv.FormatBool(value)
}
func (b *Postgres) Default(col *Column) string {
if col.Type == DB_Bool {
if col.Default == "0" {

View File

@ -29,6 +29,13 @@ func (db *Sqlite3) AutoIncrStr() string {
return "AUTOINCREMENT"
}
func (db *Sqlite3) BooleanStr(value bool) string {
if value {
return "1"
}
return "0"
}
func (db *Sqlite3) SqlType(c *Column) string {
switch c.Type {
case DB_Date, DB_DateTime, DB_TimeStamp, DB_Time:

View File

@ -78,9 +78,11 @@ var (
DataProxyWhiteList map[string]bool
// Snapshots
ExternalSnapshotUrl string
ExternalSnapshotName string
ExternalEnabled bool
ExternalSnapshotUrl string
ExternalSnapshotName string
ExternalEnabled bool
SnapShotTTLDays int
SnapShotRemoveExpired bool
// User settings
AllowUserSignUp bool
@ -118,8 +120,9 @@ var (
IsWindows bool
// PhantomJs Rendering
ImagesDir string
PhantomDir string
ImagesDir string
PhantomDir string
RenderedImageTTLDays int
// for logging purposes
configFiles []string
@ -495,6 +498,8 @@ func NewConfigContext(args *CommandLineArgs) error {
ExternalSnapshotUrl = snapshots.Key("external_snapshot_url").String()
ExternalSnapshotName = snapshots.Key("external_snapshot_name").String()
ExternalEnabled = snapshots.Key("external_enabled").MustBool(true)
SnapShotRemoveExpired = snapshots.Key("snapshot_remove_expired").MustBool(true)
SnapShotTTLDays = snapshots.Key("snapshot_TTL_days").MustInt(90)
// read data source proxy white list
DataProxyWhiteList = make(map[string]bool)
@ -535,6 +540,9 @@ func NewConfigContext(args *CommandLineArgs) error {
ImagesDir = filepath.Join(DataPath, "png")
PhantomDir = filepath.Join(HomePath, "vendor/phantomjs")
tmpFilesSection := Cfg.Section("tmp.files")
RenderedImageTTLDays = tmpFilesSection.Key("rendered_image_ttl_days").MustInt(14)
analytics := Cfg.Section("analytics")
ReportingEnabled = analytics.Key("reporting_enabled").MustBool(true)
CheckForUpdates = analytics.Key("check_for_updates").MustBool(true)

View File

@ -12,8 +12,8 @@ type OAuthInfo struct {
type OAuther struct {
GitHub, Google, Twitter, Generic, GrafanaNet bool
OAuthInfos map[string]*OAuthInfo
OAuthProviderName string
OAuthInfos map[string]*OAuthInfo
OAuthProviderName string
}
var OAuthService *OAuther

View File

@ -26,7 +26,7 @@ func (bg *Batch) process(context *QueryContext) {
if executor == nil {
bg.Done = true
result := &BatchResult{
Error: errors.New("Could not find executor for data source type " + bg.Queries[0].DataSource.PluginId),
Error: errors.New("Could not find executor for data source type: " + bg.Queries[0].DataSource.PluginId),
QueryResults: make(map[string]*QueryResult),
}
for _, query := range bg.Queries {

View File

@ -38,7 +38,7 @@ func init() {
}
HttpClient = http.Client{
Timeout: time.Duration(10 * time.Second),
Timeout: time.Duration(15 * time.Second),
Transport: tr,
}
}
@ -54,7 +54,7 @@ func (e *GraphiteExecutor) Execute(queries tsdb.QuerySlice, context *tsdb.QueryC
}
for _, query := range queries {
formData["target"] = []string{query.Query}
formData["target"] = []string{query.Model.Get("target").MustString()}
}
if setting.Env == setting.DEV {
@ -79,7 +79,8 @@ func (e *GraphiteExecutor) Execute(queries tsdb.QuerySlice, context *tsdb.QueryC
}
result.QueryResults = make(map[string]*tsdb.QueryResult)
queryRes := &tsdb.QueryResult{}
queryRes := tsdb.NewQueryResult()
for _, series := range data {
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
Name: series.Target,
@ -102,9 +103,9 @@ func (e *GraphiteExecutor) parseResponse(res *http.Response) ([]TargetResponseDT
return nil, err
}
if res.StatusCode == http.StatusUnauthorized {
glog.Info("Request is Unauthorized", "status", res.Status, "body", string(body))
return nil, fmt.Errorf("Request is Unauthorized status: %v body: %s", res.Status, string(body))
if res.StatusCode/100 != 2 {
glog.Info("Request failed", "status", res.Status, "body", string(body))
return nil, fmt.Errorf("Request failed status: %v", res.Status)
}
var data []TargetResponseDTO

View File

@ -1,23 +1 @@
package graphite
// func TestGraphite(t *testing.T) {
//
// Convey("When executing graphite query", t, func() {
// executor := NewGraphiteExecutor(&tsdb.DataSourceInfo{
// Url: "http://localhost:8080",
// })
//
// queries := tsdb.QuerySlice{
// &tsdb.Query{Query: "{\"target\": \"apps.backend.*.counters.requests.count\"}"},
// }
//
// context := tsdb.NewQueryContext(queries, tsdb.TimeRange{})
// result := executor.Execute(queries, context)
// So(result.Error, ShouldBeNil)
//
// Convey("Should return series", func() {
// So(result.QueryResults, ShouldNotBeEmpty)
// })
// })
//
// }

View File

@ -1,6 +1,8 @@
package graphite
import "github.com/grafana/grafana/pkg/tsdb"
type TargetResponseDTO struct {
Target string `json:"target"`
DataPoints [][2]*float64 `json:"datapoints"`
Target string `json:"target"`
DataPoints tsdb.TimeSeriesPoints `json:"datapoints"`
}

View File

@ -1,19 +1,31 @@
package tsdb
type TimeRange struct {
From string
To string
import (
"github.com/grafana/grafana/pkg/components/simplejson"
"gopkg.in/guregu/null.v3"
)
type Query struct {
RefId string
Model *simplejson.Json
Depends []string
DataSource *DataSourceInfo
Results []*TimeSeries
Exclude bool
MaxDataPoints int64
IntervalMs int64
}
type QuerySlice []*Query
type Request struct {
TimeRange TimeRange
MaxDataPoints int
Queries QuerySlice
TimeRange *TimeRange
Queries QuerySlice
}
type Response struct {
BatchTimings []*BatchTiming
Results map[string]*QueryResult
BatchTimings []*BatchTiming `json:"timings"`
Results map[string]*QueryResult `json:"results"`
}
type DataSourceInfo struct {
@ -40,19 +52,41 @@ type BatchResult struct {
}
type QueryResult struct {
Error error
RefId string
Series TimeSeriesSlice
Error error `json:"error"`
RefId string `json:"refId"`
Series TimeSeriesSlice `json:"series"`
}
type TimeSeries struct {
Name string `json:"name"`
Points [][2]*float64 `json:"points"`
Name string `json:"name"`
Points TimeSeriesPoints `json:"points"`
}
type TimePoint [2]null.Float
type TimeSeriesPoints []TimePoint
type TimeSeriesSlice []*TimeSeries
func NewTimeSeries(name string, points [][2]*float64) *TimeSeries {
func NewQueryResult() *QueryResult {
return &QueryResult{
Series: make(TimeSeriesSlice, 0),
}
}
func NewTimePoint(value float64, timestamp float64) TimePoint {
return TimePoint{null.FloatFrom(value), null.FloatFrom(timestamp)}
}
func NewTimeSeriesPointsFromArgs(values ...float64) TimeSeriesPoints {
points := make(TimeSeriesPoints, 0)
for i := 0; i < len(values); i += 2 {
points = append(points, NewTimePoint(values[i], values[i+1]))
}
return points
}
func NewTimeSeries(name string, points TimeSeriesPoints) *TimeSeries {
return &TimeSeries{
Name: name,
Points: points,

View File

@ -0,0 +1,161 @@
package prometheus
import (
"fmt"
"net/http"
"regexp"
"strings"
"time"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/prometheus/client_golang/api/prometheus"
pmodel "github.com/prometheus/common/model"
"golang.org/x/net/context"
)
type PrometheusExecutor struct {
*tsdb.DataSourceInfo
}
func NewPrometheusExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
return &PrometheusExecutor{dsInfo}
}
var (
plog log.Logger
HttpClient http.Client
)
func init() {
plog = log.New("tsdb.prometheus")
tsdb.RegisterExecutor("prometheus", NewPrometheusExecutor)
}
func (e *PrometheusExecutor) getClient() (prometheus.QueryAPI, error) {
cfg := prometheus.Config{
Address: e.DataSourceInfo.Url,
}
client, err := prometheus.New(cfg)
if err != nil {
return nil, err
}
return prometheus.NewQueryAPI(client), nil
}
func (e *PrometheusExecutor) Execute(queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) *tsdb.BatchResult {
result := &tsdb.BatchResult{}
client, err := e.getClient()
if err != nil {
return resultWithError(result, err)
}
query, err := parseQuery(queries, queryContext)
if err != nil {
return resultWithError(result, err)
}
timeRange := prometheus.Range{
Start: query.Start,
End: query.End,
Step: query.Step,
}
value, err := client.QueryRange(context.Background(), query.Expr, timeRange)
if err != nil {
return resultWithError(result, err)
}
queryResult, err := parseResponse(value, query)
if err != nil {
return resultWithError(result, err)
}
result.QueryResults = queryResult
return result
}
func formatLegend(metric pmodel.Metric, query *PrometheusQuery) string {
reg, _ := regexp.Compile(`\{\{\s*(.+?)\s*\}\}`)
result := reg.ReplaceAllFunc([]byte(query.LegendFormat), func(in []byte) []byte {
ind := strings.Replace(strings.Replace(string(in), "{{", "", 1), "}}", "", 1)
if val, exists := metric[pmodel.LabelName(ind)]; exists {
return []byte(val)
}
return in
})
return string(result)
}
func parseQuery(queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) (*PrometheusQuery, error) {
queryModel := queries[0]
expr, err := queryModel.Model.Get("expr").String()
if err != nil {
return nil, err
}
step, err := queryModel.Model.Get("step").Int64()
if err != nil {
return nil, err
}
format, err := queryModel.Model.Get("legendFormat").String()
if err != nil {
return nil, err
}
start, err := queryContext.TimeRange.ParseFrom()
if err != nil {
return nil, err
}
end, err := queryContext.TimeRange.ParseTo()
if err != nil {
return nil, err
}
return &PrometheusQuery{
Expr: expr,
Step: time.Second * time.Duration(step),
LegendFormat: format,
Start: start,
End: end,
}, nil
}
func parseResponse(value pmodel.Value, query *PrometheusQuery) (map[string]*tsdb.QueryResult, error) {
queryResults := make(map[string]*tsdb.QueryResult)
queryRes := tsdb.NewQueryResult()
data, ok := value.(pmodel.Matrix)
if !ok {
return queryResults, fmt.Errorf("Unsupported result format: %s", value.Type().String())
}
for _, v := range data {
series := tsdb.TimeSeries{
Name: formatLegend(v.Metric, query),
}
for _, k := range v.Values {
series.Points = append(series.Points, tsdb.NewTimePoint(float64(k.Value), float64(k.Timestamp.Unix()*1000)))
}
queryRes.Series = append(queryRes.Series, &series)
}
queryResults["A"] = queryRes
return queryResults, nil
}
func resultWithError(result *tsdb.BatchResult, err error) *tsdb.BatchResult {
result.Error = err
return result
}

View File

@ -0,0 +1,26 @@
package prometheus
import (
"testing"
p "github.com/prometheus/common/model"
. "github.com/smartystreets/goconvey/convey"
)
func TestPrometheus(t *testing.T) {
Convey("Prometheus", t, func() {
Convey("converting metric name", func() {
metric := map[p.LabelName]p.LabelValue{
p.LabelName("app"): p.LabelValue("backend"),
p.LabelName("device"): p.LabelValue("mobile"),
}
query := &PrometheusQuery{
LegendFormat: "legend {{app}} {{device}} {{broken}}",
}
So(formatLegend(metric, query), ShouldEqual, "legend backend mobile {{broken}}")
})
})
}

View File

@ -0,0 +1,11 @@
package prometheus
import "time"
type PrometheusQuery struct {
Expr string
Step time.Duration
LegendFormat string
Start time.Time
End time.Time
}

View File

@ -1,12 +0,0 @@
package tsdb
type Query struct {
RefId string
Query string
Depends []string
DataSource *DataSourceInfo
Results []*TimeSeries
Exclude bool
}
type QuerySlice []*Query

View File

@ -3,7 +3,7 @@ package tsdb
import "sync"
type QueryContext struct {
TimeRange TimeRange
TimeRange *TimeRange
Queries QuerySlice
Results map[string]*QueryResult
ResultsChan chan *BatchResult
@ -11,7 +11,7 @@ type QueryContext struct {
BatchWaits sync.WaitGroup
}
func NewQueryContext(queries QuerySlice, timeRange TimeRange) *QueryContext {
func NewQueryContext(queries QuerySlice, timeRange *TimeRange) *QueryContext {
return &QueryContext{
TimeRange: timeRange,
Queries: queries,

130
pkg/tsdb/testdata/scenarios.go vendored Normal file
View File

@ -0,0 +1,130 @@
package testdata
import (
"math/rand"
"strconv"
"strings"
"time"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb"
)
type ScenarioHandler func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult
type Scenario struct {
Id string `json:"id"`
Name string `json:"name"`
StringInput string `json:"stringOption"`
Description string `json:"description"`
Handler ScenarioHandler `json:"-"`
}
var ScenarioRegistry map[string]*Scenario
func init() {
ScenarioRegistry = make(map[string]*Scenario)
logger := log.New("tsdb.testdata")
logger.Debug("Initializing TestData Scenario")
registerScenario(&Scenario{
Id: "random_walk",
Name: "Random Walk",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
timeWalkerMs := context.TimeRange.GetFromAsMsEpoch()
to := context.TimeRange.GetToAsMsEpoch()
series := newSeriesForQuery(query)
points := make(tsdb.TimeSeriesPoints, 0)
walker := rand.Float64() * 100
for i := int64(0); i < 10000 && timeWalkerMs < to; i++ {
points = append(points, tsdb.NewTimePoint(walker, float64(timeWalkerMs)))
walker += rand.Float64() - 0.5
timeWalkerMs += query.IntervalMs
}
series.Points = points
queryRes := tsdb.NewQueryResult()
queryRes.Series = append(queryRes.Series, series)
return queryRes
},
})
registerScenario(&Scenario{
Id: "no_data_points",
Name: "No Data Points",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
return tsdb.NewQueryResult()
},
})
registerScenario(&Scenario{
Id: "datapoints_outside_range",
Name: "Datapoints Outside Range",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
queryRes := tsdb.NewQueryResult()
series := newSeriesForQuery(query)
outsideTime := context.TimeRange.MustGetFrom().Add(-1*time.Hour).Unix() * 1000
series.Points = append(series.Points, tsdb.NewTimePoint(10, float64(outsideTime)))
queryRes.Series = append(queryRes.Series, series)
return queryRes
},
})
registerScenario(&Scenario{
Id: "csv_metric_values",
Name: "CSV Metric Values",
StringInput: "1,20,90,30,5,0",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
queryRes := tsdb.NewQueryResult()
stringInput := query.Model.Get("stringInput").MustString()
values := []float64{}
for _, strVal := range strings.Split(stringInput, ",") {
if val, err := strconv.ParseFloat(strVal, 64); err == nil {
values = append(values, val)
}
}
if len(values) == 0 {
return queryRes
}
series := newSeriesForQuery(query)
startTime := context.TimeRange.GetFromAsMsEpoch()
endTime := context.TimeRange.GetToAsMsEpoch()
step := (endTime - startTime) / int64(len(values)-1)
for _, val := range values {
series.Points = append(series.Points, tsdb.NewTimePoint(val, float64(startTime)))
startTime += step
}
queryRes.Series = append(queryRes.Series, series)
return queryRes
},
})
}
func registerScenario(scenario *Scenario) {
ScenarioRegistry[scenario.Id] = scenario
}
func newSeriesForQuery(query *tsdb.Query) *tsdb.TimeSeries {
alias := query.Model.Get("alias").MustString("")
if alias == "" {
alias = query.RefId + "-series"
}
return &tsdb.TimeSeries{Name: alias}
}

39
pkg/tsdb/testdata/testdata.go vendored Normal file
View File

@ -0,0 +1,39 @@
package testdata
import (
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb"
)
type TestDataExecutor struct {
*tsdb.DataSourceInfo
log log.Logger
}
func NewTestDataExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
return &TestDataExecutor{
DataSourceInfo: dsInfo,
log: log.New("tsdb.testdata"),
}
}
func init() {
tsdb.RegisterExecutor("grafana-testdata-datasource", NewTestDataExecutor)
}
func (e *TestDataExecutor) Execute(queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult {
result := &tsdb.BatchResult{}
result.QueryResults = make(map[string]*tsdb.QueryResult)
for _, query := range queries {
scenarioId := query.Model.Get("scenarioId").MustString("random_walk")
if scenario, exist := ScenarioRegistry[scenarioId]; exist {
result.QueryResults[query.RefId] = scenario.Handler(query, context)
result.QueryResults[query.RefId].RefId = query.RefId
} else {
e.log.Error("Scenario not found", "scenarioId", scenarioId)
}
}
return result
}

90
pkg/tsdb/time_range.go Normal file
View File

@ -0,0 +1,90 @@
package tsdb
import (
"fmt"
"strconv"
"strings"
"time"
)
func NewTimeRange(from, to string) *TimeRange {
return &TimeRange{
From: from,
To: to,
Now: time.Now(),
}
}
type TimeRange struct {
From string
To string
Now time.Time
}
func (tr *TimeRange) GetFromAsMsEpoch() int64 {
return tr.MustGetFrom().UnixNano() / int64(time.Millisecond)
}
func (tr *TimeRange) GetToAsMsEpoch() int64 {
return tr.MustGetTo().UnixNano() / int64(time.Millisecond)
}
func (tr *TimeRange) MustGetFrom() time.Time {
if res, err := tr.ParseFrom(); err != nil {
return time.Unix(0, 0)
} else {
return res
}
}
func (tr *TimeRange) MustGetTo() time.Time {
if res, err := tr.ParseTo(); err != nil {
return time.Unix(0, 0)
} else {
return res
}
}
func tryParseUnixMsEpoch(val string) (time.Time, bool) {
if val, err := strconv.ParseInt(val, 10, 64); err == nil {
seconds := val / 1000
nano := (val - seconds*1000) * 1000000
return time.Unix(seconds, nano), true
}
return time.Time{}, false
}
func (tr *TimeRange) ParseFrom() (time.Time, error) {
if res, ok := tryParseUnixMsEpoch(tr.From); ok {
return res, nil
}
fromRaw := strings.Replace(tr.From, "now-", "", 1)
diff, err := time.ParseDuration("-" + fromRaw)
if err != nil {
return time.Time{}, err
}
return tr.Now.Add(diff), nil
}
func (tr *TimeRange) ParseTo() (time.Time, error) {
if tr.To == "now" {
return tr.Now, nil
} else if strings.HasPrefix(tr.To, "now-") {
withoutNow := strings.Replace(tr.To, "now-", "", 1)
diff, err := time.ParseDuration("-" + withoutNow)
if err != nil {
return time.Time{}, nil
}
return tr.Now.Add(diff), nil
}
if res, ok := tryParseUnixMsEpoch(tr.To); ok {
return res, nil
}
return time.Time{}, fmt.Errorf("cannot parse to value %s", tr.To)
}

View File

@ -0,0 +1,95 @@
package tsdb
import (
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
)
func TestTimeRange(t *testing.T) {
Convey("Time range", t, func() {
now := time.Now()
Convey("Can parse 5m, now", func() {
tr := TimeRange{
From: "5m",
To: "now",
Now: now,
}
Convey("5m ago ", func() {
fiveMinAgo, _ := time.ParseDuration("-5m")
expected := now.Add(fiveMinAgo)
res, err := tr.ParseFrom()
So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, expected.Unix())
})
Convey("now ", func() {
res, err := tr.ParseTo()
So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, now.Unix())
})
})
Convey("Can parse 5h, now-10m", func() {
tr := TimeRange{
From: "5h",
To: "now-10m",
Now: now,
}
Convey("5h ago ", func() {
fiveHourAgo, _ := time.ParseDuration("-5h")
expected := now.Add(fiveHourAgo)
res, err := tr.ParseFrom()
So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, expected.Unix())
})
Convey("now-10m ", func() {
fiveMinAgo, _ := time.ParseDuration("-10m")
expected := now.Add(fiveMinAgo)
res, err := tr.ParseTo()
So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, expected.Unix())
})
})
Convey("can parse unix epocs", func() {
var err error
tr := TimeRange{
From: "1474973725473",
To: "1474975757930",
Now: now,
}
res, err := tr.ParseFrom()
So(err, ShouldBeNil)
So(res.UnixNano()/int64(time.Millisecond), ShouldEqual, 1474973725473)
res, err = tr.ParseTo()
So(err, ShouldBeNil)
So(res.UnixNano()/int64(time.Millisecond), ShouldEqual, 1474975757930)
})
Convey("Cannot parse asdf", func() {
var err error
tr := TimeRange{
From: "asdf",
To: "asdf",
Now: now,
}
_, err = tr.ParseFrom()
So(err, ShouldNotBeNil)
_, err = tr.ParseTo()
So(err, ShouldNotBeNil)
})
})
}

View File

@ -14,9 +14,9 @@ func TestMetricQuery(t *testing.T) {
Convey("Given 3 queries for 2 data sources", func() {
request := &Request{
Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "C", Query: "asd", DataSource: &DataSourceInfo{Id: 2}},
{RefId: "A", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "B", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "C", DataSource: &DataSourceInfo{Id: 2}},
},
}
@ -31,9 +31,9 @@ func TestMetricQuery(t *testing.T) {
Convey("Given query 2 depends on query 1", func() {
request := &Request{
Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 2}},
{RefId: "C", Query: "#A / #B", DataSource: &DataSourceInfo{Id: 3}, Depends: []string{"A", "B"}},
{RefId: "A", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "B", DataSource: &DataSourceInfo{Id: 2}},
{RefId: "C", DataSource: &DataSourceInfo{Id: 3}, Depends: []string{"A", "B"}},
},
}
@ -55,7 +55,7 @@ func TestMetricQuery(t *testing.T) {
Convey("When executing request with one query", t, func() {
req := &Request{
Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
},
}
@ -74,8 +74,8 @@ func TestMetricQuery(t *testing.T) {
Convey("When executing one request with two queries from same data source", t, func() {
req := &Request{
Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "B", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
},
}
@ -100,9 +100,9 @@ func TestMetricQuery(t *testing.T) {
Convey("When executing one request with three queries from different datasources", t, func() {
req := &Request{
Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "C", Query: "asd", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}},
{RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "B", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "C", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}},
},
}
@ -117,7 +117,7 @@ func TestMetricQuery(t *testing.T) {
Convey("When query uses data source of unknown type", t, func() {
req := &Request{
Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "asdasdas"}},
{RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "asdasdas"}},
},
}
@ -129,10 +129,10 @@ func TestMetricQuery(t *testing.T) {
req := &Request{
Queries: QuerySlice{
{
RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"},
RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"},
},
{
RefId: "B", Query: "#A / 2", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}, Depends: []string{"A"},
RefId: "B", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}, Depends: []string{"A"},
},
},
}

View File

@ -41,6 +41,7 @@ import 'app/core/routes/routes';
import './filters/filters';
import coreModule from './core_module';
import appEvents from './app_events';
import colors from './utils/colors';
export {
@ -60,4 +61,5 @@ export {
dashboardSelector,
queryPartEditorDirective,
WizardFlow,
colors,
};

View File

@ -23,10 +23,10 @@ function (_, $, coreModule) {
getOptions: "&",
onChange: "&",
},
link: function($scope, elem, attrs) {
link: function($scope, elem) {
var $input = $(inputTemplate);
var $button = $(attrs.styleMode === 'select' ? selectTemplate : linkTemplate);
var segment = $scope.segment;
var $button = $(segment.selectMode ? selectTemplate : linkTemplate);
var options = null;
var cancelBlur = null;
var linkMode = true;
@ -170,6 +170,7 @@ function (_, $, coreModule) {
},
link: {
pre: function postLink($scope, elem, attrs) {
var cachedOptions;
$scope.valueToSegment = function(value) {
var option = _.find($scope.options, {value: value});
@ -177,7 +178,9 @@ function (_, $, coreModule) {
cssClass: attrs.cssClass,
custom: attrs.custom,
value: option ? option.text : value,
selectMode: attrs.selectMode,
};
return uiSegmentSrv.newSegment(segment);
};
@ -188,13 +191,20 @@ function (_, $, coreModule) {
});
return $q.when(optionSegments);
} else {
return $scope.getOptions();
return $scope.getOptions().then(function(options) {
cachedOptions = options;
return _.map(options, function(option) {
return uiSegmentSrv.newSegment({value: option.text});
});
});
}
};
$scope.onSegmentChange = function() {
if ($scope.options) {
var option = _.find($scope.options, {text: $scope.segment.value});
var options = $scope.options || cachedOptions;
if (options) {
var option = _.find(options, {text: $scope.segment.value});
if (option && option.value !== $scope.property) {
$scope.property = option.value;
} else if (attrs.custom !== 'false') {

View File

@ -114,6 +114,10 @@ export class BackendSrv {
var requestIsLocal = options.url.indexOf('/') === 0;
var firstAttempt = options.retry === 0;
if (requestIsLocal && !options.hasSubUrl && options.retry === 0) {
options.url = config.appSubUrl + options.url;
}
if (requestIsLocal && options.headers && options.headers.Authorization) {
options.headers['X-DS-Authorization'] = options.headers.Authorization;
delete options.headers.Authorization;

View File

@ -28,6 +28,7 @@ function (angular, _, coreModule) {
this.type = options.type;
this.fake = options.fake;
this.value = options.value;
this.selectMode = options.selectMode;
this.type = options.type;
this.expandable = options.expandable;
this.html = options.html || $sce.trustAsHtml(templateSrv.highlightVariablesAsHtml(this.value));

View File

@ -31,6 +31,8 @@ export default class TimeSeries {
allIsZero: boolean;
decimals: number;
scaledDecimals: number;
hasMsResolution: boolean;
isOutsideRange: boolean;
lines: any;
bars: any;
@ -54,6 +56,7 @@ export default class TimeSeries {
this.stats = {};
this.legend = true;
this.unit = opts.unit;
this.hasMsResolution = this.isMsResolutionNeeded();
}
applySeriesOverrides(overrides) {

View File

@ -0,0 +1,12 @@
export default [
"#7EB26D","#EAB839","#6ED0E0","#EF843C","#E24D42","#1F78C1","#BA43A9","#705DA0",
"#508642","#CCA300","#447EBC","#C15C17","#890F02","#0A437C","#6D1F62","#584477",
"#B7DBAB","#F4D598","#70DBED","#F9BA8F","#F29191","#82B5D8","#E5A8E2","#AEA2E0",
"#629E51","#E5AC0E","#64B0C8","#E0752D","#BF1B00","#0A50A1","#962D82","#614D93",
"#9AC48A","#F2C96D","#65C5DB","#F9934E","#EA6460","#5195CE","#D683CE","#806EB7",
"#3F6833","#967302","#2F575E","#99440A","#58140C","#052B51","#511749","#3F2B5B",
"#E0F9D7","#FCEACA","#CFFAFF","#F9E2D2","#FCE2DE","#BADFF4","#F9D9F9","#DEDAF7"
];

View File

@ -174,7 +174,10 @@ function($, _, moment) {
lowLimitMs = kbn.interval_to_ms(lowLimitInterval);
}
else {
return userInterval;
return {
intervalMs: kbn.interval_to_ms(userInterval),
interval: userInterval,
};
}
}
@ -183,7 +186,10 @@ function($, _, moment) {
intervalMs = lowLimitMs;
}
return kbn.secondsToHms(intervalMs / 1000);
return {
intervalMs: intervalMs,
interval: kbn.secondsToHms(intervalMs / 1000),
};
};
kbn.describe_interval = function (string) {

View File

@ -227,8 +227,8 @@ export class AlertTabCtrl {
var datasourceName = foundTarget.datasource || this.panel.datasource;
this.datasourceSrv.get(datasourceName).then(ds => {
if (ds.meta.id !== 'graphite') {
this.error = 'Currently the alerting backend only supports Graphite queries';
if (!ds.meta.alerting) {
this.error = 'The datasource does not support alerting queries';
} else if (this.templateSrv.variableExists(foundTarget.target)) {
this.error = 'Template variables are not supported in alert queries';
} else {

View File

@ -30,6 +30,7 @@ export class DashboardModel {
snapshot: any;
schemaVersion: number;
version: number;
revision: number;
links: any;
gnetId: any;
meta: any;
@ -42,6 +43,7 @@ export class DashboardModel {
this.events = new Emitter();
this.id = data.id || null;
this.revision = data.revision;
this.title = data.title || 'No Title';
this.autoUpdate = data.autoUpdate;
this.description = data.description;

View File

@ -8,7 +8,7 @@ function (angular, _, require, config) {
var module = angular.module('grafana.controllers');
module.controller('ShareModalCtrl', function($scope, $rootScope, $location, $timeout, timeSrv, $element, templateSrv, linkSrv) {
module.controller('ShareModalCtrl', function($scope, $rootScope, $location, $timeout, timeSrv, templateSrv, linkSrv) {
$scope.options = { forCurrent: true, includeTemplateVars: true, theme: 'current' };
$scope.editor = { index: $scope.tabIndex || 0};

View File

@ -2,7 +2,7 @@
<div ng-repeat="variable in ctrl.variables" ng-hide="variable.hide === 2" class="submenu-item gf-form-inline">
<div class="gf-form">
<label class="gf-form-label template-variable" ng-hide="variable.hide === 1">
{{variable.label || variable.name}}:
{{variable.label || variable.name}}
</label>
<value-select-dropdown ng-if="variable.type !== 'adhoc'" variable="variable" on-updated="ctrl.variableUpdated(variable)" get-values-for-tag="ctrl.getValuesForTag(variable, tagKey)"></value-select-dropdown>
</div>

View File

@ -16,7 +16,7 @@ var template = `
Panel data source
</label>
<metric-segment segment="ctrl.dsSegment" style-mode="select"
<metric-segment segment="ctrl.dsSegment"
get-options="ctrl.getOptions()"
on-change="ctrl.datasourceChanged()"></metric-segment>
</div>
@ -67,7 +67,7 @@ export class MetricsDsSelectorCtrl {
this.current = {name: dsValue + ' not found', value: null};
}
this.dsSegment = uiSegmentSrv.newSegment(this.current.name);
this.dsSegment = uiSegmentSrv.newSegment({value: this.current.name, selectMode: true});
}
getOptions() {

View File

@ -25,6 +25,7 @@ class MetricsPanelCtrl extends PanelCtrl {
range: any;
rangeRaw: any;
interval: any;
intervalMs: any;
resolution: any;
timeInfo: any;
skipDataOnInit: boolean;
@ -123,11 +124,22 @@ class MetricsPanelCtrl extends PanelCtrl {
this.resolution = Math.ceil($(window).width() * (this.panel.span / 12));
}
var panelInterval = this.panel.interval;
var datasourceInterval = (this.datasource || {}).interval;
this.interval = kbn.calculateInterval(this.range, this.resolution, panelInterval || datasourceInterval);
this.calculateInterval();
};
calculateInterval() {
var intervalOverride = this.panel.interval;
// if no panel interval check datasource
if (!intervalOverride && this.datasource && this.datasource.interval) {
intervalOverride = this.datasource.interval;
}
var res = kbn.calculateInterval(this.range, this.resolution, intervalOverride);
this.interval = res.interval;
this.intervalMs = res.intervalMs;
}
applyPanelTimeOverrides() {
this.timeInfo = '';
@ -183,6 +195,7 @@ class MetricsPanelCtrl extends PanelCtrl {
range: this.range,
rangeRaw: this.rangeRaw,
interval: this.interval,
intervalMs: this.intervalMs,
targets: this.panel.targets,
format: this.panel.renderer === 'png' ? 'png' : 'json',
maxDataPoints: this.resolution,

View File

@ -25,7 +25,7 @@
</div>
<div class="row">
<div class="col-md-6">
<div class="col-lg-6">
<div class="playlist-search-containerwrapper">
<div class="max-width-32">
<h5 class="page-headering playlist-column-header">Available</h5>
@ -72,7 +72,7 @@
</div>
</div>
<div class="col-md-6">
<div class="col-lg-6">
<h5 class="page headering playlist-column-header">Selected</h5>
<table class="grafana-options-table playlist-available-list">
<tr ng-repeat="playlistItem in ctrl.playlistItems">

View File

@ -14,7 +14,7 @@ export class PlaylistSearchCtrl {
/** @ngInject */
constructor(private $scope, private $location, private $timeout, private backendSrv, private contextSrv) {
this.query = { query: '', tag: [], starred: false };
this.query = {query: '', tag: [], starred: false, limit: 30};
$timeout(() => {
this.query.query = '';

View File

@ -3,7 +3,9 @@
<div class="page-container">
<div class="page-header">
<h1>Plugins</h1>
<h1>
Plugins <span class="muted small">(currently installed)</span>
</h1>
<div class="page-header-tabs">
<ul class="gf-tabs">
@ -25,7 +27,7 @@
</ul>
<a class="get-more-plugins-link" href="https://grafana.net/plugins?utm_source=grafana_plugin_list" target="_blank">
Find plugins on
Find more plugins on
</a>
</div>
</div>

View File

@ -18,7 +18,7 @@ export class ConstantVariable implements Variable {
current: {},
};
/** @ngInject */
/** @ngInject **/
constructor(private model, private variableSrv) {
assignModelProperties(this, model, this.defaults);
}

View File

@ -10,6 +10,7 @@ export class DatasourceVariable implements Variable {
query: string;
options: any;
current: any;
refresh: any;
defaults = {
type: 'datasource',
@ -20,11 +21,13 @@ export class DatasourceVariable implements Variable {
regex: '',
options: [],
query: '',
refresh: 1,
};
/** @ngInject */
/** @ngInject **/
constructor(private model, private datasourceSrv, private variableSrv) {
assignModelProperties(this, model, this.defaults);
this.refresh = 1;
}
getModel() {

View File

@ -6,7 +6,7 @@ import {variableTypes} from './variable';
export class VariableEditorCtrl {
/** @ngInject */
/** @ngInject **/
constructor(private $scope, private datasourceSrv, private variableSrv, templateSrv) {
$scope.variableTypes = variableTypes;
$scope.ctrl = {};

View File

@ -28,7 +28,7 @@ export class IntervalVariable implements Variable {
auto_count: 30,
};
/** @ngInject */
/** @ngInject **/
constructor(private model, private timeSrv, private templateSrv, private variableSrv) {
assignModelProperties(this, model, this.defaults);
this.refresh = 2;
@ -54,8 +54,8 @@ export class IntervalVariable implements Variable {
this.options.unshift({ text: 'auto', value: '$__auto_interval' });
}
var interval = kbn.calculateInterval(this.timeSrv.timeRange(), this.auto_count, (this.auto_min ? ">"+this.auto_min : null));
this.templateSrv.setGrafanaVariable('$__auto_interval', interval);
var res = kbn.calculateInterval(this.timeSrv.timeRange(), this.auto_count, (this.auto_min ? ">"+this.auto_min : null));
this.templateSrv.setGrafanaVariable('$__auto_interval', res.interval);
}
updateOptions() {

View File

@ -40,6 +40,7 @@ export class QueryVariable implements Variable {
tagValuesQuery: null,
};
/** @ngInject **/
constructor(private model, private datasourceSrv, private templateSrv, private variableSrv, private $q) {
// copy model properties to this instance
assignModelProperties(this, model, this.defaults);

View File

@ -62,6 +62,7 @@ describe('VariableSrv init', function() {
options: [{text: "test", value: "test"}]
}];
scenario.urlParams["var-apps"] = "new";
scenario.metricSources = [];
});
it('should update current value', () => {
@ -110,6 +111,30 @@ describe('VariableSrv init', function() {
});
});
describeInitScenario('when datasource variable is initialized', scenario => {
scenario.setup(() => {
scenario.variables = [{
type: 'datasource',
query: 'graphite',
name: 'test',
current: {value: 'backend4_pee', text: 'backend4_pee'},
regex: '/pee$/'
}
];
scenario.metricSources = [
{name: 'backend1', meta: {id: 'influx'}},
{name: 'backend2_pee', meta: {id: 'graphite'}},
{name: 'backend3', meta: {id: 'graphite'}},
{name: 'backend4_pee', meta: {id: 'graphite'}},
];
});
it('should update current value', function() {
var variable = ctx.variableSrv.variables[0];
expect(variable.options.length).to.be(2);
});
});
describeInitScenario('when template variable is present in url multiple times', scenario => {
scenario.setup(() => {
scenario.variables = [{

View File

@ -43,6 +43,10 @@ function (angular, _, kbn) {
}
};
this.variableInitialized = function(variable) {
this._index[variable.name] = variable;
};
this.getAdhocFilters = function(datasourceName) {
var variable = this._adhocVariables[datasourceName];
if (variable) {

View File

@ -1,417 +0,0 @@
define([
'angular',
'lodash',
'jquery',
'app/core/utils/kbn',
],
function (angular, _, $, kbn) {
'use strict';
var module = angular.module('grafana.services');
module.service('templateValuesSrv', function($q, $rootScope, datasourceSrv, $location, templateSrv, timeSrv) {
var self = this;
this.variableLock = {};
function getNoneOption() { return { text: 'None', value: '', isNone: true }; }
// update time variant variables
$rootScope.onAppEvent('refresh', function() {
// look for interval variables
var intervalVariable = _.find(self.variables, { type: 'interval' });
if (intervalVariable) {
self.updateAutoInterval(intervalVariable);
}
// update variables with refresh === 2
var promises = self.variables
.filter(function(variable) {
return variable.refresh === 2;
}).map(function(variable) {
var previousOptions = variable.options.slice();
return self.updateOptions(variable).then(function () {
return self.variableUpdated(variable).then(function () {
// check if current options changed due to refresh
if (angular.toJson(previousOptions) !== angular.toJson(variable.options)) {
$rootScope.appEvent('template-variable-value-updated');
}
});
});
});
return $q.all(promises);
}, $rootScope);
this.init = function(dashboard) {
this.dashboard = dashboard;
this.variables = dashboard.templating.list;
templateSrv.init(this.variables);
var queryParams = $location.search();
var promises = [];
// use promises to delay processing variables that
// depend on other variables.
this.variableLock = {};
_.forEach(this.variables, function(variable) {
self.variableLock[variable.name] = $q.defer();
});
for (var i = 0; i < this.variables.length; i++) {
var variable = this.variables[i];
promises.push(this.processVariable(variable, queryParams));
}
return $q.all(promises);
};
this.processVariable = function(variable, queryParams) {
var dependencies = [];
var lock = self.variableLock[variable.name];
// determine our dependencies.
if (variable.type === "query") {
_.forEach(this.variables, function(v) {
// both query and datasource can contain variable
if (templateSrv.containsVariable(variable.query, v.name) ||
templateSrv.containsVariable(variable.datasource, v.name)) {
dependencies.push(self.variableLock[v.name].promise);
}
});
}
return $q.all(dependencies).then(function() {
var urlValue = queryParams['var-' + variable.name];
if (urlValue !== void 0) {
return self.setVariableFromUrl(variable, urlValue).then(lock.resolve);
}
else if (variable.refresh === 1 || variable.refresh === 2) {
return self.updateOptions(variable).then(function() {
if (_.isEmpty(variable.current) && variable.options.length) {
self.setVariableValue(variable, variable.options[0]);
}
lock.resolve();
});
}
else if (variable.type === 'interval') {
self.updateAutoInterval(variable);
lock.resolve();
} else {
lock.resolve();
}
}).finally(function() {
delete self.variableLock[variable.name];
});
};
this.setVariableFromUrl = function(variable, urlValue) {
var promise = $q.when(true);
if (variable.refresh) {
promise = this.updateOptions(variable);
}
return promise.then(function() {
var option = _.find(variable.options, function(op) {
return op.text === urlValue || op.value === urlValue;
});
option = option || { text: urlValue, value: urlValue };
self.updateAutoInterval(variable);
return self.setVariableValue(variable, option, true);
});
};
this.updateAutoInterval = function(variable) {
if (!variable.auto) { return; }
// add auto option if missing
if (variable.options.length && variable.options[0].text !== 'auto') {
variable.options.unshift({ text: 'auto', value: '$__auto_interval' });
}
var interval = kbn.calculateInterval(timeSrv.timeRange(), variable.auto_count, (variable.auto_min ? ">"+variable.auto_min : null));
templateSrv.setGrafanaVariable('$__auto_interval', interval);
};
this.setVariableValue = function(variable, option) {
variable.current = angular.copy(option);
if (_.isArray(variable.current.text)) {
variable.current.text = variable.current.text.join(' + ');
}
self.selectOptionsForCurrentValue(variable);
templateSrv.updateTemplateData();
return this.updateOptionsInChildVariables(variable);
};
this.variableUpdated = function(variable) {
templateSrv.updateTemplateData();
return self.updateOptionsInChildVariables(variable);
};
this.updateOptionsInChildVariables = function(updatedVariable) {
// if there is a variable lock ignore cascading update because we are in a boot up scenario
if (self.variableLock[updatedVariable.name]) {
return $q.when();
}
var promises = _.map(self.variables, function(otherVariable) {
if (otherVariable === updatedVariable) {
return;
}
if (templateSrv.containsVariable(otherVariable.regex, updatedVariable.name) ||
templateSrv.containsVariable(otherVariable.query, updatedVariable.name) ||
templateSrv.containsVariable(otherVariable.datasource, updatedVariable.name)) {
return self.updateOptions(otherVariable);
}
});
return $q.all(promises);
};
this._updateNonQueryVariable = function(variable) {
if (variable.type === 'datasource') {
self.updateDataSourceVariable(variable);
return;
}
if (variable.type === 'constant') {
variable.options = [{text: variable.query, value: variable.query}];
return;
}
if (variable.type === 'adhoc') {
variable.current = {};
variable.options = [];
return;
}
// extract options in comma separated string
variable.options = _.map(variable.query.split(/[,]+/), function(text) {
return { text: text.trim(), value: text.trim() };
});
if (variable.type === 'interval') {
self.updateAutoInterval(variable);
return;
}
if (variable.type === 'custom' && variable.includeAll) {
self.addAllOption(variable);
}
};
this.updateDataSourceVariable = function(variable) {
var options = [];
var sources = datasourceSrv.getMetricSources({skipVariables: true});
var regex;
if (variable.regex) {
regex = kbn.stringToJsRegex(templateSrv.replace(variable.regex));
}
for (var i = 0; i < sources.length; i++) {
var source = sources[i];
// must match on type
if (source.meta.id !== variable.query) {
continue;
}
if (regex && !regex.exec(source.name)) {
continue;
}
options.push({text: source.name, value: source.name});
}
if (options.length === 0) {
options.push({text: 'No data sources found', value: ''});
}
variable.options = options;
};
this.updateOptions = function(variable) {
if (variable.type !== 'query') {
self._updateNonQueryVariable(variable);
return self.validateVariableSelectionState(variable);
}
return datasourceSrv.get(variable.datasource)
.then(_.partial(this.updateOptionsFromMetricFindQuery, variable))
.then(_.partial(this.updateTags, variable))
.then(_.partial(this.validateVariableSelectionState, variable));
};
this.selectOptionsForCurrentValue = function(variable) {
var i, y, value, option;
var selected = [];
for (i = 0; i < variable.options.length; i++) {
option = variable.options[i];
option.selected = false;
if (_.isArray(variable.current.value)) {
for (y = 0; y < variable.current.value.length; y++) {
value = variable.current.value[y];
if (option.value === value) {
option.selected = true;
selected.push(option);
}
}
} else if (option.value === variable.current.value) {
option.selected = true;
selected.push(option);
}
}
return selected;
};
this.validateVariableSelectionState = function(variable) {
if (!variable.current) {
if (!variable.options.length) { return $q.when(); }
return self.setVariableValue(variable, variable.options[0], false);
}
if (_.isArray(variable.current.value)) {
var selected = self.selectOptionsForCurrentValue(variable);
// if none pick first
if (selected.length === 0) {
selected = variable.options[0];
} else {
selected = {
value: _.map(selected, function(val) {return val.value;}),
text: _.map(selected, function(val) {return val.text;}).join(' + '),
};
}
return self.setVariableValue(variable, selected, false);
} else {
var currentOption = _.find(variable.options, {text: variable.current.text});
if (currentOption) {
return self.setVariableValue(variable, currentOption, false);
} else {
if (!variable.options.length) { return $q.when(null); }
return self.setVariableValue(variable, variable.options[0]);
}
}
};
this.updateTags = function(variable, datasource) {
if (variable.useTags) {
return datasource.metricFindQuery(variable.tagsQuery).then(function (results) {
variable.tags = [];
for (var i = 0; i < results.length; i++) {
variable.tags.push(results[i].text);
}
return datasource;
});
} else {
delete variable.tags;
}
return datasource;
};
this.updateOptionsFromMetricFindQuery = function(variable, datasource) {
return datasource.metricFindQuery(variable.query).then(function (results) {
variable.options = self.metricNamesToVariableValues(variable, results);
if (variable.includeAll) {
self.addAllOption(variable);
}
if (!variable.options.length) {
variable.options.push(getNoneOption());
}
return datasource;
});
};
this.getValuesForTag = function(variable, tagKey) {
return datasourceSrv.get(variable.datasource).then(function(datasource) {
var query = variable.tagValuesQuery.replace('$tag', tagKey);
return datasource.metricFindQuery(query).then(function (results) {
return _.map(results, function(value) {
return value.text;
});
});
});
};
this.metricNamesToVariableValues = function(variable, metricNames) {
var regex, options, i, matches;
options = [];
if (variable.regex) {
regex = kbn.stringToJsRegex(templateSrv.replace(variable.regex));
}
for (i = 0; i < metricNames.length; i++) {
var item = metricNames[i];
var value = item.value || item.text;
var text = item.text || item.value;
if (_.isNumber(value)) {
value = value.toString();
}
if (_.isNumber(text)) {
text = text.toString();
}
if (regex) {
matches = regex.exec(value);
if (!matches) { continue; }
if (matches.length > 1) {
value = matches[1];
text = value;
}
}
options.push({text: text, value: value});
}
options = _.uniq(options, 'value');
return this.sortVariableValues(options, variable.sort);
};
this.addAllOption = function(variable) {
variable.options.unshift({text: 'All', value: "$__all"});
};
this.sortVariableValues = function(options, sortOrder) {
if (sortOrder === 0) {
return options;
}
var sortType = Math.ceil(sortOrder / 2);
var reverseSort = (sortOrder % 2 === 0);
if (sortType === 1) {
options = _.sortBy(options, 'text');
} else if (sortType === 2) {
options = _.sortBy(options, function(opt) {
var matches = opt.text.match(/.*?(\d+).*/);
if (!matches) {
return 0;
} else {
return parseInt(matches[1], 10);
}
});
}
if (reverseSort) {
options = options.reverse();
}
return options;
};
});
});

View File

@ -8,7 +8,6 @@ import {Variable, variableTypes} from './variable';
export class VariableSrv {
dashboard: any;
variables: any;
variableLock: any;
/** @ngInject */
constructor(private $rootScope, private $q, private $location, private $injector, private templateSrv) {
@ -18,7 +17,6 @@ export class VariableSrv {
}
init(dashboard) {
this.variableLock = {};
this.dashboard = dashboard;
// create working class models representing variables
@ -30,13 +28,15 @@ export class VariableSrv {
// init variables
for (let variable of this.variables) {
this.variableLock[variable.name] = this.$q.defer();
variable.initLock = this.$q.defer();
}
var queryParams = this.$location.search();
return this.$q.all(this.variables.map(variable => {
return this.processVariable(variable, queryParams);
}));
})).then(() => {
this.templateSrv.updateTemplateData();
});
}
onDashboardRefresh() {
@ -59,27 +59,27 @@ export class VariableSrv {
processVariable(variable, queryParams) {
var dependencies = [];
var lock = this.variableLock[variable.name];
for (let otherVariable of this.variables) {
if (variable.dependsOn(otherVariable)) {
dependencies.push(this.variableLock[otherVariable.name].promise);
dependencies.push(otherVariable.initLock.promise);
}
}
return this.$q.all(dependencies).then(() => {
var urlValue = queryParams['var-' + variable.name];
if (urlValue !== void 0) {
return variable.setValueFromUrl(urlValue).then(lock.resolve);
return variable.setValueFromUrl(urlValue).then(variable.initLock.resolve);
}
if (variable.refresh === 1 || variable.refresh === 2) {
return variable.updateOptions().then(lock.resolve);
return variable.updateOptions().then(variable.initLock.resolve);
}
lock.resolve();
variable.initLock.resolve();
}).finally(() => {
delete this.variableLock[variable.name];
this.templateSrv.variableInitialized(variable);
delete variable.initLock;
});
}
@ -111,7 +111,7 @@ export class VariableSrv {
variableUpdated(variable) {
// if there is a variable lock ignore cascading update because we are in a boot up scenario
if (this.variableLock[variable.name]) {
if (variable.initLock) {
return this.$q.when();
}
@ -155,8 +155,7 @@ export class VariableSrv {
validateVariableSelectionState(variable) {
if (!variable.current) {
if (!variable.options.length) { return this.$q.when(); }
return variable.setValue(variable.options[0]);
variable.current = {};
}
if (_.isArray(variable.current.value)) {

View File

@ -0,0 +1,287 @@
{
"revision": 2,
"title": "TestData - Alerts",
"tags": [
"grafana-test"
],
"style": "dark",
"timezone": "browser",
"editable": true,
"hideControls": false,
"sharedCrosshair": false,
"rows": [
{
"collapse": false,
"editable": true,
"height": 255.625,
"panels": [
{
"alert": {
"conditions": [
{
"evaluator": {
"params": [
60
],
"type": "gt"
},
"query": {
"params": [
"A",
"5m",
"now"
]
},
"reducer": {
"params": [],
"type": "avg"
},
"type": "query"
}
],
"enabled": true,
"frequency": "60s",
"handler": 1,
"name": "TestData - Always OK",
"noDataState": "no_data",
"notifications": []
},
"aliasColors": {},
"bars": false,
"datasource": "Grafana TestData",
"editable": true,
"error": false,
"fill": 1,
"id": 3,
"isNew": true,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 6,
"stack": false,
"steppedLine": false,
"targets": [
{
"refId": "A",
"scenario": "random_walk",
"scenarioId": "csv_metric_values",
"stringInput": "1,20,90,30,5,0",
"target": ""
}
],
"thresholds": [
{
"value": 60,
"op": "gt",
"fill": true,
"line": true,
"colorMode": "critical"
}
],
"timeFrom": null,
"timeShift": null,
"title": "Always OK",
"tooltip": {
"msResolution": false,
"shared": true,
"sort": 0,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": "",
"logBase": 1,
"max": "125",
"min": "0",
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"alert": {
"conditions": [
{
"evaluator": {
"params": [
177
],
"type": "gt"
},
"query": {
"params": [
"A",
"5m",
"now"
]
},
"reducer": {
"params": [],
"type": "avg"
},
"type": "query"
}
],
"enabled": true,
"frequency": "60s",
"handler": 1,
"name": "TestData - Always Alerting",
"noDataState": "no_data",
"notifications": []
},
"aliasColors": {},
"bars": false,
"datasource": "Grafana TestData",
"editable": true,
"error": false,
"fill": 1,
"id": 4,
"isNew": true,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 6,
"stack": false,
"steppedLine": false,
"targets": [
{
"refId": "A",
"scenario": "random_walk",
"scenarioId": "csv_metric_values",
"stringInput": "200,445,100,150,200,220,190",
"target": ""
}
],
"thresholds": [
{
"colorMode": "critical",
"fill": true,
"line": true,
"op": "gt",
"value": 177
}
],
"timeFrom": null,
"timeShift": null,
"title": "Always Alerting",
"tooltip": {
"msResolution": false,
"shared": true,
"sort": 0,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": "",
"logBase": 1,
"max": null,
"min": "0",
"show": true
},
{
"format": "short",
"label": "",
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
}
],
"title": "New row"
}
],
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"templating": {
"list": []
},
"annotations": {
"list": []
},
"schemaVersion": 13,
"version": 4,
"links": [],
"gnetId": null
}

View File

@ -0,0 +1,483 @@
{
"revision": 4,
"title": "TestData - Graph Panel Last 1h",
"tags": [
"grafana-test"
],
"style": "dark",
"timezone": "browser",
"editable": true,
"hideControls": false,
"sharedCrosshair": false,
"rows": [
{
"collapse": false,
"editable": true,
"height": "250px",
"panels": [
{
"aliasColors": {},
"bars": false,
"datasource": "Grafana TestData",
"editable": true,
"error": false,
"fill": 1,
"id": 1,
"isNew": true,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 4,
"stack": false,
"steppedLine": false,
"targets": [
{
"refId": "A",
"scenario": "random_walk",
"scenarioId": "no_data_points",
"target": ""
}
],
"thresholds": [],
"timeFrom": null,
"timeShift": null,
"title": "No Data Points Warning",
"tooltip": {
"msResolution": false,
"shared": true,
"sort": 0,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"aliasColors": {},
"bars": false,
"datasource": "Grafana TestData",
"editable": true,
"error": false,
"fill": 1,
"id": 2,
"isNew": true,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 4,
"stack": false,
"steppedLine": false,
"targets": [
{
"refId": "A",
"scenario": "random_walk",
"scenarioId": "datapoints_outside_range",
"target": ""
}
],
"thresholds": [],
"timeFrom": null,
"timeShift": null,
"title": "Datapoints Outside Range Warning",
"tooltip": {
"msResolution": false,
"shared": true,
"sort": 0,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"aliasColors": {},
"bars": false,
"datasource": "Grafana TestData",
"editable": true,
"error": false,
"fill": 1,
"id": 3,
"isNew": true,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 4,
"stack": false,
"steppedLine": false,
"targets": [
{
"refId": "A",
"scenario": "random_walk",
"scenarioId": "random_walk",
"target": ""
}
],
"thresholds": [],
"timeFrom": null,
"timeShift": null,
"title": "Random walk series",
"tooltip": {
"msResolution": false,
"shared": true,
"sort": 0,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
}
],
"title": "New row"
},
{
"collapse": false,
"editable": true,
"height": "250px",
"panels": [
{
"aliasColors": {},
"bars": false,
"datasource": "Grafana TestData",
"editable": true,
"error": false,
"fill": 1,
"id": 4,
"isNew": true,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "connected",
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"span": 8,
"stack": false,
"steppedLine": false,
"targets": [
{
"refId": "A",
"scenario": "random_walk",
"scenarioId": "random_walk",
"target": ""
}
],
"thresholds": [],
"timeFrom": "2s",
"timeShift": null,
"title": "Millisecond res x-axis and tooltip",
"tooltip": {
"msResolution": false,
"shared": true,
"sort": 0,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
]
},
{
"title": "",
"error": false,
"span": 4,
"editable": true,
"type": "text",
"isNew": true,
"id": 6,
"mode": "markdown",
"content": "Just verify that the tooltip time has millisecond resolution ",
"links": []
}
],
"title": "New row"
},
{
"title": "New row",
"height": 336,
"editable": true,
"collapse": false,
"panels": [
{
"title": "2 yaxis and axis lables",
"error": false,
"span": 7.99561403508772,
"editable": true,
"type": "graph",
"isNew": true,
"id": 5,
"targets": [
{
"target": "",
"refId": "A",
"scenarioId": "csv_metric_values",
"stringInput": "1,20,90,30,5,0"
},
{
"target": "",
"refId": "B",
"scenarioId": "csv_metric_values",
"stringInput": "2000,3000,4000,1000,3000,10000"
}
],
"datasource": "Grafana TestData",
"renderer": "flot",
"yaxes": [
{
"label": "Perecent",
"show": true,
"logBase": 1,
"min": null,
"max": null,
"format": "percent"
},
{
"label": "Pressure",
"show": true,
"logBase": 1,
"min": null,
"max": null,
"format": "short"
}
],
"xaxis": {
"show": true,
"mode": "time",
"name": null,
"values": []
},
"lines": true,
"fill": 1,
"linewidth": 2,
"points": false,
"pointradius": 5,
"bars": false,
"stack": false,
"percentage": false,
"legend": {
"show": true,
"values": false,
"min": false,
"max": false,
"current": false,
"total": false,
"avg": false
},
"nullPointMode": "connected",
"steppedLine": false,
"tooltip": {
"value_type": "cumulative",
"shared": true,
"sort": 0,
"msResolution": false
},
"timeFrom": null,
"timeShift": null,
"aliasColors": {},
"seriesOverrides": [
{
"alias": "B-series",
"yaxis": 2
}
],
"thresholds": [],
"links": []
},
{
"title": "",
"error": false,
"span": 4.00438596491228,
"editable": true,
"type": "text",
"isNew": true,
"id": 7,
"mode": "markdown",
"content": "Verify that axis labels look ok",
"links": []
}
]
}
],
"time": {
"from": "now-1h",
"to": "now"
},
"timepicker": {
"refresh_intervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"templating": {
"list": []
},
"annotations": {
"list": []
},
"refresh": false,
"schemaVersion": 13,
"version": 3,
"links": [],
"gnetId": null
}

View File

@ -0,0 +1,62 @@
///<reference path="../../../../headers/common.d.ts" />
import _ from 'lodash';
import angular from 'angular';
class TestDataDatasource {
/** @ngInject */
constructor(private backendSrv, private $q) {}
query(options) {
var queries = _.filter(options.targets, item => {
return item.hide !== true;
}).map(item => {
return {
refId: item.refId,
scenarioId: item.scenarioId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
stringInput: item.stringInput,
jsonInput: angular.fromJson(item.jsonInput),
};
});
if (queries.length === 0) {
return this.$q.when({data: []});
}
return this.backendSrv.post('/api/tsdb/query', {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: queries,
}).then(res => {
var data = [];
if (res.results) {
_.forEach(res.results, queryRes => {
for (let series of queryRes.series) {
data.push({
target: series.name,
datapoints: series.points
});
}
});
}
return {data: data};
});
}
annotationQuery(options) {
return this.backendSrv.get('/api/annotations', {
from: options.range.from.valueOf(),
to: options.range.to.valueOf(),
limit: options.limit,
type: options.type,
});
}
}
export {TestDataDatasource};

Some files were not shown because too many files have changed in this diff Show More