This commit is contained in:
Leandro Piccilli 2016-10-02 16:59:25 +02:00
commit 0000065053
243 changed files with 11663 additions and 4298 deletions

View File

@ -12,7 +12,7 @@ grunt karma:dev
### Run tests for backend assets before commit ### Run tests for backend assets before commit
``` ```
test -z "$(gofmt -s -l . | grep -v vendor/src/ | tee /dev/stderr)" test -z "$(gofmt -s -l . | grep -v -E 'vendor/(github.com|golang.org|gopkg.in)' | tee /dev/stderr)"
``` ```
### Run tests for frontend assets before commit ### Run tests for frontend assets before commit

View File

@ -12,6 +12,10 @@
* **Graphite**: Add support for groupByNode, closes [#5613](https://github.com/grafana/grafana/pull/5613) * **Graphite**: Add support for groupByNode, closes [#5613](https://github.com/grafana/grafana/pull/5613)
* **Influxdb**: Add support for elapsed(), closes [#5827](https://github.com/grafana/grafana/pull/5827) * **Influxdb**: Add support for elapsed(), closes [#5827](https://github.com/grafana/grafana/pull/5827)
* **OAuth**: Add support for generic oauth, closes [#4718](https://github.com/grafana/grafana/pull/4718) * **OAuth**: Add support for generic oauth, closes [#4718](https://github.com/grafana/grafana/pull/4718)
* **Cloudwatch**: Add support to expand multi select template variable, closes [#5003](https://github.com/grafana/grafana/pull/5003)
* **Graph Panel**: Now supports flexible lower/upper bounds on Y-Max and Y-Min, PR [#5720](https://github.com/grafana/grafana/pull/5720)
* **Background Tasks**: Now support automatic purging of old snapshots, closes [#4087](https://github.com/grafana/grafana/issues/4087)
* **Background Tasks**: Now support automatic purging of old rendered images, closes [#2172](https://github.com/grafana/grafana/issues/2172)
### Breaking changes ### Breaking changes
* **SystemD**: Change systemd description, closes [#5971](https://github.com/grafana/grafana/pull/5971) * **SystemD**: Change systemd description, closes [#5971](https://github.com/grafana/grafana/pull/5971)
@ -19,6 +23,11 @@
### Bugfixes ### Bugfixes
* **Table Panel**: Fixed problem when switching to Mixed datasource in metrics tab, fixes [#5999](https://github.com/grafana/grafana/pull/5999) * **Table Panel**: Fixed problem when switching to Mixed datasource in metrics tab, fixes [#5999](https://github.com/grafana/grafana/pull/5999)
* **Playlist**: Fixed problem with play order not matching order defined in playlist, fixes [#5467](https://github.com/grafana/grafana/pull/5467)
* **Graph panel**: Fixed problem with auto decimals on y axis when datamin=datamax, fixes [#6070](https://github.com/grafana/grafana/pull/6070)
* **Snapshot**: Can view embedded panels/png rendered panels in snapshots without login, fixes [#3769](https://github.com/grafana/grafana/pull/3769)
* **Elasticsearch**: Fix for query template variable when looking up terms without query, no longer relies on elasticsearch default field, fixes [#3887](https://github.com/grafana/grafana/pull/3887)
* **PNG Rendering**: Fix for server side rendering when using auth proxy, fixes [#5906](https://github.com/grafana/grafana/pull/5906)
# 3.1.2 (unreleased) # 3.1.2 (unreleased)
* **Templating**: Fixed issue when combining row & panel repeats, fixes [#5790](https://github.com/grafana/grafana/issues/5790) * **Templating**: Fixed issue when combining row & panel repeats, fixes [#5790](https://github.com/grafana/grafana/issues/5790)

View File

@ -9,7 +9,6 @@ module.exports = function (grunt) {
genDir: 'public_gen', genDir: 'public_gen',
destDir: 'dist', destDir: 'dist',
tempDir: 'tmp', tempDir: 'tmp',
arch: os.arch(),
platform: process.platform.replace('win32', 'windows'), platform: process.platform.replace('win32', 'windows'),
}; };
@ -17,6 +16,10 @@ module.exports = function (grunt) {
config.arch = process.env.hasOwnProperty('ProgramFiles(x86)') ? 'x64' : 'x86'; config.arch = process.env.hasOwnProperty('ProgramFiles(x86)') ? 'x64' : 'x86';
} }
config.arch = grunt.option('arch') || os.arch();
config.phjs = grunt.option('phjsToRelease');
config.pkg.version = grunt.option('pkgVer') || config.pkg.version; config.pkg.version = grunt.option('pkgVer') || config.pkg.version;
console.log('Version', config.pkg.version); console.log('Version', config.pkg.version);

View File

@ -96,7 +96,7 @@ easily the grafana repository you want to build.
```bash ```bash
go get github.com/*your_account*/grafana go get github.com/*your_account*/grafana
mkdir $GOPATH/src/github.com/grafana mkdir $GOPATH/src/github.com/grafana
ln -s github.com/*your_account*/grafana $GOPATH/src/github.com/grafana/grafana ln -s $GOPATH/src/github.com/*your_account*/grafana $GOPATH/src/github.com/grafana/grafana
``` ```
### Building the backend ### Building the backend

View File

@ -25,11 +25,16 @@ var (
versionRe = regexp.MustCompile(`-[0-9]{1,3}-g[0-9a-f]{5,10}`) versionRe = regexp.MustCompile(`-[0-9]{1,3}-g[0-9a-f]{5,10}`)
goarch string goarch string
goos string goos string
gocc string
gocxx string
cgo string
pkgArch string
version string = "v1" version string = "v1"
// deb & rpm does not support semver so have to handle their version a little differently // deb & rpm does not support semver so have to handle their version a little differently
linuxPackageVersion string = "v1" linuxPackageVersion string = "v1"
linuxPackageIteration string = "" linuxPackageIteration string = ""
race bool race bool
phjsToRelease string
workingDir string workingDir string
binaries []string = []string{"grafana-server", "grafana-cli"} binaries []string = []string{"grafana-server", "grafana-cli"}
) )
@ -47,6 +52,11 @@ func main() {
flag.StringVar(&goarch, "goarch", runtime.GOARCH, "GOARCH") flag.StringVar(&goarch, "goarch", runtime.GOARCH, "GOARCH")
flag.StringVar(&goos, "goos", runtime.GOOS, "GOOS") flag.StringVar(&goos, "goos", runtime.GOOS, "GOOS")
flag.StringVar(&gocc, "cc", "", "CC")
flag.StringVar(&gocxx, "cxx", "", "CXX")
flag.StringVar(&cgo, "cgo-enabled", "", "CGO_ENABLED")
flag.StringVar(&pkgArch, "pkg-arch", "", "PKG ARCH")
flag.StringVar(&phjsToRelease, "phjs", "", "PhantomJS binary")
flag.BoolVar(&race, "race", race, "Use race detector") flag.BoolVar(&race, "race", race, "Use race detector")
flag.Parse() flag.Parse()
@ -73,15 +83,15 @@ func main() {
grunt("test") grunt("test")
case "package": case "package":
grunt("release", fmt.Sprintf("--pkgVer=%v-%v", linuxPackageVersion, linuxPackageIteration)) grunt(gruntBuildArg("release")...)
createLinuxPackages() createLinuxPackages()
case "pkg-rpm": case "pkg-rpm":
grunt("release") grunt(gruntBuildArg("release")...)
createRpmPackages() createRpmPackages()
case "pkg-deb": case "pkg-deb":
grunt("release") grunt(gruntBuildArg("release")...)
createDebPackages() createDebPackages()
case "latest": case "latest":
@ -258,6 +268,10 @@ func createPackage(options linuxPackageOptions) {
"-p", "./dist", "-p", "./dist",
} }
if pkgArch != "" {
args = append(args, "-a", pkgArch)
}
if linuxPackageIteration != "" { if linuxPackageIteration != "" {
args = append(args, "--iteration", linuxPackageIteration) args = append(args, "--iteration", linuxPackageIteration)
} }
@ -307,11 +321,20 @@ func grunt(params ...string) {
runPrint("./node_modules/.bin/grunt", params...) runPrint("./node_modules/.bin/grunt", params...)
} }
func gruntBuildArg(task string) []string {
args := []string{task, fmt.Sprintf("--pkgVer=%v-%v", linuxPackageVersion, linuxPackageIteration)}
if pkgArch != "" {
args = append(args, fmt.Sprintf("--arch=%v", pkgArch))
}
if phjsToRelease != "" {
args = append(args, fmt.Sprintf("--phjsToRelease=%v", phjsToRelease))
}
return args
}
func setup() { func setup() {
runPrint("go", "get", "-v", "github.com/kardianos/govendor") runPrint("go", "get", "-v", "github.com/kardianos/govendor")
runPrint("go", "get", "-v", "github.com/blang/semver") runPrint("go", "install", "-v", "./pkg/cmd/grafana-server")
runPrint("go", "get", "-v", "github.com/mattn/go-sqlite3")
runPrint("go", "install", "-v", "github.com/mattn/go-sqlite3")
} }
func test(pkg string) { func test(pkg string) {
@ -382,6 +405,15 @@ func setBuildEnv() {
if goarch == "386" { if goarch == "386" {
os.Setenv("GO386", "387") os.Setenv("GO386", "387")
} }
if cgo != "" {
os.Setenv("CGO_ENABLED", cgo)
}
if gocc != "" {
os.Setenv("CC", gocc)
}
if gocxx != "" {
os.Setenv("CXX", gocxx)
}
} }
func getGitSha() string { func getGitSha() string {

View File

@ -9,7 +9,7 @@ app_mode = production
# instance name, defaults to HOSTNAME environment variable value or hostname if HOSTNAME var is empty # instance name, defaults to HOSTNAME environment variable value or hostname if HOSTNAME var is empty
instance_name = ${HOSTNAME} instance_name = ${HOSTNAME}
#################################### Paths #################################### #################################### Paths ###############################
[paths] [paths]
# Path to where grafana can store temp files, sessions, and the sqlite3 db (if that is used) # Path to where grafana can store temp files, sessions, and the sqlite3 db (if that is used)
# #
@ -23,7 +23,7 @@ logs = data/log
# #
plugins = data/plugins plugins = data/plugins
#################################### Server #################################### #################################### Server ##############################
[server] [server]
# Protocol (http or https) # Protocol (http or https)
protocol = http protocol = http
@ -57,7 +57,7 @@ enable_gzip = false
cert_file = cert_file =
cert_key = cert_key =
#################################### Database #################################### #################################### Database ############################
[database] [database]
# You can configure the database connection by specifying type, host, name, user and password # You can configure the database connection by specifying type, host, name, user and password
# as seperate properties or as on string using the url propertie. # as seperate properties or as on string using the url propertie.
@ -84,7 +84,7 @@ server_cert_name =
# For "sqlite3" only, path relative to data_path setting # For "sqlite3" only, path relative to data_path setting
path = grafana.db path = grafana.db
#################################### Session #################################### #################################### Session #############################
[session] [session]
# Either "memory", "file", "redis", "mysql", "postgres", "memcache", default is "file" # Either "memory", "file", "redis", "mysql", "postgres", "memcache", default is "file"
provider = file provider = file
@ -112,7 +112,7 @@ cookie_secure = false
session_life_time = 86400 session_life_time = 86400
gc_interval_time = 86400 gc_interval_time = 86400
#################################### Analytics #################################### #################################### Analytics ###########################
[analytics] [analytics]
# Server reporting, sends usage counters to stats.grafana.org every 24 hours. # Server reporting, sends usage counters to stats.grafana.org every 24 hours.
# No ip addresses are being tracked, only simple counters to track # No ip addresses are being tracked, only simple counters to track
@ -133,7 +133,7 @@ google_analytics_ua_id =
# Google Tag Manager ID, only enabled if you specify an id here # Google Tag Manager ID, only enabled if you specify an id here
google_tag_manager_id = google_tag_manager_id =
#################################### Security #################################### #################################### Security ############################
[security] [security]
# default admin user, created on startup # default admin user, created on startup
admin_user = admin admin_user = admin
@ -161,6 +161,12 @@ external_enabled = true
external_snapshot_url = https://snapshots-origin.raintank.io external_snapshot_url = https://snapshots-origin.raintank.io
external_snapshot_name = Publish to snapshot.raintank.io external_snapshot_name = Publish to snapshot.raintank.io
# remove expired snapshot
snapshot_remove_expired = true
# remove snapshots after 90 days
snapshot_TTL_days = 90
#################################### Users #################################### #################################### Users ####################################
[users] [users]
# disable user signup / registration # disable user signup / registration
@ -184,10 +190,11 @@ login_hint = email or username
# Default UI theme ("dark" or "light") # Default UI theme ("dark" or "light")
default_theme = dark default_theme = dark
# Allow users to sign in using username and password [auth]
allow_user_pass_login = true # Set to true to disable (hide) the login form, useful if you use OAuth
disable_login_form = false
#################################### Anonymous Auth ########################## #################################### Anonymous Auth ######################
[auth.anonymous] [auth.anonymous]
# enable anonymous access # enable anonymous access
enabled = false enabled = false
@ -198,7 +205,7 @@ org_name = Main Org.
# specify role for unauthenticated users # specify role for unauthenticated users
org_role = Viewer org_role = Viewer
#################################### Github Auth ########################## #################################### Github Auth #########################
[auth.github] [auth.github]
enabled = false enabled = false
allow_sign_up = false allow_sign_up = false
@ -211,7 +218,7 @@ api_url = https://api.github.com/user
team_ids = team_ids =
allowed_organizations = allowed_organizations =
#################################### Google Auth ########################## #################################### Google Auth #########################
[auth.google] [auth.google]
enabled = false enabled = false
allow_sign_up = false allow_sign_up = false
@ -223,7 +230,16 @@ token_url = https://accounts.google.com/o/oauth2/token
api_url = https://www.googleapis.com/oauth2/v1/userinfo api_url = https://www.googleapis.com/oauth2/v1/userinfo
allowed_domains = allowed_domains =
#################################### Generic OAuth ########################## #################################### Grafana.net Auth ####################
[auth.grafananet]
enabled = false
allow_sign_up = false
client_id = some_id
client_secret = some_secret
scopes = user:email
allowed_organizations =
#################################### Generic OAuth #######################
[auth.generic_oauth] [auth.generic_oauth]
enabled = false enabled = false
allow_sign_up = false allow_sign_up = false
@ -247,12 +263,12 @@ header_name = X-WEBAUTH-USER
header_property = username header_property = username
auto_sign_up = true auto_sign_up = true
#################################### Auth LDAP ########################## #################################### Auth LDAP ###########################
[auth.ldap] [auth.ldap]
enabled = false enabled = false
config_file = /etc/grafana/ldap.toml config_file = /etc/grafana/ldap.toml
#################################### SMTP / Emailing ########################## #################################### SMTP / Emailing #####################
[smtp] [smtp]
enabled = false enabled = false
host = localhost:25 host = localhost:25
@ -322,18 +338,18 @@ facility =
tag = tag =
#################################### AMQP Event Publisher ########################## #################################### AMQP Event Publisher ################
[event_publisher] [event_publisher]
enabled = false enabled = false
rabbitmq_url = amqp://localhost/ rabbitmq_url = amqp://localhost/
exchange = grafana_events exchange = grafana_events
#################################### Dashboard JSON files ########################## #################################### Dashboard JSON files ################
[dashboards.json] [dashboards.json]
enabled = false enabled = false
path = /var/lib/grafana/dashboards path = /var/lib/grafana/dashboards
#################################### Usage Quotas ########################## #################################### Usage Quotas ########################
[quota] [quota]
enabled = false enabled = false
@ -368,7 +384,7 @@ global_api_key = -1
# global limit on number of logged in users. # global limit on number of logged in users.
global_session = -1 global_session = -1
#################################### Alerting ###################################### #################################### Alerting ############################
# docs about alerting can be found in /docs/sources/alerting/ # docs about alerting can be found in /docs/sources/alerting/
# __.-/| # __.-/|
# \`o_O' # \`o_O'
@ -387,7 +403,7 @@ global_session = -1
[alerting] [alerting]
enabled = true enabled = true
#################################### Internal Grafana Metrics ########################## #################################### Internal Grafana Metrics ############
# Metrics available at HTTP API Url /api/metrics # Metrics available at HTTP API Url /api/metrics
[metrics] [metrics]
enabled = true enabled = true
@ -402,9 +418,9 @@ prefix = prod.grafana.%(instance_name)s.
[grafana_net] [grafana_net]
url = https://grafana.net url = https://grafana.net
#################################### External image storage ########################## #################################### External Image Storage ##############
[external_image_storage] [external_image_storage]
# You can choose between (s3, webdav or internal) # You can choose between (s3, webdav)
provider = s3 provider = s3
[external_image_storage.s3] [external_image_storage.s3]

View File

@ -116,7 +116,7 @@
# in some UI views to notify that grafana or plugin update exists # in some UI views to notify that grafana or plugin update exists
# This option does not cause any auto updates, nor send any information # This option does not cause any auto updates, nor send any information
# only a GET request to http://grafana.net to get latest versions # only a GET request to http://grafana.net to get latest versions
check_for_updates = true ;check_for_updates = true
# Google Analytics universal tracking code, only enabled if you specify an id here # Google Analytics universal tracking code, only enabled if you specify an id here
;google_analytics_ua_id = ;google_analytics_ua_id =
@ -149,6 +149,12 @@ check_for_updates = true
;external_snapshot_url = https://snapshots-origin.raintank.io ;external_snapshot_url = https://snapshots-origin.raintank.io
;external_snapshot_name = Publish to snapshot.raintank.io ;external_snapshot_name = Publish to snapshot.raintank.io
# remove expired snapshot
;snapshot_remove_expired = true
# remove snapshots after 90 days
;snapshot_TTL_days = 90
#################################### Users #################################### #################################### Users ####################################
[users] [users]
# disable user signup / registration # disable user signup / registration
@ -169,6 +175,10 @@ check_for_updates = true
# Default UI theme ("dark" or "light") # Default UI theme ("dark" or "light")
;default_theme = dark ;default_theme = dark
[auth]
# Set to true to disable (hide) the login form, useful if you use OAuth, defaults to false
;disable_login_form = false
#################################### Anonymous Auth ########################## #################################### Anonymous Auth ##########################
[auth.anonymous] [auth.anonymous]
# enable anonymous access # enable anonymous access
@ -218,6 +228,15 @@ check_for_updates = true
;team_ids = ;team_ids =
;allowed_organizations = ;allowed_organizations =
#################################### Grafana.net Auth ####################
[auth.grafananet]
;enabled = false
;allow_sign_up = false
;client_id = some_id
;client_secret = some_secret
;scopes = user:email
;allowed_organizations =
#################################### Auth Proxy ########################## #################################### Auth Proxy ##########################
[auth.proxy] [auth.proxy]
;enabled = false ;enabled = false

View File

@ -6,11 +6,7 @@ page_keywords: grafana, influxdb, metrics, query, documentation
# InfluxDB # InfluxDB
There are currently two separate datasources for InfluxDB in Grafana: InfluxDB 0.8.x and InfluxDB 0.9.x. Grafana ships with very a feature data source plugin for InfluxDB. Supporting a feature rich query editor, annotation and templating queries.
The API and capabilities of InfluxDB 0.9.x are completely different from InfluxDB 0.8.x which is why Grafana handles
them as different data sources.
InfluxDB 0.9 is rapidly evolving and we continue to track its API. InfluxDB 0.8 is no longer maintained by InfluxDB Inc, but we provide support as a convenience to existing users.
## Adding the data source ## Adding the data source
![](/img/v2/add_Influx.jpg) ![](/img/v2/add_Influx.jpg)

View File

@ -6,6 +6,10 @@ page_keywords: grafana, admin, http, api, documentation
# Admin API # Admin API
The admin http API does not currently work with an api token. Api Token's are currently only linked to an organization and organization role. They cannot given
the permission of server admin, only user's can be given that permission. So in order to use these API calls you will have to use basic auth and Grafana user
with Grafana admin permission.
## Settings ## Settings
`GET /api/admin/settings` `GET /api/admin/settings`
@ -15,7 +19,6 @@ page_keywords: grafana, admin, http, api, documentation
GET /api/admin/settings GET /api/admin/settings
Accept: application/json Accept: application/json
Content-Type: application/json Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
**Example Response**: **Example Response**:
@ -171,7 +174,6 @@ page_keywords: grafana, admin, http, api, documentation
GET /api/admin/stats GET /api/admin/stats
Accept: application/json Accept: application/json
Content-Type: application/json Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
**Example Response**: **Example Response**:
@ -201,7 +203,6 @@ Create new user
POST /api/admin/users HTTP/1.1 POST /api/admin/users HTTP/1.1
Accept: application/json Accept: application/json
Content-Type: application/json Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
{ {
"name":"User", "name":"User",
@ -228,7 +229,6 @@ Change password for specific user
PUT /api/admin/users/2/password HTTP/1.1 PUT /api/admin/users/2/password HTTP/1.1
Accept: application/json Accept: application/json
Content-Type: application/json Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
**Example Response**: **Example Response**:
@ -246,7 +246,6 @@ Change password for specific user
PUT /api/admin/users/2/permissions HTTP/1.1 PUT /api/admin/users/2/permissions HTTP/1.1
Accept: application/json Accept: application/json
Content-Type: application/json Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
**Example Response**: **Example Response**:
@ -264,7 +263,6 @@ Change password for specific user
DELETE /api/admin/users/2 HTTP/1.1 DELETE /api/admin/users/2 HTTP/1.1
Accept: application/json Accept: application/json
Content-Type: application/json Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
**Example Response**: **Example Response**:

View File

@ -238,6 +238,14 @@ options are `Admin` and `Editor` and `Read-Only Editor`.
<hr> <hr>
## [auth]
### disable_login_form
Set to true to disable (hide) the login form, useful if you use OAuth, defaults to false.
<hr>
## [auth.anonymous] ## [auth.anonymous]
### enabled ### enabled
@ -484,6 +492,33 @@ Grafana backend index those json dashboards which will make them appear in regul
### path ### path
The full path to a directory containing your json dashboards. The full path to a directory containing your json dashboards.
## [smtp]
Email server settings.
### enabled
defaults to false
### host
defaults to localhost:25
### user
In case of SMTP auth, defaults to `empty`
### password
In case of SMTP auth, defaults to `empty`
### cert_file
File path to a cert file, defaults to `empty`
### key_file
File path to a key file, defaults to `empty`
### skip_verify
Verify SSL for smtp server? defaults to `false`
### from_address
Address used when sending out emails, defaults to `admin@grafana.localhost`
## [log] ## [log]
### mode ### mode
@ -525,3 +560,9 @@ Set root url to a Grafana instance where you want to publish external snapshots
### external_snapshot_name ### external_snapshot_name
Set name for external snapshot button. Defaults to `Publish to snapshot.raintank.io` Set name for external snapshot button. Defaults to `Publish to snapshot.raintank.io`
### remove expired snapshot
Enabled to automatically remove expired snapshots
### remove snapshots after 90 days
Time to live for snapshots.

View File

@ -25,6 +25,25 @@ func ValidateOrgAlert(c *middleware.Context) {
} }
} }
func GetAlertStatesForDashboard(c *middleware.Context) Response {
dashboardId := c.QueryInt64("dashboardId")
if dashboardId == 0 {
return ApiError(400, "Missing query parameter dashboardId", nil)
}
query := models.GetAlertStatesForDashboardQuery{
OrgId: c.OrgId,
DashboardId: c.QueryInt64("dashboardId"),
}
if err := bus.Dispatch(&query); err != nil {
return ApiError(500, "Failed to fetch alert states", err)
}
return Json(200, query.Result)
}
// GET /api/alerts // GET /api/alerts
func GetAlerts(c *middleware.Context) Response { func GetAlerts(c *middleware.Context) Response {
query := models.GetAlertsQuery{ query := models.GetAlertsQuery{

View File

@ -58,6 +58,7 @@ func Register(r *macaron.Macaron) {
r.Get("/plugins/:id/page/:page", reqSignedIn, Index) r.Get("/plugins/:id/page/:page", reqSignedIn, Index)
r.Get("/dashboard/*", reqSignedIn, Index) r.Get("/dashboard/*", reqSignedIn, Index)
r.Get("/dashboard-solo/snapshot/*", Index)
r.Get("/dashboard-solo/*", reqSignedIn, Index) r.Get("/dashboard-solo/*", reqSignedIn, Index)
r.Get("/import/dashboard", reqSignedIn, Index) r.Get("/import/dashboard", reqSignedIn, Index)
r.Get("/dashboards/*", reqSignedIn, Index) r.Get("/dashboards/*", reqSignedIn, Index)
@ -202,9 +203,9 @@ func Register(r *macaron.Macaron) {
r.Get("/plugins", wrap(GetPluginList)) r.Get("/plugins", wrap(GetPluginList))
r.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingById)) r.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingById))
r.Get("/plugins/:pluginId/readme", wrap(GetPluginReadme))
r.Group("/plugins", func() { r.Group("/plugins", func() {
r.Get("/:pluginId/readme", wrap(GetPluginReadme))
r.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards)) r.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards))
r.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting)) r.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting))
}, reqOrgAdmin) }, reqOrgAdmin)
@ -243,7 +244,8 @@ func Register(r *macaron.Macaron) {
r.Get("/search/", Search) r.Get("/search/", Search)
// metrics // metrics
r.Get("/metrics/test", wrap(GetTestMetrics)) r.Post("/tsdb/query", bind(dtos.MetricRequest{}), wrap(QueryMetrics))
r.Get("/tsdb/testdata/scenarios", wrap(GetTestDataScenarios))
// metrics // metrics
r.Get("/metrics", wrap(GetInternalMetrics)) r.Get("/metrics", wrap(GetInternalMetrics))
@ -252,6 +254,7 @@ func Register(r *macaron.Macaron) {
r.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest)) r.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest))
r.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert)) r.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert))
r.Get("/", wrap(GetAlerts)) r.Get("/", wrap(GetAlerts))
r.Get("/states-for-dashboard", wrap(GetAlertStatesForDashboard))
}) })
r.Get("/alert-notifications", wrap(GetAlertNotifications)) r.Get("/alert-notifications", wrap(GetAlertNotifications))

View File

@ -96,13 +96,10 @@ func (slice DataSourceList) Swap(i, j int) {
slice[i], slice[j] = slice[j], slice[i] slice[i], slice[j] = slice[j], slice[i]
} }
type MetricQueryResultDto struct { type MetricRequest struct {
Data []MetricQueryResultDataDto `json:"data"` From string `json:"from"`
} To string `json:"to"`
Queries []*simplejson.Json `json:"queries"`
type MetricQueryResultDataDto struct {
Target string `json:"target"`
DataPoints [][2]float64 `json:"datapoints"`
} }
type UserStars struct { type UserStars struct {

23
pkg/api/dtos/playlist.go Normal file
View File

@ -0,0 +1,23 @@
package dtos
type PlaylistDashboard struct {
Id int64 `json:"id"`
Slug string `json:"slug"`
Title string `json:"title"`
Uri string `json:"uri"`
Order int `json:"order"`
}
type PlaylistDashboardsSlice []PlaylistDashboard
func (slice PlaylistDashboardsSlice) Len() int {
return len(slice)
}
func (slice PlaylistDashboardsSlice) Less(i, j int) bool {
return slice[i].Order < slice[j].Order
}
func (slice PlaylistDashboardsSlice) Swap(i, j int) {
slice[i], slice[j] = slice[j], slice[i]
}

View File

@ -38,7 +38,7 @@ func getFrontendSettingsMap(c *middleware.Context) (map[string]interface{}, erro
url := ds.Url url := ds.Url
if ds.Access == m.DS_ACCESS_PROXY { if ds.Access == m.DS_ACCESS_PROXY {
url = setting.AppSubUrl + "/api/datasources/proxy/" + strconv.FormatInt(ds.Id, 10) url = "/api/datasources/proxy/" + strconv.FormatInt(ds.Id, 10)
} }
var dsMap = map[string]interface{}{ var dsMap = map[string]interface{}{

View File

@ -1,6 +1,7 @@
package api package api
import ( import (
"fmt"
"strings" "strings"
"github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/api/dtos"
@ -32,6 +33,16 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
locale = parts[0] locale = parts[0]
} }
appUrl := setting.AppUrl
appSubUrl := setting.AppSubUrl
// special case when doing localhost call from phantomjs
if c.IsRenderCall {
appUrl = fmt.Sprintf("%s://localhost:%s", setting.Protocol, setting.HttpPort)
appSubUrl = ""
settings["appSubUrl"] = ""
}
var data = dtos.IndexViewData{ var data = dtos.IndexViewData{
User: &dtos.CurrentUser{ User: &dtos.CurrentUser{
Id: c.UserId, Id: c.UserId,
@ -49,8 +60,8 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
Locale: locale, Locale: locale,
}, },
Settings: settings, Settings: settings,
AppUrl: setting.AppUrl, AppUrl: appUrl,
AppSubUrl: setting.AppSubUrl, AppSubUrl: appSubUrl,
GoogleAnalyticsId: setting.GoogleAnalyticsId, GoogleAnalyticsId: setting.GoogleAnalyticsId,
GoogleTagManagerId: setting.GoogleTagManagerId, GoogleTagManagerId: setting.GoogleTagManagerId,
BuildVersion: setting.BuildVersion, BuildVersion: setting.BuildVersion,
@ -154,7 +165,7 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) {
} }
} }
if c.OrgRole == m.ROLE_ADMIN { if len(appLink.Children) > 0 && c.OrgRole == m.ROLE_ADMIN {
appLink.Children = append(appLink.Children, &dtos.NavLink{Divider: true}) appLink.Children = append(appLink.Children, &dtos.NavLink{Divider: true})
appLink.Children = append(appLink.Children, &dtos.NavLink{Text: "Plugin Config", Icon: "fa fa-cog", Url: setting.AppSubUrl + "/plugins/" + plugin.Id + "/edit"}) appLink.Children = append(appLink.Children, &dtos.NavLink{Text: "Plugin Config", Icon: "fa fa-cog", Url: setting.AppSubUrl + "/plugins/" + plugin.Id + "/edit"})
} }

View File

@ -25,13 +25,15 @@ func LoginView(c *middleware.Context) {
return return
} }
viewData.Settings["googleAuthEnabled"] = setting.OAuthService.Google enabledOAuths := make(map[string]interface{})
viewData.Settings["githubAuthEnabled"] = setting.OAuthService.GitHub for key, oauth := range setting.OAuthService.OAuthInfos {
viewData.Settings["genericOAuthEnabled"] = setting.OAuthService.Generic enabledOAuths[key] = map[string]string{"name": oauth.Name}
viewData.Settings["oauthProviderName"] = setting.OAuthService.OAuthProviderName }
viewData.Settings["oauth"] = enabledOAuths
viewData.Settings["disableUserSignUp"] = !setting.AllowUserSignUp viewData.Settings["disableUserSignUp"] = !setting.AllowUserSignUp
viewData.Settings["loginHint"] = setting.LoginHint viewData.Settings["loginHint"] = setting.LoginHint
viewData.Settings["allowUserPassLogin"] = setting.AllowUserPassLogin viewData.Settings["disableLoginForm"] = setting.DisableLoginForm
if !tryLoginUsingRememberCookie(c) { if !tryLoginUsingRememberCookie(c) {
c.HTML(200, VIEW_INDEX, viewData) c.HTML(200, VIEW_INDEX, viewData)

View File

@ -3,7 +3,6 @@ package api
import ( import (
"errors" "errors"
"fmt" "fmt"
"net/url"
"golang.org/x/oauth2" "golang.org/x/oauth2"
@ -46,9 +45,9 @@ func OAuthLogin(ctx *middleware.Context) {
userInfo, err := connect.UserInfo(token) userInfo, err := connect.UserInfo(token)
if err != nil { if err != nil {
if err == social.ErrMissingTeamMembership { if err == social.ErrMissingTeamMembership {
ctx.Redirect(setting.AppSubUrl + "/login?failedMsg=" + url.QueryEscape("Required Github team membership not fulfilled")) ctx.Redirect(setting.AppSubUrl + "/login?failCode=1000")
} else if err == social.ErrMissingOrganizationMembership { } else if err == social.ErrMissingOrganizationMembership {
ctx.Redirect(setting.AppSubUrl + "/login?failedMsg=" + url.QueryEscape("Required Github organization membership not fulfilled")) ctx.Redirect(setting.AppSubUrl + "/login?failCode=1001")
} else { } else {
ctx.Handle(500, fmt.Sprintf("login.OAuthLogin(get info from %s)", name), err) ctx.Handle(500, fmt.Sprintf("login.OAuthLogin(get info from %s)", name), err)
} }
@ -60,7 +59,7 @@ func OAuthLogin(ctx *middleware.Context) {
// validate that the email is allowed to login to grafana // validate that the email is allowed to login to grafana
if !connect.IsEmailAllowed(userInfo.Email) { if !connect.IsEmailAllowed(userInfo.Email) {
ctx.Logger.Info("OAuth login attempt with unallowed email", "email", userInfo.Email) ctx.Logger.Info("OAuth login attempt with unallowed email", "email", userInfo.Email)
ctx.Redirect(setting.AppSubUrl + "/login?failedMsg=" + url.QueryEscape("Required email domain not fulfilled")) ctx.Redirect(setting.AppSubUrl + "/login?failCode=1002")
return return
} }
@ -87,6 +86,7 @@ func OAuthLogin(ctx *middleware.Context) {
Email: userInfo.Email, Email: userInfo.Email,
Name: userInfo.Name, Name: userInfo.Name,
Company: userInfo.Company, Company: userInfo.Company,
DefaultOrgRole: userInfo.Role,
} }
if err = bus.Dispatch(&cmd); err != nil { if err = bus.Dispatch(&cmd); err != nil {

View File

@ -2,39 +2,54 @@ package api
import ( import (
"encoding/json" "encoding/json"
"math/rand"
"net/http" "net/http"
"strconv"
"github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/metrics"
"github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/tsdb/testdata"
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
) )
func GetTestMetrics(c *middleware.Context) Response { // POST /api/tsdb/query
from := c.QueryInt64("from") func QueryMetrics(c *middleware.Context, reqDto dtos.MetricRequest) Response {
to := c.QueryInt64("to") timeRange := tsdb.NewTimeRange(reqDto.From, reqDto.To)
maxDataPoints := c.QueryInt64("maxDataPoints")
stepInSeconds := (to - from) / maxDataPoints
result := dtos.MetricQueryResultDto{} request := &tsdb.Request{TimeRange: timeRange}
result.Data = make([]dtos.MetricQueryResultDataDto, 1)
for seriesIndex := range result.Data { for _, query := range reqDto.Queries {
points := make([][2]float64, maxDataPoints) request.Queries = append(request.Queries, &tsdb.Query{
walker := rand.Float64() * 100 RefId: query.Get("refId").MustString("A"),
time := from MaxDataPoints: query.Get("maxDataPoints").MustInt64(100),
IntervalMs: query.Get("intervalMs").MustInt64(1000),
for i := range points { Model: query,
points[i][0] = walker DataSource: &tsdb.DataSourceInfo{
points[i][1] = float64(time) Name: "Grafana TestDataDB",
walker += rand.Float64() - 0.5 PluginId: "grafana-testdata-datasource",
time += stepInSeconds },
})
} }
result.Data[seriesIndex].Target = "test-series-" + strconv.Itoa(seriesIndex) resp, err := tsdb.HandleRequest(request)
result.Data[seriesIndex].DataPoints = points if err != nil {
return ApiError(500, "Metric request error", err)
}
return Json(200, &resp)
}
// GET /api/tsdb/testdata/scenarios
func GetTestDataScenarios(c *middleware.Context) Response {
result := make([]interface{}, 0)
for _, scenario := range testdata.ScenarioRegistry {
result = append(result, map[string]interface{}{
"id": scenario.Id,
"name": scenario.Name,
"description": scenario.Description,
"stringInput": scenario.StringInput,
})
} }
return Json(200, &result) return Json(200, &result)

View File

@ -1,16 +1,18 @@
package api package api
import ( import (
"sort"
"strconv" "strconv"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
_ "github.com/grafana/grafana/pkg/log" _ "github.com/grafana/grafana/pkg/log"
m "github.com/grafana/grafana/pkg/models" m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/search" "github.com/grafana/grafana/pkg/services/search"
) )
func populateDashboardsById(dashboardByIds []int64) ([]m.PlaylistDashboardDto, error) { func populateDashboardsById(dashboardByIds []int64, dashboardIdOrder map[int64]int) (dtos.PlaylistDashboardsSlice, error) {
result := make([]m.PlaylistDashboardDto, 0) result := make(dtos.PlaylistDashboardsSlice, 0)
if len(dashboardByIds) > 0 { if len(dashboardByIds) > 0 {
dashboardQuery := m.GetDashboardsQuery{DashboardIds: dashboardByIds} dashboardQuery := m.GetDashboardsQuery{DashboardIds: dashboardByIds}
@ -19,11 +21,12 @@ func populateDashboardsById(dashboardByIds []int64) ([]m.PlaylistDashboardDto, e
} }
for _, item := range dashboardQuery.Result { for _, item := range dashboardQuery.Result {
result = append(result, m.PlaylistDashboardDto{ result = append(result, dtos.PlaylistDashboard{
Id: item.Id, Id: item.Id,
Slug: item.Slug, Slug: item.Slug,
Title: item.Title, Title: item.Title,
Uri: "db/" + item.Slug, Uri: "db/" + item.Slug,
Order: dashboardIdOrder[item.Id],
}) })
} }
} }
@ -31,8 +34,8 @@ func populateDashboardsById(dashboardByIds []int64) ([]m.PlaylistDashboardDto, e
return result, nil return result, nil
} }
func populateDashboardsByTag(orgId, userId int64, dashboardByTag []string) []m.PlaylistDashboardDto { func populateDashboardsByTag(orgId, userId int64, dashboardByTag []string, dashboardTagOrder map[string]int) dtos.PlaylistDashboardsSlice {
result := make([]m.PlaylistDashboardDto, 0) result := make(dtos.PlaylistDashboardsSlice, 0)
if len(dashboardByTag) > 0 { if len(dashboardByTag) > 0 {
for _, tag := range dashboardByTag { for _, tag := range dashboardByTag {
@ -47,10 +50,11 @@ func populateDashboardsByTag(orgId, userId int64, dashboardByTag []string) []m.P
if err := bus.Dispatch(&searchQuery); err == nil { if err := bus.Dispatch(&searchQuery); err == nil {
for _, item := range searchQuery.Result { for _, item := range searchQuery.Result {
result = append(result, m.PlaylistDashboardDto{ result = append(result, dtos.PlaylistDashboard{
Id: item.Id, Id: item.Id,
Title: item.Title, Title: item.Title,
Uri: item.Uri, Uri: item.Uri,
Order: dashboardTagOrder[tag],
}) })
} }
} }
@ -60,28 +64,33 @@ func populateDashboardsByTag(orgId, userId int64, dashboardByTag []string) []m.P
return result return result
} }
func LoadPlaylistDashboards(orgId, userId, playlistId int64) ([]m.PlaylistDashboardDto, error) { func LoadPlaylistDashboards(orgId, userId, playlistId int64) (dtos.PlaylistDashboardsSlice, error) {
playlistItems, _ := LoadPlaylistItems(playlistId) playlistItems, _ := LoadPlaylistItems(playlistId)
dashboardByIds := make([]int64, 0) dashboardByIds := make([]int64, 0)
dashboardByTag := make([]string, 0) dashboardByTag := make([]string, 0)
dashboardIdOrder := make(map[int64]int)
dashboardTagOrder := make(map[string]int)
for _, i := range playlistItems { for _, i := range playlistItems {
if i.Type == "dashboard_by_id" { if i.Type == "dashboard_by_id" {
dashboardId, _ := strconv.ParseInt(i.Value, 10, 64) dashboardId, _ := strconv.ParseInt(i.Value, 10, 64)
dashboardByIds = append(dashboardByIds, dashboardId) dashboardByIds = append(dashboardByIds, dashboardId)
dashboardIdOrder[dashboardId] = i.Order
} }
if i.Type == "dashboard_by_tag" { if i.Type == "dashboard_by_tag" {
dashboardByTag = append(dashboardByTag, i.Value) dashboardByTag = append(dashboardByTag, i.Value)
dashboardTagOrder[i.Value] = i.Order
} }
} }
result := make([]m.PlaylistDashboardDto, 0) result := make(dtos.PlaylistDashboardsSlice, 0)
var k, _ = populateDashboardsById(dashboardByIds) var k, _ = populateDashboardsById(dashboardByIds, dashboardIdOrder)
result = append(result, k...) result = append(result, k...)
result = append(result, populateDashboardsByTag(orgId, userId, dashboardByTag)...) result = append(result, populateDashboardsByTag(orgId, userId, dashboardByTag, dashboardTagOrder)...)
sort.Sort(sort.Reverse(result))
return result, nil return result, nil
} }

View File

@ -6,35 +6,21 @@ import (
"github.com/grafana/grafana/pkg/components/renderer" "github.com/grafana/grafana/pkg/components/renderer"
"github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
) )
func RenderToPng(c *middleware.Context) { func RenderToPng(c *middleware.Context) {
queryReader := util.NewUrlQueryReader(c.Req.URL) queryReader := util.NewUrlQueryReader(c.Req.URL)
queryParams := fmt.Sprintf("?%s", c.Req.URL.RawQuery) queryParams := fmt.Sprintf("?%s", c.Req.URL.RawQuery)
sessionId := c.Session.ID()
// Handle api calls authenticated without session
if sessionId == "" && c.ApiKeyId != 0 {
c.Session.Start(c)
c.Session.Set(middleware.SESS_KEY_APIKEY, c.ApiKeyId)
// release will make sure the new session is persisted before
// we spin up phantomjs
c.Session.Release()
// cleanup session after render is complete
defer func() { c.Session.Destory(c) }()
}
renderOpts := &renderer.RenderOpts{ renderOpts := &renderer.RenderOpts{
Url: c.Params("*") + queryParams, Path: c.Params("*") + queryParams,
Width: queryReader.Get("width", "800"), Width: queryReader.Get("width", "800"),
Height: queryReader.Get("height", "400"), Height: queryReader.Get("height", "400"),
SessionId: c.Session.ID(), OrgId: c.OrgId,
Timeout: queryReader.Get("timeout", "30"), Timeout: queryReader.Get("timeout", "30"),
} }
renderOpts.Url = setting.ToAbsUrl(renderOpts.Url)
pngPath, err := renderer.RenderToPng(renderOpts) pngPath, err := renderer.RenderToPng(renderOpts)
if err != nil { if err != nil {

View File

@ -141,8 +141,6 @@ func createRequest(repoUrl string, subPaths ...string) ([]byte, error) {
req, err := http.NewRequest(http.MethodGet, u.String(), nil) req, err := http.NewRequest(http.MethodGet, u.String(), nil)
logger.Info("grafanaVersion ", grafanaVersion)
req.Header.Set("grafana-version", grafanaVersion) req.Header.Set("grafana-version", grafanaVersion)
req.Header.Set("User-Agent", "grafana "+grafanaVersion) req.Header.Set("User-Agent", "grafana "+grafanaVersion)

View File

@ -13,16 +13,15 @@ import (
"time" "time"
"github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/login" "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/metrics"
"github.com/grafana/grafana/pkg/plugins"
alertingInit "github.com/grafana/grafana/pkg/services/alerting/init"
"github.com/grafana/grafana/pkg/services/eventpublisher"
"github.com/grafana/grafana/pkg/services/notifications"
"github.com/grafana/grafana/pkg/services/search"
"github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/social"
_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
_ "github.com/grafana/grafana/pkg/tsdb/graphite"
_ "github.com/grafana/grafana/pkg/tsdb/prometheus"
_ "github.com/grafana/grafana/pkg/tsdb/testdata"
) )
var version = "3.1.0" var version = "3.1.0"
@ -56,26 +55,8 @@ func main() {
setting.BuildCommit = commit setting.BuildCommit = commit
setting.BuildStamp = buildstampInt64 setting.BuildStamp = buildstampInt64
go listenToSystemSignels() server := NewGrafanaServer()
server.Start()
flag.Parse()
writePIDFile()
initRuntime()
metrics.Init()
search.Init()
login.Init()
social.NewOAuthService()
eventpublisher.Init()
plugins.Init()
alertingInit.Init()
if err := notifications.Init(); err != nil {
log.Fatal(3, "Notification service failed to initialize", err)
}
StartServer()
exitChan <- 0
} }
func initRuntime() { func initRuntime() {
@ -93,7 +74,9 @@ func initRuntime() {
logger.Info("Starting Grafana", "version", version, "commit", commit, "compiled", time.Unix(setting.BuildStamp, 0)) logger.Info("Starting Grafana", "version", version, "commit", commit, "compiled", time.Unix(setting.BuildStamp, 0))
setting.LogConfigurationInfo() setting.LogConfigurationInfo()
}
func initSql() {
sqlstore.NewEngine() sqlstore.NewEngine()
sqlstore.EnsureAdminUser() sqlstore.EnsureAdminUser()
} }
@ -116,7 +99,7 @@ func writePIDFile() {
} }
} }
func listenToSystemSignels() { func listenToSystemSignals(server models.GrafanaServer) {
signalChan := make(chan os.Signal, 1) signalChan := make(chan os.Signal, 1)
code := 0 code := 0
@ -124,16 +107,8 @@ func listenToSystemSignels() {
select { select {
case sig := <-signalChan: case sig := <-signalChan:
log.Info("Received signal %s. shutting down", sig) server.Shutdown(0, fmt.Sprintf("system signal: %s", sig))
case code = <-exitChan: case code = <-exitChan:
switch code { server.Shutdown(code, "startup error")
case 0:
log.Info("Shutting down")
default:
log.Warn("Shutting down")
} }
} }
log.Close()
os.Exit(code)
}

View File

@ -0,0 +1,128 @@
package main
import (
"context"
"fmt"
"net/http"
"os"
"time"
"golang.org/x/sync/errgroup"
"github.com/grafana/grafana/pkg/api"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/login"
"github.com/grafana/grafana/pkg/metrics"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/services/alerting"
"github.com/grafana/grafana/pkg/services/cleanup"
"github.com/grafana/grafana/pkg/services/eventpublisher"
"github.com/grafana/grafana/pkg/services/notifications"
"github.com/grafana/grafana/pkg/services/search"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/social"
)
func NewGrafanaServer() models.GrafanaServer {
rootCtx, shutdownFn := context.WithCancel(context.Background())
childRoutines, childCtx := errgroup.WithContext(rootCtx)
return &GrafanaServerImpl{
context: childCtx,
shutdownFn: shutdownFn,
childRoutines: childRoutines,
log: log.New("server"),
}
}
type GrafanaServerImpl struct {
context context.Context
shutdownFn context.CancelFunc
childRoutines *errgroup.Group
log log.Logger
}
func (g *GrafanaServerImpl) Start() {
go listenToSystemSignals(g)
writePIDFile()
initRuntime()
initSql()
metrics.Init()
search.Init()
login.Init()
social.NewOAuthService()
eventpublisher.Init()
plugins.Init()
// init alerting
if setting.AlertingEnabled {
engine := alerting.NewEngine()
g.childRoutines.Go(func() error { return engine.Run(g.context) })
}
// cleanup service
cleanUpService := cleanup.NewCleanUpService()
g.childRoutines.Go(func() error { return cleanUpService.Run(g.context) })
if err := notifications.Init(); err != nil {
g.log.Error("Notification service failed to initialize", "erro", err)
g.Shutdown(1, "Startup failed")
return
}
g.startHttpServer()
}
func (g *GrafanaServerImpl) startHttpServer() {
logger = log.New("http.server")
var err error
m := newMacaron()
api.Register(m)
listenAddr := fmt.Sprintf("%s:%s", setting.HttpAddr, setting.HttpPort)
g.log.Info("Initializing HTTP Server", "address", listenAddr, "protocol", setting.Protocol, "subUrl", setting.AppSubUrl)
switch setting.Protocol {
case setting.HTTP:
err = http.ListenAndServe(listenAddr, m)
case setting.HTTPS:
err = http.ListenAndServeTLS(listenAddr, setting.CertFile, setting.KeyFile, m)
default:
g.log.Error("Invalid protocol", "protocol", setting.Protocol)
g.Shutdown(1, "Startup failed")
}
if err != nil {
g.log.Error("Fail to start server", "error", err)
g.Shutdown(1, "Startup failed")
return
}
}
func (g *GrafanaServerImpl) Shutdown(code int, reason string) {
g.log.Info("Shutdown started", "code", code, "reason", reason)
g.shutdownFn()
err := g.childRoutines.Wait()
g.log.Info("Shutdown completed", "reason", err)
log.Close()
os.Exit(code)
}
// implement context.Context
func (g *GrafanaServerImpl) Deadline() (deadline time.Time, ok bool) {
return g.context.Deadline()
}
func (g *GrafanaServerImpl) Done() <-chan struct{} {
return g.context.Done()
}
func (g *GrafanaServerImpl) Err() error {
return g.context.Err()
}
func (g *GrafanaServerImpl) Value(key interface{}) interface{} {
return g.context.Value(key)
}

View File

@ -6,7 +6,6 @@ package main
import ( import (
"fmt" "fmt"
"net/http" "net/http"
"os"
"path" "path"
"gopkg.in/macaron.v1" "gopkg.in/macaron.v1"
@ -79,7 +78,7 @@ func mapStatic(m *macaron.Macaron, rootDir string, dir string, prefix string) {
)) ))
} }
func StartServer() { func StartServer() int {
logger = log.New("server") logger = log.New("server")
var err error var err error
@ -95,11 +94,13 @@ func StartServer() {
err = http.ListenAndServeTLS(listenAddr, setting.CertFile, setting.KeyFile, m) err = http.ListenAndServeTLS(listenAddr, setting.CertFile, setting.KeyFile, m)
default: default:
logger.Error("Invalid protocol", "protocol", setting.Protocol) logger.Error("Invalid protocol", "protocol", setting.Protocol)
os.Exit(1) return 1
} }
if err != nil { if err != nil {
logger.Error("Fail to start server", "error", err) logger.Error("Fail to start server", "error", err)
os.Exit(1) return 1
} }
return 0
} }

View File

@ -12,36 +12,51 @@ import (
"strconv" "strconv"
"github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
) )
type RenderOpts struct { type RenderOpts struct {
Url string Path string
Width string Width string
Height string Height string
SessionId string
Timeout string Timeout string
OrgId int64
} }
var rendererLog log.Logger = log.New("png-renderer") var rendererLog log.Logger = log.New("png-renderer")
func RenderToPng(params *RenderOpts) (string, error) { func RenderToPng(params *RenderOpts) (string, error) {
rendererLog.Info("Rendering", "url", params.Url) rendererLog.Info("Rendering", "path", params.Path)
var executable = "phantomjs" var executable = "phantomjs"
if runtime.GOOS == "windows" { if runtime.GOOS == "windows" {
executable = executable + ".exe" executable = executable + ".exe"
} }
url := fmt.Sprintf("%s://localhost:%s/%s", setting.Protocol, setting.HttpPort, params.Path)
binPath, _ := filepath.Abs(filepath.Join(setting.PhantomDir, executable)) binPath, _ := filepath.Abs(filepath.Join(setting.PhantomDir, executable))
scriptPath, _ := filepath.Abs(filepath.Join(setting.PhantomDir, "render.js")) scriptPath, _ := filepath.Abs(filepath.Join(setting.PhantomDir, "render.js"))
pngPath, _ := filepath.Abs(filepath.Join(setting.ImagesDir, util.GetRandomString(20))) pngPath, _ := filepath.Abs(filepath.Join(setting.ImagesDir, util.GetRandomString(20)))
pngPath = pngPath + ".png" pngPath = pngPath + ".png"
cmd := exec.Command(binPath, "--ignore-ssl-errors=true", scriptPath, "url="+params.Url, "width="+params.Width, renderKey := middleware.AddRenderAuthKey(params.OrgId)
"height="+params.Height, "png="+pngPath, "cookiename="+setting.SessionOptions.CookieName, defer middleware.RemoveRenderAuthKey(renderKey)
"domain="+setting.Domain, "sessionid="+params.SessionId)
cmdArgs := []string{
"--ignore-ssl-errors=true",
scriptPath,
"url=" + url,
"width=" + params.Width,
"height=" + params.Height,
"png=" + pngPath,
"domain=" + setting.Domain,
"renderKey=" + renderKey,
}
cmd := exec.Command(binPath, cmdArgs...)
stdout, err := cmd.StdoutPipe() stdout, err := cmd.StdoutPipe()
if err != nil { if err != nil {

View File

@ -32,11 +32,25 @@ func New(logger string, ctx ...interface{}) Logger {
} }
func Trace(format string, v ...interface{}) { func Trace(format string, v ...interface{}) {
Root.Debug(fmt.Sprintf(format, v)) var message string
if len(v) > 0 {
message = fmt.Sprintf(format, v)
} else {
message = format
}
Root.Debug(message)
} }
func Debug(format string, v ...interface{}) { func Debug(format string, v ...interface{}) {
Root.Debug(fmt.Sprintf(format, v)) var message string
if len(v) > 0 {
message = fmt.Sprintf(format, v)
} else {
message = format
}
Root.Debug(message)
} }
func Debug2(message string, v ...interface{}) { func Debug2(message string, v ...interface{}) {
@ -44,7 +58,14 @@ func Debug2(message string, v ...interface{}) {
} }
func Info(format string, v ...interface{}) { func Info(format string, v ...interface{}) {
Root.Info(fmt.Sprintf(format, v)) var message string
if len(v) > 0 {
message = fmt.Sprintf(format, v)
} else {
message = format
}
Root.Info(message)
} }
func Info2(message string, v ...interface{}) { func Info2(message string, v ...interface{}) {
@ -52,7 +73,14 @@ func Info2(message string, v ...interface{}) {
} }
func Warn(format string, v ...interface{}) { func Warn(format string, v ...interface{}) {
Root.Warn(fmt.Sprintf(format, v)) var message string
if len(v) > 0 {
message = fmt.Sprintf(format, v)
} else {
message = format
}
Root.Warn(message)
} }
func Warn2(message string, v ...interface{}) { func Warn2(message string, v ...interface{}) {

View File

@ -24,10 +24,10 @@ func NewGauge(meta *MetricMeta) Gauge {
} }
} }
func RegGauge(meta *MetricMeta) Gauge { func RegGauge(name string, tagStrings ...string) Gauge {
g := NewGauge(meta) tr := NewGauge(NewMetricMeta(name, tagStrings))
MetricStats.Register(g) MetricStats.Register(tr)
return g return tr
} }
// GaugeSnapshot is a read-only copy of another Gauge. // GaugeSnapshot is a read-only copy of another Gauge.

View File

@ -63,6 +63,8 @@ func (this *GraphitePublisher) Publish(metrics []Metric) {
switch metric := m.(type) { switch metric := m.(type) {
case Counter: case Counter:
this.addCount(buf, metricName+".count", metric.Count(), now) this.addCount(buf, metricName+".count", metric.Count(), now)
case Gauge:
this.addCount(buf, metricName, metric.Value(), now)
case Timer: case Timer:
percentiles := metric.Percentiles([]float64{0.25, 0.75, 0.90, 0.99}) percentiles := metric.Percentiles([]float64{0.25, 0.75, 0.90, 0.99})
this.addCount(buf, metricName+".count", metric.Count(), now) this.addCount(buf, metricName+".count", metric.Count(), now)

View File

@ -49,6 +49,12 @@ var (
// Timers // Timers
M_DataSource_ProxyReq_Timer Timer M_DataSource_ProxyReq_Timer Timer
M_Alerting_Exeuction_Time Timer M_Alerting_Exeuction_Time Timer
// StatTotals
M_StatTotal_Dashboards Gauge
M_StatTotal_Users Gauge
M_StatTotal_Orgs Gauge
M_StatTotal_Playlists Gauge
) )
func initMetricVars(settings *MetricSettings) { func initMetricVars(settings *MetricSettings) {
@ -105,4 +111,10 @@ func initMetricVars(settings *MetricSettings) {
// Timers // Timers
M_DataSource_ProxyReq_Timer = RegTimer("api.dataproxy.request.all") M_DataSource_ProxyReq_Timer = RegTimer("api.dataproxy.request.all")
M_Alerting_Exeuction_Time = RegTimer("alerting.execution_time") M_Alerting_Exeuction_Time = RegTimer("alerting.execution_time")
// StatTotals
M_StatTotal_Dashboards = RegGauge("stat_totals", "stat", "dashboards")
M_StatTotal_Users = RegGauge("stat_totals", "stat", "users")
M_StatTotal_Orgs = RegGauge("stat_totals", "stat", "orgs")
M_StatTotal_Playlists = RegGauge("stat_totals", "stat", "playlists")
} }

View File

@ -15,6 +15,7 @@ import (
) )
var metricsLogger log.Logger = log.New("metrics") var metricsLogger log.Logger = log.New("metrics")
var metricPublishCounter int64 = 0
func Init() { func Init() {
settings := readSettings() settings := readSettings()
@ -45,12 +46,33 @@ func sendMetrics(settings *MetricSettings) {
return return
} }
updateTotalStats()
metrics := MetricStats.GetSnapshots() metrics := MetricStats.GetSnapshots()
for _, publisher := range settings.Publishers { for _, publisher := range settings.Publishers {
publisher.Publish(metrics) publisher.Publish(metrics)
} }
} }
func updateTotalStats() {
// every interval also publish totals
metricPublishCounter++
if metricPublishCounter%10 == 0 {
// get stats
statsQuery := m.GetSystemStatsQuery{}
if err := bus.Dispatch(&statsQuery); err != nil {
metricsLogger.Error("Failed to get system stats", "error", err)
return
}
M_StatTotal_Dashboards.Update(statsQuery.Result.DashboardCount)
M_StatTotal_Users.Update(statsQuery.Result.UserCount)
M_StatTotal_Playlists.Update(statsQuery.Result.PlaylistCount)
M_StatTotal_Orgs.Update(statsQuery.Result.OrgCount)
}
}
func sendUsageStats() { func sendUsageStats() {
if !setting.ReportingEnabled { if !setting.ReportingEnabled {
return return

View File

@ -22,6 +22,7 @@ type Context struct {
Session SessionStore Session SessionStore
IsSignedIn bool IsSignedIn bool
IsRenderCall bool
AllowAnonymous bool AllowAnonymous bool
Logger log.Logger Logger log.Logger
} }
@ -42,11 +43,11 @@ func GetContextHandler() macaron.Handler {
// then init session and look for userId in session // then init session and look for userId in session
// then look for api key in session (special case for render calls via api) // then look for api key in session (special case for render calls via api)
// then test if anonymous access is enabled // then test if anonymous access is enabled
if initContextWithApiKey(ctx) || if initContextWithRenderAuth(ctx) ||
initContextWithApiKey(ctx) ||
initContextWithBasicAuth(ctx) || initContextWithBasicAuth(ctx) ||
initContextWithAuthProxy(ctx) || initContextWithAuthProxy(ctx) ||
initContextWithUserSessionCookie(ctx) || initContextWithUserSessionCookie(ctx) ||
initContextWithApiKeyFromSession(ctx) ||
initContextWithAnonymousUser(ctx) { initContextWithAnonymousUser(ctx) {
} }
@ -176,29 +177,6 @@ func initContextWithBasicAuth(ctx *Context) bool {
} }
} }
// special case for panel render calls with api key
func initContextWithApiKeyFromSession(ctx *Context) bool {
keyId := ctx.Session.Get(SESS_KEY_APIKEY)
if keyId == nil {
return false
}
keyQuery := m.GetApiKeyByIdQuery{ApiKeyId: keyId.(int64)}
if err := bus.Dispatch(&keyQuery); err != nil {
ctx.Logger.Error("Failed to get api key by id", "id", keyId, "error", err)
return false
} else {
apikey := keyQuery.Result
ctx.IsSignedIn = true
ctx.SignedInUser = &m.SignedInUser{}
ctx.OrgRole = apikey.Role
ctx.ApiKeyId = apikey.Id
ctx.OrgId = apikey.OrgId
return true
}
}
// Handle handles and logs error by given status. // Handle handles and logs error by given status.
func (ctx *Context) Handle(status int, title string, err error) { func (ctx *Context) Handle(status int, title string, err error) {
if err != nil { if err != nil {

View File

@ -0,0 +1,55 @@
package middleware
import (
"sync"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/util"
)
var renderKeysLock sync.Mutex
var renderKeys map[string]*m.SignedInUser = make(map[string]*m.SignedInUser)
func initContextWithRenderAuth(ctx *Context) bool {
key := ctx.GetCookie("renderKey")
if key == "" {
return false
}
renderKeysLock.Lock()
defer renderKeysLock.Unlock()
if renderUser, exists := renderKeys[key]; !exists {
ctx.JsonApiErr(401, "Invalid Render Key", nil)
return true
} else {
ctx.IsSignedIn = true
ctx.SignedInUser = renderUser
ctx.IsRenderCall = true
return true
}
}
type renderContextFunc func(key string) (string, error)
func AddRenderAuthKey(orgId int64) string {
renderKeysLock.Lock()
key := util.GetRandomString(32)
renderKeys[key] = &m.SignedInUser{
OrgId: orgId,
OrgRole: m.ROLE_VIEWER,
}
renderKeysLock.Unlock()
return key
}
func RemoveRenderAuthKey(key string) {
renderKeysLock.Lock()
delete(renderKeys, key)
renderKeysLock.Unlock()
}

View File

@ -13,7 +13,6 @@ import (
const ( const (
SESS_KEY_USERID = "uid" SESS_KEY_USERID = "uid"
SESS_KEY_APIKEY = "apikey_id" // used fror render requests with api keys
) )
var sessionManager *session.Manager var sessionManager *session.Manager

View File

@ -135,3 +135,18 @@ type GetAlertByIdQuery struct {
Result *Alert Result *Alert
} }
type GetAlertStatesForDashboardQuery struct {
OrgId int64
DashboardId int64
Result []*AlertStateInfoDTO
}
type AlertStateInfoDTO struct {
Id int64 `json:"id"`
DashboardId int64 `json:"dashboardId"`
PanelId int64 `json:"panelId"`
State AlertStateType `json:"state"`
NewStateDate time.Time `json:"newStateDate"`
}

View File

@ -63,6 +63,9 @@ type DeleteDashboardSnapshotCommand struct {
DeleteKey string `json:"-"` DeleteKey string `json:"-"`
} }
type DeleteExpiredSnapshotsCommand struct {
}
type GetDashboardSnapshotQuery struct { type GetDashboardSnapshotQuery struct {
Key string Key string

View File

@ -7,4 +7,5 @@ const (
GOOGLE GOOGLE
TWITTER TWITTER
GENERIC GENERIC
GRAFANANET
) )

View File

@ -57,17 +57,6 @@ func (this PlaylistDashboard) TableName() string {
type Playlists []*Playlist type Playlists []*Playlist
type PlaylistDashboards []*PlaylistDashboard type PlaylistDashboards []*PlaylistDashboard
//
// DTOS
//
type PlaylistDashboardDto struct {
Id int64 `json:"id"`
Slug string `json:"slug"`
Title string `json:"title"`
Uri string `json:"uri"`
}
// //
// COMMANDS // COMMANDS
// //

10
pkg/models/server.go Normal file
View File

@ -0,0 +1,10 @@
package models
import "context"
type GrafanaServer interface {
context.Context
Start()
Shutdown(code int, reason string)
}

View File

@ -1,10 +1,10 @@
package models package models
type SystemStats struct { type SystemStats struct {
DashboardCount int DashboardCount int64
UserCount int UserCount int64
OrgCount int OrgCount int64
PlaylistCount int PlaylistCount int64
} }
type DataSourceStats struct { type DataSourceStats struct {

View File

@ -53,6 +53,7 @@ type CreateUserCommand struct {
EmailVerified bool EmailVerified bool
IsAdmin bool IsAdmin bool
SkipOrgSetup bool SkipOrgSetup bool
DefaultOrgRole string
Result User Result User
} }

View File

@ -6,6 +6,7 @@ type DataSourcePlugin struct {
FrontendPluginBase FrontendPluginBase
Annotations bool `json:"annotations"` Annotations bool `json:"annotations"`
Metrics bool `json:"metrics"` Metrics bool `json:"metrics"`
Alerting bool `json:"alerting"`
BuiltIn bool `json:"builtIn"` BuiltIn bool `json:"builtIn"`
Mixed bool `json:"mixed"` Mixed bool `json:"mixed"`
App string `json:"app"` App string `json:"app"`

View File

@ -43,7 +43,12 @@ func (fp *FrontendPluginBase) setPathsBasedOnApp(app *AppPlugin) {
appSubPath := strings.Replace(fp.PluginDir, app.PluginDir, "", 1) appSubPath := strings.Replace(fp.PluginDir, app.PluginDir, "", 1)
fp.IncludedInAppId = app.Id fp.IncludedInAppId = app.Id
fp.BaseUrl = app.BaseUrl fp.BaseUrl = app.BaseUrl
if isExternalPlugin(app.PluginDir) {
fp.Module = util.JoinUrlFragments("plugins/"+app.Id, appSubPath) + "/module" fp.Module = util.JoinUrlFragments("plugins/"+app.Id, appSubPath) + "/module"
} else {
fp.Module = util.JoinUrlFragments("app/plugins/app/"+app.Id, appSubPath) + "/module"
}
} }
func (fp *FrontendPluginBase) handleModuleDefaults() { func (fp *FrontendPluginBase) handleModuleDefaults() {

View File

@ -9,6 +9,11 @@ import (
"github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
"github.com/hashicorp/go-version"
)
var (
httpClient http.Client = http.Client{Timeout: time.Duration(10 * time.Second)}
) )
type GrafanaNetPlugin struct { type GrafanaNetPlugin struct {
@ -39,26 +44,23 @@ func StartPluginUpdateChecker() {
} }
func getAllExternalPluginSlugs() string { func getAllExternalPluginSlugs() string {
str := "" var result []string
for _, plug := range Plugins { for _, plug := range Plugins {
if plug.IsCorePlugin { if plug.IsCorePlugin {
continue continue
} }
str += plug.Id + "," result = append(result, plug.Id)
} }
return str return strings.Join(result, ",")
} }
func checkForUpdates() { func checkForUpdates() {
log.Trace("Checking for updates") log.Trace("Checking for updates")
client := http.Client{Timeout: time.Duration(5 * time.Second)}
pluginSlugs := getAllExternalPluginSlugs() pluginSlugs := getAllExternalPluginSlugs()
resp, err := client.Get("https://grafana.net/api/plugins/versioncheck?slugIn=" + pluginSlugs + "&grafanaVersion=" + setting.BuildVersion) resp, err := httpClient.Get("https://grafana.net/api/plugins/versioncheck?slugIn=" + pluginSlugs + "&grafanaVersion=" + setting.BuildVersion)
if err != nil { if err != nil {
log.Trace("Failed to get plugins repo from grafana.net, %v", err.Error()) log.Trace("Failed to get plugins repo from grafana.net, %v", err.Error())
@ -84,12 +86,20 @@ func checkForUpdates() {
for _, gplug := range gNetPlugins { for _, gplug := range gNetPlugins {
if gplug.Slug == plug.Id { if gplug.Slug == plug.Id {
plug.GrafanaNetVersion = gplug.Version plug.GrafanaNetVersion = gplug.Version
plugVersion, err1 := version.NewVersion(plug.Info.Version)
gplugVersion, err2 := version.NewVersion(gplug.Version)
if err1 != nil || err2 != nil {
plug.GrafanaNetHasUpdate = plug.Info.Version != plug.GrafanaNetVersion plug.GrafanaNetHasUpdate = plug.Info.Version != plug.GrafanaNetVersion
} else {
plug.GrafanaNetHasUpdate = plugVersion.LessThan(gplugVersion)
}
} }
} }
} }
resp2, err := client.Get("https://raw.githubusercontent.com/grafana/grafana/master/latest.json") resp2, err := httpClient.Get("https://raw.githubusercontent.com/grafana/grafana/master/latest.json")
if err != nil { if err != nil {
log.Trace("Failed to get latest.json repo from github: %v", err.Error()) log.Trace("Failed to get latest.json repo from github: %v", err.Error())
return return
@ -116,4 +126,11 @@ func checkForUpdates() {
GrafanaLatestVersion = githubLatest.Stable GrafanaLatestVersion = githubLatest.Stable
GrafanaHasUpdate = githubLatest.Stable != setting.BuildVersion GrafanaHasUpdate = githubLatest.Stable != setting.BuildVersion
} }
currVersion, err1 := version.NewVersion(setting.BuildVersion)
latestVersion, err2 := version.NewVersion(GrafanaLatestVersion)
if err1 == nil && err2 == nil {
GrafanaHasUpdate = currVersion.LessThan(latestVersion)
}
} }

View File

@ -5,6 +5,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/alerting"
"gopkg.in/guregu/null.v3"
) )
var ( var (
@ -13,13 +14,13 @@ var (
) )
type AlertEvaluator interface { type AlertEvaluator interface {
Eval(reducedValue *float64) bool Eval(reducedValue null.Float) bool
} }
type NoDataEvaluator struct{} type NoDataEvaluator struct{}
func (e *NoDataEvaluator) Eval(reducedValue *float64) bool { func (e *NoDataEvaluator) Eval(reducedValue null.Float) bool {
return reducedValue == nil return reducedValue.Valid == false
} }
type ThresholdEvaluator struct { type ThresholdEvaluator struct {
@ -43,16 +44,16 @@ func newThresholdEvaludator(typ string, model *simplejson.Json) (*ThresholdEvalu
return defaultEval, nil return defaultEval, nil
} }
func (e *ThresholdEvaluator) Eval(reducedValue *float64) bool { func (e *ThresholdEvaluator) Eval(reducedValue null.Float) bool {
if reducedValue == nil { if reducedValue.Valid == false {
return false return false
} }
switch e.Type { switch e.Type {
case "gt": case "gt":
return *reducedValue > e.Threshold return reducedValue.Float64 > e.Threshold
case "lt": case "lt":
return *reducedValue < e.Threshold return reducedValue.Float64 < e.Threshold
} }
return false return false
@ -86,16 +87,18 @@ func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, e
return rangedEval, nil return rangedEval, nil
} }
func (e *RangedEvaluator) Eval(reducedValue *float64) bool { func (e *RangedEvaluator) Eval(reducedValue null.Float) bool {
if reducedValue == nil { if reducedValue.Valid == false {
return false return false
} }
floatValue := reducedValue.Float64
switch e.Type { switch e.Type {
case "within_range": case "within_range":
return (e.Lower < *reducedValue && e.Upper > *reducedValue) || (e.Upper < *reducedValue && e.Lower > *reducedValue) return (e.Lower < floatValue && e.Upper > floatValue) || (e.Upper < floatValue && e.Lower > floatValue)
case "outside_range": case "outside_range":
return (e.Upper < *reducedValue && e.Lower < *reducedValue) || (e.Upper > *reducedValue && e.Lower > *reducedValue) return (e.Upper < floatValue && e.Lower < floatValue) || (e.Upper > floatValue && e.Lower > floatValue)
} }
return false return false

View File

@ -3,6 +3,8 @@ package conditions
import ( import (
"testing" "testing"
"gopkg.in/guregu/null.v3"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
. "github.com/smartystreets/goconvey/convey" . "github.com/smartystreets/goconvey/convey"
) )
@ -14,7 +16,7 @@ func evalutorScenario(json string, reducedValue float64, datapoints ...float64)
evaluator, err := NewAlertEvaluator(jsonModel) evaluator, err := NewAlertEvaluator(jsonModel)
So(err, ShouldBeNil) So(err, ShouldBeNil)
return evaluator.Eval(&reducedValue) return evaluator.Eval(null.FloatFrom(reducedValue))
} }
func TestEvalutors(t *testing.T) { func TestEvalutors(t *testing.T) {
@ -51,6 +53,6 @@ func TestEvalutors(t *testing.T) {
evaluator, err := NewAlertEvaluator(jsonModel) evaluator, err := NewAlertEvaluator(jsonModel)
So(err, ShouldBeNil) So(err, ShouldBeNil)
So(evaluator.Eval(nil), ShouldBeTrue) So(evaluator.Eval(null.FloatFromPtr(nil)), ShouldBeTrue)
}) })
} }

View File

@ -2,6 +2,8 @@ package conditions
import ( import (
"fmt" "fmt"
"strings"
"time"
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
@ -32,7 +34,8 @@ type AlertQuery struct {
} }
func (c *QueryCondition) Eval(context *alerting.EvalContext) { func (c *QueryCondition) Eval(context *alerting.EvalContext) {
seriesList, err := c.executeQuery(context) timeRange := tsdb.NewTimeRange(c.Query.From, c.Query.To)
seriesList, err := c.executeQuery(context, timeRange)
if err != nil { if err != nil {
context.Error = err context.Error = err
return return
@ -43,21 +46,21 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) {
reducedValue := c.Reducer.Reduce(series) reducedValue := c.Reducer.Reduce(series)
evalMatch := c.Evaluator.Eval(reducedValue) evalMatch := c.Evaluator.Eval(reducedValue)
if reducedValue == nil { if reducedValue.Valid == false {
emptySerieCount++ emptySerieCount++
continue continue
} }
if context.IsTestRun { if context.IsTestRun {
context.Logs = append(context.Logs, &alerting.ResultLogEntry{ context.Logs = append(context.Logs, &alerting.ResultLogEntry{
Message: fmt.Sprintf("Condition[%d]: Eval: %v, Metric: %s, Value: %1.3f", c.Index, evalMatch, series.Name, *reducedValue), Message: fmt.Sprintf("Condition[%d]: Eval: %v, Metric: %s, Value: %1.3f", c.Index, evalMatch, series.Name, reducedValue.Float64),
}) })
} }
if evalMatch { if evalMatch {
context.EvalMatches = append(context.EvalMatches, &alerting.EvalMatch{ context.EvalMatches = append(context.EvalMatches, &alerting.EvalMatch{
Metric: series.Name, Metric: series.Name,
Value: *reducedValue, Value: reducedValue.Float64,
}) })
} }
} }
@ -66,7 +69,7 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) {
context.Firing = len(context.EvalMatches) > 0 context.Firing = len(context.EvalMatches) > 0
} }
func (c *QueryCondition) executeQuery(context *alerting.EvalContext) (tsdb.TimeSeriesSlice, error) { func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *tsdb.TimeRange) (tsdb.TimeSeriesSlice, error) {
getDsInfo := &m.GetDataSourceByIdQuery{ getDsInfo := &m.GetDataSourceByIdQuery{
Id: c.Query.DatasourceId, Id: c.Query.DatasourceId,
OrgId: context.Rule.OrgId, OrgId: context.Rule.OrgId,
@ -76,7 +79,7 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext) (tsdb.TimeS
return nil, fmt.Errorf("Could not find datasource") return nil, fmt.Errorf("Could not find datasource")
} }
req := c.getRequestForAlertRule(getDsInfo.Result) req := c.getRequestForAlertRule(getDsInfo.Result, timeRange)
result := make(tsdb.TimeSeriesSlice, 0) result := make(tsdb.TimeSeriesSlice, 0)
resp, err := c.HandleRequest(req) resp, err := c.HandleRequest(req)
@ -102,16 +105,13 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext) (tsdb.TimeS
return result, nil return result, nil
} }
func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource) *tsdb.Request { func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource, timeRange *tsdb.TimeRange) *tsdb.Request {
req := &tsdb.Request{ req := &tsdb.Request{
TimeRange: tsdb.TimeRange{ TimeRange: timeRange,
From: c.Query.From,
To: c.Query.To,
},
Queries: []*tsdb.Query{ Queries: []*tsdb.Query{
{ {
RefId: "A", RefId: "A",
Query: c.Query.Model.Get("target").MustString(), Model: c.Query.Model,
DataSource: &tsdb.DataSourceInfo{ DataSource: &tsdb.DataSourceInfo{
Id: datasource.Id, Id: datasource.Id,
Name: datasource.Name, Name: datasource.Name,
@ -141,6 +141,15 @@ func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, erro
condition.Query.Model = queryJson.Get("model") condition.Query.Model = queryJson.Get("model")
condition.Query.From = queryJson.Get("params").MustArray()[1].(string) condition.Query.From = queryJson.Get("params").MustArray()[1].(string)
condition.Query.To = queryJson.Get("params").MustArray()[2].(string) condition.Query.To = queryJson.Get("params").MustArray()[2].(string)
if err := validateFromValue(condition.Query.From); err != nil {
return nil, err
}
if err := validateToValue(condition.Query.To); err != nil {
return nil, err
}
condition.Query.DatasourceId = queryJson.Get("datasourceId").MustInt64() condition.Query.DatasourceId = queryJson.Get("datasourceId").MustInt64()
reducerJson := model.Get("reducer") reducerJson := model.Get("reducer")
@ -155,3 +164,26 @@ func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, erro
condition.Evaluator = evaluator condition.Evaluator = evaluator
return &condition, nil return &condition, nil
} }
func validateFromValue(from string) error {
fromRaw := strings.Replace(from, "now-", "", 1)
_, err := time.ParseDuration("-" + fromRaw)
return err
}
func validateToValue(to string) error {
if to == "now" {
return nil
} else if strings.HasPrefix(to, "now-") {
withoutNow := strings.Replace(to, "now-", "", 1)
_, err := time.ParseDuration("-" + withoutNow)
if err == nil {
return nil
}
}
_, err := time.ParseDuration(to)
return err
}

View File

@ -3,6 +3,8 @@ package conditions
import ( import (
"testing" "testing"
null "gopkg.in/guregu/null.v3"
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/components/simplejson"
m "github.com/grafana/grafana/pkg/models" m "github.com/grafana/grafana/pkg/models"
@ -41,9 +43,8 @@ func TestQueryCondition(t *testing.T) {
}) })
Convey("should fire when avg is above 100", func() { Convey("should fire when avg is above 100", func() {
one := float64(120) points := tsdb.NewTimeSeriesPointsFromArgs(120, 0)
two := float64(0) ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", points)}
ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", [][2]*float64{{&one, &two}})}
ctx.exec() ctx.exec()
So(ctx.result.Error, ShouldBeNil) So(ctx.result.Error, ShouldBeNil)
@ -51,9 +52,8 @@ func TestQueryCondition(t *testing.T) {
}) })
Convey("Should not fire when avg is below 100", func() { Convey("Should not fire when avg is below 100", func() {
one := float64(90) points := tsdb.NewTimeSeriesPointsFromArgs(90, 0)
two := float64(0) ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", points)}
ctx.series = tsdb.TimeSeriesSlice{tsdb.NewTimeSeries("test1", [][2]*float64{{&one, &two}})}
ctx.exec() ctx.exec()
So(ctx.result.Error, ShouldBeNil) So(ctx.result.Error, ShouldBeNil)
@ -61,11 +61,9 @@ func TestQueryCondition(t *testing.T) {
}) })
Convey("Should fire if only first serie matches", func() { Convey("Should fire if only first serie matches", func() {
one := float64(120)
two := float64(0)
ctx.series = tsdb.TimeSeriesSlice{ ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{{&one, &two}}), tsdb.NewTimeSeries("test1", tsdb.NewTimeSeriesPointsFromArgs(120, 0)),
tsdb.NewTimeSeries("test2", [][2]*float64{{&two, &two}}), tsdb.NewTimeSeries("test2", tsdb.NewTimeSeriesPointsFromArgs(0, 0)),
} }
ctx.exec() ctx.exec()
@ -76,8 +74,8 @@ func TestQueryCondition(t *testing.T) {
Convey("Empty series", func() { Convey("Empty series", func() {
Convey("Should set NoDataFound both series are empty", func() { Convey("Should set NoDataFound both series are empty", func() {
ctx.series = tsdb.TimeSeriesSlice{ ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{}), tsdb.NewTimeSeries("test1", tsdb.NewTimeSeriesPointsFromArgs()),
tsdb.NewTimeSeries("test2", [][2]*float64{}), tsdb.NewTimeSeries("test2", tsdb.NewTimeSeriesPointsFromArgs()),
} }
ctx.exec() ctx.exec()
@ -86,10 +84,9 @@ func TestQueryCondition(t *testing.T) {
}) })
Convey("Should set NoDataFound both series contains null", func() { Convey("Should set NoDataFound both series contains null", func() {
one := float64(120)
ctx.series = tsdb.TimeSeriesSlice{ ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{{nil, &one}}), tsdb.NewTimeSeries("test1", tsdb.TimeSeriesPoints{tsdb.TimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}),
tsdb.NewTimeSeries("test2", [][2]*float64{{nil, &one}}), tsdb.NewTimeSeries("test2", tsdb.TimeSeriesPoints{tsdb.TimePoint{null.FloatFromPtr(nil), null.FloatFrom(0)}}),
} }
ctx.exec() ctx.exec()
@ -98,11 +95,9 @@ func TestQueryCondition(t *testing.T) {
}) })
Convey("Should not set NoDataFound if one serie is empty", func() { Convey("Should not set NoDataFound if one serie is empty", func() {
one := float64(120)
two := float64(0)
ctx.series = tsdb.TimeSeriesSlice{ ctx.series = tsdb.TimeSeriesSlice{
tsdb.NewTimeSeries("test1", [][2]*float64{}), tsdb.NewTimeSeries("test1", tsdb.NewTimeSeriesPointsFromArgs()),
tsdb.NewTimeSeries("test2", [][2]*float64{{&one, &two}}), tsdb.NewTimeSeries("test2", tsdb.NewTimeSeriesPointsFromArgs(120, 0)),
} }
ctx.exec() ctx.exec()

View File

@ -4,19 +4,20 @@ import (
"math" "math"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb"
"gopkg.in/guregu/null.v3"
) )
type QueryReducer interface { type QueryReducer interface {
Reduce(timeSeries *tsdb.TimeSeries) *float64 Reduce(timeSeries *tsdb.TimeSeries) null.Float
} }
type SimpleReducer struct { type SimpleReducer struct {
Type string Type string
} }
func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) *float64 { func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float {
if len(series.Points) == 0 { if len(series.Points) == 0 {
return nil return null.FloatFromPtr(nil)
} }
value := float64(0) value := float64(0)
@ -25,36 +26,36 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) *float64 {
switch s.Type { switch s.Type {
case "avg": case "avg":
for _, point := range series.Points { for _, point := range series.Points {
if point[0] != nil { if point[0].Valid {
value += *point[0] value += point[0].Float64
allNull = false allNull = false
} }
} }
value = value / float64(len(series.Points)) value = value / float64(len(series.Points))
case "sum": case "sum":
for _, point := range series.Points { for _, point := range series.Points {
if point[0] != nil { if point[0].Valid {
value += *point[0] value += point[0].Float64
allNull = false allNull = false
} }
} }
case "min": case "min":
value = math.MaxFloat64 value = math.MaxFloat64
for _, point := range series.Points { for _, point := range series.Points {
if point[0] != nil { if point[0].Valid {
allNull = false allNull = false
if value > *point[0] { if value > point[0].Float64 {
value = *point[0] value = point[0].Float64
} }
} }
} }
case "max": case "max":
value = -math.MaxFloat64 value = -math.MaxFloat64
for _, point := range series.Points { for _, point := range series.Points {
if point[0] != nil { if point[0].Valid {
allNull = false allNull = false
if value < *point[0] { if value < point[0].Float64 {
value = *point[0] value = point[0].Float64
} }
} }
} }
@ -64,10 +65,10 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) *float64 {
} }
if allNull { if allNull {
return nil return null.FloatFromPtr(nil)
} }
return &value return null.FloatFrom(value)
} }
func NewSimpleReducer(typ string) *SimpleReducer { func NewSimpleReducer(typ string) *SimpleReducer {

View File

@ -10,44 +10,41 @@ import (
func TestSimpleReducer(t *testing.T) { func TestSimpleReducer(t *testing.T) {
Convey("Test simple reducer by calculating", t, func() { Convey("Test simple reducer by calculating", t, func() {
Convey("avg", func() { Convey("avg", func() {
result := *testReducer("avg", 1, 2, 3) result := testReducer("avg", 1, 2, 3)
So(result, ShouldEqual, float64(2)) So(result, ShouldEqual, float64(2))
}) })
Convey("sum", func() { Convey("sum", func() {
result := *testReducer("sum", 1, 2, 3) result := testReducer("sum", 1, 2, 3)
So(result, ShouldEqual, float64(6)) So(result, ShouldEqual, float64(6))
}) })
Convey("min", func() { Convey("min", func() {
result := *testReducer("min", 3, 2, 1) result := testReducer("min", 3, 2, 1)
So(result, ShouldEqual, float64(1)) So(result, ShouldEqual, float64(1))
}) })
Convey("max", func() { Convey("max", func() {
result := *testReducer("max", 1, 2, 3) result := testReducer("max", 1, 2, 3)
So(result, ShouldEqual, float64(3)) So(result, ShouldEqual, float64(3))
}) })
Convey("count", func() { Convey("count", func() {
result := *testReducer("count", 1, 2, 3000) result := testReducer("count", 1, 2, 3000)
So(result, ShouldEqual, float64(3)) So(result, ShouldEqual, float64(3))
}) })
}) })
} }
func testReducer(typ string, datapoints ...float64) *float64 { func testReducer(typ string, datapoints ...float64) float64 {
reducer := NewSimpleReducer(typ) reducer := NewSimpleReducer(typ)
var timeserie [][2]*float64 series := &tsdb.TimeSeries{
dummieTimestamp := float64(521452145) Name: "test time serie",
}
for idx := range datapoints { for idx := range datapoints {
timeserie = append(timeserie, [2]*float64{&datapoints[idx], &dummieTimestamp}) series.Points = append(series.Points, tsdb.NewTimePoint(datapoints[idx], 1234134))
} }
tsdb := &tsdb.TimeSeries{ return reducer.Reduce(series).Float64
Name: "test time serie",
Points: timeserie,
}
return reducer.Reduce(tsdb)
} }

View File

@ -1,10 +1,12 @@
package alerting package alerting
import ( import (
"context"
"time" "time"
"github.com/benbjohnson/clock" "github.com/benbjohnson/clock"
"github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/log"
"golang.org/x/sync/errgroup"
) )
type Engine struct { type Engine struct {
@ -34,12 +36,19 @@ func NewEngine() *Engine {
return e return e
} }
func (e *Engine) Start() { func (e *Engine) Run(ctx context.Context) error {
e.log.Info("Starting Alerting Engine") e.log.Info("Initializing Alerting")
go e.alertingTicker() g, ctx := errgroup.WithContext(ctx)
go e.execDispatcher()
go e.resultDispatcher() g.Go(func() error { return e.alertingTicker(ctx) })
g.Go(func() error { return e.execDispatcher(ctx) })
g.Go(func() error { return e.resultDispatcher(ctx) })
err := g.Wait()
e.log.Info("Stopped Alerting", "reason", err)
return err
} }
func (e *Engine) Stop() { func (e *Engine) Stop() {
@ -47,7 +56,7 @@ func (e *Engine) Stop() {
close(e.resultQueue) close(e.resultQueue)
} }
func (e *Engine) alertingTicker() { func (e *Engine) alertingTicker(grafanaCtx context.Context) error {
defer func() { defer func() {
if err := recover(); err != nil { if err := recover(); err != nil {
e.log.Error("Scheduler Panic: stopping alertingTicker", "error", err, "stack", log.Stack(1)) e.log.Error("Scheduler Panic: stopping alertingTicker", "error", err, "stack", log.Stack(1))
@ -58,6 +67,8 @@ func (e *Engine) alertingTicker() {
for { for {
select { select {
case <-grafanaCtx.Done():
return grafanaCtx.Err()
case tick := <-e.ticker.C: case tick := <-e.ticker.C:
// TEMP SOLUTION update rules ever tenth tick // TEMP SOLUTION update rules ever tenth tick
if tickIndex%10 == 0 { if tickIndex%10 == 0 {
@ -70,37 +81,69 @@ func (e *Engine) alertingTicker() {
} }
} }
func (e *Engine) execDispatcher() { func (e *Engine) execDispatcher(grafanaCtx context.Context) error {
for job := range e.execQueue { for {
e.log.Debug("Starting executing alert rule", "alert id", job.Rule.Id) select {
go e.executeJob(job) case <-grafanaCtx.Done():
close(e.resultQueue)
return grafanaCtx.Err()
case job := <-e.execQueue:
go e.executeJob(grafanaCtx, job)
}
} }
} }
func (e *Engine) executeJob(job *Job) { func (e *Engine) executeJob(grafanaCtx context.Context, job *Job) error {
defer func() { defer func() {
if err := recover(); err != nil { if err := recover(); err != nil {
e.log.Error("Execute Alert Panic", "error", err, "stack", log.Stack(1)) e.log.Error("Execute Alert Panic", "error", err, "stack", log.Stack(1))
} }
}() }()
done := make(chan *EvalContext, 1)
go func() {
job.Running = true job.Running = true
context := NewEvalContext(job.Rule) context := NewEvalContext(job.Rule)
e.evalHandler.Eval(context) e.evalHandler.Eval(context)
job.Running = false job.Running = false
done <- context
close(done)
}()
e.resultQueue <- context select {
case <-grafanaCtx.Done():
return grafanaCtx.Err()
case evalContext := <-done:
e.resultQueue <- evalContext
} }
func (e *Engine) resultDispatcher() { return nil
}
func (e *Engine) resultDispatcher(grafanaCtx context.Context) error {
for {
select {
case <-grafanaCtx.Done():
//handle all responses before shutting down.
for result := range e.resultQueue {
e.handleResponse(result)
}
return grafanaCtx.Err()
case result := <-e.resultQueue:
e.handleResponse(result)
}
}
}
func (e *Engine) handleResponse(result *EvalContext) {
defer func() { defer func() {
if err := recover(); err != nil { if err := recover(); err != nil {
e.log.Error("Panic in resultDispatcher", "error", err, "stack", log.Stack(1)) e.log.Error("Panic in resultDispatcher", "error", err, "stack", log.Stack(1))
} }
}() }()
for result := range e.resultQueue {
e.log.Debug("Alert Rule Result", "ruleId", result.Rule.Id, "firing", result.Firing) e.log.Debug("Alert Rule Result", "ruleId", result.Rule.Id, "firing", result.Firing)
e.resultHandler.Handle(result) e.resultHandler.Handle(result)
} }
}

View File

@ -71,7 +71,7 @@ func (c *EvalContext) GetNotificationTitle() string {
return "[" + c.GetStateModel().Text + "] " + c.Rule.Name return "[" + c.GetStateModel().Text + "] " + c.Rule.Name
} }
func (c *EvalContext) getDashboardSlug() (string, error) { func (c *EvalContext) GetDashboardSlug() (string, error) {
if c.dashboardSlug != "" { if c.dashboardSlug != "" {
return c.dashboardSlug, nil return c.dashboardSlug, nil
} }
@ -86,7 +86,7 @@ func (c *EvalContext) getDashboardSlug() (string, error) {
} }
func (c *EvalContext) GetRuleUrl() (string, error) { func (c *EvalContext) GetRuleUrl() (string, error) {
if slug, err := c.getDashboardSlug(); err != nil { if slug, err := c.GetDashboardSlug(); err != nil {
return "", err return "", err
} else { } else {
ruleUrl := fmt.Sprintf("%sdashboard/db/%s?fullscreen&edit&tab=alert&panelId=%d", setting.AppUrl, slug, c.Rule.PanelId) ruleUrl := fmt.Sprintf("%sdashboard/db/%s?fullscreen&edit&tab=alert&panelId=%d", setting.AppUrl, slug, c.Rule.PanelId)
@ -94,15 +94,6 @@ func (c *EvalContext) GetRuleUrl() (string, error) {
} }
} }
func (c *EvalContext) GetImageUrl() (string, error) {
if slug, err := c.getDashboardSlug(); err != nil {
return "", err
} else {
ruleUrl := fmt.Sprintf("%sdashboard-solo/db/%s?&panelId=%d", setting.AppUrl, slug, c.Rule.PanelId)
return ruleUrl, nil
}
}
func NewEvalContext(rule *Rule) *EvalContext { func NewEvalContext(rule *Rule) *EvalContext {
return &EvalContext{ return &EvalContext{
StartTime: time.Now(), StartTime: time.Now(),

View File

@ -20,7 +20,7 @@ type DefaultEvalHandler struct {
func NewEvalHandler() *DefaultEvalHandler { func NewEvalHandler() *DefaultEvalHandler {
return &DefaultEvalHandler{ return &DefaultEvalHandler{
log: log.New("alerting.evalHandler"), log: log.New("alerting.evalHandler"),
alertJobTimeout: time.Second * 10, alertJobTimeout: time.Second * 15,
} }
} }

View File

@ -74,9 +74,9 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) {
continue continue
} }
// backward compatability check, can be removed later
enabled, hasEnabled := jsonAlert.CheckGet("enabled") enabled, hasEnabled := jsonAlert.CheckGet("enabled")
if hasEnabled && enabled.MustBool() == false {
if !hasEnabled || !enabled.MustBool() {
continue continue
} }

View File

@ -42,7 +42,6 @@ func TestAlertRuleExtraction(t *testing.T) {
"name": "name1", "name": "name1",
"message": "desc1", "message": "desc1",
"handler": 1, "handler": 1,
"enabled": true,
"frequency": "60s", "frequency": "60s",
"conditions": [ "conditions": [
{ {
@ -66,7 +65,6 @@ func TestAlertRuleExtraction(t *testing.T) {
"name": "name2", "name": "name2",
"message": "desc2", "message": "desc2",
"handler": 0, "handler": 0,
"enabled": true,
"frequency": "60s", "frequency": "60s",
"severity": "warning", "severity": "warning",
"conditions": [ "conditions": [

View File

@ -1,20 +0,0 @@
package init
import (
"github.com/grafana/grafana/pkg/services/alerting"
_ "github.com/grafana/grafana/pkg/services/alerting/conditions"
_ "github.com/grafana/grafana/pkg/services/alerting/notifiers"
"github.com/grafana/grafana/pkg/setting"
_ "github.com/grafana/grafana/pkg/tsdb/graphite"
)
var engine *alerting.Engine
func Init() {
if !setting.AlertingEnabled {
return
}
engine = alerting.NewEngine()
engine.Start()
}

View File

@ -2,6 +2,7 @@ package alerting
import ( import (
"errors" "errors"
"fmt"
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/imguploader" "github.com/grafana/grafana/pkg/components/imguploader"
@ -60,20 +61,23 @@ func (n *RootNotifier) sendNotifications(notifiers []Notifier, context *EvalCont
} }
} }
func (n *RootNotifier) uploadImage(context *EvalContext) error { func (n *RootNotifier) uploadImage(context *EvalContext) (err error) {
uploader, _ := imguploader.NewImageUploader() uploader, err := imguploader.NewImageUploader()
imageUrl, err := context.GetImageUrl()
if err != nil { if err != nil {
return err return err
} }
renderOpts := &renderer.RenderOpts{ renderOpts := &renderer.RenderOpts{
Url: imageUrl,
Width: "800", Width: "800",
Height: "400", Height: "400",
SessionId: "123", Timeout: "30",
Timeout: "10", OrgId: context.Rule.OrgId,
}
if slug, err := context.GetDashboardSlug(); err != nil {
return err
} else {
renderOpts.Path = fmt.Sprintf("dashboard-solo/db/%s?&panelId=%d", slug, context.Rule.PanelId)
} }
if imagePath, err := renderer.RenderToPng(renderOpts); err != nil { if imagePath, err := renderer.RenderToPng(renderOpts); err != nil {

View File

@ -52,9 +52,8 @@ func (this *WebhookNotifier) Notify(context *alerting.EvalContext) {
bodyJSON.Set("rule_url", ruleUrl) bodyJSON.Set("rule_url", ruleUrl)
} }
imageUrl, err := context.GetImageUrl() if context.ImagePublicUrl != "" {
if err == nil { bodyJSON.Set("image_url", context.ImagePublicUrl)
bodyJSON.Set("image_url", imageUrl)
} }
body, _ := bodyJSON.MarshalJSON() body, _ := bodyJSON.MarshalJSON()

View File

@ -0,0 +1,85 @@
package cleanup
import (
"context"
"io/ioutil"
"os"
"path"
"time"
"golang.org/x/sync/errgroup"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/log"
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
)
type CleanUpService struct {
log log.Logger
}
func NewCleanUpService() *CleanUpService {
return &CleanUpService{
log: log.New("cleanup"),
}
}
func (service *CleanUpService) Run(ctx context.Context) error {
service.log.Info("Initializing CleanUpService")
g, _ := errgroup.WithContext(ctx)
g.Go(func() error { return service.start(ctx) })
err := g.Wait()
service.log.Info("Stopped CleanUpService", "reason", err)
return err
}
func (service *CleanUpService) start(ctx context.Context) error {
service.cleanUpTmpFiles()
ticker := time.NewTicker(time.Hour * 1)
for {
select {
case <-ticker.C:
service.cleanUpTmpFiles()
service.deleteExpiredSnapshots()
case <-ctx.Done():
return ctx.Err()
}
}
}
func (service *CleanUpService) cleanUpTmpFiles() {
if _, err := os.Stat(setting.ImagesDir); os.IsNotExist(err) {
return
}
files, err := ioutil.ReadDir(setting.ImagesDir)
if err != nil {
service.log.Error("Problem reading image dir", "error", err)
return
}
var toDelete []os.FileInfo
for _, file := range files {
if file.ModTime().AddDate(0, 0, 1).Before(time.Now()) {
toDelete = append(toDelete, file)
}
}
for _, file := range toDelete {
fullPath := path.Join(setting.ImagesDir, file.Name())
err := os.Remove(fullPath)
if err != nil {
service.log.Error("Failed to delete temp file", "file", file.Name(), "error", err)
}
}
service.log.Debug("Found old rendered image to delete", "deleted", len(toDelete), "keept", len(files))
}
func (service *CleanUpService) deleteExpiredSnapshots() {
bus.Dispatch(&m.DeleteExpiredSnapshotsCommand{})
}

View File

@ -12,6 +12,7 @@ import (
"net/smtp" "net/smtp"
"os" "os"
"strings" "strings"
"time"
"github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/setting"
@ -66,7 +67,7 @@ func sendToSmtpServer(recipients []string, msgContent []byte) error {
tlsconfig.Certificates = []tls.Certificate{cert} tlsconfig.Certificates = []tls.Certificate{cert}
} }
conn, err := net.Dial("tcp", net.JoinHostPort(host, port)) conn, err := net.DialTimeout("tcp", net.JoinHostPort(host, port), time.Second*10)
if err != nil { if err != nil {
return err return err
} }

View File

@ -44,7 +44,7 @@ func sendWebRequest(webhook *Webhook) error {
webhookLog.Debug("Sending webhook", "url", webhook.Url) webhookLog.Debug("Sending webhook", "url", webhook.Url)
client := http.Client{ client := http.Client{
Timeout: time.Duration(3 * time.Second), Timeout: time.Duration(10 * time.Second),
} }
request, err := http.NewRequest("POST", webhook.Url, bytes.NewReader([]byte(webhook.Body))) request, err := http.NewRequest("POST", webhook.Url, bytes.NewReader([]byte(webhook.Body)))

View File

@ -17,6 +17,7 @@ func init() {
bus.AddHandler("sql", DeleteAlertById) bus.AddHandler("sql", DeleteAlertById)
bus.AddHandler("sql", GetAllAlertQueryHandler) bus.AddHandler("sql", GetAllAlertQueryHandler)
bus.AddHandler("sql", SetAlertState) bus.AddHandler("sql", SetAlertState)
bus.AddHandler("sql", GetAlertStatesForDashboard)
} }
func GetAlertById(query *m.GetAlertByIdQuery) error { func GetAlertById(query *m.GetAlertByIdQuery) error {
@ -241,3 +242,19 @@ func SetAlertState(cmd *m.SetAlertStateCommand) error {
return nil return nil
}) })
} }
func GetAlertStatesForDashboard(query *m.GetAlertStatesForDashboardQuery) error {
var rawSql = `SELECT
id,
dashboard_id,
panel_id,
state,
new_state_date
FROM alert
WHERE org_id = ? AND dashboard_id = ?`
query.Result = make([]*m.AlertStateInfoDTO, 0)
err := x.Sql(rawSql, query.OrgId, query.DashboardId).Find(&query.Result)
return err
}

View File

@ -66,7 +66,8 @@ func GetAlertNotificationsToSend(query *m.GetAlertNotificationsToSendQuery) erro
sql.WriteString(` WHERE alert_notification.org_id = ?`) sql.WriteString(` WHERE alert_notification.org_id = ?`)
params = append(params, query.OrgId) params = append(params, query.OrgId)
sql.WriteString(` AND ((alert_notification.is_default = 1)`) sql.WriteString(` AND ((alert_notification.is_default = ?)`)
params = append(params, dialect.BooleanStr(true))
if len(query.Ids) > 0 { if len(query.Ids) > 0 {
sql.WriteString(` OR alert_notification.id IN (?` + strings.Repeat(",?", len(query.Ids)-1) + ")") sql.WriteString(` OR alert_notification.id IN (?` + strings.Repeat(",?", len(query.Ids)-1) + ")")
for _, v := range query.Ids { for _, v := range query.Ids {

View File

@ -6,6 +6,7 @@ import (
"github.com/go-xorm/xorm" "github.com/go-xorm/xorm"
"github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/bus"
m "github.com/grafana/grafana/pkg/models" m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
) )
func init() { func init() {
@ -13,6 +14,25 @@ func init() {
bus.AddHandler("sql", GetDashboardSnapshot) bus.AddHandler("sql", GetDashboardSnapshot)
bus.AddHandler("sql", DeleteDashboardSnapshot) bus.AddHandler("sql", DeleteDashboardSnapshot)
bus.AddHandler("sql", SearchDashboardSnapshots) bus.AddHandler("sql", SearchDashboardSnapshots)
bus.AddHandler("sql", DeleteExpiredSnapshots)
}
func DeleteExpiredSnapshots(cmd *m.DeleteExpiredSnapshotsCommand) error {
return inTransaction(func(sess *xorm.Session) error {
var expiredCount int64 = 0
if setting.SnapShotRemoveExpired {
deleteExpiredSql := "DELETE FROM dashboard_snapshot WHERE expires < ?"
expiredResponse, err := x.Exec(deleteExpiredSql, time.Now)
if err != nil {
return err
}
expiredCount, _ = expiredResponse.RowsAffected()
}
sqlog.Debug("Deleted old/expired snaphots", "expired", expiredCount)
return nil
})
} }
func CreateDashboardSnapshot(cmd *m.CreateDashboardSnapshotCommand) error { func CreateDashboardSnapshot(cmd *m.CreateDashboardSnapshotCommand) error {

View File

@ -120,4 +120,9 @@ func addDashboardMigration(mg *Migrator) {
mg.AddMigration("Add index for plugin_id in dashboard", NewAddIndexMigration(dashboardV2, &Index{ mg.AddMigration("Add index for plugin_id in dashboard", NewAddIndexMigration(dashboardV2, &Index{
Cols: []string{"org_id", "plugin_id"}, Type: IndexType, Cols: []string{"org_id", "plugin_id"}, Type: IndexType,
})) }))
// dashboard_id index for dashboard_tag table
mg.AddMigration("Add index for dashboard_id in dashboard_tag", NewAddIndexMigration(dashboardTagV1, &Index{
Cols: []string{"dashboard_id"}, Type: IndexType,
}))
} }

View File

@ -18,6 +18,7 @@ type Dialect interface {
SupportEngine() bool SupportEngine() bool
LikeStr() string LikeStr() string
Default(col *Column) string Default(col *Column) string
BooleanStr(bool) string
CreateIndexSql(tableName string, index *Index) string CreateIndexSql(tableName string, index *Index) string
CreateTableSql(table *Table) string CreateTableSql(table *Table) string

View File

@ -92,25 +92,32 @@ func (mg *Migrator) Start() error {
mg.Logger.Debug("Executing", "sql", sql) mg.Logger.Debug("Executing", "sql", sql)
if err := mg.exec(m); err != nil { err := mg.inTransaction(func(sess *xorm.Session) error {
if err := mg.exec(m, sess); err != nil {
mg.Logger.Error("Exec failed", "error", err, "sql", sql) mg.Logger.Error("Exec failed", "error", err, "sql", sql)
record.Error = err.Error() record.Error = err.Error()
mg.x.Insert(&record) sess.Insert(&record)
return err return err
} else { } else {
record.Success = true record.Success = true
mg.x.Insert(&record) sess.Insert(&record)
}
return nil
})
if err != nil {
return err
} }
} }
return nil return nil
} }
func (mg *Migrator) exec(m Migration) error { func (mg *Migrator) exec(m Migration, sess *xorm.Session) error {
mg.Logger.Info("Executing migration", "id", m.Id()) mg.Logger.Info("Executing migration", "id", m.Id())
err := mg.inTransaction(func(sess *xorm.Session) error {
condition := m.GetCondition() condition := m.GetCondition()
if condition != nil { if condition != nil {
sql, args := condition.Sql(mg.dialect) sql, args := condition.Sql(mg.dialect)
@ -126,12 +133,6 @@ func (mg *Migrator) exec(m Migration) error {
mg.Logger.Error("Executing migration failed", "id", m.Id(), "error", err) mg.Logger.Error("Executing migration failed", "id", m.Id(), "error", err)
return err return err
} }
return nil
})
if err != nil {
return err
}
return nil return nil
} }

View File

@ -29,6 +29,10 @@ func (db *Mysql) AutoIncrStr() string {
return "AUTO_INCREMENT" return "AUTO_INCREMENT"
} }
func (db *Mysql) BooleanStr(value bool) string {
return strconv.FormatBool(value)
}
func (db *Mysql) SqlType(c *Column) string { func (db *Mysql) SqlType(c *Column) string {
var res string var res string
switch c.Type { switch c.Type {

View File

@ -36,6 +36,10 @@ func (db *Postgres) AutoIncrStr() string {
return "" return ""
} }
func (db *Postgres) BooleanStr(value bool) string {
return strconv.FormatBool(value)
}
func (b *Postgres) Default(col *Column) string { func (b *Postgres) Default(col *Column) string {
if col.Type == DB_Bool { if col.Type == DB_Bool {
if col.Default == "0" { if col.Default == "0" {

View File

@ -29,6 +29,13 @@ func (db *Sqlite3) AutoIncrStr() string {
return "AUTOINCREMENT" return "AUTOINCREMENT"
} }
func (db *Sqlite3) BooleanStr(value bool) string {
if value {
return "1"
}
return "0"
}
func (db *Sqlite3) SqlType(c *Column) string { func (db *Sqlite3) SqlType(c *Column) string {
switch c.Type { switch c.Type {
case DB_Date, DB_DateTime, DB_TimeStamp, DB_Time: case DB_Date, DB_DateTime, DB_TimeStamp, DB_Time:

View File

@ -128,8 +128,12 @@ func CreateUser(cmd *m.CreateUserCommand) error {
} }
if setting.AutoAssignOrg && !user.IsAdmin { if setting.AutoAssignOrg && !user.IsAdmin {
if len(cmd.DefaultOrgRole) > 0 {
orgUser.Role = m.RoleType(cmd.DefaultOrgRole)
} else {
orgUser.Role = m.RoleType(setting.AutoAssignOrgRole) orgUser.Role = m.RoleType(setting.AutoAssignOrgRole)
} }
}
if _, err = sess.Insert(&orgUser); err != nil { if _, err = sess.Insert(&orgUser); err != nil {
return err return err

View File

@ -81,6 +81,8 @@ var (
ExternalSnapshotUrl string ExternalSnapshotUrl string
ExternalSnapshotName string ExternalSnapshotName string
ExternalEnabled bool ExternalEnabled bool
SnapShotTTLDays int
SnapShotRemoveExpired bool
// User settings // User settings
AllowUserSignUp bool AllowUserSignUp bool
@ -90,7 +92,7 @@ var (
VerifyEmailEnabled bool VerifyEmailEnabled bool
LoginHint string LoginHint string
DefaultTheme string DefaultTheme string
AllowUserPassLogin bool DisableLoginForm bool
// Http auth // Http auth
AdminUser string AdminUser string
@ -495,6 +497,8 @@ func NewConfigContext(args *CommandLineArgs) error {
ExternalSnapshotUrl = snapshots.Key("external_snapshot_url").String() ExternalSnapshotUrl = snapshots.Key("external_snapshot_url").String()
ExternalSnapshotName = snapshots.Key("external_snapshot_name").String() ExternalSnapshotName = snapshots.Key("external_snapshot_name").String()
ExternalEnabled = snapshots.Key("external_enabled").MustBool(true) ExternalEnabled = snapshots.Key("external_enabled").MustBool(true)
SnapShotRemoveExpired = snapshots.Key("snapshot_remove_expired").MustBool(true)
SnapShotTTLDays = snapshots.Key("snapshot_TTL_days").MustInt(90)
// read data source proxy white list // read data source proxy white list
DataProxyWhiteList = make(map[string]bool) DataProxyWhiteList = make(map[string]bool)
@ -514,7 +518,10 @@ func NewConfigContext(args *CommandLineArgs) error {
VerifyEmailEnabled = users.Key("verify_email_enabled").MustBool(false) VerifyEmailEnabled = users.Key("verify_email_enabled").MustBool(false)
LoginHint = users.Key("login_hint").String() LoginHint = users.Key("login_hint").String()
DefaultTheme = users.Key("default_theme").String() DefaultTheme = users.Key("default_theme").String()
AllowUserPassLogin = users.Key("allow_user_pass_login").MustBool(true)
// auth
auth := Cfg.Section("auth")
DisableLoginForm = auth.Key("disable_login_form").MustBool(false)
// anonymous access // anonymous access
AnonymousEnabled = Cfg.Section("auth.anonymous").Key("enabled").MustBool(false) AnonymousEnabled = Cfg.Section("auth.anonymous").Key("enabled").MustBool(false)
@ -556,7 +563,7 @@ func NewConfigContext(args *CommandLineArgs) error {
log.Warn("require_email_validation is enabled but smpt is disabled") log.Warn("require_email_validation is enabled but smpt is disabled")
} }
GrafanaNetUrl = Cfg.Section("grafana.net").Key("url").MustString("https://grafana.net") GrafanaNetUrl = Cfg.Section("grafana_net").Key("url").MustString("https://grafana.net")
imageUploadingSection := Cfg.Section("external_image_storage") imageUploadingSection := Cfg.Section("external_image_storage")
ImageUploadProvider = imageUploadingSection.Key("provider").MustString("internal") ImageUploadProvider = imageUploadingSection.Key("provider").MustString("internal")

View File

@ -8,12 +8,11 @@ type OAuthInfo struct {
AllowedDomains []string AllowedDomains []string
ApiUrl string ApiUrl string
AllowSignup bool AllowSignup bool
Name string
} }
type OAuther struct { type OAuther struct {
GitHub, Google, Twitter, Generic bool
OAuthInfos map[string]*OAuthInfo OAuthInfos map[string]*OAuthInfo
OAuthProviderName string
} }
var OAuthService *OAuther var OAuthService *OAuther

View File

@ -0,0 +1,114 @@
package social
import (
"encoding/json"
"fmt"
"net/http"
"strconv"
"github.com/grafana/grafana/pkg/models"
"golang.org/x/oauth2"
)
type SocialGrafanaNet struct {
*oauth2.Config
url string
allowedOrganizations []string
allowSignup bool
}
func (s *SocialGrafanaNet) Type() int {
return int(models.GRAFANANET)
}
func (s *SocialGrafanaNet) IsEmailAllowed(email string) bool {
return true
}
func (s *SocialGrafanaNet) IsSignupAllowed() bool {
return s.allowSignup
}
func (s *SocialGrafanaNet) IsOrganizationMember(client *http.Client) bool {
if len(s.allowedOrganizations) == 0 {
return true
}
organizations, err := s.FetchOrganizations(client)
if err != nil {
return false
}
for _, allowedOrganization := range s.allowedOrganizations {
for _, organization := range organizations {
if organization == allowedOrganization {
return true
}
}
}
return false
}
func (s *SocialGrafanaNet) FetchOrganizations(client *http.Client) ([]string, error) {
type Record struct {
Login string `json:"login"`
}
url := fmt.Sprintf(s.url + "/api/oauth2/user/orgs")
r, err := client.Get(url)
if err != nil {
return nil, err
}
defer r.Body.Close()
var records []Record
if err = json.NewDecoder(r.Body).Decode(&records); err != nil {
return nil, err
}
var logins = make([]string, len(records))
for i, record := range records {
logins[i] = record.Login
}
return logins, nil
}
func (s *SocialGrafanaNet) UserInfo(token *oauth2.Token) (*BasicUserInfo, error) {
var data struct {
Id int `json:"id"`
Name string `json:"login"`
Email string `json:"email"`
Role string `json:"role"`
}
var err error
client := s.Client(oauth2.NoContext, token)
r, err := client.Get(s.url + "/api/oauth2/user")
if err != nil {
return nil, err
}
defer r.Body.Close()
if err = json.NewDecoder(r.Body).Decode(&data); err != nil {
return nil, err
}
userInfo := &BasicUserInfo{
Identity: strconv.Itoa(data.Id),
Name: data.Name,
Email: data.Email,
Role: data.Role,
}
if !s.IsOrganizationMember(client) {
return nil, ErrMissingOrganizationMembership
}
return userInfo, nil
}

View File

@ -15,6 +15,7 @@ type BasicUserInfo struct {
Email string Email string
Login string Login string
Company string Company string
Role string
} }
type SocialConnector interface { type SocialConnector interface {
@ -36,7 +37,7 @@ func NewOAuthService() {
setting.OAuthService = &setting.OAuther{} setting.OAuthService = &setting.OAuther{}
setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo) setting.OAuthService.OAuthInfos = make(map[string]*setting.OAuthInfo)
allOauthes := []string{"github", "google", "generic_oauth"} allOauthes := []string{"github", "google", "generic_oauth", "grafananet"}
for _, name := range allOauthes { for _, name := range allOauthes {
sec := setting.Cfg.Section("auth." + name) sec := setting.Cfg.Section("auth." + name)
@ -50,6 +51,7 @@ func NewOAuthService() {
Enabled: sec.Key("enabled").MustBool(), Enabled: sec.Key("enabled").MustBool(),
AllowedDomains: sec.Key("allowed_domains").Strings(" "), AllowedDomains: sec.Key("allowed_domains").Strings(" "),
AllowSignup: sec.Key("allow_sign_up").MustBool(), AllowSignup: sec.Key("allow_sign_up").MustBool(),
Name: sec.Key("name").MustString(name),
} }
if !info.Enabled { if !info.Enabled {
@ -70,22 +72,18 @@ func NewOAuthService() {
// GitHub. // GitHub.
if name == "github" { if name == "github" {
setting.OAuthService.GitHub = true
teamIds := sec.Key("team_ids").Ints(",")
allowedOrganizations := sec.Key("allowed_organizations").Strings(" ")
SocialMap["github"] = &SocialGithub{ SocialMap["github"] = &SocialGithub{
Config: &config, Config: &config,
allowedDomains: info.AllowedDomains, allowedDomains: info.AllowedDomains,
apiUrl: info.ApiUrl, apiUrl: info.ApiUrl,
allowSignup: info.AllowSignup, allowSignup: info.AllowSignup,
teamIds: teamIds, teamIds: sec.Key("team_ids").Ints(","),
allowedOrganizations: allowedOrganizations, allowedOrganizations: sec.Key("allowed_organizations").Strings(" "),
} }
} }
// Google. // Google.
if name == "google" { if name == "google" {
setting.OAuthService.Google = true
SocialMap["google"] = &SocialGoogle{ SocialMap["google"] = &SocialGoogle{
Config: &config, allowedDomains: info.AllowedDomains, Config: &config, allowedDomains: info.AllowedDomains,
apiUrl: info.ApiUrl, apiUrl: info.ApiUrl,
@ -95,17 +93,33 @@ func NewOAuthService() {
// Generic - Uses the same scheme as Github. // Generic - Uses the same scheme as Github.
if name == "generic_oauth" { if name == "generic_oauth" {
setting.OAuthService.Generic = true
setting.OAuthService.OAuthProviderName = sec.Key("oauth_provider_name").String()
teamIds := sec.Key("team_ids").Ints(",")
allowedOrganizations := sec.Key("allowed_organizations").Strings(" ")
SocialMap["generic_oauth"] = &GenericOAuth{ SocialMap["generic_oauth"] = &GenericOAuth{
Config: &config, Config: &config,
allowedDomains: info.AllowedDomains, allowedDomains: info.AllowedDomains,
apiUrl: info.ApiUrl, apiUrl: info.ApiUrl,
allowSignup: info.AllowSignup, allowSignup: info.AllowSignup,
teamIds: teamIds, teamIds: sec.Key("team_ids").Ints(","),
allowedOrganizations: allowedOrganizations, allowedOrganizations: sec.Key("allowed_organizations").Strings(" "),
}
}
if name == "grafananet" {
config := oauth2.Config{
ClientID: info.ClientId,
ClientSecret: info.ClientSecret,
Endpoint: oauth2.Endpoint{
AuthURL: setting.GrafanaNetUrl + "/oauth2/authorize",
TokenURL: setting.GrafanaNetUrl + "/api/oauth2/token",
},
RedirectURL: strings.TrimSuffix(setting.AppUrl, "/") + SocialBaseUrl + name,
Scopes: info.Scopes,
}
SocialMap["grafananet"] = &SocialGrafanaNet{
Config: &config,
url: setting.GrafanaNetUrl,
allowSignup: info.AllowSignup,
allowedOrganizations: sec.Key("allowed_organizations").Strings(" "),
} }
} }
} }

View File

@ -26,7 +26,7 @@ func (bg *Batch) process(context *QueryContext) {
if executor == nil { if executor == nil {
bg.Done = true bg.Done = true
result := &BatchResult{ result := &BatchResult{
Error: errors.New("Could not find executor for data source type " + bg.Queries[0].DataSource.PluginId), Error: errors.New("Could not find executor for data source type: " + bg.Queries[0].DataSource.PluginId),
QueryResults: make(map[string]*QueryResult), QueryResults: make(map[string]*QueryResult),
} }
for _, query := range bg.Queries { for _, query := range bg.Queries {

View File

@ -38,7 +38,7 @@ func init() {
} }
HttpClient = http.Client{ HttpClient = http.Client{
Timeout: time.Duration(10 * time.Second), Timeout: time.Duration(15 * time.Second),
Transport: tr, Transport: tr,
} }
} }
@ -54,7 +54,7 @@ func (e *GraphiteExecutor) Execute(queries tsdb.QuerySlice, context *tsdb.QueryC
} }
for _, query := range queries { for _, query := range queries {
formData["target"] = []string{query.Query} formData["target"] = []string{query.Model.Get("target").MustString()}
} }
if setting.Env == setting.DEV { if setting.Env == setting.DEV {
@ -79,7 +79,8 @@ func (e *GraphiteExecutor) Execute(queries tsdb.QuerySlice, context *tsdb.QueryC
} }
result.QueryResults = make(map[string]*tsdb.QueryResult) result.QueryResults = make(map[string]*tsdb.QueryResult)
queryRes := &tsdb.QueryResult{} queryRes := tsdb.NewQueryResult()
for _, series := range data { for _, series := range data {
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{ queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
Name: series.Target, Name: series.Target,
@ -102,9 +103,9 @@ func (e *GraphiteExecutor) parseResponse(res *http.Response) ([]TargetResponseDT
return nil, err return nil, err
} }
if res.StatusCode == http.StatusUnauthorized { if res.StatusCode/100 != 2 {
glog.Info("Request is Unauthorized", "status", res.Status, "body", string(body)) glog.Info("Request failed", "status", res.Status, "body", string(body))
return nil, fmt.Errorf("Request is Unauthorized status: %v body: %s", res.Status, string(body)) return nil, fmt.Errorf("Request failed status: %v", res.Status)
} }
var data []TargetResponseDTO var data []TargetResponseDTO

View File

@ -1,23 +1 @@
package graphite package graphite
// func TestGraphite(t *testing.T) {
//
// Convey("When executing graphite query", t, func() {
// executor := NewGraphiteExecutor(&tsdb.DataSourceInfo{
// Url: "http://localhost:8080",
// })
//
// queries := tsdb.QuerySlice{
// &tsdb.Query{Query: "{\"target\": \"apps.backend.*.counters.requests.count\"}"},
// }
//
// context := tsdb.NewQueryContext(queries, tsdb.TimeRange{})
// result := executor.Execute(queries, context)
// So(result.Error, ShouldBeNil)
//
// Convey("Should return series", func() {
// So(result.QueryResults, ShouldNotBeEmpty)
// })
// })
//
// }

View File

@ -1,6 +1,8 @@
package graphite package graphite
import "github.com/grafana/grafana/pkg/tsdb"
type TargetResponseDTO struct { type TargetResponseDTO struct {
Target string `json:"target"` Target string `json:"target"`
DataPoints [][2]*float64 `json:"datapoints"` DataPoints tsdb.TimeSeriesPoints `json:"datapoints"`
} }

View File

@ -1,19 +1,31 @@
package tsdb package tsdb
type TimeRange struct { import (
From string "github.com/grafana/grafana/pkg/components/simplejson"
To string "gopkg.in/guregu/null.v3"
)
type Query struct {
RefId string
Model *simplejson.Json
Depends []string
DataSource *DataSourceInfo
Results []*TimeSeries
Exclude bool
MaxDataPoints int64
IntervalMs int64
} }
type QuerySlice []*Query
type Request struct { type Request struct {
TimeRange TimeRange TimeRange *TimeRange
MaxDataPoints int
Queries QuerySlice Queries QuerySlice
} }
type Response struct { type Response struct {
BatchTimings []*BatchTiming BatchTimings []*BatchTiming `json:"timings"`
Results map[string]*QueryResult Results map[string]*QueryResult `json:"results"`
} }
type DataSourceInfo struct { type DataSourceInfo struct {
@ -40,19 +52,41 @@ type BatchResult struct {
} }
type QueryResult struct { type QueryResult struct {
Error error Error error `json:"error"`
RefId string RefId string `json:"refId"`
Series TimeSeriesSlice Series TimeSeriesSlice `json:"series"`
} }
type TimeSeries struct { type TimeSeries struct {
Name string `json:"name"` Name string `json:"name"`
Points [][2]*float64 `json:"points"` Points TimeSeriesPoints `json:"points"`
} }
type TimePoint [2]null.Float
type TimeSeriesPoints []TimePoint
type TimeSeriesSlice []*TimeSeries type TimeSeriesSlice []*TimeSeries
func NewTimeSeries(name string, points [][2]*float64) *TimeSeries { func NewQueryResult() *QueryResult {
return &QueryResult{
Series: make(TimeSeriesSlice, 0),
}
}
func NewTimePoint(value float64, timestamp float64) TimePoint {
return TimePoint{null.FloatFrom(value), null.FloatFrom(timestamp)}
}
func NewTimeSeriesPointsFromArgs(values ...float64) TimeSeriesPoints {
points := make(TimeSeriesPoints, 0)
for i := 0; i < len(values); i += 2 {
points = append(points, NewTimePoint(values[i], values[i+1]))
}
return points
}
func NewTimeSeries(name string, points TimeSeriesPoints) *TimeSeries {
return &TimeSeries{ return &TimeSeries{
Name: name, Name: name,
Points: points, Points: points,

View File

@ -0,0 +1,161 @@
package prometheus
import (
"fmt"
"net/http"
"regexp"
"strings"
"time"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/prometheus/client_golang/api/prometheus"
pmodel "github.com/prometheus/common/model"
"golang.org/x/net/context"
)
type PrometheusExecutor struct {
*tsdb.DataSourceInfo
}
func NewPrometheusExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
return &PrometheusExecutor{dsInfo}
}
var (
plog log.Logger
HttpClient http.Client
)
func init() {
plog = log.New("tsdb.prometheus")
tsdb.RegisterExecutor("prometheus", NewPrometheusExecutor)
}
func (e *PrometheusExecutor) getClient() (prometheus.QueryAPI, error) {
cfg := prometheus.Config{
Address: e.DataSourceInfo.Url,
}
client, err := prometheus.New(cfg)
if err != nil {
return nil, err
}
return prometheus.NewQueryAPI(client), nil
}
func (e *PrometheusExecutor) Execute(queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) *tsdb.BatchResult {
result := &tsdb.BatchResult{}
client, err := e.getClient()
if err != nil {
return resultWithError(result, err)
}
query, err := parseQuery(queries, queryContext)
if err != nil {
return resultWithError(result, err)
}
timeRange := prometheus.Range{
Start: query.Start,
End: query.End,
Step: query.Step,
}
value, err := client.QueryRange(context.Background(), query.Expr, timeRange)
if err != nil {
return resultWithError(result, err)
}
queryResult, err := parseResponse(value, query)
if err != nil {
return resultWithError(result, err)
}
result.QueryResults = queryResult
return result
}
func formatLegend(metric pmodel.Metric, query *PrometheusQuery) string {
reg, _ := regexp.Compile(`\{\{\s*(.+?)\s*\}\}`)
result := reg.ReplaceAllFunc([]byte(query.LegendFormat), func(in []byte) []byte {
ind := strings.Replace(strings.Replace(string(in), "{{", "", 1), "}}", "", 1)
if val, exists := metric[pmodel.LabelName(ind)]; exists {
return []byte(val)
}
return in
})
return string(result)
}
func parseQuery(queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) (*PrometheusQuery, error) {
queryModel := queries[0]
expr, err := queryModel.Model.Get("expr").String()
if err != nil {
return nil, err
}
step, err := queryModel.Model.Get("step").Int64()
if err != nil {
return nil, err
}
format, err := queryModel.Model.Get("legendFormat").String()
if err != nil {
return nil, err
}
start, err := queryContext.TimeRange.ParseFrom()
if err != nil {
return nil, err
}
end, err := queryContext.TimeRange.ParseTo()
if err != nil {
return nil, err
}
return &PrometheusQuery{
Expr: expr,
Step: time.Second * time.Duration(step),
LegendFormat: format,
Start: start,
End: end,
}, nil
}
func parseResponse(value pmodel.Value, query *PrometheusQuery) (map[string]*tsdb.QueryResult, error) {
queryResults := make(map[string]*tsdb.QueryResult)
queryRes := tsdb.NewQueryResult()
data, ok := value.(pmodel.Matrix)
if !ok {
return queryResults, fmt.Errorf("Unsupported result format: %s", value.Type().String())
}
for _, v := range data {
series := tsdb.TimeSeries{
Name: formatLegend(v.Metric, query),
}
for _, k := range v.Values {
series.Points = append(series.Points, tsdb.NewTimePoint(float64(k.Value), float64(k.Timestamp.Unix()*1000)))
}
queryRes.Series = append(queryRes.Series, &series)
}
queryResults["A"] = queryRes
return queryResults, nil
}
func resultWithError(result *tsdb.BatchResult, err error) *tsdb.BatchResult {
result.Error = err
return result
}

View File

@ -0,0 +1,26 @@
package prometheus
import (
"testing"
p "github.com/prometheus/common/model"
. "github.com/smartystreets/goconvey/convey"
)
func TestPrometheus(t *testing.T) {
Convey("Prometheus", t, func() {
Convey("converting metric name", func() {
metric := map[p.LabelName]p.LabelValue{
p.LabelName("app"): p.LabelValue("backend"),
p.LabelName("device"): p.LabelValue("mobile"),
}
query := &PrometheusQuery{
LegendFormat: "legend {{app}} {{device}} {{broken}}",
}
So(formatLegend(metric, query), ShouldEqual, "legend backend mobile {{broken}}")
})
})
}

View File

@ -0,0 +1,11 @@
package prometheus
import "time"
type PrometheusQuery struct {
Expr string
Step time.Duration
LegendFormat string
Start time.Time
End time.Time
}

View File

@ -1,12 +0,0 @@
package tsdb
type Query struct {
RefId string
Query string
Depends []string
DataSource *DataSourceInfo
Results []*TimeSeries
Exclude bool
}
type QuerySlice []*Query

View File

@ -3,7 +3,7 @@ package tsdb
import "sync" import "sync"
type QueryContext struct { type QueryContext struct {
TimeRange TimeRange TimeRange *TimeRange
Queries QuerySlice Queries QuerySlice
Results map[string]*QueryResult Results map[string]*QueryResult
ResultsChan chan *BatchResult ResultsChan chan *BatchResult
@ -11,7 +11,7 @@ type QueryContext struct {
BatchWaits sync.WaitGroup BatchWaits sync.WaitGroup
} }
func NewQueryContext(queries QuerySlice, timeRange TimeRange) *QueryContext { func NewQueryContext(queries QuerySlice, timeRange *TimeRange) *QueryContext {
return &QueryContext{ return &QueryContext{
TimeRange: timeRange, TimeRange: timeRange,
Queries: queries, Queries: queries,

130
pkg/tsdb/testdata/scenarios.go vendored Normal file
View File

@ -0,0 +1,130 @@
package testdata
import (
"math/rand"
"strconv"
"strings"
"time"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb"
)
type ScenarioHandler func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult
type Scenario struct {
Id string `json:"id"`
Name string `json:"name"`
StringInput string `json:"stringOption"`
Description string `json:"description"`
Handler ScenarioHandler `json:"-"`
}
var ScenarioRegistry map[string]*Scenario
func init() {
ScenarioRegistry = make(map[string]*Scenario)
logger := log.New("tsdb.testdata")
logger.Debug("Initializing TestData Scenario")
registerScenario(&Scenario{
Id: "random_walk",
Name: "Random Walk",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
timeWalkerMs := context.TimeRange.GetFromAsMsEpoch()
to := context.TimeRange.GetToAsMsEpoch()
series := newSeriesForQuery(query)
points := make(tsdb.TimeSeriesPoints, 0)
walker := rand.Float64() * 100
for i := int64(0); i < 10000 && timeWalkerMs < to; i++ {
points = append(points, tsdb.NewTimePoint(walker, float64(timeWalkerMs)))
walker += rand.Float64() - 0.5
timeWalkerMs += query.IntervalMs
}
series.Points = points
queryRes := tsdb.NewQueryResult()
queryRes.Series = append(queryRes.Series, series)
return queryRes
},
})
registerScenario(&Scenario{
Id: "no_data_points",
Name: "No Data Points",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
return tsdb.NewQueryResult()
},
})
registerScenario(&Scenario{
Id: "datapoints_outside_range",
Name: "Datapoints Outside Range",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
queryRes := tsdb.NewQueryResult()
series := newSeriesForQuery(query)
outsideTime := context.TimeRange.MustGetFrom().Add(-1*time.Hour).Unix() * 1000
series.Points = append(series.Points, tsdb.NewTimePoint(10, float64(outsideTime)))
queryRes.Series = append(queryRes.Series, series)
return queryRes
},
})
registerScenario(&Scenario{
Id: "csv_metric_values",
Name: "CSV Metric Values",
StringInput: "1,20,90,30,5,0",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
queryRes := tsdb.NewQueryResult()
stringInput := query.Model.Get("stringInput").MustString()
values := []float64{}
for _, strVal := range strings.Split(stringInput, ",") {
if val, err := strconv.ParseFloat(strVal, 64); err == nil {
values = append(values, val)
}
}
if len(values) == 0 {
return queryRes
}
series := newSeriesForQuery(query)
startTime := context.TimeRange.GetFromAsMsEpoch()
endTime := context.TimeRange.GetToAsMsEpoch()
step := (endTime - startTime) / int64(len(values)-1)
for _, val := range values {
series.Points = append(series.Points, tsdb.NewTimePoint(val, float64(startTime)))
startTime += step
}
queryRes.Series = append(queryRes.Series, series)
return queryRes
},
})
}
func registerScenario(scenario *Scenario) {
ScenarioRegistry[scenario.Id] = scenario
}
func newSeriesForQuery(query *tsdb.Query) *tsdb.TimeSeries {
alias := query.Model.Get("alias").MustString("")
if alias == "" {
alias = query.RefId + "-series"
}
return &tsdb.TimeSeries{Name: alias}
}

39
pkg/tsdb/testdata/testdata.go vendored Normal file
View File

@ -0,0 +1,39 @@
package testdata
import (
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb"
)
type TestDataExecutor struct {
*tsdb.DataSourceInfo
log log.Logger
}
func NewTestDataExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
return &TestDataExecutor{
DataSourceInfo: dsInfo,
log: log.New("tsdb.testdata"),
}
}
func init() {
tsdb.RegisterExecutor("grafana-testdata-datasource", NewTestDataExecutor)
}
func (e *TestDataExecutor) Execute(queries tsdb.QuerySlice, context *tsdb.QueryContext) *tsdb.BatchResult {
result := &tsdb.BatchResult{}
result.QueryResults = make(map[string]*tsdb.QueryResult)
for _, query := range queries {
scenarioId := query.Model.Get("scenarioId").MustString("random_walk")
if scenario, exist := ScenarioRegistry[scenarioId]; exist {
result.QueryResults[query.RefId] = scenario.Handler(query, context)
result.QueryResults[query.RefId].RefId = query.RefId
} else {
e.log.Error("Scenario not found", "scenarioId", scenarioId)
}
}
return result
}

90
pkg/tsdb/time_range.go Normal file
View File

@ -0,0 +1,90 @@
package tsdb
import (
"fmt"
"strconv"
"strings"
"time"
)
func NewTimeRange(from, to string) *TimeRange {
return &TimeRange{
From: from,
To: to,
Now: time.Now(),
}
}
type TimeRange struct {
From string
To string
Now time.Time
}
func (tr *TimeRange) GetFromAsMsEpoch() int64 {
return tr.MustGetFrom().UnixNano() / int64(time.Millisecond)
}
func (tr *TimeRange) GetToAsMsEpoch() int64 {
return tr.MustGetTo().UnixNano() / int64(time.Millisecond)
}
func (tr *TimeRange) MustGetFrom() time.Time {
if res, err := tr.ParseFrom(); err != nil {
return time.Unix(0, 0)
} else {
return res
}
}
func (tr *TimeRange) MustGetTo() time.Time {
if res, err := tr.ParseTo(); err != nil {
return time.Unix(0, 0)
} else {
return res
}
}
func tryParseUnixMsEpoch(val string) (time.Time, bool) {
if val, err := strconv.ParseInt(val, 10, 64); err == nil {
seconds := val / 1000
nano := (val - seconds*1000) * 1000000
return time.Unix(seconds, nano), true
}
return time.Time{}, false
}
func (tr *TimeRange) ParseFrom() (time.Time, error) {
if res, ok := tryParseUnixMsEpoch(tr.From); ok {
return res, nil
}
fromRaw := strings.Replace(tr.From, "now-", "", 1)
diff, err := time.ParseDuration("-" + fromRaw)
if err != nil {
return time.Time{}, err
}
return tr.Now.Add(diff), nil
}
func (tr *TimeRange) ParseTo() (time.Time, error) {
if tr.To == "now" {
return tr.Now, nil
} else if strings.HasPrefix(tr.To, "now-") {
withoutNow := strings.Replace(tr.To, "now-", "", 1)
diff, err := time.ParseDuration("-" + withoutNow)
if err != nil {
return time.Time{}, nil
}
return tr.Now.Add(diff), nil
}
if res, ok := tryParseUnixMsEpoch(tr.To); ok {
return res, nil
}
return time.Time{}, fmt.Errorf("cannot parse to value %s", tr.To)
}

View File

@ -0,0 +1,95 @@
package tsdb
import (
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
)
func TestTimeRange(t *testing.T) {
Convey("Time range", t, func() {
now := time.Now()
Convey("Can parse 5m, now", func() {
tr := TimeRange{
From: "5m",
To: "now",
Now: now,
}
Convey("5m ago ", func() {
fiveMinAgo, _ := time.ParseDuration("-5m")
expected := now.Add(fiveMinAgo)
res, err := tr.ParseFrom()
So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, expected.Unix())
})
Convey("now ", func() {
res, err := tr.ParseTo()
So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, now.Unix())
})
})
Convey("Can parse 5h, now-10m", func() {
tr := TimeRange{
From: "5h",
To: "now-10m",
Now: now,
}
Convey("5h ago ", func() {
fiveHourAgo, _ := time.ParseDuration("-5h")
expected := now.Add(fiveHourAgo)
res, err := tr.ParseFrom()
So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, expected.Unix())
})
Convey("now-10m ", func() {
fiveMinAgo, _ := time.ParseDuration("-10m")
expected := now.Add(fiveMinAgo)
res, err := tr.ParseTo()
So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, expected.Unix())
})
})
Convey("can parse unix epocs", func() {
var err error
tr := TimeRange{
From: "1474973725473",
To: "1474975757930",
Now: now,
}
res, err := tr.ParseFrom()
So(err, ShouldBeNil)
So(res.UnixNano()/int64(time.Millisecond), ShouldEqual, 1474973725473)
res, err = tr.ParseTo()
So(err, ShouldBeNil)
So(res.UnixNano()/int64(time.Millisecond), ShouldEqual, 1474975757930)
})
Convey("Cannot parse asdf", func() {
var err error
tr := TimeRange{
From: "asdf",
To: "asdf",
Now: now,
}
_, err = tr.ParseFrom()
So(err, ShouldNotBeNil)
_, err = tr.ParseTo()
So(err, ShouldNotBeNil)
})
})
}

View File

@ -14,9 +14,9 @@ func TestMetricQuery(t *testing.T) {
Convey("Given 3 queries for 2 data sources", func() { Convey("Given 3 queries for 2 data sources", func() {
request := &Request{ request := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 1}}, {RefId: "B", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "C", Query: "asd", DataSource: &DataSourceInfo{Id: 2}}, {RefId: "C", DataSource: &DataSourceInfo{Id: 2}},
}, },
} }
@ -31,9 +31,9 @@ func TestMetricQuery(t *testing.T) {
Convey("Given query 2 depends on query 1", func() { Convey("Given query 2 depends on query 1", func() {
request := &Request{ request := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 2}}, {RefId: "B", DataSource: &DataSourceInfo{Id: 2}},
{RefId: "C", Query: "#A / #B", DataSource: &DataSourceInfo{Id: 3}, Depends: []string{"A", "B"}}, {RefId: "C", DataSource: &DataSourceInfo{Id: 3}, Depends: []string{"A", "B"}},
}, },
} }
@ -55,7 +55,7 @@ func TestMetricQuery(t *testing.T) {
Convey("When executing request with one query", t, func() { Convey("When executing request with one query", t, func() {
req := &Request{ req := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
}, },
} }
@ -74,8 +74,8 @@ func TestMetricQuery(t *testing.T) {
Convey("When executing one request with two queries from same data source", t, func() { Convey("When executing one request with two queries from same data source", t, func() {
req := &Request{ req := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}}, {RefId: "B", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
}, },
} }
@ -100,9 +100,9 @@ func TestMetricQuery(t *testing.T) {
Convey("When executing one request with three queries from different datasources", t, func() { Convey("When executing one request with three queries from different datasources", t, func() {
req := &Request{ req := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}}, {RefId: "B", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "C", Query: "asd", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}}, {RefId: "C", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}},
}, },
} }
@ -117,7 +117,7 @@ func TestMetricQuery(t *testing.T) {
Convey("When query uses data source of unknown type", t, func() { Convey("When query uses data source of unknown type", t, func() {
req := &Request{ req := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "asdasdas"}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "asdasdas"}},
}, },
} }
@ -129,10 +129,10 @@ func TestMetricQuery(t *testing.T) {
req := &Request{ req := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{ {
RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}, RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"},
}, },
{ {
RefId: "B", Query: "#A / 2", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}, Depends: []string{"A"}, RefId: "B", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}, Depends: []string{"A"},
}, },
}, },
} }

View File

@ -1,11 +1,18 @@
define([ define([
'angular', 'angular',
'lodash',
'../core_module', '../core_module',
'app/core/config', 'app/core/config',
], ],
function (angular, coreModule, config) { function (angular, _, coreModule, config) {
'use strict'; 'use strict';
var failCodes = {
"1000": "Required team membership not fulfilled",
"1001": "Required organization membership not fulfilled",
"1002": "Required email domain not fulfilled",
};
coreModule.default.controller('LoginCtrl', function($scope, backendSrv, contextSrv, $location) { coreModule.default.controller('LoginCtrl', function($scope, backendSrv, contextSrv, $location) {
$scope.formModel = { $scope.formModel = {
user: '', user: '',
@ -15,12 +22,10 @@ function (angular, coreModule, config) {
contextSrv.sidemenu = false; contextSrv.sidemenu = false;
$scope.googleAuthEnabled = config.googleAuthEnabled; $scope.oauth = config.oauth;
$scope.githubAuthEnabled = config.githubAuthEnabled; $scope.oauthEnabled = _.keys(config.oauth).length > 0;
$scope.oauthEnabled = config.githubAuthEnabled || config.googleAuthEnabled || config.genericOAuthEnabled;
$scope.allowUserPassLogin = config.allowUserPassLogin; $scope.disableLoginForm = config.disableLoginForm;
$scope.genericOAuthEnabled = config.genericOAuthEnabled;
$scope.oauthProviderName = config.oauthProviderName;
$scope.disableUserSignUp = config.disableUserSignUp; $scope.disableUserSignUp = config.disableUserSignUp;
$scope.loginHint = config.loginHint; $scope.loginHint = config.loginHint;
@ -31,8 +36,8 @@ function (angular, coreModule, config) {
$scope.$watch("loginMode", $scope.loginModeChanged); $scope.$watch("loginMode", $scope.loginModeChanged);
var params = $location.search(); var params = $location.search();
if (params.failedMsg) { if (params.failCode) {
$scope.appEvent('alert-warning', ['Login Failed', params.failedMsg]); $scope.appEvent('alert-warning', ['Login Failed', failCodes[params.failCode]]);
delete params.failedMsg; delete params.failedMsg;
$location.search(params); $location.search(params);
} }

View File

@ -41,6 +41,7 @@ import 'app/core/routes/routes';
import './filters/filters'; import './filters/filters';
import coreModule from './core_module'; import coreModule from './core_module';
import appEvents from './app_events'; import appEvents from './app_events';
import colors from './utils/colors';
export { export {
@ -60,4 +61,5 @@ export {
dashboardSelector, dashboardSelector,
queryPartEditorDirective, queryPartEditorDirective,
WizardFlow, WizardFlow,
colors,
}; };

View File

@ -23,10 +23,10 @@ function (_, $, coreModule) {
getOptions: "&", getOptions: "&",
onChange: "&", onChange: "&",
}, },
link: function($scope, elem, attrs) { link: function($scope, elem) {
var $input = $(inputTemplate); var $input = $(inputTemplate);
var $button = $(attrs.styleMode === 'select' ? selectTemplate : linkTemplate);
var segment = $scope.segment; var segment = $scope.segment;
var $button = $(segment.selectMode ? selectTemplate : linkTemplate);
var options = null; var options = null;
var cancelBlur = null; var cancelBlur = null;
var linkMode = true; var linkMode = true;
@ -136,7 +136,7 @@ function (_, $, coreModule) {
$button.click(function() { $button.click(function() {
options = null; options = null;
$input.css('width', ($button.width() + 16) + 'px'); $input.css('width', (Math.max($button.width(), 80) + 16) + 'px');
$button.hide(); $button.hide();
$input.show(); $input.show();
@ -170,6 +170,7 @@ function (_, $, coreModule) {
}, },
link: { link: {
pre: function postLink($scope, elem, attrs) { pre: function postLink($scope, elem, attrs) {
var cachedOptions;
$scope.valueToSegment = function(value) { $scope.valueToSegment = function(value) {
var option = _.find($scope.options, {value: value}); var option = _.find($scope.options, {value: value});
@ -177,7 +178,9 @@ function (_, $, coreModule) {
cssClass: attrs.cssClass, cssClass: attrs.cssClass,
custom: attrs.custom, custom: attrs.custom,
value: option ? option.text : value, value: option ? option.text : value,
selectMode: attrs.selectMode,
}; };
return uiSegmentSrv.newSegment(segment); return uiSegmentSrv.newSegment(segment);
}; };
@ -188,13 +191,20 @@ function (_, $, coreModule) {
}); });
return $q.when(optionSegments); return $q.when(optionSegments);
} else { } else {
return $scope.getOptions(); return $scope.getOptions().then(function(options) {
cachedOptions = options;
return _.map(options, function(option) {
return uiSegmentSrv.newSegment({value: option.text});
});
});
} }
}; };
$scope.onSegmentChange = function() { $scope.onSegmentChange = function() {
if ($scope.options) { var options = $scope.options || cachedOptions;
var option = _.find($scope.options, {text: $scope.segment.value});
if (options) {
var option = _.find(options, {text: $scope.segment.value});
if (option && option.value !== $scope.property) { if (option && option.value !== $scope.property) {
$scope.property = option.value; $scope.property = option.value;
} else if (attrs.custom !== 'false') { } else if (attrs.custom !== 'false') {

View File

@ -236,7 +236,7 @@ function (angular, _, coreModule) {
var inputEl = elem.find('input'); var inputEl = elem.find('input');
function openDropdown() { function openDropdown() {
inputEl.css('width', Math.max(linkEl.width(), 30) + 'px'); inputEl.css('width', Math.max(linkEl.width(), 80) + 'px');
inputEl.show(); inputEl.show();
linkEl.hide(); linkEl.hide();

View File

@ -114,6 +114,10 @@ export class BackendSrv {
var requestIsLocal = options.url.indexOf('/') === 0; var requestIsLocal = options.url.indexOf('/') === 0;
var firstAttempt = options.retry === 0; var firstAttempt = options.retry === 0;
if (requestIsLocal && !options.hasSubUrl && options.retry === 0) {
options.url = config.appSubUrl + options.url;
}
if (requestIsLocal && options.headers && options.headers.Authorization) { if (requestIsLocal && options.headers && options.headers.Authorization) {
options.headers['X-DS-Authorization'] = options.headers.Authorization; options.headers['X-DS-Authorization'] = options.headers.Authorization;
delete options.headers.Authorization; delete options.headers.Authorization;

View File

@ -9,6 +9,7 @@ export class User {
isGrafanaAdmin: any; isGrafanaAdmin: any;
isSignedIn: any; isSignedIn: any;
orgRole: any; orgRole: any;
timezone: string;
constructor() { constructor() {
if (config.bootData.user) { if (config.bootData.user) {

View File

@ -28,6 +28,7 @@ function (angular, _, coreModule) {
this.type = options.type; this.type = options.type;
this.fake = options.fake; this.fake = options.fake;
this.value = options.value; this.value = options.value;
this.selectMode = options.selectMode;
this.type = options.type; this.type = options.type;
this.expandable = options.expandable; this.expandable = options.expandable;
this.html = options.html || $sce.trustAsHtml(templateSrv.highlightVariablesAsHtml(this.value)); this.html = options.html || $sce.trustAsHtml(templateSrv.highlightVariablesAsHtml(this.value));

View File

@ -31,6 +31,8 @@ export default class TimeSeries {
allIsZero: boolean; allIsZero: boolean;
decimals: number; decimals: number;
scaledDecimals: number; scaledDecimals: number;
hasMsResolution: boolean;
isOutsideRange: boolean;
lines: any; lines: any;
bars: any; bars: any;
@ -54,6 +56,7 @@ export default class TimeSeries {
this.stats = {}; this.stats = {};
this.legend = true; this.legend = true;
this.unit = opts.unit; this.unit = opts.unit;
this.hasMsResolution = this.isMsResolutionNeeded();
} }
applySeriesOverrides(overrides) { applySeriesOverrides(overrides) {

View File

@ -0,0 +1,12 @@
export default [
"#7EB26D","#EAB839","#6ED0E0","#EF843C","#E24D42","#1F78C1","#BA43A9","#705DA0",
"#508642","#CCA300","#447EBC","#C15C17","#890F02","#0A437C","#6D1F62","#584477",
"#B7DBAB","#F4D598","#70DBED","#F9BA8F","#F29191","#82B5D8","#E5A8E2","#AEA2E0",
"#629E51","#E5AC0E","#64B0C8","#E0752D","#BF1B00","#0A50A1","#962D82","#614D93",
"#9AC48A","#F2C96D","#65C5DB","#F9934E","#EA6460","#5195CE","#D683CE","#806EB7",
"#3F6833","#967302","#2F575E","#99440A","#58140C","#052B51","#511749","#3F2B5B",
"#E0F9D7","#FCEACA","#CFFAFF","#F9E2D2","#FCE2DE","#BADFF4","#F9D9F9","#DEDAF7"
];

View File

@ -9,6 +9,10 @@ function($, _, moment) {
var kbn = {}; var kbn = {};
kbn.valueFormats = {}; kbn.valueFormats = {};
kbn.regexEscape = function(value) {
return value.replace(/[\\^$*+?.()|[\]{}\/]/g, '\\$&');
};
///// HELPER FUNCTIONS ///// ///// HELPER FUNCTIONS /////
kbn.round_interval = function(interval) { kbn.round_interval = function(interval) {
@ -170,7 +174,10 @@ function($, _, moment) {
lowLimitMs = kbn.interval_to_ms(lowLimitInterval); lowLimitMs = kbn.interval_to_ms(lowLimitInterval);
} }
else { else {
return userInterval; return {
intervalMs: kbn.interval_to_ms(userInterval),
interval: userInterval,
};
} }
} }
@ -179,7 +186,10 @@ function($, _, moment) {
intervalMs = lowLimitMs; intervalMs = lowLimitMs;
} }
return kbn.secondsToHms(intervalMs / 1000); return {
intervalMs: intervalMs,
interval: kbn.secondsToHms(intervalMs / 1000),
};
}; };
kbn.describe_interval = function (string) { kbn.describe_interval = function (string) {

Some files were not shown because too many files have changed in this diff Show More