diff --git a/CHANGELOG.md b/CHANGELOG.md
index e01e105db6c..bbe75e84210 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -28,6 +28,7 @@ it allows you to add queries of differnet data source types & instances to the s
- [Issue #2708](https://github.com/grafana/grafana/issues/2708). InfluxDB: You can now set math expression for select clauses.
- [Issue #1575](https://github.com/grafana/grafana/issues/1575). Drilldown link: now you can click on the external link icon in the panel header to access drilldown links!
- [Issue #1646](https://github.com/grafana/grafana/issues/1646). OpenTSDB: Fetch list of aggregators from OpenTSDB
+- [Issue #2955](https://github.com/grafana/grafana/issues/2955). Graph: More axis units (Length, Volume, Temperature, Pressure, etc), thanks @greglook
**Fixes**
- [Issue #2413](https://github.com/grafana/grafana/issues/2413). InfluxDB 0.9: Fix for handling empty series object in response from influxdb
diff --git a/build.go b/build.go
index ca6f52ecd89..7804d00890e 100644
--- a/build.go
+++ b/build.go
@@ -89,8 +89,13 @@ func main() {
}
func makeLatestDistCopies() {
+ rpmIteration := "-1"
+ if linuxPackageIteration != "" {
+ rpmIteration = "-" + linuxPackageIteration
+ }
+
runError("cp", "dist/grafana_"+version+"_amd64.deb", "dist/grafana_latest_amd64.deb")
- runError("cp", "dist/grafana-"+strings.Replace(version, "-", "_", 5)+"-1.x86_64.rpm", "dist/grafana-latest-1.x86_64.rpm")
+ runError("cp", "dist/grafana-"+linuxPackageVersion+rpmIteration+".x86_64.rpm", "dist/grafana-latest-1.x86_64.rpm")
runError("cp", "dist/grafana-"+version+".linux-x64.tar.gz", "dist/grafana-latest.linux-x64.tar.gz")
}
diff --git a/docker/blocks/prometheus/prometheus.yml b/docker/blocks/prometheus/prometheus.yml
index 1d524b1b320..b0fc2a919cd 100644
--- a/docker/blocks/prometheus/prometheus.yml
+++ b/docker/blocks/prometheus/prometheus.yml
@@ -4,10 +4,6 @@ global:
evaluation_interval: 10s # By default, scrape targets every 15 seconds.
# scrape_timeout is set to the global default (10s).
- # Attach these extra labels to all timeseries collected by this Prometheus instance.
- labels:
- monitor: 'codelab-monitor'
-
# Load and evaluate rules in this file every 'evaluation_interval' seconds.
rule_files:
# - "first.rules"
diff --git a/docs/sources/datasources/graphite.md b/docs/sources/datasources/graphite.md
index dc545af5007..9640d21a81d 100644
--- a/docs/sources/datasources/graphite.md
+++ b/docs/sources/datasources/graphite.md
@@ -29,7 +29,7 @@ Url | The http protocol, ip and port of you graphite-web or graphite-api install
Access | Proxy = access via Grafana backend, Direct = access directory from browser.
-Proxy access means that the Grafana backend will proxy all requests from the browser, and send them on to the Data Source. This is useful because it can eliminate CORS (Cross Origin Site Resource) issues, as well as eliminate the need to disseminate authentication details to the Data Source to the brower.
+Proxy access means that the Grafana backend will proxy all requests from the browser, and send them on to the Data Source. This is useful because it can eliminate CORS (Cross Origin Site Resource) issues, as well as eliminate the need to disseminate authentication details to the Data Source to the browser.
Direct access is still supported because in some cases it may be useful to access a Data Source directly depending on the use case and topology of Grafana, the user, and the Data Source.
@@ -78,4 +78,4 @@ You can also create nested variables that use other variables in their definitio
## Query Reference
-You can reference queries by the row “letter” that they’re on (similar to Microsoft Excel). If you add a second query to graph, you can reference the first query simply by typing in #A. This provides an easy and convenient way to build compounded queries.
\ No newline at end of file
+You can reference queries by the row “letter” that they’re on (similar to Microsoft Excel). If you add a second query to graph, you can reference the first query simply by typing in #A. This provides an easy and convenient way to build compounded queries.
diff --git a/docs/sources/datasources/influxdb.md b/docs/sources/datasources/influxdb.md
index b48b2b0097a..8595548a1cb 100644
--- a/docs/sources/datasources/influxdb.md
+++ b/docs/sources/datasources/influxdb.md
@@ -28,12 +28,12 @@ Name | Description
Name | The data source name, important that this is the same as in Grafana v1.x if you plan to import old dashboards.
Default | Default data source means that it will be pre-selected for new panels.
Url | The http protocol, ip and port of you influxdb api (influxdb api port is by default 8086)
-Access | Proxy = access via Grafana backend, Direct = access directory from browser.
+Access | Proxy = access via Grafana backend, Direct = access directly from browser.
Database | Name of your influxdb database
User | Name of your database user
Password | Database user's password
- > Proxy access means that the Grafana backend will proxy all requests from the browser, and send them on to the Data Source. This is useful because it can eliminate CORS (Cross Origin Site Resource) issues, as well as eliminate the need to disseminate authentication details to the Data Source to the brower.
+ > Proxy access means that the Grafana backend will proxy all requests from the browser, and send them on to the Data Source. This is useful because it can eliminate CORS (Cross Origin Site Resource) issues, as well as eliminate the need to disseminate authentication details to the Data Source to the browser.
> Direct access is still supported because in some cases it may be useful to access a Data Source directly depending on the use case and topology of Grafana, the user, and the Data Source.
diff --git a/docs/sources/datasources/kairosdb.md b/docs/sources/datasources/kairosdb.md
index 7619c8e8b1f..fc9b5682515 100644
--- a/docs/sources/datasources/kairosdb.md
+++ b/docs/sources/datasources/kairosdb.md
@@ -32,7 +32,7 @@ Open a graph in edit mode by click the title.

-For details on KairosDB metric queries checkout the offical.
+For details on KairosDB metric queries checkout the official.
- [Query Metrics - KairosDB 0.9.4 documentation](http://kairosdb.github.io/kairosdocs/restapi/QueryMetrics.html).
## Templated queries
@@ -49,4 +49,4 @@ For details of `metric names`, `tag names`, and `tag values`, please refer to th
- [List Metric Names - KairosDB 0.9.4 documentation](http://kairosdb.github.io/kairosdocs/restapi/ListMetricNames.html)
- [List Tag Names - KairosDB 0.9.4 documentation](http://kairosdb.github.io/kairosdocs/restapi/ListTagNames.html)
- [List Tag Values - KairosDB 0.9.4 documentation](http://kairosdb.github.io/kairosdocs/restapi/ListTagValues.html)
-- [Query Metrics - KairosDB 0.9.4 documentation](http://kairosdb.github.io/kairosdocs/restapi/QueryMetrics.html).
\ No newline at end of file
+- [Query Metrics - KairosDB 0.9.4 documentation](http://kairosdb.github.io/kairosdocs/restapi/QueryMetrics.html).
diff --git a/docs/sources/datasources/prometheus.md b/docs/sources/datasources/prometheus.md
index e975a5cf8a4..61f6c0d66e8 100644
--- a/docs/sources/datasources/prometheus.md
+++ b/docs/sources/datasources/prometheus.md
@@ -28,7 +28,7 @@ Basic Auth | Enable basic authentication to the Prometheus datasource.
User | Name of your Prometheus user
Password | Database user's password
- > Proxy access means that the Grafana backend will proxy all requests from the browser, and send them on to the Data Source. This is useful because it can eliminate CORS (Cross Origin Site Resource) issues, as well as eliminate the need to disseminate authentication details to the Data Source to the brower.
+ > Proxy access means that the Grafana backend will proxy all requests from the browser, and send them on to the Data Source. This is useful because it can eliminate CORS (Cross Origin Site Resource) issues, as well as eliminate the need to disseminate authentication details to the Data Source to the browser.
> Direct access is still supported because in some cases it may be useful to access a Data Source directly depending on the use case and topology of Grafana, the user, and the Data Source.
diff --git a/docs/sources/guides/gettingstarted.md b/docs/sources/guides/gettingstarted.md
index b8bd27609a3..c92f30098ab 100644
--- a/docs/sources/guides/gettingstarted.md
+++ b/docs/sources/guides/gettingstarted.md
@@ -29,7 +29,7 @@ The image above shows you the top header for a Dashboard.
6. Settings: Manage Dashboard settings and features such as Templating and Annotations.
## Dashboards, Panels, Rows, the building blocks of Grafana...
-Dashboards are at the core of what Grafana is all about. Dashboards are composed of individual Panels arranged on a number of Rows. Grafana ships with a variety of Panels. Gafana makes it easy to construct the right queries, and customize the display properities so that you can create the perfect Dashboard for your need. Each Panel can interact with data from any configured Grafana Data Source (currently InfluxDB, Graphite, OpenTSDB, and KairosDB). The [Core Concepts](/guides/basic_concepts) guide explores these key ideas in detail.
+Dashboards are at the core of what Grafana is all about. Dashboards are composed of individual Panels arranged on a number of Rows. Grafana ships with a variety of Panels. Grafana makes it easy to construct the right queries, and customize the display properties so that you can create the perfect Dashboard for your need. Each Panel can interact with data from any configured Grafana Data Source (currently InfluxDB, Graphite, OpenTSDB, and KairosDB). The [Core Concepts](/guides/basic_concepts) guide explores these key ideas in detail.
## Adding & Editing Graphs and Panels
diff --git a/docs/sources/guides/screencasts.md b/docs/sources/guides/screencasts.md
index edf3d7b8122..500ca9f9b6a 100644
--- a/docs/sources/guides/screencasts.md
+++ b/docs/sources/guides/screencasts.md
@@ -32,7 +32,7 @@ no_toc: true
Episode 4 - Installation & Configuration on Ubuntu / Debian
- Learn how to easily install the dependencies and packages to get Grafana 2.0 up and running on Ubuntu or Debian in just a few mintues.
+ Learn how to easily install the dependencies and packages to get Grafana 2.0 up and running on Ubuntu or Debian in just a few minutes.
diff --git a/docs/sources/guides/whats-new-in-v2-1.md b/docs/sources/guides/whats-new-in-v2-1.md
index 6a6ebba98de..10519cb0422 100644
--- a/docs/sources/guides/whats-new-in-v2-1.md
+++ b/docs/sources/guides/whats-new-in-v2-1.md
@@ -112,7 +112,7 @@ for example make all series that contain the word **CPU** `red` and assigned to

-New series style override, negative-y transform and stack groups. Negative y tranform is
+New series style override, negative-y transform and stack groups. Negative y transform is
very useful if you want to plot a series on the negative y scale without affecting the legend values like min or max or
the values shown in the hover tooltip.
@@ -126,5 +126,5 @@ string values.
### Changelog
For a detailed list and link to github issues for everything included in the 2.1 release please
-view the [CHANGELOG.md]("https://github.com/grafana/grafana/blob/master/CHANGELOG.md") file.
+view the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file.
diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md
index 31d029b1101..e68edcbca0c 100644
--- a/docs/sources/installation/configuration.md
+++ b/docs/sources/installation/configuration.md
@@ -266,7 +266,7 @@ automatically signed up.
### team_ids
Require an active team membership for at least one of the given teams on
-GitHub. If the authenticated user isn't a member of at least one the
+GitHub. If the authenticated user isn't a member of at least one of the
teams they will not be able to register or authenticate with your
Grafana instance. For example:
@@ -274,7 +274,7 @@ Grafana instance. For example:
enabled = true
client_id = YOUR_GITHUB_APP_CLIENT_ID
client_secret = YOUR_GITHUB_APP_CLIENT_SECRET
- scopes = user:email
+ scopes = user:email,read:org
team_ids = 150,300
auth_url = https://github.com/login/oauth/authorize
token_url = https://github.com/login/oauth/access_token
diff --git a/docs/sources/installation/docker.md b/docs/sources/installation/docker.md
index 53366bed44a..18dcf964450 100644
--- a/docs/sources/installation/docker.md
+++ b/docs/sources/installation/docker.md
@@ -9,7 +9,7 @@ page_keywords: grafana, installation, docker, container, guide
> **2.0.2 -> 2.1.0 Upgrade NOTICE!**
> The data and log paths were not correct in the previous image. The grafana database was placed by default in /usr/share/grafana/data instead of the correct path /var/lib/grafana. This means it was not in a dir that was marked as a volume. So if you remove the container it will remove the grafana database. So before updating make sure you copy the /usr/share/grafana/data path from inside the container to the host.
-## Install from offical docker image
+## Install from official docker image
Grafana has an official Docker container.
diff --git a/docs/sources/installation/ldap.md b/docs/sources/installation/ldap.md
index f0325a953fe..45552a8e2b0 100644
--- a/docs/sources/installation/ldap.md
+++ b/docs/sources/installation/ldap.md
@@ -1,6 +1,6 @@
---
page_title: LDAP Integration
-page_description: LDAP Integrtaion guide for Grafana.
+page_description: LDAP Integration guide for Grafana.
page_keywords: grafana, ldap, configuration, documentation, integration
---
@@ -85,12 +85,12 @@ bind_dn = "cn=%s,o=users,dc=grafana,dc=org"
```
In this case you skip providing a `bind_password` and instead provide a `bind_dn` value with a `%s` somewhere. This will be replaced with the username entered in on the Grafana login page.
-The search filter and search bases settings are still needed to perform the LDAP search to retreive the other LDAP information (like LDAP groups and email).
+The search filter and search bases settings are still needed to perform the LDAP search to retrieve the other LDAP information (like LDAP groups and email).
## LDAP to Grafana Org Role Sync
## Group Mappings
-In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization and role. These will be synced every time the user logs in, with LDAP being the authoratative source.
+In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization and role. These will be synced every time the user logs in, with LDAP being the authoritative source.
So, if you change a user's role in the Grafana Org. Users page, this change will be reset the next time the user logs in. If you change the LDAP groups of a user, the change will take effect the next time the user logs in.
### Priority between Multiple Mappings
diff --git a/docs/sources/installation/rpm.md b/docs/sources/installation/rpm.md
index c112c4c9fea..67057384af0 100644
--- a/docs/sources/installation/rpm.md
+++ b/docs/sources/installation/rpm.md
@@ -1,6 +1,6 @@
---
page_title: Installing on RPM-based Linux
-page_description: Grafana Installation guide for Centos, Fedora, Redhat.
+page_description: Grafana Installation guide for Centos, Fedora, OpenSuse, Redhat.
page_keywords: grafana, installation, centos, fedora, opensuse, redhat, guide
---
@@ -10,7 +10,7 @@ page_keywords: grafana, installation, centos, fedora, opensuse, redhat, guide
Description | Download
------------ | -------------
-.RPM for Fedora / RHEL / CentOS Linux | [grafana-2.1.3-1.x86_64.rpm](https://grafanarel.s3.amazonaws.com/builds/grafana-2.1.3-1.x86_64.rpm)
+.RPM for CentOS / Fedora / OpenSuse / Redhat Linux | [grafana-2.1.3-1.x86_64.rpm](https://grafanarel.s3.amazonaws.com/builds/grafana-2.1.3-1.x86_64.rpm)
## Install from package file
@@ -20,9 +20,15 @@ You can install Grafana using Yum directly.
Or install manually using `rpm`.
+#### On CentOS / Fedora / Redhat:
+
$ sudo yum install initscripts fontconfig
$ sudo rpm -Uvh grafana-2.1.3-1.x86_64.rpm
+#### On OpenSuse:
+
+ $ sudo rpm -i --nodeps grafana-2.1.3-1.x86_64.rpm
+
## Install via YUM Repository
Add the following to a new file at `/etc/yum.repos.d/grafana.repo`
diff --git a/docs/sources/project/building_from_source.md b/docs/sources/project/building_from_source.md
index 45ab26249c8..71e94b088fb 100644
--- a/docs/sources/project/building_from_source.md
+++ b/docs/sources/project/building_from_source.md
@@ -25,7 +25,7 @@ go get github.com/grafana/grafana
```
cd $GOPATH/src/github.com/grafana/grafana
go run build.go setup # (only needed once to install godep)
-$GOPATH/bin/godep restore # (will pull down all golang lib dependecies in your current GOPATH)
+$GOPATH/bin/godep restore # (will pull down all golang lib dependencies in your current GOPATH)
go run build.go build # (or 'go build .')
```
diff --git a/docs/sources/reference/admin.md b/docs/sources/reference/admin.md
index 854147b4240..c5d5073020d 100644
--- a/docs/sources/reference/admin.md
+++ b/docs/sources/reference/admin.md
@@ -24,7 +24,7 @@ modify Organization details and options.
As a Grafana Administrator, you have complete access to any Organization or User in that instance of Grafana.
-When performing actions as a Grafana admin, the sidebar will change it's apperance as below to indicate you are performing global server administration.
+When performing actions as a Grafana admin, the sidebar will change it's appearance as below to indicate you are performing global server administration.
From the Grafana Server Admin page, you can access the System Info page which summarizes all of the backend configuration settings of the Grafana server.
diff --git a/docs/sources/reference/dashlist.md b/docs/sources/reference/dashlist.md
index 1f0794dabfc..ea098541da2 100644
--- a/docs/sources/reference/dashlist.md
+++ b/docs/sources/reference/dashlist.md
@@ -8,7 +8,7 @@ page_keywords: grafana, dashlist, panel, documentation
## Overview
-The dashboard list panel allows you to display dynamic links to other dashboards. The list can be configured to use starred dashbaords, a search query and/or dashboard tags.
+The dashboard list panel allows you to display dynamic links to other dashboards. The list can be configured to use starred dashboards, a search query and/or dashboard tags.
diff --git a/docs/sources/reference/graph.md b/docs/sources/reference/graph.md
index 9de23332a99..9c2ab63fbaa 100644
--- a/docs/sources/reference/graph.md
+++ b/docs/sources/reference/graph.md
@@ -30,7 +30,7 @@ The drilldown section allows adding dynamic links to the panel that can link to
or URLs
Each link has a title, a type and params. A link can be either a ``dashboard`` or ``absolute`` links.
-If it is a dashboard links, the `dashboard` value must be the name of a dashbaord. If it's an
+If it is a dashboard links, the `dashboard` value must be the name of a dashboard. If it's an
`absolute` link, the URL is the URL to link.
``params`` allows adding additional URL params to the links. The format is the ``name=value`` with
@@ -127,7 +127,7 @@ If you have stack enabled you can select what the mouse hover feature should sho
### Rendering
- ``Flot`` - Render the graphs in the browser using Flot (default)
-- ``Graphite PNG`` - Render the graph on the server using graphites render API.
+- ``Graphite PNG`` - Render the graph on the server using graphite's render API.
### Tooltip
diff --git a/docs/sources/reference/keyboard_shortcuts.md b/docs/sources/reference/keyboard_shortcuts.md
index b6e199e4e70..627bfc5c3dd 100644
--- a/docs/sources/reference/keyboard_shortcuts.md
+++ b/docs/sources/reference/keyboard_shortcuts.md
@@ -1,5 +1,5 @@
-page_title: Kayboard Shortcuts
-page_description: Kayboard Shortcuts for Grafana
+page_title: Keyboard Shortcuts
+page_description: Keyboard Shortcuts for Grafana
page_keywords: grafana, export, import, documentation
---
diff --git a/docs/sources/reference/search.md b/docs/sources/reference/search.md
index 3c4555b843e..e2d6647229c 100644
--- a/docs/sources/reference/search.md
+++ b/docs/sources/reference/search.md
@@ -11,7 +11,7 @@ Dashboards can be searched by the dashboard name, filtered by one (or many) tags
1. `Dashboard Picker`: The Dashboard Picker is your primary navigation tool to move between dashboards. It is present on all dashboards, and open the Dashboard Search. The dashboard picker also doubles as the title of the current dashboard.
-2. `Search Bar`: The search bar allows you to enter any string and search both database and file based dashbaords in real-time.
+2. `Search Bar`: The search bar allows you to enter any string and search both database and file based dashboards in real-time.
3. `Starred`: The starred link allows you to filter the list to display only starred dashboards.
4. `Tags`: The tags filter allows you to filter the list by dashboard tags.
@@ -25,14 +25,14 @@ To search and load dashboards click the open folder icon in the header or use th
Dashboard search is:
- Real-time
-- *Not* case senstitive
+- *Not* case sensitive
- Functional across stored *and* file based dashboards.
## Filter by Tag(s)
-Tags are a great way to organize your dashboards, especially as the number of dashbaords grow. Tags can be added and managed in the dashboard `Settings`.
+Tags are a great way to organize your dashboards, especially as the number of dashboards grow. Tags can be added and managed in the dashboard `Settings`.
-To filter the dashboard list by tag, click on any tag appearing in the right column. The list may be further filtered by cliking on additional tags:
+To filter the dashboard list by tag, click on any tag appearing in the right column. The list may be further filtered by clicking on additional tags:
@@ -40,7 +40,7 @@ Alternately, to see a list of all available tags, click the tags link in the sea
-When using only a keybaord: `tab` to focus on the *tags* link, `▼` down arrow key to find a tag and select with the `Enter` key.
+When using only a keyboard: `tab` to focus on the *tags* link, `▼` down arrow key to find a tag and select with the `Enter` key.
**Note**: When multiple tags are selected, Grafana will show dashboards that include **all**.
@@ -51,4 +51,4 @@ Starring is a great way to organize and find commonly used dashboards. To show o
-When using only a keybaord: `tab` to focus on the *stars* link, `▼` down arrow key to find a tag and select with the `Enter` key.
\ No newline at end of file
+When using only a keyboard: `tab` to focus on the *stars* link, `▼` down arrow key to find a tag and select with the `Enter` key.
\ No newline at end of file
diff --git a/docs/sources/reference/sharing.md b/docs/sources/reference/sharing.md
index e3a6ce3f2f5..c20f4e5f67c 100644
--- a/docs/sources/reference/sharing.md
+++ b/docs/sources/reference/sharing.md
@@ -5,7 +5,7 @@ page_keywords: grafana, sharing, guide, documentation
---
# Sharing features
-Grafana provides a number of ways to share a dashboard or a specfic panel to other users within your
+Grafana provides a number of ways to share a dashboard or a specific panel to other users within your
organization. It also provides ways to publish interactive snapshots that can be accessed by external partners.
## Share dashboard
diff --git a/docs/sources/reference/singlestat.md b/docs/sources/reference/singlestat.md
index 7a7c38da289..6f7c9fbd965 100644
--- a/docs/sources/reference/singlestat.md
+++ b/docs/sources/reference/singlestat.md
@@ -17,11 +17,11 @@ The singlestat panel has a normal query editor to allow you define your exact me
1. `Big Value`: Big Value refers to how we display the main stat for the Singlestat Panel. This is always a single value that is displayed in the Panel in between two strings, `Prefix` and `Suffix`. The single number is calculated by choosing a function (min,max,average,current,total) of your metric query. This functions reduces your query into a single numeric value.
-2. `Font Size`: You can use this section
-3. `Values`: The Value fields let you set the function (min, max, average, current, total) that your entire query is reduced into a single value with. You can also set the font size of theand font-size (as a %) of the metric query that the Panel is configured with. This reduces the entire query into a single summary value that is displayed.
+2. `Font Size`: You can use this section to select the font size of the different texts in the Singlestat Panel, i.e. prefix, value and postfix.
+3. `Values`: The Value fields let you set the function (min, max, average, current, total) that your entire query is reduced into a single value with. You can also set the font size of the Value field and font-size (as a %) of the metric query that the Panel is configured with. This reduces the entire query into a single summary value that is displayed.
4. `Postfixes`: The Postfix fields let you define a custom label and font-size (as a %) to appear *after* the value
5. `Units`: Units are appended to the the Singlestat within the panel, and will respect the color and threshold settings for the value.
-6. `Decimals`: The Decimal field allows you to override the automatic decimal precision, and set it explicitely.
+6. `Decimals`: The Decimal field allows you to override the automatic decimal precision, and set it explicitly.
### Coloring
@@ -29,9 +29,9 @@ The coloring options of the Singlestat Panel config allow you to dynamically cha
-1. `Background`: This checkbox applies the configured thresholds and colors to the entirity of the Singlestat Panel background.
+1. `Background`: This checkbox applies the configured thresholds and colors to the entirety of the Singlestat Panel background.
2. `Value`: This checkbox applies the configured thresholds and colors to the summary stat.
-3. `Thresholds`: Change the background and value colors dyanmically within the panel, depending on the Singlestat value. The threshold field accepts **3 comma-separated** values, corresponding to the three colors directly to the right.
+3. `Thresholds`: Change the background and value colors dynamically within the panel, depending on the Singlestat value. The threshold field accepts **3 comma-separated** values, corresponding to the three colors directly to the right.
4. `Colors`: Select a color and opacity
5. `Invert order`: This link toggles the threshold color order.For example: Green, Orange, Red () will become Red, Orange, Green ().
diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md
index 239cb33c378..e605af845ea 100644
--- a/docs/sources/reference/templating.md
+++ b/docs/sources/reference/templating.md
@@ -63,7 +63,7 @@ Once configured, Multi-Select Tagging provides a convenient way to group and you
### Interval
-Use the `Interval` type to create Template variables aroundr time ranges (eg. `1m`,`1h`, `1d`). There is also a special `auto` option that will change depending on the current time range, you can specify how many times the current time range should be divided to calculate the current `auto` range.
+Use the `Interval` type to create Template variables around time ranges (eg. `1m`,`1h`, `1d`). There is also a special `auto` option that will change depending on the current time range, you can specify how many times the current time range should be divided to calculate the current `auto` range.

@@ -75,7 +75,7 @@ Use the `Custom` type to manually create Template variables around explicit valu
Template Variables can be very useful to dynamically change what you're visualizing on a given panel. Sometimes, you might want to create entire new Panels (or Rows) based on what Template Variables have been selected. This is now possible in Grafana 2.1.
-Once you've got your Template variables (of any type) configured the way you'd like, check out the Repeating Panels and Repeating Row documentatione
+Once you've got your Template variables (of any type) configured the way you'd like, check out the Repeating Panels and Repeating Row documentation
## Screencast - Templated Graphite Queries
diff --git a/docs/sources/tutorials/hubot_howto.md b/docs/sources/tutorials/hubot_howto.md
index b2f53d0bece..85ebb3e7c85 100644
--- a/docs/sources/tutorials/hubot_howto.md
+++ b/docs/sources/tutorials/hubot_howto.md
@@ -64,13 +64,13 @@ The `hubot-grafana` plugin requires a number of environment variables to be set
The hubot plugin will take advantage of the Grafana server side rendering feature that can
render any panel on the server using phantomjs. Grafana ships with a phantomjs binary (linux only).
-To verify that this freature works try the `Direct link to rendered image` link in the panel share dialog.
+To verify that this feature works try the `Direct link to rendered image` link in the panel share dialog.
If you do not get an image when opening this link verify that the required font packages are installed for phantomjs to work.
### Grafana API Key
You need to set the environment variable `HUBOT_GRAFANA_API_KEY` to a Grafana API Key.
-You can add these from the API Keys page wich you find in the Organization dropdown.
+You can add these from the API Keys page which you find in the Organization dropdown.
### Amazon S3
The `S3` options are optional but for the images to work properly in services like Slack and Hipchat they need
@@ -118,7 +118,7 @@ Now you can add an alias like this:
## Summary
-Grafana is going to ship with integrated Slack and Hiptchat features some day but you do
+Grafana is going to ship with integrated Slack and Hipchat features some day but you do
not have to wait for that. Grafana 2 shipped with a very clever server side rendering feature
that can render any panel to a png using phantomjs. The hubot plugin for Grafana is something
you can install and use today!
diff --git a/packaging/deb/systemd/grafana-server.service b/packaging/deb/systemd/grafana-server.service
index 16b975f23ea..dd5d2097149 100644
--- a/packaging/deb/systemd/grafana-server.service
+++ b/packaging/deb/systemd/grafana-server.service
@@ -14,7 +14,7 @@ ExecStart=/usr/sbin/grafana-server \
--config=${CONF_FILE} \
--pidfile=${PID_FILE} \
cfg:default.paths.logs=${LOG_DIR} \
- cfg:default.paths.data=${DATA_DIR} \
+ cfg:default.paths.data=${DATA_DIR}
LimitNOFILE=10000
TimeoutStopSec=20
UMask=0027
diff --git a/packaging/rpm/systemd/grafana-server.service b/packaging/rpm/systemd/grafana-server.service
index 855dce53d08..fb2ec24d123 100644
--- a/packaging/rpm/systemd/grafana-server.service
+++ b/packaging/rpm/systemd/grafana-server.service
@@ -14,7 +14,7 @@ ExecStart=/usr/sbin/grafana-server \
--config=${CONF_FILE} \
--pidfile=${PID_FILE} \
cfg:default.paths.logs=${LOG_DIR} \
- cfg:default.paths.data=${DATA_DIR} \
+ cfg:default.paths.data=${DATA_DIR}
LimitNOFILE=10000
TimeoutStopSec=20
diff --git a/pkg/api/api.go b/pkg/api/api.go
index 27eb3c749db..013b2ebe076 100644
--- a/pkg/api/api.go
+++ b/pkg/api/api.go
@@ -40,7 +40,9 @@ func Register(r *macaron.Macaron) {
r.Get("/admin/users/edit/:id", reqGrafanaAdmin, Index)
r.Get("/admin/orgs", reqGrafanaAdmin, Index)
r.Get("/admin/orgs/edit/:id", reqGrafanaAdmin, Index)
+
r.Get("/dashboard/*", reqSignedIn, Index)
+ r.Get("/dashboard-solo/*", reqSignedIn, Index)
// sign up
r.Get("/signup", Index)
diff --git a/pkg/middleware/logger.go b/pkg/middleware/logger.go
index 5b1132e111e..eb5c7b8dde4 100644
--- a/pkg/middleware/logger.go
+++ b/pkg/middleware/logger.go
@@ -22,6 +22,7 @@ import (
"github.com/Unknwon/macaron"
"github.com/grafana/grafana/pkg/log"
+ "github.com/grafana/grafana/pkg/setting"
)
func Logger() macaron.Handler {
@@ -36,7 +37,9 @@ func Logger() macaron.Handler {
switch rw.Status() {
case 200, 304:
content = fmt.Sprintf("%s", content)
- return
+ if !setting.RouterLogging {
+ return
+ }
case 404:
content = fmt.Sprintf("%s", content)
case 500:
diff --git a/pkg/plugins/plugins.go b/pkg/plugins/plugins.go
index 2f7e5264e53..665cf6a36ca 100644
--- a/pkg/plugins/plugins.go
+++ b/pkg/plugins/plugins.go
@@ -91,7 +91,6 @@ func (scanner *PluginScanner) loadPluginJson(path string) error {
if !exists {
return errors.New("Did not find type property in plugin.json")
}
-
DataSources[datasourceType.(string)] = pluginJson
}
diff --git a/public/app/app.js b/public/app/app.js
index 4f30df34d89..1e7acf2e56f 100644
--- a/public/app/app.js
+++ b/public/app/app.js
@@ -100,12 +100,7 @@ function (angular, $, _, appLevelRequire) {
var $scope = this;
$scope.requireContext(deps, function () {
var deps = _.toArray(arguments);
- // Check that this is a valid scope.
- if($scope.$id) {
- $scope.$apply(function () {
- fn.apply($scope, deps);
- });
- }
+ fn.apply($scope, deps);
});
};
}]);
diff --git a/public/app/components/kbn.js b/public/app/components/kbn.js
index 5b9516bc82f..ad2d4c3c422 100644
--- a/public/app/components/kbn.js
+++ b/public/app/components/kbn.js
@@ -8,6 +8,8 @@ function($, _) {
var kbn = {};
kbn.valueFormats = {};
+ ///// HELPER FUNCTIONS /////
+
kbn.round_interval = function(interval) {
switch (true) {
// 0.5s
@@ -170,32 +172,31 @@ function($, _) {
].join(';') + '">
diff --git a/public/app/plugins/datasource/elasticsearch/query_builder.js b/public/app/plugins/datasource/elasticsearch/query_builder.js
index ac61fe13a1e..d0fbb6a603a 100644
--- a/public/app/plugins/datasource/elasticsearch/query_builder.js
+++ b/public/app/plugins/datasource/elasticsearch/query_builder.js
@@ -15,15 +15,14 @@ function (angular) {
};
ElasticQueryBuilder.prototype.buildTermsAgg = function(aggDef, queryNode, target) {
- var metricRef, metric, size, y;
+ var metricRef, metric, y;
queryNode.terms = { "field": aggDef.field };
if (!aggDef.settings) {
return queryNode;
}
- size = parseInt(aggDef.settings.size, 10);
- if (size > 0) { queryNode.terms.size = size; }
+ queryNode.terms.size = parseInt(aggDef.settings.size, 10);
if (aggDef.settings.orderBy !== void 0) {
queryNode.terms.order = {};
queryNode.terms.order[aggDef.settings.orderBy] = aggDef.settings.order;
diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts
index aecb16501b7..584f915a86d 100644
--- a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts
+++ b/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts
@@ -36,22 +36,21 @@ describe('ElasticDatasource', function() {
ctx.ds.testDatasource();
ctx.$rootScope.$apply();
- var today = moment().format("YYYY.MM.DD");
+ var today = moment.utc().format("YYYY.MM.DD");
expect(requestOptions.url).to.be("http://es.com/asd-" + today + '/_stats');
});
});
describe('When issueing metric query with interval pattern', function() {
+ var requestOptions, parts, header;
+
beforeEach(function() {
ctx.ds = new ctx.service({
url: 'http://es.com',
index: '[asd-]YYYY.MM.DD',
jsonData: { interval: 'Daily' }
});
- });
- it('should translate index pattern to current day', function() {
- var requestOptions;
ctx.backendSrv.datasourceRequest = function(options) {
requestOptions = options;
return ctx.$q.when({data: {responses: []}});
@@ -62,13 +61,22 @@ describe('ElasticDatasource', function() {
from: moment([2015, 4, 30, 10]),
to: moment([2015, 5, 1, 10])
},
- targets: [{ bucketAggs: [], metrics: [] }]
+ targets: [{ bucketAggs: [], metrics: [], query: 'escape\\:test' }]
});
ctx.$rootScope.$apply();
- var parts = requestOptions.data.split('\n');
- var header = angular.fromJson(parts[0]);
+
+ parts = requestOptions.data.split('\n');
+ header = angular.fromJson(parts[0]);
+ });
+
+ it('should translate index pattern to current day', function() {
expect(header.index).to.eql(['asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01']);
});
+
+ it('should json escape lucene query', function() {
+ var body = angular.fromJson(parts[1]);
+ expect(body.query.filtered.query.query_string.query).to.be('escape\\:test');
+ });
});
});
diff --git a/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts
index 8f662bb075f..1a19b550f4e 100644
--- a/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts
+++ b/public/app/plugins/datasource/elasticsearch/specs/index_pattern_specs.ts
@@ -11,7 +11,7 @@ describe('IndexPattern', function() {
describe('when getting index for today', function() {
it('should return correct index name', function() {
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
- var expected = 'asd-' + moment().format('YYYY.MM.DD');
+ var expected = 'asd-' + moment.utc().format('YYYY.MM.DD');
expect(pattern.getIndexForToday()).to.be(expected);
});
diff --git a/public/app/plugins/datasource/graphite/datasource.js b/public/app/plugins/datasource/graphite/datasource.js
index 553a11c2350..f9e747fb984 100644
--- a/public/app/plugins/datasource/graphite/datasource.js
+++ b/public/app/plugins/datasource/graphite/datasource.js
@@ -75,7 +75,7 @@ function (angular, _, $, config, dateMath) {
if (annotation.target) {
var target = templateSrv.replace(annotation.target);
var graphiteQuery = {
- range: rangeUnparsed,
+ rangeRaw: rangeUnparsed,
targets: [{ target: target }],
format: 'json',
maxDataPoints: 100
diff --git a/public/app/plugins/datasource/influxdb/datasource.js b/public/app/plugins/datasource/influxdb/datasource.js
index d6a5ccc37ad..b8ee9b15d49 100644
--- a/public/app/plugins/datasource/influxdb/datasource.js
+++ b/public/app/plugins/datasource/influxdb/datasource.js
@@ -12,7 +12,7 @@ function (angular, _, dateMath, InfluxSeries, InfluxQueryBuilder) {
var module = angular.module('grafana.services');
- module.factory('InfluxDatasource', function($q, $http, templateSrv) {
+ module.factory('InfluxDatasource', function($q, backendSrv, templateSrv) {
function InfluxDatasource(datasource) {
this.type = 'influxdb';
@@ -78,7 +78,7 @@ function (angular, _, dateMath, InfluxSeries, InfluxQueryBuilder) {
};
InfluxDatasource.prototype.annotationQuery = function(annotation, rangeUnparsed) {
- var timeFilter = getTimeFilter({ range: rangeUnparsed });
+ var timeFilter = getTimeFilter({ rangeRaw: rangeUnparsed });
var query = annotation.query.replace('$timeFilter', timeFilter);
query = templateSrv.replace(query);
@@ -161,7 +161,7 @@ function (angular, _, dateMath, InfluxSeries, InfluxQueryBuilder) {
options.headers.Authorization = self.basicAuth;
}
- return $http(options).then(function(result) {
+ return backendSrv.datasourceRequest(options).then(function(result) {
return result.data;
}, function(err) {
if (err.status !== 0 || err.status >= 300) {
diff --git a/public/app/plugins/datasource/influxdb/partials/annotations.editor.html b/public/app/plugins/datasource/influxdb/partials/annotations.editor.html
index fe867d68f36..cec47e7a7e4 100644
--- a/public/app/plugins/datasource/influxdb/partials/annotations.editor.html
+++ b/public/app/plugins/datasource/influxdb/partials/annotations.editor.html
@@ -9,7 +9,7 @@
-
Column mappings If your influxdb query returns more than one column you need to specify the column names bellow. An annotation event is composed of a title, tags, and an additional text field.
+
Column mappings If your influxdb query returns more than one column you need to specify the column names below. An annotation event is composed of a title, tags, and an additional text field.
diff --git a/public/app/plugins/datasource/influxdb_08/datasource.js b/public/app/plugins/datasource/influxdb_08/datasource.js
index 5dabc93efa0..a5058db3eb6 100644
--- a/public/app/plugins/datasource/influxdb_08/datasource.js
+++ b/public/app/plugins/datasource/influxdb_08/datasource.js
@@ -2,11 +2,11 @@ define([
'angular',
'lodash',
'app/core/utils/datemath',
- './influxSeries',
- './queryBuilder',
+ './influx_series',
+ './query_builder',
'./directives',
- './queryCtrl',
- './funcEditor',
+ './query_ctrl',
+ './func_editor',
],
function (angular, _, dateMath, InfluxSeries, InfluxQueryBuilder) {
'use strict';
@@ -266,11 +266,11 @@ function (angular, _, dateMath, InfluxSeries, InfluxQueryBuilder) {
}
function getInfluxTime(date, roundUp) {
- if (_.isString(date) && date.indexOf('/') === -1) {
+ if (_.isString(date)) {
if (date === 'now') {
return 'now()';
}
- if (date.indexOf('now-') >= 0) {
+ if (date.indexOf('now-') >= 0 && date.indexOf('/') === -1) {
return date.replace('now', 'now()');
}
date = dateMath.parse(date, roundUp);
diff --git a/public/app/plugins/datasource/influxdb_08/funcEditor.js b/public/app/plugins/datasource/influxdb_08/func_editor.js
similarity index 100%
rename from public/app/plugins/datasource/influxdb_08/funcEditor.js
rename to public/app/plugins/datasource/influxdb_08/func_editor.js
diff --git a/public/app/plugins/datasource/influxdb_08/influxSeries.js b/public/app/plugins/datasource/influxdb_08/influx_series.js
similarity index 100%
rename from public/app/plugins/datasource/influxdb_08/influxSeries.js
rename to public/app/plugins/datasource/influxdb_08/influx_series.js
diff --git a/public/app/plugins/datasource/influxdb_08/partials/annotations.editor.html b/public/app/plugins/datasource/influxdb_08/partials/annotations.editor.html
index fe867d68f36..cec47e7a7e4 100644
--- a/public/app/plugins/datasource/influxdb_08/partials/annotations.editor.html
+++ b/public/app/plugins/datasource/influxdb_08/partials/annotations.editor.html
@@ -9,7 +9,7 @@
-
Column mappings If your influxdb query returns more than one column you need to specify the column names bellow. An annotation event is composed of a title, tags, and an additional text field.
+
Column mappings If your influxdb query returns more than one column you need to specify the column names below. An annotation event is composed of a title, tags, and an additional text field.
diff --git a/public/app/plugins/datasource/influxdb_08/queryBuilder.js b/public/app/plugins/datasource/influxdb_08/query_builder.js
similarity index 100%
rename from public/app/plugins/datasource/influxdb_08/queryBuilder.js
rename to public/app/plugins/datasource/influxdb_08/query_builder.js
diff --git a/public/app/plugins/datasource/influxdb_08/queryCtrl.js b/public/app/plugins/datasource/influxdb_08/query_ctrl.js
similarity index 99%
rename from public/app/plugins/datasource/influxdb_08/queryCtrl.js
rename to public/app/plugins/datasource/influxdb_08/query_ctrl.js
index c6304bc50ae..093e0af84d1 100644
--- a/public/app/plugins/datasource/influxdb_08/queryCtrl.js
+++ b/public/app/plugins/datasource/influxdb_08/query_ctrl.js
@@ -89,6 +89,7 @@ function (angular) {
}
};
- });
+ $scope.init();
+ });
});
diff --git a/public/app/plugins/datasource/influxdb_08/specs/datasource-specs.ts b/public/app/plugins/datasource/influxdb_08/specs/datasource-specs.ts
new file mode 100644
index 00000000000..c4ea81759e8
--- /dev/null
+++ b/public/app/plugins/datasource/influxdb_08/specs/datasource-specs.ts
@@ -0,0 +1,97 @@
+///
+///
+///
+///
+
+import {describe, beforeEach, it, sinon, expect, angularMocks} from 'test/lib/common';
+
+declare var helpers: any;
+
+describe('InfluxDatasource', function() {
+ var ctx = new helpers.ServiceTestContext();
+
+ beforeEach(angularMocks.module('grafana.services'));
+ beforeEach(ctx.providePhase(['templateSrv']));
+ beforeEach(ctx.createService('InfluxDatasource_08'));
+ beforeEach(function() {
+ ctx.ds = new ctx.service({ url: '', user: 'test', password: 'mupp' });
+ });
+
+ describe('When querying influxdb with one target using query editor target spec', function() {
+ var results;
+ var urlExpected = "/series?p=mupp&q=select+mean(value)+from+%22test%22+where+time+%3E+now()-1h+group+by+time(1s)+order+asc";
+ var query = {
+ rangeRaw: { from: 'now-1h', to: 'now' },
+ targets: [{ series: 'test', column: 'value', function: 'mean' }],
+ interval: '1s'
+ };
+
+ var response = [{
+ columns: ["time", "sequence_nr", "value"],
+ name: 'test',
+ points: [[10, 1, 1]],
+ }];
+
+ beforeEach(function() {
+ ctx.$httpBackend.expect('GET', urlExpected).respond(response);
+ ctx.ds.query(query).then(function(data) { results = data; });
+ ctx.$httpBackend.flush();
+ });
+
+ it('should generate the correct query', function() {
+ ctx.$httpBackend.verifyNoOutstandingExpectation();
+ });
+
+ it('should return series list', function() {
+ expect(results.data.length).to.be(1);
+ expect(results.data[0].target).to.be('test.value');
+ });
+
+ });
+
+ describe('When querying influxdb with one raw query', function() {
+ var results;
+ var urlExpected = "/series?p=mupp&q=select+value+from+series+where+time+%3E+now()-1h";
+ var query = {
+ rangeRaw: { from: 'now-1h', to: 'now' },
+ targets: [{ query: "select value from series where $timeFilter", rawQuery: true }]
+ };
+
+ var response = [];
+
+ beforeEach(function() {
+ ctx.$httpBackend.expect('GET', urlExpected).respond(response);
+ ctx.ds.query(query).then(function(data) { results = data; });
+ ctx.$httpBackend.flush();
+ });
+
+ it('should generate the correct query', function() {
+ ctx.$httpBackend.verifyNoOutstandingExpectation();
+ });
+
+ });
+
+ describe('When issuing annotation query', function() {
+ var results;
+ var urlExpected = "/series?p=mupp&q=select+title+from+events.backend_01+where+time+%3E+now()-1h";
+
+ var range = { from: 'now-1h', to: 'now' };
+ var annotation = { query: 'select title from events.$server where $timeFilter' };
+ var response = [];
+
+ beforeEach(function() {
+ ctx.templateSrv.replace = function(str) {
+ return str.replace('$server', 'backend_01');
+ };
+ ctx.$httpBackend.expect('GET', urlExpected).respond(response);
+ ctx.ds.annotationQuery(annotation, range).then(function(data) { results = data; });
+ ctx.$httpBackend.flush();
+ });
+
+ it('should generate the correct query', function() {
+ ctx.$httpBackend.verifyNoOutstandingExpectation();
+ });
+
+ });
+
+});
diff --git a/public/app/plugins/datasource/influxdb_08/specs/influx_series_specs.ts b/public/app/plugins/datasource/influxdb_08/specs/influx_series_specs.ts
new file mode 100644
index 00000000000..f8f2e4ed62f
--- /dev/null
+++ b/public/app/plugins/datasource/influxdb_08/specs/influx_series_specs.ts
@@ -0,0 +1,220 @@
+///
+
+import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
+
+declare var InfluxSeries: any;
+
+describe('when generating timeseries from influxdb response', function() {
+
+ describe('given two series', function() {
+ var series = new InfluxSeries({
+ seriesList: [
+ {
+ columns: ['time', 'mean', 'sequence_number'],
+ name: 'prod.server1.cpu',
+ points: [[1402596000, 10, 1], [1402596001, 12, 2]]
+ },
+ {
+ columns: ['time', 'mean', 'sequence_number'],
+ name: 'prod.server2.cpu',
+ points: [[1402596000, 15, 1], [1402596001, 16, 2]]
+ }
+ ]
+ });
+
+ var result = series.getTimeSeries();
+
+ it('should generate two time series', function() {
+ expect(result.length).to.be(2);
+ expect(result[0].target).to.be('prod.server1.cpu.mean');
+ expect(result[0].datapoints[0][0]).to.be(10);
+ expect(result[0].datapoints[0][1]).to.be(1402596000);
+ expect(result[0].datapoints[1][0]).to.be(12);
+ expect(result[0].datapoints[1][1]).to.be(1402596001);
+
+ expect(result[1].target).to.be('prod.server2.cpu.mean');
+ expect(result[1].datapoints[0][0]).to.be(15);
+ expect(result[1].datapoints[0][1]).to.be(1402596000);
+ expect(result[1].datapoints[1][0]).to.be(16);
+ expect(result[1].datapoints[1][1]).to.be(1402596001);
+ });
+
+ });
+
+ describe('given an alias format', function() {
+ var series = new InfluxSeries({
+ seriesList: [
+ {
+ columns: ['time', 'mean', 'sequence_number'],
+ name: 'prod.server1.cpu',
+ points: [[1402596000, 10, 1], [1402596001, 12, 2]]
+ }
+ ],
+ alias: '$s.testing'
+ });
+
+ var result = series.getTimeSeries();
+
+ it('should generate correct series name', function() {
+ expect(result[0].target).to.be('prod.server1.cpu.testing');
+ });
+
+ });
+
+ describe('given an alias format with segment numbers', function() {
+ var series = new InfluxSeries({
+ seriesList: [
+ {
+ columns: ['time', 'mean', 'sequence_number'],
+ name: 'prod.server1.cpu',
+ points: [[1402596000, 10, 1], [1402596001, 12, 2]]
+ }
+ ],
+ alias: '$1.mean'
+ });
+
+ var result = series.getTimeSeries();
+
+ it('should generate correct series name', function() {
+ expect(result[0].target).to.be('server1.mean');
+ });
+
+ });
+
+ describe('given an alias format and many segments', function() {
+ var series = new InfluxSeries({
+ seriesList: [
+ {
+ columns: ['time', 'mean', 'sequence_number'],
+ name: 'a0.a1.a2.a3.a4.a5.a6.a7.a8.a9.a10.a11.a12',
+ points: [[1402596000, 10, 1], [1402596001, 12, 2]]
+ }
+ ],
+ alias: '$5.$11.mean'
+ });
+
+ var result = series.getTimeSeries();
+
+ it('should generate correct series name', function() {
+ expect(result[0].target).to.be('a5.a11.mean');
+ });
+
+ });
+
+
+ describe('given an alias format with group by field', function() {
+ var series = new InfluxSeries({
+ seriesList: [
+ {
+ columns: ['time', 'mean', 'host'],
+ name: 'prod.cpu',
+ points: [[1402596000, 10, 'A']]
+ }
+ ],
+ groupByField: 'host',
+ alias: '$g.$1'
+ });
+
+ var result = series.getTimeSeries();
+
+ it('should generate correct series name', function() {
+ expect(result[0].target).to.be('A.cpu');
+ });
+
+ });
+
+ describe('given group by column', function() {
+ var series = new InfluxSeries({
+ seriesList: [
+ {
+ columns: ['time', 'mean', 'host'],
+ name: 'prod.cpu',
+ points: [
+ [1402596000, 10, 'A'],
+ [1402596001, 11, 'A'],
+ [1402596000, 5, 'B'],
+ [1402596001, 6, 'B'],
+ ]
+ }
+ ],
+ groupByField: 'host'
+ });
+
+ var result = series.getTimeSeries();
+
+ it('should generate two time series', function() {
+ expect(result.length).to.be(2);
+ expect(result[0].target).to.be('prod.cpu.A');
+ expect(result[0].datapoints[0][0]).to.be(10);
+ expect(result[0].datapoints[0][1]).to.be(1402596000);
+ expect(result[0].datapoints[1][0]).to.be(11);
+ expect(result[0].datapoints[1][1]).to.be(1402596001);
+
+ expect(result[1].target).to.be('prod.cpu.B');
+ expect(result[1].datapoints[0][0]).to.be(5);
+ expect(result[1].datapoints[0][1]).to.be(1402596000);
+ expect(result[1].datapoints[1][0]).to.be(6);
+ expect(result[1].datapoints[1][1]).to.be(1402596001);
+ });
+
+ });
+
+});
+
+describe("when creating annotations from influxdb response", function() {
+ describe('given column mapping for all columns', function() {
+ var series = new InfluxSeries({
+ seriesList: [
+ {
+ columns: ['time', 'text', 'sequence_number', 'title', 'tags'],
+ name: 'events1',
+ points: [[1402596000000, 'some text', 1, 'Hello', 'B'], [1402596001000, 'asd', 2, 'Hello2', 'B']]
+ }
+ ],
+ annotation: {
+ query: 'select',
+ titleColumn: 'title',
+ tagsColumn: 'tags',
+ textColumn: 'text',
+ }
+ });
+
+ var result = series.getAnnotations();
+
+ it(' should generate 2 annnotations ', function() {
+ expect(result.length).to.be(2);
+ expect(result[0].annotation.query).to.be('select');
+ expect(result[0].title).to.be('Hello');
+ expect(result[0].time).to.be(1402596000000);
+ expect(result[0].tags).to.be('B');
+ expect(result[0].text).to.be('some text');
+ });
+
+ });
+
+ describe('given no column mapping', function() {
+ var series = new InfluxSeries({
+ seriesList: [
+ {
+ columns: ['time', 'text', 'sequence_number'],
+ name: 'events1',
+ points: [[1402596000000, 'some text', 1]]
+ }
+ ],
+ annotation: { query: 'select' }
+ });
+
+ var result = series.getAnnotations();
+
+ it('should generate 1 annnotation', function() {
+ expect(result.length).to.be(1);
+ expect(result[0].title).to.be('some text');
+ expect(result[0].time).to.be(1402596000000);
+ expect(result[0].tags).to.be(undefined);
+ expect(result[0].text).to.be(undefined);
+ });
+
+ });
+
+});
+
diff --git a/public/app/plugins/datasource/influxdb_08/specs/query_builder_specs.ts b/public/app/plugins/datasource/influxdb_08/specs/query_builder_specs.ts
new file mode 100644
index 00000000000..5876efb962e
--- /dev/null
+++ b/public/app/plugins/datasource/influxdb_08/specs/query_builder_specs.ts
@@ -0,0 +1,78 @@
+///
+
+import {describe, beforeEach, it, sinon, expect} from 'test/lib/common';
+
+declare var InfluxQueryBuilder: any;
+
+describe('InfluxQueryBuilder', function() {
+
+ describe('series with conditon and group by', function() {
+ var builder = new InfluxQueryBuilder({
+ series: 'google.test',
+ column: 'value',
+ function: 'mean',
+ condition: "code=1",
+ groupby_field: 'code'
+ });
+
+ var query = builder.build();
+
+ it('should generate correct query', function() {
+ expect(query).to.be('select code, mean(value) from "google.test" where $timeFilter and code=1 ' +
+ 'group by time($interval), code order asc');
+ });
+
+ it('should expose groupByFiled', function() {
+ expect(builder.groupByField).to.be('code');
+ });
+
+ });
+
+ describe('series with fill and minimum group by time', function() {
+ var builder = new InfluxQueryBuilder({
+ series: 'google.test',
+ column: 'value',
+ function: 'mean',
+ fill: '0',
+ });
+
+ var query = builder.build();
+
+ it('should generate correct query', function() {
+ expect(query).to.be('select mean(value) from "google.test" where $timeFilter ' +
+ 'group by time($interval) fill(0) order asc');
+ });
+
+ });
+
+ describe('merge function detection', function() {
+ it('should not quote wrap regex merged series', function() {
+ var builder = new InfluxQueryBuilder({
+ series: 'merge(/^google.test/)',
+ column: 'value',
+ function: 'mean'
+ });
+
+ var query = builder.build();
+
+ expect(query).to.be('select mean(value) from merge(/^google.test/) where $timeFilter ' +
+ 'group by time($interval) order asc');
+ });
+
+ it('should quote wrap series names that start with "merge"', function() {
+ var builder = new InfluxQueryBuilder({
+ series: 'merge.google.test',
+ column: 'value',
+ function: 'mean'
+ });
+
+ var query = builder.build();
+
+ expect(query).to.be('select mean(value) from "merge.google.test" where $timeFilter ' +
+ 'group by time($interval) order asc');
+ });
+
+ });
+
+});
+
diff --git a/public/test/specs/dashboardSrv-specs.js b/public/test/specs/dashboardSrv-specs.js
index 112dc7b9aad..276e28b32c8 100644
--- a/public/test/specs/dashboardSrv-specs.js
+++ b/public/test/specs/dashboardSrv-specs.js
@@ -152,7 +152,10 @@ define([
rows: [
{
panels: [
- {type: 'graphite', legend: true, aliasYAxis: { test: 2 }, grid: { min: 1, max: 10 }}
+ {
+ type: 'graphite', legend: true, aliasYAxis: { test: 2 }, grid: { min: 1, max: 10 },
+ targets: [{refId: 'A'}, {}],
+ }
]
}
]
@@ -178,6 +181,10 @@ define([
expect(graph.type).to.be('graph');
});
+ it('queries without refId should get it', function() {
+ expect(graph.targets[1].refId).to.be('B');
+ });
+
it('update legend setting', function() {
expect(graph.legend.show).to.be(true);
});
diff --git a/public/test/specs/helpers.js b/public/test/specs/helpers.js
index 011c0cc70d6..9fa5aeee209 100644
--- a/public/test/specs/helpers.js
+++ b/public/test/specs/helpers.js
@@ -38,9 +38,10 @@ define([
};
this.createControllerPhase = function(controllerName) {
- return inject(function($controller, $rootScope, $q, $location) {
+ return inject(function($controller, $rootScope, $q, $location, $browser) {
self.scope = $rootScope.$new();
self.$location = $location;
+ self.$browser = $browser;
self.scope.contextSrv = {};
self.scope.panel = {};
self.scope.row = { panels:[] };
diff --git a/public/test/specs/influxQueryBuilder-specs.js b/public/test/specs/influxQueryBuilder-specs.js
deleted file mode 100644
index 5dcb7a9facd..00000000000
--- a/public/test/specs/influxQueryBuilder-specs.js
+++ /dev/null
@@ -1,78 +0,0 @@
-define([
- 'app/plugins/datasource/influxdb_08/queryBuilder'
-], function(InfluxQueryBuilder) {
- 'use strict';
-
- describe('InfluxQueryBuilder', function() {
-
- describe('series with conditon and group by', function() {
- var builder = new InfluxQueryBuilder({
- series: 'google.test',
- column: 'value',
- function: 'mean',
- condition: "code=1",
- groupby_field: 'code'
- });
-
- var query = builder.build();
-
- it('should generate correct query', function() {
- expect(query).to.be('select code, mean(value) from "google.test" where $timeFilter and code=1 ' +
- 'group by time($interval), code order asc');
- });
-
- it('should expose groupByFiled', function() {
- expect(builder.groupByField).to.be('code');
- });
-
- });
-
- describe('series with fill and minimum group by time', function() {
- var builder = new InfluxQueryBuilder({
- series: 'google.test',
- column: 'value',
- function: 'mean',
- fill: '0',
- });
-
- var query = builder.build();
-
- it('should generate correct query', function() {
- expect(query).to.be('select mean(value) from "google.test" where $timeFilter ' +
- 'group by time($interval) fill(0) order asc');
- });
-
- });
-
- describe('merge function detection', function() {
- it('should not quote wrap regex merged series', function() {
- var builder = new InfluxQueryBuilder({
- series: 'merge(/^google.test/)',
- column: 'value',
- function: 'mean'
- });
-
- var query = builder.build();
-
- expect(query).to.be('select mean(value) from merge(/^google.test/) where $timeFilter ' +
- 'group by time($interval) order asc');
- });
-
- it('should quote wrap series names that start with "merge"', function() {
- var builder = new InfluxQueryBuilder({
- series: 'merge.google.test',
- column: 'value',
- function: 'mean'
- });
-
- var query = builder.build();
-
- expect(query).to.be('select mean(value) from "merge.google.test" where $timeFilter ' +
- 'group by time($interval) order asc');
- });
-
- });
-
- });
-
-});
diff --git a/public/test/specs/influxSeries08-specs.js b/public/test/specs/influxSeries08-specs.js
deleted file mode 100644
index 7b99a3035a9..00000000000
--- a/public/test/specs/influxSeries08-specs.js
+++ /dev/null
@@ -1,220 +0,0 @@
-define([
- 'app/plugins/datasource/influxdb_08/influxSeries'
-], function(InfluxSeries) {
- 'use strict';
-
- describe('when generating timeseries from influxdb response', function() {
-
- describe('given two series', function() {
- var series = new InfluxSeries({
- seriesList: [
- {
- columns: ['time', 'mean', 'sequence_number'],
- name: 'prod.server1.cpu',
- points: [[1402596000, 10, 1], [1402596001, 12, 2]]
- },
- {
- columns: ['time', 'mean', 'sequence_number'],
- name: 'prod.server2.cpu',
- points: [[1402596000, 15, 1], [1402596001, 16, 2]]
- }
- ]
- });
-
- var result = series.getTimeSeries();
-
- it('should generate two time series', function() {
- expect(result.length).to.be(2);
- expect(result[0].target).to.be('prod.server1.cpu.mean');
- expect(result[0].datapoints[0][0]).to.be(10);
- expect(result[0].datapoints[0][1]).to.be(1402596000);
- expect(result[0].datapoints[1][0]).to.be(12);
- expect(result[0].datapoints[1][1]).to.be(1402596001);
-
- expect(result[1].target).to.be('prod.server2.cpu.mean');
- expect(result[1].datapoints[0][0]).to.be(15);
- expect(result[1].datapoints[0][1]).to.be(1402596000);
- expect(result[1].datapoints[1][0]).to.be(16);
- expect(result[1].datapoints[1][1]).to.be(1402596001);
- });
-
- });
-
- describe('given an alias format', function() {
- var series = new InfluxSeries({
- seriesList: [
- {
- columns: ['time', 'mean', 'sequence_number'],
- name: 'prod.server1.cpu',
- points: [[1402596000, 10, 1], [1402596001, 12, 2]]
- }
- ],
- alias: '$s.testing'
- });
-
- var result = series.getTimeSeries();
-
- it('should generate correct series name', function() {
- expect(result[0].target).to.be('prod.server1.cpu.testing');
- });
-
- });
-
- describe('given an alias format with segment numbers', function() {
- var series = new InfluxSeries({
- seriesList: [
- {
- columns: ['time', 'mean', 'sequence_number'],
- name: 'prod.server1.cpu',
- points: [[1402596000, 10, 1], [1402596001, 12, 2]]
- }
- ],
- alias: '$1.mean'
- });
-
- var result = series.getTimeSeries();
-
- it('should generate correct series name', function() {
- expect(result[0].target).to.be('server1.mean');
- });
-
- });
-
- describe('given an alias format and many segments', function() {
- var series = new InfluxSeries({
- seriesList: [
- {
- columns: ['time', 'mean', 'sequence_number'],
- name: 'a0.a1.a2.a3.a4.a5.a6.a7.a8.a9.a10.a11.a12',
- points: [[1402596000, 10, 1], [1402596001, 12, 2]]
- }
- ],
- alias: '$5.$11.mean'
- });
-
- var result = series.getTimeSeries();
-
- it('should generate correct series name', function() {
- expect(result[0].target).to.be('a5.a11.mean');
- });
-
- });
-
-
- describe('given an alias format with group by field', function() {
- var series = new InfluxSeries({
- seriesList: [
- {
- columns: ['time', 'mean', 'host'],
- name: 'prod.cpu',
- points: [[1402596000, 10, 'A']]
- }
- ],
- groupByField: 'host',
- alias: '$g.$1'
- });
-
- var result = series.getTimeSeries();
-
- it('should generate correct series name', function() {
- expect(result[0].target).to.be('A.cpu');
- });
-
- });
-
- describe('given group by column', function() {
- var series = new InfluxSeries({
- seriesList: [
- {
- columns: ['time', 'mean', 'host'],
- name: 'prod.cpu',
- points: [
- [1402596000, 10, 'A'],
- [1402596001, 11, 'A'],
- [1402596000, 5, 'B'],
- [1402596001, 6, 'B'],
- ]
- }
- ],
- groupByField: 'host'
- });
-
- var result = series.getTimeSeries();
-
- it('should generate two time series', function() {
- expect(result.length).to.be(2);
- expect(result[0].target).to.be('prod.cpu.A');
- expect(result[0].datapoints[0][0]).to.be(10);
- expect(result[0].datapoints[0][1]).to.be(1402596000);
- expect(result[0].datapoints[1][0]).to.be(11);
- expect(result[0].datapoints[1][1]).to.be(1402596001);
-
- expect(result[1].target).to.be('prod.cpu.B');
- expect(result[1].datapoints[0][0]).to.be(5);
- expect(result[1].datapoints[0][1]).to.be(1402596000);
- expect(result[1].datapoints[1][0]).to.be(6);
- expect(result[1].datapoints[1][1]).to.be(1402596001);
- });
-
- });
-
- });
-
- describe("when creating annotations from influxdb response", function() {
- describe('given column mapping for all columns', function() {
- var series = new InfluxSeries({
- seriesList: [
- {
- columns: ['time', 'text', 'sequence_number', 'title', 'tags'],
- name: 'events1',
- points: [[1402596000000, 'some text', 1, 'Hello', 'B'], [1402596001000, 'asd', 2, 'Hello2', 'B']]
- }
- ],
- annotation: {
- query: 'select',
- titleColumn: 'title',
- tagsColumn: 'tags',
- textColumn: 'text',
- }
- });
-
- var result = series.getAnnotations();
-
- it(' should generate 2 annnotations ', function() {
- expect(result.length).to.be(2);
- expect(result[0].annotation.query).to.be('select');
- expect(result[0].title).to.be('Hello');
- expect(result[0].time).to.be(1402596000000);
- expect(result[0].tags).to.be('B');
- expect(result[0].text).to.be('some text');
- });
-
- });
-
- describe('given no column mapping', function() {
- var series = new InfluxSeries({
- seriesList: [
- {
- columns: ['time', 'text', 'sequence_number'],
- name: 'events1',
- points: [[1402596000000, 'some text', 1]]
- }
- ],
- annotation: { query: 'select' }
- });
-
- var result = series.getAnnotations();
-
- it('should generate 1 annnotation', function() {
- expect(result.length).to.be(1);
- expect(result[0].title).to.be('some text');
- expect(result[0].time).to.be(1402596000000);
- expect(result[0].tags).to.be(undefined);
- expect(result[0].text).to.be(undefined);
- });
-
- });
-
- });
-
-});
diff --git a/public/test/specs/influxdb-datasource-specs.js b/public/test/specs/influxdb-datasource-specs.js
deleted file mode 100644
index 175215663e0..00000000000
--- a/public/test/specs/influxdb-datasource-specs.js
+++ /dev/null
@@ -1,101 +0,0 @@
-define([
- './helpers',
- 'app/plugins/datasource/influxdb_08/datasource',
- 'app/services/backendSrv',
- 'app/services/alertSrv'
-], function(helpers) {
- 'use strict';
-
- describe('InfluxDatasource', function() {
- var ctx = new helpers.ServiceTestContext();
-
- beforeEach(module('grafana.services'));
- beforeEach(ctx.providePhase(['templateSrv']));
- beforeEach(ctx.createService('InfluxDatasource_08'));
- beforeEach(function() {
- ctx.ds = new ctx.service({ url: '', user: 'test', password: 'mupp' });
- });
-
- describe('When querying influxdb with one target using query editor target spec', function() {
- var results;
- var urlExpected = "/series?p=mupp&q=select+mean(value)+from+%22test%22"+
- "+where+time+%3E+now()-1h+group+by+time(1s)+order+asc";
- var query = {
- rangeRaw: { from: 'now-1h', to: 'now' },
- targets: [{ series: 'test', column: 'value', function: 'mean' }],
- interval: '1s'
- };
-
- var response = [{
- columns: ["time", "sequence_nr", "value"],
- name: 'test',
- points: [[10, 1, 1]],
- }];
-
- beforeEach(function() {
- ctx.$httpBackend.expect('GET', urlExpected).respond(response);
- ctx.ds.query(query).then(function(data) { results = data; });
- ctx.$httpBackend.flush();
- });
-
- it('should generate the correct query', function() {
- ctx.$httpBackend.verifyNoOutstandingExpectation();
- });
-
- it('should return series list', function() {
- expect(results.data.length).to.be(1);
- expect(results.data[0].target).to.be('test.value');
- });
-
- });
-
- describe('When querying influxdb with one raw query', function() {
- var results;
- var urlExpected = "/series?p=mupp&q=select+value+from+series"+
- "+where+time+%3E+now()-1h";
- var query = {
- rangeRaw: { from: 'now-1h', to: 'now' },
- targets: [{ query: "select value from series where $timeFilter", rawQuery: true }]
- };
-
- var response = [];
-
- beforeEach(function() {
- ctx.$httpBackend.expect('GET', urlExpected).respond(response);
- ctx.ds.query(query).then(function(data) { results = data; });
- ctx.$httpBackend.flush();
- });
-
- it('should generate the correct query', function() {
- ctx.$httpBackend.verifyNoOutstandingExpectation();
- });
-
- });
-
- describe('When issuing annotation query', function() {
- var results;
- var urlExpected = "/series?p=mupp&q=select+title+from+events.backend_01"+
- "+where+time+%3E+now()-1h";
-
- var range = { from: 'now-1h', to: 'now' };
- var annotation = { query: 'select title from events.$server where $timeFilter' };
- var response = [];
-
- beforeEach(function() {
- ctx.templateSrv.replace = function(str) {
- return str.replace('$server', 'backend_01');
- };
- ctx.$httpBackend.expect('GET', urlExpected).respond(response);
- ctx.ds.annotationQuery(annotation, range).then(function(data) { results = data; });
- ctx.$httpBackend.flush();
- });
-
- it('should generate the correct query', function() {
- ctx.$httpBackend.verifyNoOutstandingExpectation();
- });
-
- });
-
- });
-});
-
diff --git a/public/test/specs/kbn-format-specs.js b/public/test/specs/kbn-format-specs.js
index 366ab575491..f84544fff4f 100644
--- a/public/test/specs/kbn-format-specs.js
+++ b/public/test/specs/kbn-format-specs.js
@@ -4,6 +4,25 @@ define([
], function(kbn, dateMath) {
'use strict';
+ describe('unit format menu', function() {
+ var menu = kbn.getUnitFormats();
+ menu.map(function(submenu) {
+ describe('submenu ' + submenu.text, function() {
+ it('should have a title', function() { expect(submenu.text).to.be.a('string'); });
+ it('should have a submenu', function() { expect(submenu.submenu).to.be.an('array'); });
+ submenu.submenu.map(function(entry) {
+ describe('entry ' + entry.text, function() {
+ it('should have a title', function() { expect(entry.text).to.be.a('string'); });
+ it('should have a format', function() { expect(entry.value).to.be.a('string'); });
+ it('should have a valid format', function() {
+ expect(kbn.valueFormats[entry.value]).to.be.a('function');
+ });
+ });
+ });
+ });
+ });
+ });
+
function describeValueFormat(desc, value, tickSize, tickDecimals, result) {
describe('value format: ' + desc, function() {
@@ -26,6 +45,18 @@ define([
describeValueFormat('none', 2.75e-10, 0, 10, '3e-10');
describeValueFormat('none', 0, 0, 2, '0');
+ describeValueFormat('dB', 10, 1000, 2, '10.00 dB');
+
+ describeValueFormat('percent', 0, 0, 0, '0%');
+ describeValueFormat('percent', 53, 0, 1, '53.0%');
+ describeValueFormat('percentunit', 0.0, 0, 0, '0%');
+ describeValueFormat('percentunit', 0.278, 0, 1, '27.8%');
+ describeValueFormat('percentunit', 1.0, 0, 0, '100%');
+
+ describeValueFormat('currencyUSD', 7.42, 10000, 2, '$7.42');
+ describeValueFormat('currencyUSD', 1532.82, 1000, 1, '$1.53K');
+ describeValueFormat('currencyUSD', 18520408.7, 10000000, 0, '$19M');
+
describeValueFormat('bytes', -1.57e+308, -1.57e+308, 2, 'NA');
describeValueFormat('ns', 25, 1, 0, '25 ns');
diff --git a/public/test/specs/linkSrv-specs.js b/public/test/specs/linkSrv-specs.js
new file mode 100644
index 00000000000..8a978b650ff
--- /dev/null
+++ b/public/test/specs/linkSrv-specs.js
@@ -0,0 +1,50 @@
+define([
+ 'lodash',
+ 'app/features/panellinks/linkSrv'
+], function(_) {
+ 'use strict';
+
+ describe('linkSrv', function() {
+ var _linkSrv;
+
+ beforeEach(module('grafana.services'));
+
+ beforeEach(inject(function(linkSrv) {
+ _linkSrv = linkSrv;
+ }));
+
+ describe('when appending query strings', function() {
+
+ it('add ? to URL if not present', function() {
+ var url = _linkSrv.appendToQueryString('http://example.com', 'foo=bar');
+ expect(url).to.be('http://example.com?foo=bar');
+ });
+
+ it('do not add & to URL if ? is present but query string is empty', function() {
+ var url = _linkSrv.appendToQueryString('http://example.com?', 'foo=bar');
+ expect(url).to.be('http://example.com?foo=bar');
+ });
+
+ it('add & to URL if query string is present', function() {
+ var url = _linkSrv.appendToQueryString('http://example.com?foo=bar', 'hello=world');
+ expect(url).to.be('http://example.com?foo=bar&hello=world');
+ });
+
+ it('do not change the URL if there is nothing to append', function() {
+ _.each(['', undefined, null], function(toAppend) {
+ var url1 = _linkSrv.appendToQueryString('http://example.com', toAppend);
+ expect(url1).to.be('http://example.com');
+
+ var url2 = _linkSrv.appendToQueryString('http://example.com?', toAppend);
+ expect(url2).to.be('http://example.com?');
+
+ var url3 = _linkSrv.appendToQueryString('http://example.com?foo=bar', toAppend);
+ expect(url3).to.be('http://example.com?foo=bar');
+ });
+ });
+
+ });
+
+ });
+
+});
diff --git a/public/test/specs/shareModalCtrl-specs.js b/public/test/specs/shareModalCtrl-specs.js
index 6aea645d557..995d663779a 100644
--- a/public/test/specs/shareModalCtrl-specs.js
+++ b/public/test/specs/shareModalCtrl-specs.js
@@ -31,6 +31,17 @@ define([
expect(ctx.scope.shareUrl).to.be('http://server/#/test?from=1000&to=2000&panelId=22&fullscreen');
});
+ it('should generate render url', function() {
+ ctx.$location.$$absUrl = 'http://dashboards.grafana.com/dashboard/db/my-dash';
+
+ ctx.scope.panel = { id: 22 };
+
+ ctx.scope.init();
+ var base = 'http://dashboards.grafana.com/render/dashboard-solo/db/my-dash';
+ var params = '?from=1000&to=2000&panelId=22&fullscreen&width=1000&height=500';
+ expect(ctx.scope.imageUrl).to.be(base + params);
+ });
+
it('should remove panel id when no panel in scope', function() {
ctx.$location.path('/test');
ctx.scope.options.forCurrent = true;
diff --git a/tasks/build_task.js b/tasks/build_task.js
index 364c3dc797c..7773299a06d 100644
--- a/tasks/build_task.js
+++ b/tasks/build_task.js
@@ -23,7 +23,6 @@ module.exports = function(grunt) {
'filerev',
'remapFilerev',
'usemin',
- 'clean:temp',
'uglify:genDir'
]);
diff --git a/tasks/options/concat.js b/tasks/options/concat.js
index 96ad9ae1341..c15aa8a2d6e 100644
--- a/tasks/options/concat.js
+++ b/tasks/options/concat.js
@@ -24,7 +24,6 @@ module.exports = function(config) {
],
dest: '<%= genDir %>/css/grafana.light.min.css'
},
-
js: {
src: [
'<%= tempDir %>/vendor/requirejs/require.js',
diff --git a/tasks/options/ngtemplates.js b/tasks/options/ngtemplates.js
index 2ccdf4ce5ef..10c39a49c39 100644
--- a/tasks/options/ngtemplates.js
+++ b/tasks/options/ngtemplates.js
@@ -6,7 +6,7 @@ module.exports = function(config) {
dest: '<%= genDir %>/app/components/partials.js',
options: {
bootstrap: function(module, script) {
- return "define('components/partials', ['angular'], function(angular) { \n" +
+ return "define('app/components/partials', ['angular'], function(angular) { \n" +
"angular.module('grafana').run(['$templateCache', function($templateCache) { \n" +
script +
'\n}]);' +