diff --git a/.bra.toml b/.bra.toml index 36421a3c5a6..dcf316466d6 100644 --- a/.bra.toml +++ b/.bra.toml @@ -1,6 +1,6 @@ [run] init_cmds = [ - ["go", "build", "-o", "./bin/grafana-server", "./pkg/cmd/grafana-server"], + ["go", "run", "build.go", "-dev", "build-server"], ["./bin/grafana-server", "cfg:app_mode=development"] ] watch_all = true @@ -12,6 +12,6 @@ watch_dirs = [ watch_exts = [".go", ".ini", ".toml"] build_delay = 1500 cmds = [ - ["go", "build", "-o", "./bin/grafana-server", "./pkg/cmd/grafana-server"], + ["go", "run", "build.go", "-dev", "build-server"], ["./bin/grafana-server", "cfg:app_mode=development"] ] diff --git a/.circleci/config.yml b/.circleci/config.yml index cfa8b762e49..bad5a7c1cd0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,6 +1,41 @@ version: 2 jobs: + codespell: + docker: + - image: circleci/python + steps: + - checkout + - run: + name: install codespell + command: 'sudo pip install codespell' + - run: + # Important: all words have to be in lowercase, and separated by "\n". + name: exclude known exceptions + command: 'echo -e "unknwon" > words_to_ignore.txt' + - run: + name: check documentation spelling errors + command: 'codespell -I ./words_to_ignore.txt docs/' + + gometalinter: + docker: + - image: circleci/golang:1.10 + environment: + # we need CGO because of go-sqlite3 + CGO_ENABLED: 1 + working_directory: /go/src/github.com/grafana/grafana + steps: + - checkout + - run: 'go get -u gopkg.in/alecthomas/gometalinter.v2' + - run: 'go get -u github.com/tsenart/deadcode' + - run: 'go get -u github.com/gordonklaus/ineffassign' + - run: 'go get -u github.com/opennota/check/cmd/structcheck' + - run: 'go get -u github.com/mdempsky/unconvert' + - run: 'go get -u github.com/opennota/check/cmd/varcheck' + - run: + name: run linters + command: 'gometalinter.v2 --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...' + test-frontend: docker: - image: circleci/node:6.11.4 @@ -58,6 +93,22 @@ jobs: - scripts/*.sh - scripts/publish + build-enterprise: + docker: + - image: grafana/build-container:v0.1 + working_directory: /go/src/github.com/grafana/grafana + steps: + - checkout + - run: + name: build and package grafana + command: './scripts/build/build_enterprise.sh' + - run: + name: sign packages + command: './scripts/build/sign_packages.sh' + - run: + name: sha-sum packages + command: 'go run build.go sha-dist' + deploy-master: docker: - image: circleci/python:2.7-stretch @@ -85,7 +136,7 @@ jobs: - image: circleci/python:2.7-stretch steps: - attach_workspace: - at: dist + at: . - run: name: install awscli command: 'sudo pip install awscli' @@ -103,6 +154,14 @@ workflows: version: 2 test-and-build: jobs: + - codespell: + filters: + tags: + only: /.*/ + - gometalinter: + filters: + tags: + only: /.*/ - build: filters: tags: @@ -133,3 +192,7 @@ workflows: ignore: /.*/ tags: only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/ + # - build-enterprise: + # filters: + # tags: + # only: /.*/ diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000000..c79fe777899 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,18 @@ +.awcache +.dockerignore +.git +.gitignore +.github +data* +dist +docker +docs +dump.rdb +node_modules +/local +/tmp +/vendor +*.yml +*.md +/vendor +/tmp diff --git a/.gitignore b/.gitignore index 72f6684ef20..953c98d04aa 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,10 @@ node_modules npm-debug.log +yarn-error.log coverage/ .aws-config.json awsconfig +/.awcache /dist /public/build /public/views/index.html @@ -42,10 +44,13 @@ docker-compose.yaml /conf/provisioning/**/custom.yaml profile.cov /grafana +/local .notouch +/Makefile.local /pkg/cmd/grafana-cli/grafana-cli /pkg/cmd/grafana-server/grafana-server /pkg/cmd/grafana-server/debug +/pkg/extensions debug.test /examples/*/dist /packaging/**/*.rpm diff --git a/CHANGELOG.md b/CHANGELOG.md index f880be9d110..55e23a88e82 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,34 @@ -# 5.1.0 (unreleased) +# 5.2.0 (unreleased) + +### Minor + +* **Graph**: Show invisible highest value bucket in histogram [#11498](https://github.com/grafana/grafana/issues/11498) +* **Dashboard**: Enable "Save As..." if user has edit permission [#11625](https://github.com/grafana/grafana/issues/11625) +* **Prometheus**: Table columns order now changes when rearrange queries [#11690](https://github.com/grafana/grafana/issues/11690), thx [@mtanda](https://github.com/mtanda) +* **Variables**: Fix variable interpolation when using multiple formatting types [#11800](https://github.com/grafana/grafana/issues/11800), thx [@svenklemm](https://github.com/svenklemm) +* **Dashboard**: Fix date selector styling for dark/light theme in time picker control [#11616](https://github.com/grafana/grafana/issues/11616) +* **Discord**: Alert notification channel type for Discord, [#7964](https://github.com/grafana/grafana/issues/7964) thx [@jereksel](https://github.com/jereksel), +* **InfluxDB**: Support SELECT queries in templating query, [#5013](https://github.com/grafana/grafana/issues/5013) +* **Dashboard**: JSON Model under dashboard settings can now be updated & changes saved, [#1429](https://github.com/grafana/grafana/issues/1429), thx [@jereksel](https://github.com/jereksel) +* **Security**: Fix XSS vulnerabilities in dashboard links [#11813](https://github.com/grafana/grafana/pull/11813) +* **Singlestat**: Fix "time of last point" shows local time when dashboard timezone set to UTC [#10338](https://github.com/grafana/grafana/issues/10338) + +# 5.1.1 (2018-05-07) + +* **LDAP**: LDAP login with MariaDB/MySQL database and dn>100 chars not possible [#11754](https://github.com/grafana/grafana/issues/11754) +* **Build**: AppVeyor Windows build missing version and commit info [#11758](https://github.com/grafana/grafana/issues/11758) +* **Scroll**: Scroll can't start in graphs on Chrome mobile [#11710](https://github.com/grafana/grafana/issues/11710) +* **Units**: Revert renaming of unit key ppm [#11743](https://github.com/grafana/grafana/issues/11743) + +# 5.1.0 (2018-04-26) + +* **Folders**: Default permissions on folder are not shown as inherited in its dashboards [#11668](https://github.com/grafana/grafana/issues/11668) +* **Templating**: Allow more than 20 previews when creating a variable [#11508](https://github.com/grafana/grafana/issues/11508) +* **Dashboard**: Row edit icon not shown [#11466](https://github.com/grafana/grafana/issues/11466) +* **SQL**: Unsupported data types for value column using time series query [#11703](https://github.com/grafana/grafana/issues/11703) +* **Prometheus**: Prometheus query inspector expands to be very large on autocomplete queries [#11673](https://github.com/grafana/grafana/issues/11673) + +# 5.1.0-beta1 (2018-04-20) * **MSSQL**: New Microsoft SQL Server data source [#10093](https://github.com/grafana/grafana/pull/10093), [#11298](https://github.com/grafana/grafana/pull/11298), thx [@linuxchips](https://github.com/linuxchips) * **Prometheus**: The heatmap panel now support Prometheus histograms [#10009](https://github.com/grafana/grafana/issues/10009) @@ -13,8 +43,15 @@ * **Prometheus**: Show template variable candidate in query editor [#9210](https://github.com/grafana/grafana/issues/9210), thx [@mtanda](https://github.com/mtanda) * **Prometheus**: Support POST for query and query_range [#9859](https://github.com/grafana/grafana/pull/9859), thx [@mtanda](https://github.com/mtanda) * **Alerting**: Add support for retries on alert queries [#5855](https://github.com/grafana/grafana/issues/5855), thx [@Thib17](https://github.com/Thib17) +* **Table**: Table plugin value mappings [#7119](https://github.com/grafana/grafana/issues/7119), thx [infernix](https://github.com/infernix) +* **IE11**: IE 11 compatibility [#11165](https://github.com/grafana/grafana/issues/11165) +* **Scrolling**: Better scrolling experience [#11053](https://github.com/grafana/grafana/issues/11053), [#11252](https://github.com/grafana/grafana/issues/11252), [#10836](https://github.com/grafana/grafana/issues/10836), [#11185](https://github.com/grafana/grafana/issues/11185), [#11168](https://github.com/grafana/grafana/issues/11168) +* **Docker**: Improved docker image (breaking changes regarding file ownership) [grafana-docker #141](https://github.com/grafana/grafana-docker/issues/141), thx [@Spindel](https://github.com/Spindel), [@ChristianKniep](https://github.com/ChristianKniep), [@brancz](https://github.com/brancz) and [@jangaraj](https://github.com/jangaraj) +* **Folders**: A folder admin cannot add user/team permissions for folder/its dashboards [#11173](https://github.com/grafana/grafana/issues/11173) +* **Provisioning**: Improved workflow for provisioned dashboards [#10883](https://github.com/grafana/grafana/issues/10883) ### Minor + * **OpsGenie**: Add triggered alerts as description [#11046](https://github.com/grafana/grafana/pull/11046), thx [@llamashoes](https://github.com/llamashoes) * **Cloudwatch**: Support high resolution metrics [#10925](https://github.com/grafana/grafana/pull/10925), thx [@mtanda](https://github.com/mtanda) * **Cloudwatch**: Add dimension filtering to CloudWatch `dimension_values()` [#10029](https://github.com/grafana/grafana/issues/10029), thx [@willyhutw](https://github.com/willyhutw) @@ -23,9 +60,47 @@ * **Dashboards**: Version cleanup fails on old databases with many entries [#11278](https://github.com/grafana/grafana/issues/11278) * **Server**: Adjust permissions of unix socket [#11343](https://github.com/grafana/grafana/pull/11343), thx [@corny](https://github.com/corny) * **Shortcuts**: Add shortcut for duplicate panel [#11102](https://github.com/grafana/grafana/issues/11102) +* **AuthProxy**: Support IPv6 in Auth proxy white list [#11330](https://github.com/grafana/grafana/pull/11330), thx [@corny](https://github.com/corny) +* **SMTP**: Don't connect to STMP server using TLS unless configured. [#7189](https://github.com/grafana/grafana/issues/7189) +* **Prometheus**: Escape backslash in labels correctly. [#10555](https://github.com/grafana/grafana/issues/10555), thx [@roidelapluie](https://github.com/roidelapluie) +* **Variables**: Case-insensitive sorting for template values [#11128](https://github.com/grafana/grafana/issues/11128) thx [@cross](https://github.com/cross) +* **Annotations (native)**: Change default limit from 10 to 100 when querying api [#11569](https://github.com/grafana/grafana/issues/11569), thx [@flopp999](https://github.com/flopp999) +* **MySQL/Postgres/MSSQL**: PostgreSQL datasource generates invalid query with dates before 1970 [#11530](https://github.com/grafana/grafana/issues/11530) thx [@ryantxu](https://github.com/ryantxu) +* **Kiosk**: Adds url parameter for starting a dashboard in inactive mode [#11228](https://github.com/grafana/grafana/issues/11228), thx [@towolf](https://github.com/towolf) +* **Dashboard**: Enable closing timepicker using escape key [#11332](https://github.com/grafana/grafana/issues/11332) +* **Datasources**: Rename direct access mode in the data source settings [#11391](https://github.com/grafana/grafana/issues/11391) +* **Search**: Display dashboards in folder indented [#11073](https://github.com/grafana/grafana/issues/11073) +* **Units**: Use B/s instead Bps for Bytes per second [#9342](https://github.com/grafana/grafana/pull/9342), thx [@mayli](https://github.com/mayli) +* **Units**: Radiation units [#11001](https://github.com/grafana/grafana/issues/11001), thx [@victorclaessen](https://github.com/victorclaessen) +* **Units**: Timeticks unit [#11183](https://github.com/grafana/grafana/pull/11183), thx [@jtyr](https://github.com/jtyr) +* **Units**: Concentration units and "Normal cubic metre" [#11211](https://github.com/grafana/grafana/issues/11211), thx [@flopp999](https://github.com/flopp999) +* **Units**: New currency - Czech koruna [#11384](https://github.com/grafana/grafana/pull/11384), thx [@Rohlik](https://github.com/Rohlik) +* **Avatar**: Fix DISABLE_GRAVATAR option [#11095](https://github.com/grafana/grafana/issues/11095) +* **Heatmap**: Disable log scale when using time time series buckets [#10792](https://github.com/grafana/grafana/issues/10792) +* **Provisioning**: Remove `id` from json when provisioning dashboards, [#11138](https://github.com/grafana/grafana/issues/11138) +* **Prometheus**: tooltip for legend format not showing properly [#11516](https://github.com/grafana/grafana/issues/11516), thx [@svenklemm](https://github.com/svenklemm) +* **Playlist**: Empty playlists cannot be deleted [#11133](https://github.com/grafana/grafana/issues/11133), thx [@kichristensen](https://github.com/kichristensen) +* **Switch Orgs**: Alphabetic order in Switch Organization modal [#11556](https://github.com/grafana/grafana/issues/11556) +* **Postgres**: improve `$__timeFilter` macro [#11578](https://github.com/grafana/grafana/issues/11578), thx [@svenklemm](https://github.com/svenklemm) +* **Permission list**: Improved ux [#10747](https://github.com/grafana/grafana/issues/10747) +* **Dashboard**: Sizing and positioning of settings menu icons [#11572](https://github.com/grafana/grafana/pull/11572) +* **Dashboard**: Add search filter/tabs to new panel control [#10427](https://github.com/grafana/grafana/issues/10427) +* **Folders**: User with org viewer role should not be able to save/move dashboards in/to general folder [#11553](https://github.com/grafana/grafana/issues/11553) +* **Influxdb**: Dont assume the first column in table response is time. [#11476](https://github.com/grafana/grafana/issues/11476), thx [@hahnjo](https://github.com/hahnjo) -# 5.0.4 (unreleased) -* **Dashboard** Fixed bug where collapsed panels could not be directly linked to/renderer [#11114](https://github.com/grafana/grafana/issues/11114) & [#11086](https://github.com/grafana/grafana/issues/11086) +### Tech +* Backend code simplification [#11613](https://github.com/grafana/grafana/pull/11613), thx [@knweiss](https://github.com/knweiss) +* Add codespell to CI [#11602](https://github.com/grafana/grafana/pull/11602), thx [@mjtrangoni](https://github.com/mjtrangoni) +* Migrated JavaScript files to TypeScript + +# 5.0.4 (2018-03-28) + +* **Docker** Can't start Grafana on Kubernetes 1.7.14, 1.8.9, or 1.9.4 [#140 in grafana-docker repo](https://github.com/grafana/grafana-docker/issues/140) thx [@suquant](https://github.com/suquant) +* **Dashboard** Fixed bug where collapsed panels could not be directly linked to/renderer [#11114](https://github.com/grafana/grafana/issues/11114) & [#11086](https://github.com/grafana/grafana/issues/11086) & [#11296](https://github.com/grafana/grafana/issues/11296) +* **Dashboard** Provisioning dashboard with alert rules should create alerts [#11247](https://github.com/grafana/grafana/issues/11247) +* **Snapshots** For snapshots, the Graph panel renders the legend incorrectly on right hand side [#11318](https://github.com/grafana/grafana/issues/11318) +* **Alerting** Link back to Grafana returns wrong URL if root_path contains sub-path components [#11403](https://github.com/grafana/grafana/issues/11403) +* **Alerting** Incorrect default value for upload images setting for alert notifiers [#11413](https://github.com/grafana/grafana/pull/11413) # 5.0.3 (2018-03-16) * **Mysql**: Mysql panic occurring occasionally upon Grafana dashboard access (a bigger patch than the one in 5.0.2) [#11155](https://github.com/grafana/grafana/issues/11155) @@ -44,7 +119,7 @@ * **Dashboards**: Changing templated value from dropdown is causing unsaved changes [#11063](https://github.com/grafana/grafana/issues/11063) * **Prometheus**: Fixes bundled Prometheus 2.0 dashboard [#11016](https://github.com/grafana/grafana/issues/11016), thx [@roidelapluie](https://github.com/roidelapluie) * **Sidemenu**: Profile menu "invisible" when gravatar is disabled [#11097](https://github.com/grafana/grafana/issues/11097) -* **Dashboard**: Fixes a bug with resizeable handles for panels [#11103](https://github.com/grafana/grafana/issues/11103) +* **Dashboard**: Fixes a bug with resizable handles for panels [#11103](https://github.com/grafana/grafana/issues/11103) * **Alerting**: Telegram inline image mode fails when caption too long [#10975](https://github.com/grafana/grafana/issues/10975) * **Alerting**: Fixes silent failing validation [#11145](https://github.com/grafana/grafana/pull/11145) * **OAuth**: Only use jwt token if it contains an email address [#11127](https://github.com/grafana/grafana/pull/11127) @@ -108,7 +183,7 @@ Grafana v5.0 is going to be the biggest and most foundational release Grafana ha ### New Major Features - **Dashboards** Dashboard folders, [#1611](https://github.com/grafana/grafana/issues/1611) - **Teams** User groups (teams) implemented. Can be used in folder & dashboard permission list. -- **Dashboard grid**: Panels are now layed out in a two dimensional grid (with x, y, w, h). [#9093](https://github.com/grafana/grafana/issues/9093). +- **Dashboard grid**: Panels are now laid out in a two dimensional grid (with x, y, w, h). [#9093](https://github.com/grafana/grafana/issues/9093). - **Templating**: Vertical repeat direction for panel repeats. - **UX**: Major update to page header and navigation - **Dashboard settings**: Combine dashboard settings views into one with side menu, [#9750](https://github.com/grafana/grafana/issues/9750) @@ -142,7 +217,7 @@ Dashboard panels and rows are positioned using a gridPos object `{x: 0, y: 0, w: * **Dashboard history**: New config file option versions_to_keep sets how many versions per dashboard to store, [#9671](https://github.com/grafana/grafana/issues/9671) * **Dashboard as cfg**: Load dashboards from file into Grafana on startup/change [#9654](https://github.com/grafana/grafana/issues/9654) [#5269](https://github.com/grafana/grafana/issues/5269) * **Prometheus**: Grafana can now send alerts to Prometheus Alertmanager while firing [#7481](https://github.com/grafana/grafana/issues/7481), thx [@Thib17](https://github.com/Thib17) and [@mtanda](https://github.com/mtanda) -* **Table**: Support multiple table formated queries in table panel [#9170](https://github.com/grafana/grafana/issues/9170), thx [@davkal](https://github.com/davkal) +* **Table**: Support multiple table formatted queries in table panel [#9170](https://github.com/grafana/grafana/issues/9170), thx [@davkal](https://github.com/davkal) * **Security**: Protect against brute force (frequent) login attempts [#7616](https://github.com/grafana/grafana/issues/7616) ## Minor @@ -164,7 +239,7 @@ Dashboard panels and rows are positioned using a gridPos object `{x: 0, y: 0, w: * **Sensu**: Send alert message to sensu output [#9551](https://github.com/grafana/grafana/issues/9551), thx [@cjchand](https://github.com/cjchand) * **Singlestat**: suppress error when result contains no datapoints [#9636](https://github.com/grafana/grafana/issues/9636), thx [@utkarshcmu](https://github.com/utkarshcmu) * **Postgres/MySQL**: Control quoting in SQL-queries when using template variables [#9030](https://github.com/grafana/grafana/issues/9030), thanks [@svenklemm](https://github.com/svenklemm) -* **Pagerduty**: Pagerduty dont auto resolve incidents by default anymore. [#10222](https://github.com/grafana/grafana/issues/10222) +* **Pagerduty**: Pagerduty don't auto resolve incidents by default anymore. [#10222](https://github.com/grafana/grafana/issues/10222) * **Cloudwatch**: Fix for multi-valued templated queries. [#9903](https://github.com/grafana/grafana/issues/9903) ## Tech @@ -242,7 +317,7 @@ The following properties have been deprecated and will be removed in a future re * **Annotations**: Add support for creating annotations from graph panel [#8197](https://github.com/grafana/grafana/pull/8197) * **GCS**: Adds support for Google Cloud Storage [#8370](https://github.com/grafana/grafana/issues/8370) thx [@chuhlomin](https://github.com/chuhlomin) * **Prometheus**: Adds /metrics endpoint for exposing Grafana metrics. [#9187](https://github.com/grafana/grafana/pull/9187) -* **Graph**: Add support for local formating in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk) +* **Graph**: Add support for local formatting in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk) * **Jaeger**: Add support for open tracing using jaeger in Grafana. [#9213](https://github.com/grafana/grafana/pull/9213) * **Unit types**: New date & time unit types added, useful in singlestat to show dates & times. [#3678](https://github.com/grafana/grafana/issues/3678), [#6710](https://github.com/grafana/grafana/issues/6710), [#2764](https://github.com/grafana/grafana/issues/2764) * **CLI**: Make it possible to install plugins from any url [#5873](https://github.com/grafana/grafana/issues/5873) @@ -279,7 +354,7 @@ The following properties have been deprecated and will be removed in a future re * **Graphite**: Fix for Grafana internal metrics to Graphite sending NaN values [#9279](https://github.com/grafana/grafana/issues/9279) * **HTTP API**: Fix for HEAD method requests [#9307](https://github.com/grafana/grafana/issues/9307) * **Templating**: Fix for duplicate template variable queries when refresh is set to time range change [#9185](https://github.com/grafana/grafana/issues/9185) -* **Metrics**: dont write NaN values to graphite [#9279](https://github.com/grafana/grafana/issues/9279) +* **Metrics**: don't write NaN values to graphite [#9279](https://github.com/grafana/grafana/issues/9279) # 4.5.1 (2017-09-15) @@ -316,12 +391,12 @@ The following properties have been deprecated and will be removed in a future re ### Breaking change * **InfluxDB/Elasticsearch**: The panel & data source option named "Group by time interval" is now named "Min time interval" and does now always define a lower limit for the auto group by time. Without having to use `>` prefix (that prefix still works). This should in theory have close to zero actual impact on existing dashboards. It does mean that if you used this setting to define a hard group by time interval of, say "1d", if you zoomed to a time range wide enough the time range could increase above the "1d" range as the setting is now always considered a lower limit. -* **Elasticsearch**: Elasticsearch metric queries without date histogram now return table formated data making table panel much easier to use for this use case. Should not break/change existing dashboards with stock panels but external panel plugins can be affected. +* **Elasticsearch**: Elasticsearch metric queries without date histogram now return table formatted data making table panel much easier to use for this use case. Should not break/change existing dashboards with stock panels but external panel plugins can be affected. ## Changes * **InfluxDB**: Change time range filter for absolute time ranges to be inclusive instead of exclusive [#8319](https://github.com/grafana/grafana/issues/8319), thx [@Oxydros](https://github.com/Oxydros) -* **InfluxDB**: Added paranthesis around tag filters in queries [#9131](https://github.com/grafana/grafana/pull/9131) +* **InfluxDB**: Added parenthesis around tag filters in queries [#9131](https://github.com/grafana/grafana/pull/9131) ## Bug Fixes @@ -333,7 +408,7 @@ The following properties have been deprecated and will be removed in a future re ## Bug Fixes -* **Search**: Fix for issue that casued search view to hide when you clicked starred or tags filters, fixes [#8981](https://github.com/grafana/grafana/issues/8981) +* **Search**: Fix for issue that caused search view to hide when you clicked starred or tags filters, fixes [#8981](https://github.com/grafana/grafana/issues/8981) * **Modals**: ESC key now closes modal again, fixes [#8981](https://github.com/grafana/grafana/issues/8988), thx [@j-white](https://github.com/j-white) # 4.4.2 (2017-08-01) @@ -672,12 +747,12 @@ due to too many connections/file handles on the data source backend. This proble ### Enhancements * **Login**: Adds option to disable username/password logins, closes [#4674](https://github.com/grafana/grafana/issues/4674) * **SingleStat**: Add seriename as option in singlestat panel, closes [#4740](https://github.com/grafana/grafana/issues/4740) -* **Localization**: Week start day now dependant on browser locale setting, closes [#3003](https://github.com/grafana/grafana/issues/3003) +* **Localization**: Week start day now dependent on browser locale setting, closes [#3003](https://github.com/grafana/grafana/issues/3003) * **Templating**: Update panel repeats for variables that change on time refresh, closes [#5021](https://github.com/grafana/grafana/issues/5021) * **Templating**: Add support for numeric and alphabetical sorting of variable values, closes [#2839](https://github.com/grafana/grafana/issues/2839) * **Elasticsearch**: Support to set Precision Threshold for Unique Count metric, closes [#4689](https://github.com/grafana/grafana/issues/4689) * **Navigation**: Add search to org swithcer, closes [#2609](https://github.com/grafana/grafana/issues/2609) -* **Database**: Allow database config using one propertie, closes [#5456](https://github.com/grafana/grafana/pull/5456) +* **Database**: Allow database config using one property, closes [#5456](https://github.com/grafana/grafana/pull/5456) * **Graphite**: Add support for groupByNodes, closes [#5613](https://github.com/grafana/grafana/pull/5613) * **Influxdb**: Add support for elapsed(), closes [#5827](https://github.com/grafana/grafana/pull/5827) * **OpenTSDB**: Add support for explicitTags for OpenTSDB>=2.3, closes [#6360](https://github.com/grafana/grafana/pull/6361) @@ -744,7 +819,7 @@ due to too many connections/file handles on the data source backend. This proble * **Datasource**: Pending data source requests are cancelled before new ones are issues (Graphite & Prometheus), closes [#5321](https://github.com/grafana/grafana/issues/5321) ### Breaking changes -* **Logging** : Changed default logging output format (now structured into message, and key value pairs, with logger key acting as component). You can also no change in config to json log ouput. +* **Logging** : Changed default logging output format (now structured into message, and key value pairs, with logger key acting as component). You can also no change in config to json log output. * **Graphite** : The Graph panel no longer have a Graphite PNG option. closes [#5367](https://github.com/grafana/grafana/issues/5367) ### Bug fixes @@ -762,7 +837,7 @@ due to too many connections/file handles on the data source backend. This proble * **Annotations**: Annotations can now use a template variable as data source, closes [#5054](https://github.com/grafana/grafana/issues/5054) * **Time picker**: Fixed issue timepicker and UTC when reading time from URL, fixes [#5078](https://github.com/grafana/grafana/issues/5078) * **CloudWatch**: Support for Multiple Account by AssumeRole, closes [#3522](https://github.com/grafana/grafana/issues/3522) -* **Singlestat**: Fixed alignment and minium height issue, fixes [#5113](https://github.com/grafana/grafana/issues/5113), fixes [#4679](https://github.com/grafana/grafana/issues/4679) +* **Singlestat**: Fixed alignment and minimum height issue, fixes [#5113](https://github.com/grafana/grafana/issues/5113), fixes [#4679](https://github.com/grafana/grafana/issues/4679) * **Share modal**: Fixed link when using grafana under dashboard sub url, fixes [#5109](https://github.com/grafana/grafana/issues/5109) * **Prometheus**: Fixed bug in query editor that caused it not to load when reloading page, fixes [#5107](https://github.com/grafana/grafana/issues/5107) * **Elasticsearch**: Fixed bug when template variable query returns numeric values, fixes [#5097](https://github.com/grafana/grafana/issues/5097), fixes [#5088](https://github.com/grafana/grafana/issues/5088) @@ -779,7 +854,7 @@ due to too many connections/file handles on the data source backend. This proble * **Graph**: Fixed broken PNG rendering in graph panel, fixes [#5025](https://github.com/grafana/grafana/issues/5025) * **Graph**: Fixed broken xaxis on graph panel, fixes [#5024](https://github.com/grafana/grafana/issues/5024) -* **Influxdb**: Fixes crash when hiding middle serie, fixes [#5005](https://github.com/grafana/grafana/issues/5005) +* **Influxdb**: Fixes crash when hiding middle series, fixes [#5005](https://github.com/grafana/grafana/issues/5005) # 3.0.1 Stable (2016-05-11) @@ -791,7 +866,7 @@ due to too many connections/file handles on the data source backend. This proble ### Bug fixes * **Dashboard title**: Fixed max dashboard title width (media query) for large screens, fixes [#4859](https://github.com/grafana/grafana/issues/4859) * **Annotations**: Fixed issue with entering annotation edit view, fixes [#4857](https://github.com/grafana/grafana/issues/4857) -* **Remove query**: Fixed issue with removing query for data sources without collapsable query editors, fixes [#4856](https://github.com/grafana/grafana/issues/4856) +* **Remove query**: Fixed issue with removing query for data sources without collapsible query editors, fixes [#4856](https://github.com/grafana/grafana/issues/4856) * **Graphite PNG**: Fixed issue graphite png rendering option, fixes [#4864](https://github.com/grafana/grafana/issues/4864) * **InfluxDB**: Fixed issue missing plus group by iconn, fixes [#4862](https://github.com/grafana/grafana/issues/4862) * **Graph**: Fixes missing line mode for thresholds, fixes [#4902](https://github.com/grafana/grafana/pull/4902) @@ -807,11 +882,11 @@ due to too many connections/file handles on the data source backend. This proble ### Bug fixes * **InfluxDB 0.12**: Fixed issue templating and `show tag values` query only returning tags for first measurement, fixes [#4726](https://github.com/grafana/grafana/issues/4726) -* **Templating**: Fixed issue with regex formating when matching multiple values, fixes [#4755](https://github.com/grafana/grafana/issues/4755) +* **Templating**: Fixed issue with regex formatting when matching multiple values, fixes [#4755](https://github.com/grafana/grafana/issues/4755) * **Templating**: Fixed issue with custom all value and escaping, fixes [#4736](https://github.com/grafana/grafana/issues/4736) * **Dashlist**: Fixed issue dashboard list panel and caching tags, fixes [#4768](https://github.com/grafana/grafana/issues/4768) * **Graph**: Fixed issue with unneeded scrollbar in legend for Firefox, fixes [#4760](https://github.com/grafana/grafana/issues/4760) -* **Table panel**: Fixed issue table panel formating string array properties, fixes [#4791](https://github.com/grafana/grafana/issues/4791) +* **Table panel**: Fixed issue table panel formatting string array properties, fixes [#4791](https://github.com/grafana/grafana/issues/4791) * **grafana-cli**: Improve error message when failing to install plugins due to corrupt response, fixes [#4651](https://github.com/grafana/grafana/issues/4651) * **Singlestat**: Fixes prefix an postfix for gauges, fixes [#4812](https://github.com/grafana/grafana/issues/4812) * **Singlestat**: Fixes auto-refresh on change for some options, fixes [#4809](https://github.com/grafana/grafana/issues/4809) @@ -903,7 +978,7 @@ slack channel (link to slack channel in readme). ### Bug fixes * **Playlist**: Fix for memory leak when running a playlist, closes [#3794](https://github.com/grafana/grafana/pull/3794) * **InfluxDB**: Fix for InfluxDB and table panel when using Format As Table and having group by time, fixes [#3928](https://github.com/grafana/grafana/issues/3928) -* **Panel Time shift**: Fix for panel time range and using dashboard times liek `Today` and `This Week`, fixes [#3941](https://github.com/grafana/grafana/issues/3941) +* **Panel Time shift**: Fix for panel time range and using dashboard times like `Today` and `This Week`, fixes [#3941](https://github.com/grafana/grafana/issues/3941) * **Row repeat**: Repeated rows will now appear next to each other and not by the bottom of the dashboard, fixes [#3942](https://github.com/grafana/grafana/issues/3942) * **Png renderer**: Fix for phantomjs path on windows, fixes [#3657](https://github.com/grafana/grafana/issues/3657) @@ -927,7 +1002,7 @@ slack channel (link to slack channel in readme). ### Bug Fixes * **metric editors**: Fix for clicking typeahead auto dropdown option, fixes [#3428](https://github.com/grafana/grafana/issues/3428) * **influxdb**: Fixed issue showing Group By label only on first query, fixes [#3453](https://github.com/grafana/grafana/issues/3453) -* **logging**: Add more verbose info logging for http reqeusts, closes [#3405](https://github.com/grafana/grafana/pull/3405) +* **logging**: Add more verbose info logging for http requests, closes [#3405](https://github.com/grafana/grafana/pull/3405) # 2.6.0-Beta1 (2015-12-04) @@ -954,7 +1029,7 @@ slack channel (link to slack channel in readme). **New Feature: Mix data sources** - A built in data source is now available named `-- Mixed --`, When picked in the metrics tab, -it allows you to add queries of differnet data source types & instances to the same graph/panel! +it allows you to add queries of different data source types & instances to the same graph/panel! [Issue #436](https://github.com/grafana/grafana/issues/436) **New Feature: Elasticsearch Metrics Query Editor and Viz Support** @@ -993,7 +1068,7 @@ it allows you to add queries of differnet data source types & instances to the s - [Issue #2564](https://github.com/grafana/grafana/issues/2564). Templating: Another atempt at fixing #2534 (Init multi value template var used in repeat panel from url) - [Issue #2620](https://github.com/grafana/grafana/issues/2620). Graph: multi series tooltip did no highlight correct point when stacking was enabled and series were of different resolution - [Issue #2636](https://github.com/grafana/grafana/issues/2636). InfluxDB: Do no show template vars in dropdown for tag keys and group by keys -- [Issue #2604](https://github.com/grafana/grafana/issues/2604). InfluxDB: More alias options, can now use `$[0-9]` syntax to reference part of a measurement name (seperated by dots) +- [Issue #2604](https://github.com/grafana/grafana/issues/2604). InfluxDB: More alias options, can now use `$[0-9]` syntax to reference part of a measurement name (separated by dots) **Breaking Changes** - Notice to makers/users of custom data sources, there is a minor breaking change in 2.2 that @@ -1075,7 +1150,7 @@ Grunt & Watch tasks: - [Issue #1826](https://github.com/grafana/grafana/issues/1826). User role 'Viewer' are now prohibited from entering edit mode (and doing other transient dashboard edits). A new role `Read Only Editor` will replace the old Viewer behavior - [Issue #1928](https://github.com/grafana/grafana/issues/1928). HTTP API: GET /api/dashboards/db/:slug response changed property `model` to `dashboard` to match the POST request nameing - Backend render URL changed from `/render/dashboard/solo` `render/dashboard-solo/` (in order to have consistent dashboard url `/dashboard/:type/:slug`) -- Search HTTP API response has changed (simplified), tags list moved to seperate HTTP resource URI +- Search HTTP API response has changed (simplified), tags list moved to separate HTTP resource URI - Datasource HTTP api breaking change, ADD datasource is now POST /api/datasources/, update is now PUT /api/datasources/:id **Fixes** @@ -1092,7 +1167,7 @@ Grunt & Watch tasks: # 2.0.2 (2015-04-22) **Fixes** -- [Issue #1832](https://github.com/grafana/grafana/issues/1832). Graph Panel + Legend Table mode: Many series casued zero height graph, now legend will never reduce the height of the graph below 50% of row height. +- [Issue #1832](https://github.com/grafana/grafana/issues/1832). Graph Panel + Legend Table mode: Many series caused zero height graph, now legend will never reduce the height of the graph below 50% of row height. - [Issue #1846](https://github.com/grafana/grafana/issues/1846). Snapshots: Fixed issue with snapshoting dashboards with an interval template variable - [Issue #1848](https://github.com/grafana/grafana/issues/1848). Panel timeshift: You can now use panel timeshift without a relative time override @@ -1134,7 +1209,7 @@ Grunt & Watch tasks: **Fixes** - [Issue #1649](https://github.com/grafana/grafana/issues/1649). HTTP API: grafana /render calls nows with api keys -- [Issue #1667](https://github.com/grafana/grafana/issues/1667). Datasource proxy & session timeout fix (casued 401 Unauthorized error after a while) +- [Issue #1667](https://github.com/grafana/grafana/issues/1667). Datasource proxy & session timeout fix (caused 401 Unauthorized error after a while) - [Issue #1707](https://github.com/grafana/grafana/issues/1707). Unsaved changes: Do not show for snapshots, scripted and file based dashboards - [Issue #1703](https://github.com/grafana/grafana/issues/1703). Unsaved changes: Do not show for users with role `Viewer` - [Issue #1675](https://github.com/grafana/grafana/issues/1675). Data source proxy: Fixed issue with Gzip enabled and data source proxy @@ -1147,14 +1222,14 @@ Grunt & Watch tasks: **Important Note** -Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated backend server. Please read the [Documentation](http://docs.grafana.org) for more detailed about this SIGNIFCANT change to Grafana +Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated backend server. Please read the [Documentation](http://docs.grafana.org) for more detailed about this SIGNIFICANT change to Grafana **New features** - [Issue #1623](https://github.com/grafana/grafana/issues/1623). Share Dashboard: Dashboard snapshot sharing (dash and data snapshot), save to local or save to public snapshot dashboard snapshots.raintank.io site - [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embedd a single graph on another web site -- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes inbetween the user is promted with a warning if he really wants to overwrite the other's changes +- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is promted with a warning if he really wants to overwrite the other's changes - [Issue #1331](https://github.com/grafana/grafana/issues/1331). Graph & Singlestat: New axis/unit format selector and more units (kbytes, Joule, Watt, eV), and new design for graph axis & grid tab and single stat options tab views -- [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change ``now`` to be for example ``now-1m``, usefull when you want to ignore last minute because it contains incomplete data +- [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change ``now`` to be for example ``now-1m``, useful when you want to ignore last minute because it contains incomplete data - [Issue #171](https://github.com/grafana/grafana/issues/171). Panel: Different time periods, panels can override dashboard relative time and/or add a time shift - [Issue #1488](https://github.com/grafana/grafana/issues/1488). Dashboard: Clone dashboard / Save as - [Issue #1458](https://github.com/grafana/grafana/issues/1458). User: persisted user option for dark or light theme (no longer an option on a dashboard) @@ -1185,7 +1260,7 @@ Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated **OpenTSDB breaking change** - [Issue #1438](https://github.com/grafana/grafana/issues/1438). OpenTSDB: Automatic downsample interval passed to OpenTSDB (depends on timespan and graph width) -- NOTICE, Downsampling is now enabled by default, so if you have not picked a downsample aggregator in your metric query do so or your graphs will be missleading +- NOTICE, Downsampling is now enabled by default, so if you have not picked a downsample aggregator in your metric query do so or your graphs will be misleading - This will make Grafana a lot quicker for OpenTSDB users when viewing large time spans without having to change the downsample interval manually. **Tech** @@ -1216,7 +1291,7 @@ Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated - [Issue #1114](https://github.com/grafana/grafana/issues/1114). Graphite: Lexer fix, allow equal sign (=) in metric paths - [Issue #1136](https://github.com/grafana/grafana/issues/1136). Graph: Fix to legend value Max and negative values - [Issue #1150](https://github.com/grafana/grafana/issues/1150). SinglestatPanel: Fixed absolute drilldown link issue -- [Issue #1123](https://github.com/grafana/grafana/issues/1123). Firefox: Workaround for Firefox bug, casued input text fields to not be selectable and not have placeable cursor +- [Issue #1123](https://github.com/grafana/grafana/issues/1123). Firefox: Workaround for Firefox bug, caused input text fields to not be selectable and not have placeable cursor - [Issue #1108](https://github.com/grafana/grafana/issues/1108). Graph: Fix for tooltip series order when series draw order was changed with zindex property # 1.9.0-rc1 (2014-11-17) @@ -1293,7 +1368,7 @@ Read this [blog post](https://grafana.com/blog/2014/09/11/grafana-1.8.0-rc1-rele - [Issue #234](https://github.com/grafana/grafana/issues/234). Templating: Interval variable type for time intervals summarize/group by parameter, included "auto" option, and auto step counts option. - [Issue #262](https://github.com/grafana/grafana/issues/262). Templating: Ability to use template variables for function parameters via custom variable type, can be used as parameter for movingAverage or scaleToSeconds for example - [Issue #312](https://github.com/grafana/grafana/issues/312). Templating: Can now use template variables in panel titles -- [Issue #613](https://github.com/grafana/grafana/issues/613). Templating: Full support for InfluxDB, filter by part of series names, extract series substrings, nested queries, multipe where clauses! +- [Issue #613](https://github.com/grafana/grafana/issues/613). Templating: Full support for InfluxDB, filter by part of series names, extract series substrings, nested queries, multiple where clauses! - Template variables can be initialized from url, with var-my_varname=value, breaking change, before it was just my_varname. - Templating and url state sync has some issues that are not solved for this release, see [Issue #772](https://github.com/grafana/grafana/issues/772) for more details. @@ -1382,7 +1457,7 @@ Read this [blog post](https://grafana.com/blog/2014/09/11/grafana-1.8.0-rc1-rele - [Issue #136](https://github.com/grafana/grafana/issues/136). Graph: New legend display option "Align as table" - [Issue #556](https://github.com/grafana/grafana/issues/556). Graph: New legend display option "Right side", will show legend to the right of the graph - [Issue #604](https://github.com/grafana/grafana/issues/604). Graph: New axis format, 'bps' (SI unit in steps of 1000) useful for network gear metics -- [Issue #626](https://github.com/grafana/grafana/issues/626). Graph: Downscale y axis to more precise unit, value of 0.1 for seconds format will be formated as 100 ms. Thanks @kamaradclimber +- [Issue #626](https://github.com/grafana/grafana/issues/626). Graph: Downscale y axis to more precise unit, value of 0.1 for seconds format will be formatted as 100 ms. Thanks @kamaradclimber - [Issue #618](https://github.com/grafana/grafana/issues/618). OpenTSDB: Series alias option to override metric name returned from opentsdb. Thanks @heldr **Documentation** @@ -1412,13 +1487,13 @@ Read this [blog post](https://grafana.com/blog/2014/09/11/grafana-1.8.0-rc1-rele - [Issue #522](https://github.com/grafana/grafana/issues/522). Series names and column name typeahead cache fix - [Issue #504](https://github.com/grafana/grafana/issues/504). Fixed influxdb issue with raw query that caused wrong value column detection - [Issue #526](https://github.com/grafana/grafana/issues/526). Default property that marks which datasource is default in config.js is now optional -- [Issue #342](https://github.com/grafana/grafana/issues/342). Auto-refresh caused 2 refreshes (and hence mulitple queries) each time (at least in firefox) +- [Issue #342](https://github.com/grafana/grafana/issues/342). Auto-refresh caused 2 refreshes (and hence multiple queries) each time (at least in firefox) # 1.6.0 (2014-06-16) #### New features or improvements - [Issue #427](https://github.com/grafana/grafana/issues/427). New Y-axis formater for metric values that represent seconds, Thanks @jippi -- [Issue #390](https://github.com/grafana/grafana/issues/390). Allow special characters in serie names (influxdb datasource), Thanks @majst01 +- [Issue #390](https://github.com/grafana/grafana/issues/390). Allow special characters in series names (influxdb datasource), Thanks @majst01 - [Issue #428](https://github.com/grafana/grafana/issues/428). Refactoring of filterSrv, Thanks @Tetha - [Issue #445](https://github.com/grafana/grafana/issues/445). New config for playlist feature. Set playlist_timespan to set default playlist interval, Thanks @rmca - [Issue #461](https://github.com/grafana/grafana/issues/461). New graphite function definition added isNonNull, Thanks @tmonk42 @@ -1439,13 +1514,13 @@ Read this [blog post](https://grafana.com/blog/2014/09/11/grafana-1.8.0-rc1-rele - [Issue #475](https://github.com/grafana/grafana/issues/475). Add panel icon and Row edit button is replaced by the Row edit menu - New graphs now have a default empty query - Add Row button now creates a row with default height of 250px (no longer opens dashboard settings modal) -- Clean up of config.sample.js, graphiteUrl removed (still works, but depricated, removed in future) +- Clean up of config.sample.js, graphiteUrl removed (still works, but deprecated, removed in future) Use datasources config instead. panel_names removed from config.js. Use plugins.panels to add custom panels - Graphite panel is now renamed graph (Existing dashboards will still work) #### Fixes - [Issue #126](https://github.com/grafana/grafana/issues/126). Graphite query lexer change, can now handle regex parameters for aliasSub function -- [Issue #447](https://github.com/grafana/grafana/issues/447). Filter option loading when having muliple nested filters now works better. Options are now reloaded correctly and there are no multiple renders/refresh inbetween. +- [Issue #447](https://github.com/grafana/grafana/issues/447). Filter option loading when having muliple nested filters now works better. Options are now reloaded correctly and there are no multiple renders/refresh in between. - [Issue #412](https://github.com/grafana/grafana/issues/412). After a filter option is changed and a nested template param is reloaded, if the current value exists after the options are reloaded the current selected value is kept. - [Issue #460](https://github.com/grafana/grafana/issues/460). Legend Current value did not display when value was zero - [Issue #328](https://github.com/grafana/grafana/issues/328). Fix to series toggling bug that caused annotations to be hidden when toggling/hiding series. diff --git a/Gopkg.lock b/Gopkg.lock index d447223795e..41fc92313d1 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -111,6 +111,18 @@ ] revision = "270bc3860bb94dd3a3ffd047377d746c5e276726" +[[projects]] + branch = "master" + name = "github.com/facebookgo/inject" + packages = ["."] + revision = "cc1aa653e50f6a9893bcaef89e673e5b24e1e97b" + +[[projects]] + branch = "master" + name = "github.com/facebookgo/structtag" + packages = ["."] + revision = "217e25fb96916cc60332e399c9aa63f5c422ceed" + [[projects]] name = "github.com/fatih/color" packages = ["."] @@ -351,6 +363,12 @@ revision = "a3647f8e31d79543b2d0f0ae2fe5c379d72cedc0" version = "v2.1.0" +[[projects]] + name = "github.com/pkg/errors" + packages = ["."] + revision = "645ef00459ed84a119197bfb8d8205042c6df63d" + version = "v0.8.0" + [[projects]] name = "github.com/prometheus/client_golang" packages = [ @@ -610,12 +628,6 @@ revision = "567b2bfa514e796916c4747494d6ff5132a1dfce" version = "v1" -[[projects]] - branch = "v2" - name = "gopkg.in/gomail.v2" - packages = ["."] - revision = "81ebce5c23dfd25c6c67194b37d3dd3f338c98b1" - [[projects]] name = "gopkg.in/ini.v1" packages = ["."] @@ -628,6 +640,12 @@ revision = "75f2e9b42e99652f0d82b28ccb73648f44615faa" version = "v1.2.4" +[[projects]] + branch = "v2" + name = "gopkg.in/mail.v2" + packages = ["."] + revision = "5bc5c8bb07bd8d2803831fbaf8cbd630fcde2c68" + [[projects]] name = "gopkg.in/redis.v2" packages = ["."] @@ -643,6 +661,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "8a9e651fb8ea49dfd3c6ddc99bd3242b39e453ea9edd11321da79bd2c865e9d1" + inputs-digest = "bd54a1a836599d90b36d4ac1af56d716ef9ca5be4865e217bddd49e3d32a1997" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 350da50fc4b..a9f79c402df 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -172,7 +172,7 @@ ignored = [ name = "golang.org/x/sync" [[constraint]] - name = "gopkg.in/gomail.v2" + name = "gopkg.in/mail.v2" branch = "v2" [[constraint]] diff --git a/Makefile b/Makefile index 6f7beb837d8..c1d755d247d 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,5 @@ +-include local/Makefile + all: deps build deps-go: diff --git a/PLUGIN_DEV.md b/PLUGIN_DEV.md index 9d831a95697..4e2e080ebe6 100644 --- a/PLUGIN_DEV.md +++ b/PLUGIN_DEV.md @@ -9,6 +9,7 @@ upgrading Grafana please check here before creating an issue. - [Datasource plugin written in typescript](https://github.com/grafana/typescript-template-datasource) - [Simple json dataource plugin](https://github.com/grafana/simple-json-datasource) - [Plugin development guide](http://docs.grafana.org/plugins/developing/development/) +- [Webpack Grafana plugin template project](https://github.com/CorpGlory/grafana-plugin-template-webpack) ## Changes in v4.6 diff --git a/README.md b/README.md index 9a05633c391..12b0c8cc74a 100644 --- a/README.md +++ b/README.md @@ -39,12 +39,21 @@ go run build.go build For this you need nodejs (v.6+). +To build the assets, rebuild on file change, and serve them by Grafana's webserver (http://localhost:3000): ```bash npm install -g yarn yarn install --pure-lockfile npm run watch ``` +Build the assets, rebuild on file change with Hot Module Replacement (HMR), and serve them by webpack-dev-server (http://localhost:3333): +```bash +yarn start +# OR set a theme +env GRAFANA_THEME=light yarn start +``` +Note: HMR for Angular is not supported. If you edit files in the Angular part of the app, the whole page will reload. + Run tests ```bash npm run jest @@ -55,6 +64,8 @@ Run karma tests npm run karma ``` +Run + ### Recompile backend on source change To rebuild on source change. diff --git a/ROADMAP.md b/ROADMAP.md index e7bed99489e..7b9c043fef1 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -1,26 +1,20 @@ -# Roadmap (2018-02-22) +# Roadmap (2018-05-06) This roadmap is a tentative plan for the core development team. Things change constantly as PRs come in and priorities change. But it will give you an idea of our current vision and plan. - + ### Short term (1-2 months) -- v5.1 - - Build speed improvements & integration test execution - - Kubernetes friendly docker container - - Enterprise LDAP - - Provisioning workflow - - MSSQL datasource + - Elasticsearch alerting + - Crossplatform builds + - Backend service refactorings + - Explore UI + - First login registration view ### Mid term (2-4 months) - -- v5.2 - - Azure monitor backend rewrite - - Elasticsearch alerting - - First login registration view - - Backend plugins? (alert notifiers, auth) - - Crossplatform builds - - IFQL Initial support + - Multi-Stat panel + - React Panels + - Templating Query Editor UI Plugin hook ### Long term (4 - 8 months) diff --git a/appveyor.yml b/appveyor.yml index 5d67edca9d9..a71eb9f81b4 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -6,8 +6,8 @@ clone_folder: c:\gopath\src\github.com\grafana\grafana environment: nodejs_version: "6" - GOPATH: c:\gopath - GOVERSION: 1.9.2 + GOPATH: C:\gopath + GOVERSION: 1.10 install: - rmdir c:\go /s /q diff --git a/build.go b/build.go index c38c452f61f..7e7183b8b83 100644 --- a/build.go +++ b/build.go @@ -16,7 +16,6 @@ import ( "os/exec" "path" "path/filepath" - "regexp" "runtime" "strconv" "strings" @@ -24,14 +23,14 @@ import ( ) var ( - versionRe = regexp.MustCompile(`-[0-9]{1,3}-g[0-9a-f]{5,10}`) - goarch string - goos string - gocc string - gocxx string - cgo string - pkgArch string - version string = "v1" + //versionRe = regexp.MustCompile(`-[0-9]{1,3}-g[0-9a-f]{5,10}`) + goarch string + goos string + gocc string + gocxx string + cgo string + pkgArch string + version string = "v1" // deb & rpm does not support semver so have to handle their version a little differently linuxPackageVersion string = "v1" linuxPackageIteration string = "" @@ -41,10 +40,10 @@ var ( includeBuildNumber bool = true buildNumber int = 0 binaries []string = []string{"grafana-server", "grafana-cli"} + isDev bool = false + enterprise bool = false ) -const minGoVersion = 1.8 - func main() { log.SetOutput(os.Stdout) log.SetFlags(0) @@ -60,7 +59,9 @@ func main() { flag.StringVar(&phjsToRelease, "phjs", "", "PhantomJS binary") flag.BoolVar(&race, "race", race, "Use race detector") flag.BoolVar(&includeBuildNumber, "includeBuildNumber", includeBuildNumber, "IncludeBuildNumber in package name") + flag.BoolVar(&enterprise, "enterprise", enterprise, "Build enterprise version of Grafana") flag.IntVar(&buildNumber, "buildNumber", 0, "Build number from CI system") + flag.BoolVar(&isDev, "dev", isDev, "optimal for development, skips certain steps") flag.Parse() readVersionFromPackageJson() @@ -284,19 +285,33 @@ func createPackage(options linuxPackageOptions) { "-s", "dir", "--description", "Grafana", "-C", packageRoot, - "--vendor", "Grafana", "--url", "https://grafana.com", - "--license", "\"Apache 2.0\"", "--maintainer", "contact@grafana.com", "--config-files", options.initdScriptFilePath, "--config-files", options.etcDefaultFilePath, "--config-files", options.systemdServiceFilePath, "--after-install", options.postinstSrc, - "--name", "grafana", + "--version", linuxPackageVersion, "-p", "./dist", } + name := "grafana" + if enterprise { + name += "-enterprise" + } + args = append(args, "--name", name) + + description := "Grafana" + if enterprise { + description += " Enterprise" + } + args = append(args, "--vendor", description) + + if !enterprise { + args = append(args, "--license", "\"Apache 2.0\"") + } + if options.packageType == "rpm" { args = append(args, "--rpm-posttrans", "packaging/rpm/control/posttrans") } @@ -324,20 +339,6 @@ func createPackage(options linuxPackageOptions) { runPrint("fpm", append([]string{"-t", options.packageType}, args...)...) } -func verifyGitRepoIsClean() { - rs, err := runError("git", "ls-files", "--modified") - if err != nil { - log.Fatalf("Failed to check if git tree was clean, %v, %v\n", string(rs), err) - return - } - count := len(string(rs)) - if count > 0 { - log.Fatalf("Git repository has modified files, aborting") - } - - log.Println("Git repository is clean") -} - func ensureGoPath() { if os.Getenv("GOPATH") == "" { cwd, err := os.Getwd() @@ -350,10 +351,6 @@ func ensureGoPath() { } } -func ChangeWorkingDir(dir string) { - os.Chdir(dir) -} - func grunt(params ...string) { if runtime.GOOS == "windows" { runPrint(`.\node_modules\.bin\grunt`, params...) @@ -394,7 +391,9 @@ func build(binaryName, pkg string, tags []string) { binary += ".exe" } - rmr(binary, binary+".md5") + if !isDev { + rmr(binary, binary+".md5") + } args := []string{"build", "-ldflags", ldflags()} if len(tags) > 0 { args = append(args, "-tags", strings.Join(tags, ",")) @@ -405,16 +404,21 @@ func build(binaryName, pkg string, tags []string) { args = append(args, "-o", binary) args = append(args, pkg) - setBuildEnv() - runPrint("go", "version") + if !isDev { + setBuildEnv() + runPrint("go", "version") + } + runPrint("go", args...) - // Create an md5 checksum of the binary, to be included in the archive for - // automatic upgrades. - err := md5File(binary) - if err != nil { - log.Fatal(err) + if !isDev { + // Create an md5 checksum of the binary, to be included in the archive for + // automatic upgrades. + err := md5File(binary) + if err != nil { + log.Fatal(err) + } } } @@ -424,6 +428,7 @@ func ldflags() string { b.WriteString(fmt.Sprintf(" -X main.version=%s", version)) b.WriteString(fmt.Sprintf(" -X main.commit=%s", getGitSha())) b.WriteString(fmt.Sprintf(" -X main.buildstamp=%d", buildStamp())) + b.WriteString(fmt.Sprintf(" -X main.enterprise=%t", enterprise)) return b.String() } @@ -435,6 +440,10 @@ func rmr(paths ...string) { } func clean() { + if isDev { + return + } + rmr("dist") rmr("tmp") rmr(filepath.Join(os.Getenv("GOPATH"), fmt.Sprintf("pkg/%s_%s/github.com/grafana", goos, goarch))) @@ -479,24 +488,6 @@ func buildStamp() int64 { return s } -func buildArch() string { - os := goos - if os == "darwin" { - os = "macosx" - } - return fmt.Sprintf("%s-%s", os, goarch) -} - -func run(cmd string, args ...string) []byte { - bs, err := runError(cmd, args...) - if err != nil { - log.Println(cmd, strings.Join(args, " ")) - log.Println(string(bs)) - log.Fatal(err) - } - return bytes.TrimSpace(bs) -} - func runError(cmd string, args ...string) ([]byte, error) { ecmd := exec.Command(cmd, args...) bs, err := ecmd.CombinedOutput() @@ -550,7 +541,7 @@ func shaFilesInDist() { return nil } - if strings.Contains(path, ".sha256") == false { + if !strings.Contains(path, ".sha256") { err := shaFile(path) if err != nil { log.Printf("Failed to create sha file. error: %v\n", err) diff --git a/codecov.yml b/codecov.yml index 82a86e0232b..b2a839365ac 100644 --- a/codecov.yml +++ b/codecov.yml @@ -8,6 +8,4 @@ coverage: patch: yes changes: no -comment: - layout: "diff" - behavior: "once" +comment: off diff --git a/conf/defaults.ini b/conf/defaults.ini index 11d173d955d..d45e270d65d 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -442,6 +442,11 @@ enabled = true # Makes it possible to turn off alert rule execution but alerting UI is visible execute_alerts = true +#################################### Explore ############################# +[explore] +# Enable the Explore section +enabled = false + #################################### Internal Grafana Metrics ############ # Metrics available at HTTP API Url /metrics [metrics] diff --git a/conf/sample.ini b/conf/sample.ini index 1af5bbdb62b..f12d917039d 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -64,7 +64,7 @@ #################################### Database #################################### [database] # You can configure the database connection by specifying type, host, name, user and password -# as seperate properties or as on string using the url propertie. +# as separate properties or as on string using the url properties. # Either "mysql", "postgres" or "sqlite3", it's your choice ;type = sqlite3 @@ -377,6 +377,11 @@ log_queries = # Makes it possible to turn off alert rule execution but alerting UI is visible ;execute_alerts = true +#################################### Explore ############################# +[explore] +# Enable the Explore section +;enabled = false + #################################### Internal Grafana Metrics ########################## # Metrics available at HTTP API Url /metrics [metrics] diff --git a/docker/blocks/apache_proxy/docker-compose.yaml b/docker/blocks/apache_proxy/docker-compose.yaml index 2aec3d4bc4f..86d4befadd6 100644 --- a/docker/blocks/apache_proxy/docker-compose.yaml +++ b/docker/blocks/apache_proxy/docker-compose.yaml @@ -2,7 +2,7 @@ # http://localhost:3000 (Grafana running locally) # # Please note that you'll need to change the root_url in the Grafana configuration: -# root_url = %(protocol)s://%(domain)s:/grafana/ +# root_url = %(protocol)s://%(domain)s:10081/grafana/ apacheproxy: build: blocks/apache_proxy diff --git a/docker/blocks/graphite/files/carbon.conf b/docker/blocks/graphite/files/carbon.conf index 50762b3fff5..fc03aba6398 100644 --- a/docker/blocks/graphite/files/carbon.conf +++ b/docker/blocks/graphite/files/carbon.conf @@ -38,7 +38,7 @@ CACHE_QUERY_PORT = 7002 LOG_UPDATES = False -# Enable AMQP if you want to receve metrics using an amqp broker +# Enable AMQP if you want to receive metrics using an amqp broker # ENABLE_AMQP = False # Verbose means a line will be logged for every metric received diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf b/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf index fc36328b25f..f8a53a61115 100644 --- a/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf +++ b/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.amqp.conf @@ -41,7 +41,7 @@ PICKLE_RECEIVER_PORT = 2004 CACHE_QUERY_INTERFACE = 0.0.0.0 CACHE_QUERY_PORT = 7002 -# Enable AMQP if you want to receve metrics using you amqp broker +# Enable AMQP if you want to receive metrics using you amqp broker ENABLE_AMQP = True # Verbose means a line will be logged for every metric received diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf b/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf index 3e10dcec9cf..6741932da37 100644 --- a/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf +++ b/docker/blocks/graphite1/conf/opt/graphite/conf/carbon.conf @@ -265,7 +265,7 @@ WHISPER_FALLOCATE_CREATE = True # CARBON_METRIC_PREFIX = carbon # CARBON_METRIC_INTERVAL = 60 -# Enable AMQP if you want to receve metrics using an amqp broker +# Enable AMQP if you want to receive metrics using an amqp broker # ENABLE_AMQP = False # Verbose means a line will be logged for every metric received diff --git a/docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf b/docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf index 2e1b0bc4db3..f558b273f57 100644 --- a/docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf +++ b/docker/blocks/graphite1/conf/opt/graphite/conf/dashboard.conf @@ -30,7 +30,7 @@ give_completer_focus = shift-space # pertain only to specific metric types. # # The dashboard presents only metrics that fall into specified naming schemes -# defined in this file. This creates a simpler, more targetted view of the +# defined in this file. This creates a simpler, more targeted view of the # data. The general form for defining a naming scheme is as follows: # #[Metric Type] diff --git a/docker/blocks/mssql_tests/dashboard.json b/docker/blocks/mssql_tests/dashboard.json index 20e3907b48b..80994254093 100644 --- a/docker/blocks/mssql_tests/dashboard.json +++ b/docker/blocks/mssql_tests/dashboard.json @@ -100,7 +100,7 @@ "gnetId": null, "graphTooltip": 0, "id": null, - "iteration": 1521715844826, + "iteration": 1523320861623, "links": [], "panels": [ { @@ -443,7 +443,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -522,7 +526,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -601,7 +609,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -680,7 +692,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -759,7 +775,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -838,7 +858,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -927,7 +951,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1026,7 +1054,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1115,7 +1147,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1196,7 +1232,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1285,7 +1325,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1366,7 +1410,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1455,7 +1503,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1536,7 +1588,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1619,7 +1675,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1702,7 +1762,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1792,7 +1856,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1875,7 +1943,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1965,7 +2037,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2048,7 +2124,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2138,7 +2218,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2221,7 +2305,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2311,7 +2399,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2394,7 +2486,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } } ], "refresh": false, @@ -2504,5 +2600,5 @@ "timezone": "", "title": "Microsoft SQL Server Data Source Test", "uid": "GlAqcPgmz", - "version": 57 + "version": 58 } \ No newline at end of file diff --git a/docker/blocks/mysql/dashboard.json b/docker/blocks/mysql/dashboard.json index e2b791f82e6..dba7847cc72 100644 --- a/docker/blocks/mysql/dashboard.json +++ b/docker/blocks/mysql/dashboard.json @@ -2,7 +2,7 @@ "__inputs": [ { "name": "DS_MYSQL", - "label": "Mysql", + "label": "MySQL", "description": "", "type": "datasource", "pluginId": "mysql", @@ -20,19 +20,19 @@ "type": "panel", "id": "graph", "name": "Graph", - "version": "" + "version": "5.0.0" }, { "type": "datasource", "id": "mysql", "name": "MySQL", - "version": "1.0.0" + "version": "5.0.0" }, { "type": "panel", "id": "table", "name": "Table", - "version": "" + "version": "5.0.0" } ], "annotations": { @@ -53,7 +53,7 @@ "gnetId": null, "graphTooltip": 0, "id": null, - "iteration": 1518602729468, + "iteration": 1523372133566, "links": [], "panels": [ { @@ -118,7 +118,7 @@ ], "thresholds": [], "timeFrom": null, - "timeShift": "1h", + "timeShift": null, "title": "Average logins / $summarize", "tooltip": { "shared": true, @@ -150,7 +150,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -204,7 +208,7 @@ ], "thresholds": [], "timeFrom": null, - "timeShift": "1h", + "timeShift": null, "title": "Average payments started/ended / $summarize", "tooltip": { "shared": true, @@ -236,7 +240,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -284,7 +292,7 @@ ], "thresholds": [], "timeFrom": null, - "timeShift": "1h", + "timeShift": null, "title": "Max CPU / $summarize", "tooltip": { "shared": true, @@ -316,7 +324,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "columns": [], @@ -369,7 +381,7 @@ "target": "" } ], - "timeShift": "1h", + "timeShift": null, "title": "Values", "transform": "table", "type": "table" @@ -428,7 +440,6 @@ "auto_count": 5, "auto_min": "10s", "current": { - "selected": true, "text": "1m", "value": "1m" }, @@ -545,5 +556,5 @@ "timezone": "", "title": "Grafana Fake Data Gen - MySQL", "uid": "DGsCac3kz", - "version": 6 + "version": 8 } \ No newline at end of file diff --git a/docker/blocks/mysql/docker-compose.yaml b/docker/blocks/mysql/docker-compose.yaml index f7881e66539..53ff9da62a7 100644 --- a/docker/blocks/mysql/docker-compose.yaml +++ b/docker/blocks/mysql/docker-compose.yaml @@ -7,9 +7,6 @@ MYSQL_PASSWORD: password ports: - "3306:3306" - volumes: - - /etc/localtime:/etc/localtime:ro - - /etc/timezone:/etc/timezone:ro command: [mysqld, --character-set-server=utf8mb4, --collation-server=utf8mb4_unicode_ci, --innodb_monitor_enable=all] fake-mysql-data: diff --git a/docker/blocks/mysql_tests/Dockerfile b/docker/blocks/mysql_tests/Dockerfile new file mode 100644 index 00000000000..fa91fa3c023 --- /dev/null +++ b/docker/blocks/mysql_tests/Dockerfile @@ -0,0 +1,3 @@ +FROM mysql:latest +ADD setup.sql /docker-entrypoint-initdb.d +CMD ["mysqld"] \ No newline at end of file diff --git a/docker/blocks/mysql_tests/dashboard.json b/docker/blocks/mysql_tests/dashboard.json index 3ab08a7da35..53f313315bd 100644 --- a/docker/blocks/mysql_tests/dashboard.json +++ b/docker/blocks/mysql_tests/dashboard.json @@ -7,14 +7,6 @@ "type": "datasource", "pluginId": "mysql", "pluginName": "MySQL" - }, - { - "name": "DS_MSSQL_TEST", - "label": "MSSQL Test", - "description": "", - "type": "datasource", - "pluginId": "mssql", - "pluginName": "Microsoft SQL Server" } ], "__requires": [ @@ -30,12 +22,6 @@ "name": "Graph", "version": "5.0.0" }, - { - "type": "datasource", - "id": "mssql", - "name": "Microsoft SQL Server", - "version": "1.0.0" - }, { "type": "datasource", "id": "mysql", @@ -114,7 +100,7 @@ "gnetId": null, "graphTooltip": 0, "id": null, - "iteration": 1521715720483, + "iteration": 1523320712115, "links": [], "panels": [ { @@ -349,7 +335,7 @@ { "alias": "Time", "dateFormat": "YYYY-MM-DD HH:mm:ss", - "pattern": "time_sec", + "pattern": "time", "type": "date" }, { @@ -457,7 +443,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -536,7 +526,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -615,7 +609,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -694,7 +692,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -773,7 +775,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -852,7 +858,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -941,7 +951,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1034,7 +1048,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1123,7 +1141,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1204,7 +1226,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1293,7 +1319,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1374,7 +1404,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1463,7 +1497,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1544,7 +1582,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1634,14 +1676,18 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, "bars": true, "dashLength": 10, "dashes": false, - "datasource": "${DS_MSSQL_TEST}", + "datasource": "${DS_MYSQL_TEST}", "fill": 1, "gridPos": { "h": 8, @@ -1717,7 +1763,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1807,7 +1857,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1890,7 +1944,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1980,7 +2038,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2063,7 +2125,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2153,7 +2219,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2236,7 +2306,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } } ], "refresh": false, @@ -2315,8 +2389,8 @@ ] }, "time": { - "from": "2018-03-15T11:30:00.000Z", - "to": "2018-03-15T12:55:01.000Z" + "from": "2018-03-15T12:30:00.000Z", + "to": "2018-03-15T13:55:01.000Z" }, "timepicker": { "refresh_intervals": [ @@ -2346,5 +2420,5 @@ "timezone": "", "title": "MySQL Data Source Test", "uid": "Hmf8FDkmz", - "version": 9 + "version": 12 } \ No newline at end of file diff --git a/docker/blocks/mysql_tests/docker-compose.yaml b/docker/blocks/mysql_tests/docker-compose.yaml index 3c59b66b5ac..035a6167017 100644 --- a/docker/blocks/mysql_tests/docker-compose.yaml +++ b/docker/blocks/mysql_tests/docker-compose.yaml @@ -1,5 +1,6 @@ mysqltests: - image: mysql:latest + build: + context: blocks/mysql_tests environment: MYSQL_ROOT_PASSWORD: rootpass MYSQL_DATABASE: grafana_tests @@ -7,7 +8,4 @@ MYSQL_PASSWORD: password ports: - "3306:3306" - volumes: - - /etc/localtime:/etc/localtime:ro - - /etc/timezone:/etc/timezone:ro tmpfs: /var/lib/mysql:rw diff --git a/docker/blocks/mysql_tests/setup.sql b/docker/blocks/mysql_tests/setup.sql new file mode 100644 index 00000000000..be917a1c542 --- /dev/null +++ b/docker/blocks/mysql_tests/setup.sql @@ -0,0 +1,2 @@ +CREATE DATABASE grafana_ds_tests; +GRANT ALL PRIVILEGES ON grafana_ds_tests.* TO 'grafana'; diff --git a/docker/blocks/nginx_proxy/docker-compose.yaml b/docker/blocks/nginx_proxy/docker-compose.yaml index 7c3447ade5c..a0ceceb83ac 100644 --- a/docker/blocks/nginx_proxy/docker-compose.yaml +++ b/docker/blocks/nginx_proxy/docker-compose.yaml @@ -2,7 +2,7 @@ # http://localhost:3000 (Grafana running locally) # # Please note that you'll need to change the root_url in the Grafana configuration: -# root_url = %(protocol)s://%(domain)s:/grafana/ +# root_url = %(protocol)s://%(domain)s:10080/grafana/ nginxproxy: build: blocks/nginx_proxy diff --git a/docker/blocks/openldap/Dockerfile b/docker/blocks/openldap/Dockerfile index d073e274356..54e383a6a97 100644 --- a/docker/blocks/openldap/Dockerfile +++ b/docker/blocks/openldap/Dockerfile @@ -17,6 +17,7 @@ EXPOSE 389 VOLUME ["/etc/ldap", "/var/lib/ldap"] COPY modules/ /etc/ldap.dist/modules +COPY prepopulate/ /etc/ldap.dist/prepopulate COPY entrypoint.sh /entrypoint.sh diff --git a/docker/blocks/openldap/entrypoint.sh b/docker/blocks/openldap/entrypoint.sh index 39a8b892de8..d560b78d388 100755 --- a/docker/blocks/openldap/entrypoint.sh +++ b/docker/blocks/openldap/entrypoint.sh @@ -65,7 +65,7 @@ EOF fi if [[ -n "$SLAPD_ADDITIONAL_SCHEMAS" ]]; then - IFS=","; declare -a schemas=($SLAPD_ADDITIONAL_SCHEMAS) + IFS=","; declare -a schemas=($SLAPD_ADDITIONAL_SCHEMAS); unset IFS for schema in "${schemas[@]}"; do slapadd -n0 -F /etc/ldap/slapd.d -l "/etc/ldap/schema/${schema}.ldif" >/dev/null 2>&1 @@ -73,14 +73,18 @@ EOF fi if [[ -n "$SLAPD_ADDITIONAL_MODULES" ]]; then - IFS=","; declare -a modules=($SLAPD_ADDITIONAL_MODULES) + IFS=","; declare -a modules=($SLAPD_ADDITIONAL_MODULES); unset IFS for module in "${modules[@]}"; do slapadd -n0 -F /etc/ldap/slapd.d -l "/etc/ldap/modules/${module}.ldif" >/dev/null 2>&1 done fi - chown -R openldap:openldap /etc/ldap/slapd.d/ + for file in `ls /etc/ldap/prepopulate/*.ldif`; do + slapadd -F /etc/ldap/slapd.d -l "$file" + done + + chown -R openldap:openldap /etc/ldap/slapd.d/ /var/lib/ldap/ /var/run/slapd/ else slapd_configs_in_env=`env | grep 'SLAPD_'` diff --git a/docker/blocks/openldap/notes.md b/docker/blocks/openldap/notes.md new file mode 100644 index 00000000000..71813c2899a --- /dev/null +++ b/docker/blocks/openldap/notes.md @@ -0,0 +1,13 @@ +# Notes on OpenLdap Docker Block + +Any ldif files added to the prepopulate subdirectory will be automatically imported into the OpenLdap database. + +The ldif files add three users, `ldapviewer`, `ldapeditor` and `ldapadmin`. Two groups, `admins` and `users`, are added that correspond with the group mappings in the default conf/ldap.toml. `ldapadmin` is a member of `admins` and `ldapeditor` is a member of `users`. + +Note that users that are added here need to specify a `memberOf` attribute manually as well as the `member` attribute for the group. The `memberOf` module usually does this automatically (if you add a group in Apache Directory Studio for example) but this does not work in the entrypoint script as it uses the `slapadd` command to add entries before the server has started and before the `memberOf` module is loaded. + +After adding ldif files to `prepopulate`: + +1. Remove your current docker image: `docker rm docker_openldap_1` +2. Build: `docker-compose build` +3. `docker-compose up` diff --git a/docker/blocks/openldap/prepopulate/admin.ldif b/docker/blocks/openldap/prepopulate/admin.ldif new file mode 100644 index 00000000000..3f4406d5810 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/admin.ldif @@ -0,0 +1,10 @@ +dn: cn=ldapadmin,dc=grafana,dc=org +mail: ldapadmin@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldapadmin +cn: ldapadmin +memberOf: cn=admins,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/adminsgroup.ldif b/docker/blocks/openldap/prepopulate/adminsgroup.ldif new file mode 100644 index 00000000000..d8dece4e458 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/adminsgroup.ldif @@ -0,0 +1,5 @@ +dn: cn=admins,dc=grafana,dc=org +cn: admins +member: cn=ldapadmin,dc=grafana,dc=org +objectClass: groupOfNames +objectClass: top diff --git a/docker/blocks/openldap/prepopulate/editor.ldif b/docker/blocks/openldap/prepopulate/editor.ldif new file mode 100644 index 00000000000..eba3adc4352 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/editor.ldif @@ -0,0 +1,10 @@ +dn: cn=ldapeditor,dc=grafana,dc=org +mail: ldapeditor@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldapeditor +cn: ldapeditor +memberOf: cn=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/usersgroup.ldif b/docker/blocks/openldap/prepopulate/usersgroup.ldif new file mode 100644 index 00000000000..a1de3a50d38 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/usersgroup.ldif @@ -0,0 +1,5 @@ +dn: cn=users,dc=grafana,dc=org +cn: users +member: cn=ldapeditor,dc=grafana,dc=org +objectClass: groupOfNames +objectClass: top diff --git a/docker/blocks/openldap/prepopulate/viewer.ldif b/docker/blocks/openldap/prepopulate/viewer.ldif new file mode 100644 index 00000000000..f699a7df57b --- /dev/null +++ b/docker/blocks/openldap/prepopulate/viewer.ldif @@ -0,0 +1,9 @@ +dn: cn=ldapviewer,dc=grafana,dc=org +mail: ldapviewer@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldapviewer +cn: ldapviewer diff --git a/docker/blocks/postgres_tests/Dockerfile b/docker/blocks/postgres_tests/Dockerfile new file mode 100644 index 00000000000..afe4d199651 --- /dev/null +++ b/docker/blocks/postgres_tests/Dockerfile @@ -0,0 +1,3 @@ +FROM postgres:latest +ADD setup.sql /docker-entrypoint-initdb.d +CMD ["postgres"] \ No newline at end of file diff --git a/docker/blocks/postgres_tests/dashboard.json b/docker/blocks/postgres_tests/dashboard.json index eea95863716..9efbe90bdfe 100644 --- a/docker/blocks/postgres_tests/dashboard.json +++ b/docker/blocks/postgres_tests/dashboard.json @@ -100,7 +100,7 @@ "gnetId": null, "graphTooltip": 0, "id": null, - "iteration": 1521725946837, + "iteration": 1523320929325, "links": [], "panels": [ { @@ -443,7 +443,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -522,7 +526,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -601,7 +609,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -680,7 +692,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -759,7 +775,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -838,7 +858,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -927,7 +951,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1008,7 +1036,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1097,7 +1129,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1178,7 +1214,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1267,7 +1307,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1348,7 +1392,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1437,7 +1485,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1518,7 +1570,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1608,7 +1664,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1691,7 +1751,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1781,7 +1845,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1864,7 +1932,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -1954,7 +2026,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2037,7 +2113,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2127,7 +2207,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } }, { "aliasColors": {}, @@ -2210,7 +2294,11 @@ "min": null, "show": true } - ] + ], + "yaxis": { + "align": false, + "alignLevel": null + } } ], "refresh": false, diff --git a/docker/blocks/postgres_tests/docker-compose.yaml b/docker/blocks/postgres_tests/docker-compose.yaml index 44b66e8e558..f5ce0a5a3d3 100644 --- a/docker/blocks/postgres_tests/docker-compose.yaml +++ b/docker/blocks/postgres_tests/docker-compose.yaml @@ -1,5 +1,6 @@ postgrestest: - image: postgres:latest + build: + context: blocks/postgres_tests environment: POSTGRES_USER: grafanatest POSTGRES_PASSWORD: grafanatest diff --git a/docker/blocks/postgres_tests/setup.sql b/docker/blocks/postgres_tests/setup.sql new file mode 100644 index 00000000000..b182b7c292d --- /dev/null +++ b/docker/blocks/postgres_tests/setup.sql @@ -0,0 +1,3 @@ +CREATE DATABASE grafanadstest; +REVOKE CONNECT ON DATABASE grafanadstest FROM PUBLIC; +GRANT CONNECT ON DATABASE grafanadstest TO grafanatest; \ No newline at end of file diff --git a/docker/blocks/prometheus_mac/Dockerfile b/docker/blocks/prometheus_mac/Dockerfile new file mode 100644 index 00000000000..2098e6527d3 --- /dev/null +++ b/docker/blocks/prometheus_mac/Dockerfile @@ -0,0 +1,3 @@ +FROM prom/prometheus:v1.8.2 +ADD prometheus.yml /etc/prometheus/ +ADD alert.rules /etc/prometheus/ diff --git a/docker/blocks/prometheus_mac/alert.rules b/docker/blocks/prometheus_mac/alert.rules new file mode 100644 index 00000000000..563d1e89994 --- /dev/null +++ b/docker/blocks/prometheus_mac/alert.rules @@ -0,0 +1,10 @@ +# Alert Rules + +ALERT AppCrash + IF process_open_fds > 0 + FOR 15s + LABELS { severity="critical" } + ANNOTATIONS { + summary = "Number of open fds > 0", + description = "Just testing" + } diff --git a/docker/blocks/prometheus_mac/docker-compose.yaml b/docker/blocks/prometheus_mac/docker-compose.yaml new file mode 100644 index 00000000000..ef53b07418a --- /dev/null +++ b/docker/blocks/prometheus_mac/docker-compose.yaml @@ -0,0 +1,26 @@ + prometheus: + build: blocks/prometheus_mac + ports: + - "9090:9090" + + node_exporter: + image: prom/node-exporter + ports: + - "9100:9100" + + fake-prometheus-data: + image: grafana/fake-data-gen + ports: + - "9091:9091" + environment: + FD_DATASOURCE: prom + + alertmanager: + image: quay.io/prometheus/alertmanager + ports: + - "9093:9093" + + prometheus-random-data: + build: blocks/prometheus_random_data + ports: + - "8081:8080" diff --git a/docker/blocks/prometheus_mac/prometheus.yml b/docker/blocks/prometheus_mac/prometheus.yml new file mode 100644 index 00000000000..299447ffb25 --- /dev/null +++ b/docker/blocks/prometheus_mac/prometheus.yml @@ -0,0 +1,39 @@ +# my global config +global: + scrape_interval: 10s # By default, scrape targets every 15 seconds. + evaluation_interval: 10s # By default, scrape targets every 15 seconds. + # scrape_timeout is set to the global default (10s). + +# Load and evaluate rules in this file every 'evaluation_interval' seconds. +rule_files: + - "alert.rules" + # - "first.rules" + # - "second.rules" + +alerting: + alertmanagers: + - scheme: http + static_configs: + - targets: + - "alertmanager:9093" + +scrape_configs: + - job_name: 'prometheus' + static_configs: + - targets: ['localhost:9090'] + + - job_name: 'node_exporter' + static_configs: + - targets: ['node_exporter:9100'] + + - job_name: 'fake-data-gen' + static_configs: + - targets: ['fake-prometheus-data:9091'] + + - job_name: 'grafana' + static_configs: + - targets: ['host.docker.internal:3000'] + + - job_name: 'prometheus-random-data' + static_configs: + - targets: ['prometheus-random-data:8080'] diff --git a/docker/blocks/smtp/bootstrap.sh b/docker/blocks/smtp/bootstrap.sh index a78f9d6dc16..27f6a2c3ef8 100755 --- a/docker/blocks/smtp/bootstrap.sh +++ b/docker/blocks/smtp/bootstrap.sh @@ -22,6 +22,6 @@ log() { log $RUN_CMD $RUN_CMD -# Exit immidiately in case of any errors or when we have interactive terminal +# Exit immediately in case of any errors or when we have interactive terminal if [[ $? != 0 ]] || test -t 0; then exit $?; fi log diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md index b43ea68bcd8..42a0fffeda1 100644 --- a/docs/sources/administration/provisioning.md +++ b/docs/sources/administration/provisioning.md @@ -100,7 +100,7 @@ datasources: - name: Graphite # datasource type. Required type: graphite - # access mode. direct or proxy. Required + # access mode. proxy or direct (Server or Browser in the UI). Required access: proxy # org id. will default to orgId 1 if not specified orgId: 1 @@ -138,6 +138,7 @@ datasources: ``` #### Custom Settings per Datasource +Please refer to each datasource documentation for specific provisioning examples. | Datasource | Misc | | ---- | ---- | @@ -196,16 +197,25 @@ providers: folder: '' type: file disableDeletion: false - editable: false options: path: /var/lib/grafana/dashboards ``` When Grafana starts, it will update/insert all dashboards available in the configured path. Then later on poll that path and look for updated json files and insert those update/insert those into the database. +#### Making changes to a provisioned dashboard + +It's possible to make changes to a provisioned dashboard in Grafana UI, but there's currently no possibility to automatically save the changes back to the provisioning source. +However, if you make changes to a provisioned dashboard you can `Save` the dashboard which will bring up a *Cannot save provisioned dashboard* dialog like seen in the screenshot below. +Here available options will let you `Copy JSON to Clipboard` and/or `Save JSON to file` which can help you synchronize your dashboard changes back to the provisioning source. + +Note: The JSON shown in input field and when using `Copy JSON to Clipboard` and/or `Save JSON to file` will have the `id` field automatically removed to aid the provisioning workflow. + +{{< docs-imagebox img="/img/docs/v51/provisioning_cannot_save_dashboard.png" max-width="500px" class="docs-image--no-shadow" >}} + ### Reuseable Dashboard Urls -If the dashboard in the json file contains an [uid](/reference/dashboard/#json-fields), Grafana will force insert/update on that uid. This allows you to migrate dashboards betweens Grafana instances and provisioning Grafana from configuration without breaking the urls given since the new dashboard url uses the uid as identifer. +If the dashboard in the json file contains an [uid](/reference/dashboard/#json-fields), Grafana will force insert/update on that uid. This allows you to migrate dashboards betweens Grafana instances and provisioning Grafana from configuration without breaking the urls given since the new dashboard url uses the uid as identifier. When Grafana starts, it will update/insert all dashboards available in the configured folders. If you modify the file, the dashboard will also be updated. By default Grafana will delete dashboards in the database if the file is removed. You can disable this behavior using the `disableDeletion` setting. diff --git a/docs/sources/alerting/notifications.md b/docs/sources/alerting/notifications.md index bb119687750..b3b4305a748 100644 --- a/docs/sources/alerting/notifications.md +++ b/docs/sources/alerting/notifications.md @@ -153,10 +153,10 @@ Prometheus Alertmanager | `prometheus-alertmanager` | no # Enable images in notifications {#external-image-store} -Grafana can render the panel associated with the alert rule and include that in the notification. Most Notification Channels require that this image be publicly accessable (Slack and PagerDuty for example). In order to include images in alert notifications, Grafana can upload the image to an image store. It currently supports +Grafana can render the panel associated with the alert rule and include that in the notification. Most Notification Channels require that this image be publicly accessible (Slack and PagerDuty for example). In order to include images in alert notifications, Grafana can upload the image to an image store. It currently supports Amazon S3, Webdav, Google Cloud Storage and Azure Blob Storage. So to set that up you need to configure the [external image uploader](/installation/configuration/#external-image-storage) in your grafana-server ini config file. -Be aware that some notifiers requires public access to the image to be able to include it in the notification. So make sure to enable public access to the images. If your using local image uploader, your Grafana instance need to be accessible by the internet. +Be aware that some notifiers requires public access to the image to be able to include it in the notification. So make sure to enable public access to the images. If you're using local image uploader, your Grafana instance need to be accessible by the internet. Currently only the Email Channels attaches images if no external image store is specified. To include images in alert notifications for other channels then you need to set up an external image store. diff --git a/docs/sources/alerting/rules.md b/docs/sources/alerting/rules.md index 9bbbd70641d..bcca3c6b2fb 100644 --- a/docs/sources/alerting/rules.md +++ b/docs/sources/alerting/rules.md @@ -110,7 +110,7 @@ to `Keep Last State` in order to basically ignore them. ## Notifications -In alert tab you can also specify alert rule notifications along with a detailed messsage about the alert rule. +In alert tab you can also specify alert rule notifications along with a detailed message about the alert rule. The message can contain anything, information about how you might solve the issue, link to runbook, etc. The actual notifications are configured and shared between multiple alerts. Read the diff --git a/docs/sources/contribute/cla.md b/docs/sources/contribute/cla.md index b990187d809..ffb2aaef1b9 100644 --- a/docs/sources/contribute/cla.md +++ b/docs/sources/contribute/cla.md @@ -1,6 +1,6 @@ +++ title = "Contributor Licence Agreement (CLA)" -description = "Contributer Licence Agreement (CLA)" +description = "Contributor Licence Agreement (CLA)" type = "docs" aliases = ["/project/cla", "docs/contributing/cla.html"] [menu.docs] @@ -101,4 +101,4 @@ TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL YOU [OR US]


-This CLA aggreement is based on the [Harmony Contributor Aggrement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/) +This CLA agreement is based on the [Harmony Contributor Aggrement Template (combined)](http://www.harmonyagreements.org/agreements.html), [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/) diff --git a/docs/sources/features/datasources/cloudwatch.md b/docs/sources/features/datasources/cloudwatch.md index f7f8138b5e9..d178c176602 100644 --- a/docs/sources/features/datasources/cloudwatch.md +++ b/docs/sources/features/datasources/cloudwatch.md @@ -43,6 +43,40 @@ server is running on AWS you can use IAM Roles and authentication will be handle Checkout AWS docs on [IAM Roles](http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html) +## IAM Policies + +Grafana needs permissions granted via IAM to be able to read CloudWatch metrics +and EC2 tags/instances. You can attach these permissions to IAM roles and +utilize Grafana's built-in support for assuming roles. + +Here is a minimal policy example: + +```json +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowReadingMetricsFromCloudWatch", + "Effect": "Allow", + "Action": [ + "cloudwatch:ListMetrics", + "cloudwatch:GetMetricStatistics" + ], + "Resource": "*" + }, + { + "Sid": "AllowReadingTagsFromEC2", + "Effect": "Allow", + "Action": [ + "ec2:DescribeTags", + "ec2:DescribeInstances" + ], + "Resource": "*" + } + ] +} +``` + ### AWS credentials file Create a file at `~/.aws/credentials`. That is the `HOME` path for user running grafana-server. @@ -173,3 +207,37 @@ Amazon provides 1 million CloudWatch API requests each month at no additional ch it costs $0.01 per 1,000 GetMetricStatistics or ListMetrics requests. For each query Grafana will issue a GetMetricStatistics request and every time you pick a dimension in the query editor Grafana will issue a ListMetrics request. + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here are some provisioning examples for this datasource. + +Using a credentials file +```yaml +apiVersion: 1 + +datasources: + - name: Cloudwatch + type: cloudwatch + jsonData: + authType: credentials + defaultRegion: eu-west-2 +``` + +Using `accessKey` and `secretKey` + +```yaml +apiVersion: 1 + +datasources: + - name: Cloudwatch + type: cloudwatch + jsonData: + authType: keys + defaultRegion: eu-west-2 + secureJsonData: + accessKey: "" + secretKey: "" +``` diff --git a/docs/sources/features/datasources/elasticsearch.md b/docs/sources/features/datasources/elasticsearch.md index 6ce17113a9b..31ce78f0bfe 100644 --- a/docs/sources/features/datasources/elasticsearch.md +++ b/docs/sources/features/datasources/elasticsearch.md @@ -29,13 +29,19 @@ Name | Description *Name* | The data source name. This is how you refer to the data source in panels & queries. *Default* | Default data source means that it will be pre-selected for new panels. *Url* | The HTTP protocol, IP, and port of your Elasticsearch server. -*Access* | Proxy = access via Grafana backend, Direct = access directly from browser. +*Access* | Server (default) = URL needs to be accessible from the Grafana backend/server, Browser = URL needs to be accessible from the browser. -Proxy access means that the Grafana backend will proxy all requests from the browser, and send them on to the Data Source. This is useful because it can eliminate CORS (Cross Origin Site Resource) issues, as well as eliminate the need to disseminate authentication to the browser. +Access mode controls how requests to the data source will be handled. Server should be the preferred way if nothing else stated. -### Direct access +### Server access mode (Default) -If you select direct access you must update your Elasticsearch configuration to allow other domains to access +All requests will be made from the browser to Grafana backend/server which in turn will forward the requests to the data source and by that circumvent possible Cross-Origin Resource Sharing (CORS) requirements. The URL needs to be accessible from the grafana backend/server if you select this access mode. + +### Browser (Direct) access + +All requests will be made from the browser directly to the data source and may be subject to Cross-Origin Resource Sharing (CORS) requirements. The URL needs to be accessible from the browser if you select this access mode. + +If you select Browser access you must update your Elasticsearch configuration to allow other domains to access Elasticsearch from the browser. You do this by specifying these to options in your **elasticsearch.yml** config file. ```bash @@ -45,7 +51,7 @@ http.cors.allow-origin: "*" ### Index settings -![](/img/docs/elasticsearch/elasticsearch_ds_details.png) +![Elasticsearch Datasource Details](/img/docs/elasticsearch/elasticsearch_ds_details.png) Here you can specify a default for the `time field` and specify the name of your Elasticsearch index. You can use a time pattern for the index name or a wildcard. @@ -55,9 +61,25 @@ a time pattern for the index name or a wildcard. Be sure to specify your Elasticsearch version in the version selection dropdown. This is very important as there are differences how queries are composed. Currently only 2.x and 5.x are supported. +### Min time interval +A lower limit for the auto group by time interval. Recommended to be set to write frequency, for example `1m` if your data is written every minute. +This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a +number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported: + +Identifier | Description +------------ | ------------- +`y` | year +`M` | month +`w` | week +`d` | day +`h` | hour +`m` | minute +`s` | second +`ms` | millisecond + ## Metric Query editor -![](/img/docs/elasticsearch/query_editor.png) +![Elasticsearch Query Editor](/img/docs/elasticsearch/query_editor.png) The Elasticsearch query editor allows you to select multiple metrics and group by multiple terms or filters. Use the plus and minus icons to the right to add/remove metrics or group by clauses. Some metrics and group by clauses haves options, click the option text to expand the row to view and edit metric or group by options. @@ -137,3 +159,23 @@ Query | You can leave the search query blank or specify a lucene query Time | The name of the time field, needs to be date field. Text | Event description field. Tags | Optional field name to use for event tags (can be an array or a CSV string). + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here are some provisioning examples for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: Elastic + type: elasticsearch + access: proxy + database: "[metrics-]YYYY.MM.DD" + url: http://localhost:9200 + jsonData: + interval: Daily + timeField: "@timestamp" +``` diff --git a/docs/sources/features/datasources/graphite.md b/docs/sources/features/datasources/graphite.md index 7c4187da9ae..da58a48225f 100644 --- a/docs/sources/features/datasources/graphite.md +++ b/docs/sources/features/datasources/graphite.md @@ -31,20 +31,28 @@ Name | Description *Name* | The data source name. This is how you refer to the data source in panels & queries. *Default* | Default data source means that it will be pre-selected for new panels. *Url* | The HTTP protocol, IP, and port of your graphite-web or graphite-api install. -*Access* | Proxy = access via Grafana backend, Direct = access directly from browser. +*Access* | Server (default) = URL needs to be accessible from the Grafana backend/server, Browser = URL needs to be accessible from the browser. -Proxy access means that the Grafana backend will proxy all requests from the browser, and send them on to the Data Source. This is useful because it can eliminate CORS (Cross Origin Site Resource) issues, as well as eliminate the need to disseminate authentication details to the browser. +Access mode controls how requests to the data source will be handled. Server should be the preferred way if nothing else stated. + +### Server access mode (Default) + +All requests will be made from the browser to Grafana backend/server which in turn will forward the requests to the data source and by that circumvent possible Cross-Origin Resource Sharing (CORS) requirements. The URL needs to be accessible from the grafana backend/server if you select this access mode. + +### Browser access mode + +All requests will be made from the browser directly to the data source and may be subject to Cross-Origin Resource Sharing (CORS) requirements. The URL needs to be accessible from the browser if you select this access mode. ## Metric editor ### Navigate metric segments + Click the ``Select metric`` link to start navigating the metric space. One you start you can continue using the mouse or keyboard arrow keys. You can select a wildcard and still continue. {{< docs-imagebox img="/img/docs/v45/graphite_query1_still.png" animated-gif="/img/docs/v45/graphite_query1.gif" >}} - ### Functions Click the plus icon to the right to add a function. You can search for the function or select it from the menu. Once @@ -55,7 +63,6 @@ by the x icon. {{< docs-imagebox img="/img/docs/v45/graphite_query2_still.png" animated-gif="/img/docs/v45/graphite_query2.gif" >}} - ### Optional parameters Some functions like aliasByNode support an optional second argument. To add this parameter specify for example 3,-2 as the first parameter and the function editor will adapt and move the -2 to a second parameter. To remove the second optional parameter just click on it and leave it blank and the editor will remove it. @@ -63,7 +70,6 @@ Some functions like aliasByNode support an optional second argument. To add this {{< docs-imagebox img="/img/docs/v45/graphite_query3_still.png" animated-gif="/img/docs/v45/graphite_query3.gif" >}} - ### Nested Queries You can reference queries by the row “letter” that they’re on (similar to Microsoft Excel). If you add a second query to a graph, you can reference the first query simply by typing in #A. This provides an easy and convenient way to build compounded queries. @@ -71,7 +77,6 @@ You can reference queries by the row “letter” that they’re on (similar to {{< docs-imagebox img="/img/docs/v45/graphite_nested_queries_still.png" animated-gif="/img/docs/v45/graphite_nested_queries.gif" >}} - ## Point consolidation All Graphite metrics are consolidated so that Graphite doesn't return more data points than there are pixels in the graph. By default, @@ -89,6 +94,18 @@ being displayed in your dashboard. Checkout the [Templating]({{< relref "reference/templating.md" >}}) documentation for an introduction to the templating feature and the different types of template variables. +Graphite 1.1 introduced tags and Grafana added support for Graphite queries with tags in version 5.0. To create a variable using tag values, then you need to use the Grafana functions `tags` and `tag_values`. + +Query | Description +------------ | ------------- +*tags()* | Returns all tags. +*tags(server=~backend\*)* | Returns only tags that occur in series matching the filter expression. +*tag_values(server)* | Return tag values for the specified tag. +*tag_values(server, server=~backend\*)* | Returns filtered tag values that occur for the specified tag in series matching those expressions. +*tag_values(server, server=~backend\*, app=~${apps:regex})* | Multiple filter expressions and expressions can contain other variables. + +For more details, see the [Graphite docs on the autocomplete api for tags](http://graphite.readthedocs.io/en/latest/tags.html#auto-complete-support). + ### Query variable The query you specify in the query field should be a metric find type of query. For example, a query like `prod.servers.*` will fill the @@ -97,10 +114,10 @@ variable with all possible values that exist in the wildcard position. You can also create nested variables that use other variables in their definition. For example `apps.$app.servers.*` uses the variable `$app` in its query definition. -### Variable usage +### Variable Usage You can use a variable in a metric node path or as a parameter to a function. -![](/img/docs/v2/templated_variable_parameter.png) +![variable](/img/docs/v2/templated_variable_parameter.png) There are two syntaxes: @@ -113,6 +130,18 @@ the second syntax in expressions like `my.server[[serverNumber]].count`. Example: [Graphite Templated Dashboard](http://play.grafana.org/dashboard/db/graphite-templated-nested) +### Variable Usage in Tag Queries + +Multi-value variables in tag queries use the advanced formatting syntax introduced in Grafana 5.0 for variables: `{var:regex}`. Non-tag queries will use the default glob formatting for multi-value variables. + +Example of a tag expression with regex formatting and using the Equal Tilde operator, `=~`: + +```text +server=~${servers:regex} +``` + +Checkout the [Advanced Formatting Options section in the Variables]({{< relref "reference/templating.md#advanced-formatting-options" >}}) documentation for examples and details. + ## Annotations [Annotations]({{< relref "reference/annotations.md" >}}) allows you to overlay rich event information on top of graphs. You add annotation @@ -120,3 +149,21 @@ queries via the Dashboard menu / Annotations view. Graphite supports two ways to query annotations. A regular metric query, for this you use the `Graphite query` textbox. A Graphite events query, use the `Graphite event tags` textbox, specify a tag or wildcard (leave empty should also work) + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here are some provisioning examples for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: Graphite + type: graphite + access: proxy + url: http://localhost:8080 + jsonData: + graphiteVersion: "1.1" +``` diff --git a/docs/sources/features/datasources/index.md b/docs/sources/features/datasources/index.md index 54606d20988..a892f38a448 100644 --- a/docs/sources/features/datasources/index.md +++ b/docs/sources/features/datasources/index.md @@ -30,6 +30,7 @@ The following datasources are officially supported: * [Prometheus]({{< relref "prometheus.md" >}}) * [MySQL]({{< relref "mysql.md" >}}) * [Postgres]({{< relref "postgres.md" >}}) +* [Microsoft SQL Server (MSSQL)]({{< relref "mssql.md" >}}) ## Data source plugins diff --git a/docs/sources/features/datasources/influxdb.md b/docs/sources/features/datasources/influxdb.md index 6d0918a0d01..1426f55e40b 100644 --- a/docs/sources/features/datasources/influxdb.md +++ b/docs/sources/features/datasources/influxdb.md @@ -28,16 +28,36 @@ Name | Description *Name* | The data source name. This is how you refer to the data source in panels & queries. *Default* | Default data source means that it will be pre-selected for new panels. *Url* | The http protocol, ip and port of you influxdb api (influxdb api port is by default 8086) -*Access* | Proxy = access via Grafana backend, Direct = access directly from browser. +*Access* | Server (default) = URL needs to be accessible from the Grafana backend/server, Browser = URL needs to be accessible from the browser. *Database* | Name of your influxdb database *User* | Name of your database user *Password* | Database user's password -### Proxy vs Direct access +Access mode controls how requests to the data source will be handled. Server should be the preferred way if nothing else stated. -Proxy access means that the Grafana backend will proxy all requests from the browser. So requests to InfluxDB will be channeled through -`grafana-server`. This means that the URL you specify needs to be accessible from the server you are running Grafana on. Proxy access -mode is also more secure as the username & password will never reach the browser. +### Server access mode (Default) + +All requests will be made from the browser to Grafana backend/server which in turn will forward the requests to the data source and by that circumvent possible Cross-Origin Resource Sharing (CORS) requirements. The URL needs to be accessible from the grafana backend/server if you select this access mode. + +### Browser access mode + +All requests will be made from the browser directly to the data source and may be subject to Cross-Origin Resource Sharing (CORS) requirements. The URL needs to be accessible from the browser if you select this access mode. + +### Min time interval +A lower limit for the auto group by time interval. Recommended to be set to write frequency, for example `1m` if your data is written every minute. +This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a +number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported: + +Identifier | Description +------------ | ------------- +`y` | year +`M` | month +`w` | week +`d` | day +`h` | hour +`m` | minute +`s` | second +`ms` | millisecond ## Query Editor @@ -174,3 +194,22 @@ SELECT title, description from events WHERE $timeFilter order asc For InfluxDB you need to enter a query like in the above example. You need to have the ```where $timeFilter``` part. If you only select one column you will not need to enter anything in the column mapping fields. The Tags field can be a comma separated string. + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here are some provisioning examples for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: InfluxDB + type: influxdb + access: proxy + database: site + user: grafana + password: grafana + url: http://localhost:8086 +``` diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md new file mode 100644 index 00000000000..1676cffa0a8 --- /dev/null +++ b/docs/sources/features/datasources/mssql.md @@ -0,0 +1,565 @@ ++++ +title = "Using Microsoft SQL Server in Grafana" +description = "Guide for using Microsoft SQL Server in Grafana" +keywords = ["grafana", "MSSQL", "Microsoft", "SQL", "guide", "Azure SQL Database"] +type = "docs" +[menu.docs] +name = "Microsoft SQL Server" +parent = "datasources" +weight = 7 ++++ + +# Using Microsoft SQL Server in Grafana + +> Only available in Grafana v5.1+. + +Grafana ships with a built-in Microsoft SQL Server (MSSQL) data source plugin that allows you to query and visualize data from any Microsoft SQL Server 2005 or newer, including Microsoft Azure SQL Database. + +## Adding the data source + +1. Open the side menu by clicking the Grafana icon in the top header. +2. In the side menu under the `Configuration` link you should find a link named `Data Sources`. +3. Click the `+ Add data source` button in the top header. +4. Select *Microsoft SQL Server* from the *Type* dropdown. + +### Data source options + +Name | Description +------------ | ------------- +*Name* | The data source name. This is how you refer to the data source in panels & queries. +*Default* | Default data source means that it will be pre-selected for new panels. +*Host* | The IP address/hostname and optional port of your MSSQL instance. If port is omitted, default 1433 will be used. +*Database* | Name of your MSSQL database. +*User* | Database user's login/username +*Password* | Database user's password + +### Database User Permissions (Important!) + +The database user you specify when you add the data source should only be granted SELECT permissions on +the specified database & tables you want to query. Grafana does not validate that the query is safe. The query +could include any SQL statement. For example, statements like `DELETE FROM user;` and `DROP TABLE user;` would be +executed. To protect against this we **Highly** recommend you create a specific MSSQL user with restricted permissions. + +Example: + +```sql + CREATE USER grafanareader WITH PASSWORD 'password' + GRANT SELECT ON dbo.YourTable3 TO grafanareader +``` + +Make sure the user does not get any unwanted privileges from the public role. + +### Known Issues + +MSSQL 2008 and 2008 R2 engine cannot handle login records when SSL encryption is not disabled. Due to this you may receive an `Login error: EOF` error when trying to create your datasource. +To fix MSSQL 2008 R2 issue, install MSSQL 2008 R2 Service Pack 2. To fix MSSQL 2008 issue, install Microsoft MSSQL 2008 Service Pack 3 and Cumulative update package 3 for MSSQL 2008 SP3. + +## Query Editor + +{{< docs-imagebox img="/img/docs/v51/mssql_query_editor.png" class="docs-image--no-shadow" >}} + +You find the MSSQL query editor in the metrics tab in Graph, Singlestat or Table panel's edit mode. You enter edit mode by clicking the +panel title, then edit. The editor allows you to define a SQL query to select data to be visualized. + +1. Select *Format as* `Time series` (for use in Graph or Singlestat panel's among others) or `Table` (for use in Table panel among others). +2. This is the actual editor where you write your SQL queries. +3. Show help section for MSSQL below the query editor. +4. Show actual executed SQL query. Will be available first after a successful query has been executed. +5. Add an additional query where an additional query editor will be displayed. + +
+ +## Macros + +To simplify syntax and to allow for dynamic parts, like date range filters, the query can contain macros. + +Macro example | Description +------------ | ------------- +*$__time(dateColumn)* | Will be replaced by an expression to rename the column to *time*. For example, *dateColumn as time* +*$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert a DATETIME column type to unix timestamp and rename it to *time*.
For example, *DATEDIFF(second, '1970-01-01', dateColumn) AS time* +*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name.
For example, *dateColumn >= DATEADD(s, 1494410783, '1970-01-01') AND dateColumn <= DATEADD(s, 1494410783, '1970-01-01')* +*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *DATEADD(second, 1494410783, '1970-01-01')* +*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *DATEADD(second, 1494410783, '1970-01-01')* +*$__timeGroup(dateColumn,'5m'[, fillvalue])* | Will be replaced by an expression usable in GROUP BY clause. Providing a *fillValue* of *NULL* or *floating value* will automatically fill empty series in timerange with that value.
For example, *CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)\*300*. +*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example). +*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183* +*$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783* +*$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183* + +We plan to add many more macros. If you have suggestions for what macros you would like to see, please [open an issue](https://github.com/grafana/grafana) in our GitHub repo. + +The query editor has a link named `Generated SQL` that shows up after a query has been executed, while in panel edit mode. Click on it and it will expand and show the raw interpolated SQL string that was executed. + +## Table queries + +If the `Format as` query option is set to `Table` then you can basically do any type of SQL query. The table panel will automatically show the results of whatever columns & rows your query returns. + +**Example database table:** + +```sql +CREATE TABLE [event] ( + time_sec bigint, + description nvarchar(100), + tags nvarchar(100), +) +``` + +```sql +CREATE TABLE [mssql_types] ( + c_bit bit, c_tinyint tinyint, c_smallint smallint, c_int int, c_bigint bigint, c_money money, c_smallmoney smallmoney, c_numeric numeric(10,5), + c_real real, c_decimal decimal(10,2), c_float float, + c_char char(10), c_varchar varchar(10), c_text text, + c_nchar nchar(12), c_nvarchar nvarchar(12), c_ntext ntext, + c_datetime datetime, c_datetime2 datetime2, c_smalldatetime smalldatetime, c_date date, c_time time, c_datetimeoffset datetimeoffset +) + +INSERT INTO [mssql_types] +SELECT + 1, 5, 20020, 980300, 1420070400, '$20000.15', '£2.15', 12345.12, + 1.11, 2.22, 3.33, + 'char10', 'varchar10', 'text', + N'☺nchar12☺', N'☺nvarchar12☺', N'☺text☺', + GETDATE(), CAST(GETDATE() AS DATETIME2), CAST(GETDATE() AS SMALLDATETIME), CAST(GETDATE() AS DATE), CAST(GETDATE() AS TIME), SWITCHOFFSET(CAST(GETDATE() AS DATETIMEOFFSET), '-07:00')) +``` + +Query editor with example query: + +{{< docs-imagebox img="/img/docs/v51/mssql_table_query.png" max-width="500px" class="docs-image--no-shadow" >}} + + +The query: + +```sql +SELECT * FROM [mssql_types] +``` + +You can control the name of the Table panel columns by using regular `AS ` SQL column selection syntax. Example: + +```sql +SELECT + c_bit as [column1], c_tinyint as [column2] +FROM + [mssql_types] +``` + +The resulting table panel: + +{{< docs-imagebox img="/img/docs/v51/mssql_table_result.png" max-width="1489px" class="docs-image--no-shadow" >}} + +## Time series queries + +If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must must have a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. You may return a column named `metric` that is used as metric name for the value column. Any column except `time` and `metric` is treated as a value column. If you omit the `metric` column, tha name of the value column will be the metric name. You may select multiple value columns, each will have its name as metric. + +**Example database table:** + +```sql +CREATE TABLE [event] ( + time_sec bigint, + description nvarchar(100), + tags nvarchar(100), +) +``` + + +```sql +CREATE TABLE metric_values ( + time datetime, + measurement nvarchar(100), + valueOne int, + valueTwo int, +) + +INSERT metric_values (time, measurement, valueOne, valueTwo) VALUES('2018-03-15 12:30:00', 'Metric A', 62, 6) +INSERT metric_values (time, measurement, valueOne, valueTwo) VALUES('2018-03-15 12:30:00', 'Metric B', 49, 11) +... +INSERT metric_values (time, measurement, valueOne, valueTwo) VALUES('2018-03-15 13:55:00', 'Metric A', 14, 25) +INSERT metric_values (time, measurement, valueOne, valueTwo) VALUES('2018-03-15 13:55:00', 'Metric B', 48, 10) + +``` + +{{< docs-imagebox img="/img/docs/v51/mssql_time_series_one.png" class="docs-image--no-shadow docs-image--right" >}} + +**Example with one `value` and one `metric` column.** + +```sql +SELECT + time, + valueOne, + measurement as metric +FROM + metric_values +WHERE + $__timeFilter(time) +ORDER BY 1 +``` + +When above query are used in a graph panel the result will be two series named `Metric A` and `Metric B` with value of `valueOne` and `valueTwo` plotted over `time`. + +
+ +{{< docs-imagebox img="/img/docs/v51/mssql_time_series_two.png" class="docs-image--no-shadow docs-image--right" >}} + +**Example with multiple `value` culumns:** + +```sql +SELECT + time, + valueOne, + valueTwo +FROM + metric_values +WHERE + $__timeFilter(time) +ORDER BY 1 +``` + +When above query are used in a graph panel the result will be two series named `valueOne` and `valueTwo` with value of `valueOne` and `valueTwo` plotted over `time`. + +
+ +{{< docs-imagebox img="/img/docs/v51/mssql_time_series_three.png" class="docs-image--no-shadow docs-image--right" >}} + +**Example using the $__timeGroup macro:** + +```sql +SELECT + $__timeGroup(time, '3m') as time, + measurement as metric, + avg(valueOne) +FROM + metric_values +WHERE + $__timeFilter(time) +GROUP BY + $__timeGroup(time, '3m'), + measurement +ORDER BY 1 +``` + +When above query are used in a graph panel the result will be two series named `Metric A` and `Metric B` with an average of `valueOne` plotted over `time`. +Any two series lacking a value in a 3 minute window will render a line between those two lines. You'll notice that the graph to the right never goes down to zero. + +
+ +{{< docs-imagebox img="/img/docs/v51/mssql_time_series_four.png" class="docs-image--no-shadow docs-image--right" >}} + +**Example using the $__timeGroup macro with fill parameter set to zero:** + +```sql +SELECT + $__timeGroup(time, '3m', 0) as time, + measurement as metric, + sum(valueTwo) +FROM + metric_values +WHERE + $__timeFilter(time) +GROUP BY + $__timeGroup(time, '3m'), + measurement +ORDER BY 1 +``` + +When above query are used in a graph panel the result will be two series named `Metric A` and `Metric B` with a sum of `valueTwo` plotted over `time`. +Any series lacking a value in a 3 minute window will have a value of zero which you'll see rendered in the graph to the right. + +## Templating + +Instead of hard-coding things like server, application and sensor name in you metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns makes it easy to change the data being displayed in your dashboard. + +Checkout the [Templating]({{< relref "reference/templating.md" >}}) documentation for an introduction to the templating feature and the different types of template variables. + +### Query Variable + +If you add a template variable of the type `Query`, you can write a MSSQL query that can +return things like measurement names, key names or key values that are shown as a dropdown select box. + +For example, you can have a variable that contains all values for the `hostname` column in a table if you specify a query like this in the templating variable *Query* setting. + +```sql +SELECT hostname FROM host +``` + +A query can return multiple columns and Grafana will automatically create a list from them. For example, the query below will return a list with values from `hostname` and `hostname2`. + +```sql +SELECT [host].[hostname], [other_host].[hostname2] FROM host JOIN other_host ON [host].[city] = [other_host].[city] +``` + +Another option is a query that can create a key/value variable. The query should return two columns that are named `__text` and `__value`. The `__text` column value should be unique (if it is not unique then the first value is used). The options in the dropdown will have a text and value that allows you to have a friendly name as text and an id as the value. An example query with `hostname` as the text and `id` as the value: + +```sql +SELECT hostname __text, id __value FROM host +``` + +You can also create nested variables. For example if you had another variable named `region`. Then you could have +the hosts variable only show hosts from the current selected region with a query like this (if `region` is a multi-value variable then use the `IN` comparison operator rather than `=` to match against multiple values): + +```sql +SELECT hostname FROM host WHERE region IN ($region) +``` + +### Using Variables in Queries + +> From Grafana 4.3.0 to 4.6.0, template variables are always quoted automatically so if it is a string value do not wrap them in quotes in where clauses. +> +> From Grafana 5.0.0, template variable values are only quoted when the template variable is a `multi-value`. + +If the variable is a multi-value variable then use the `IN` comparison operator rather than `=` to match against multiple values. + +There are two syntaxes: + +`$` Example with a template variable named `hostname`: + +```sql +SELECT + atimestamp time, + aint value +FROM table +WHERE $__timeFilter(atimestamp) and hostname in($hostname) +ORDER BY atimestamp +``` + +`[[varname]]` Example with a template variable named `hostname`: + +```sql +SELECT + atimestamp as time, + aint as value +FROM table +WHERE $__timeFilter(atimestamp) and hostname in([[hostname]]) +ORDER BY atimestamp +``` + +#### Disabling Quoting for Multi-value Variables + +Grafana automatically creates a quoted, comma-separated string for multi-value variables. For example: if `server01` and `server02` are selected then it will be formatted as: `'server01', 'server02'`. Do disable quoting, use the csv formatting option for variables: + +`${servers:csv}` + +Read more about variable formatting options in the [Variables]({{< relref "reference/templating.md#advanced-formatting-options" >}}) documentation. + +## Annotations + +[Annotations]({{< relref "reference/annotations.md" >}}) allows you to overlay rich event information on top of graphs. You add annotation queries via the Dashboard menu / Annotations view. + +**Columns:** + +Name | Description +------------ | ------------- +time | The name of the date/time field. Could be a column with a native sql date/time data type or epoch value. +text | Event description field. +tags | Optional field name to use for event tags as a comma separated string. + +**Example database tables:** + +```sql +CREATE TABLE [events] ( + time_sec bigint, + description nvarchar(100), + tags nvarchar(100), +) +``` + +We also use the database table defined in [Time series queries](#time-series-queries). + +**Example query using time column with epoch values:** + +```sql +SELECT + time_sec as time, + description as [text], + tags +FROM + [events] +WHERE + $__unixEpochFilter(time_sec) +ORDER BY 1 +``` + +**Example query using time column of native sql date/time data type:** + +```sql +SELECT + time, + measurement as text, + convert(varchar, valueOne) + ',' + convert(varchar, valueTwo) as tags +FROM + metric_values +WHERE + $__timeFilter(time_column) +ORDER BY 1 +``` + +## Stored procedure support + +Stored procedures have been verified to work. However, please note that we haven't done anything special to support this why there may exist edge cases where it won't work as you would expect. +Stored procedures should be supported in table, time series and annotation queries as long as you use the same naming of columns and return data in the same format as describe above under respective section. + +Please note that any macro function will not work inside a stored procedure. + +### Examples + +{{< docs-imagebox img="/img/docs/v51/mssql_metrics_graph.png" class="docs-image--no-shadow docs-image--right" >}} +For the following examples the database table defined in [Time series queries](#time-series-queries). Let's say that we want to visualize 4 series in a graph panel, i.e. all combinations of columns `valueOne`, `valueTwo` and `measurement`. Graph panel to the right visualizes what we want to achieve. To solve this we actually need to use two queries: + +**First query:** + +```sql +SELECT + $__timeGroup(time, '5m') as time, + measurement + ' - value one' as metric, + avg(valueOne) as valueOne +FROM + metric_values +WHERE + $__timeFilter(time) +GROUP BY + $__timeGroup(time, '5m'), + measurement +ORDER BY 1 +``` + +**Second query:** +```sql +SELECT + $__timeGroup(time, '5m') as time, + measurement + ' - value two' as metric, + avg(valueTwo) as valueTwo +FROM + metric_values +GROUP BY + $__timeGroup(time, '5m'), + measurement +ORDER BY 1 +``` + +#### Stored procedure using time in epoch format + +We can define a stored procedure that will return all data we need to render 4 series in a graph panel like above. +In this case the stored procedure accepts two parameters `@from` and `@to` of `int` data types which should be a timerange (from-to) in epoch format +which will be used to filter the data to return from the stored procedure. + +We're mimicking the `$__timeGroup(time, '5m')` in the select and group by expressions and that's why there's a lot of lengthy expressions needed - +these could be extracted to MSSQL functions, if wanted. + +```sql +CREATE PROCEDURE sp_test_epoch( + @from int, + @to int +) AS +BEGIN + SELECT + cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time))/600 as int)*600 as int) as time, + measurement + ' - value one' as metric, + avg(valueOne) as value + FROM + metric_values + WHERE + time >= DATEADD(s, @from, '1970-01-01') AND time <= DATEADD(s, @to, '1970-01-01') + GROUP BY + cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time))/600 as int)*600 as int), + measurement + UNION ALL + SELECT + cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time))/600 as int)*600 as int) as time, + measurement + ' - value two' as metric, + avg(valueTwo) as value + FROM + metric_values + WHERE + time >= DATEADD(s, @from, '1970-01-01') AND time <= DATEADD(s, @to, '1970-01-01') + GROUP BY + cast(cast(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time))/600 as int)*600 as int), + measurement + ORDER BY 1 +END +``` + +Then we can use the following query for our graph panel. + +```sql +DECLARE + @from int = $__unixEpochFrom(), + @to int = $__unixEpochTo() + +EXEC dbo.sp_test_epoch @from, @to +``` + +#### Stored procedure using time in datetime format + +We can define a stored procedure that will return all data we need to render 4 series in a graph panel like above. +In this case the stored procedure accepts two parameters `@from` and `@to` of `datetime` data types which should be a timerange (from-to) +which will be used to filter the data to return from the stored procedure. + +We're mimicking the `$__timeGroup(time, '5m')` in the select and group by expressions and that's why there's a lot of lengthy expressions needed - +these could be extracted to MSSQL functions, if wanted. + +```sql +CREATE PROCEDURE sp_test_datetime( + @from datetime, + @to datetime +) AS +BEGIN + SELECT + cast(cast(DATEDIFF(second, {d '1970-01-01'}, time)/600 as int)*600 as int) as time, + measurement + ' - value one' as metric, + avg(valueOne) as value + FROM + metric_values + WHERE + time >= @from AND time <= @to + GROUP BY + cast(cast(DATEDIFF(second, {d '1970-01-01'}, time)/600 as int)*600 as int), + measurement + UNION ALL + SELECT + cast(cast(DATEDIFF(second, {d '1970-01-01'}, time)/600 as int)*600 as int) as time, + measurement + ' - value two' as metric, + avg(valueTwo) as value + FROM + metric_values + WHERE + time >= @from AND time <= @to + GROUP BY + cast(cast(DATEDIFF(second, {d '1970-01-01'}, time)/600 as int)*600 as int), + measurement + ORDER BY 1 +END + +``` + +Then we can use the following query for our graph panel. + +```sql +DECLARE + @from datetime = $__timeFrom(), + @to datetime = $__timeTo() + +EXEC dbo.sp_test_datetime @from, @to +``` + +## Alerting + +Time series queries should work in alerting conditions. Table formatted queries are not yet supported in alert rule +conditions. + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here are some provisioning examples for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: MSSQL + type: mssql + url: localhost:1433 + database: grafana + user: grafana + secureJsonData: + password: "Password!" + +``` diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md index 7fae7441b6d..f91417a43b7 100644 --- a/docs/sources/features/datasources/mysql.md +++ b/docs/sources/features/datasources/mysql.md @@ -12,6 +12,8 @@ weight = 7 # Using MySQL in Grafana > Only available in Grafana v4.3+. +> +> Starting from Grafana v5.1 you can name the time column *time* in addition to earlier supported *time_sec*. Usage of *time_sec* will eventually be deprecated. Grafana ships with a built-in MySQL data source plugin that allow you to query any visualize data from a MySQL compatible database. @@ -23,6 +25,17 @@ data from a MySQL compatible database. 3. Click the `+ Add data source` button in the top header. 4. Select *MySQL* from the *Type* dropdown. +### Data source options + +Name | Description +------------ | ------------- +*Name* | The data source name. This is how you refer to the data source in panels & queries. +*Default* | Default data source means that it will be pre-selected for new panels. +*Host* | The IP address/hostname and optional port of your MySQL instance. +*Database* | Name of your MySQL database. +*User* | Database user's login/username +*Password* | Database user's password + ### Database User Permissions (Important!) The database user you specify when you add the data source should only be granted SELECT permissions on @@ -46,10 +59,12 @@ To simplify syntax and to allow for dynamic parts, like date range filters, the Macro example | Description ------------ | ------------- *$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec* +*$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec* *$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn > FROM_UNIXTIME(1494410783) AND dateColumn < FROM_UNIXTIME(1494497183)* *$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *FROM_UNIXTIME(1494410783)* *$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *FROM_UNIXTIME(1494497183)* -*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed) as time_sec,* +*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),* +*$__timeGroup(dateColumn,'5m',0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example). *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183* *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783* *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183* @@ -84,39 +99,50 @@ The resulting table panel: ![](/img/docs/v43/mysql_table.png) -### Time series queries +## Time series queries -If you set `Format as` to `Time series`, for use in Graph panel for example, then there are some requirements for -what your query returns. +If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch. +Any column except `time` and `metric` is treated as a value column. +You may return a column named `metric` that is used as metric name for the value column. -- Must be a column named `time_sec` representing a unix epoch in seconds. -- Must be a column named `value` representing the time series value. -- Must be a column named `metric` representing the time series name. - -Example: +**Example with `metric` column:** ```sql SELECT - min(UNIX_TIMESTAMP(time_date_time)) as time_sec, - max(value_double) as value, - metric1 as metric -FROM test_data -WHERE $__timeFilter(time_date_time) -GROUP BY metric1, UNIX_TIMESTAMP(time_date_time) DIV 300 -ORDER BY time_sec asc -``` - -Example with $__timeGroup macro: - -```sql -SELECT - $__timeGroup(time_date_time,'5m') as time_sec, - min(value_double) as value, - metric_name as metric + $__timeGroup(time_date_time,'5m'), + min(value_double), + 'min' as metric FROM test_data WHERE $__timeFilter(time_date_time) -GROUP BY 1, metric_name -ORDER BY 1 +GROUP BY time +ORDER BY time +``` + +**Example using the fill parameter in the $__timeGroup macro to convert null values to be zero instead:** + +```sql +SELECT + $__timeGroup(createdAt,'5m',0), + sum(value_double) as value, + measurement +FROM test_data +WHERE + $__timeFilter(createdAt) +GROUP BY time, measurement +ORDER BY time +``` + +**Example with multiple columns:** + +```sql +SELECT + $__timeGroup(time_date_time,'5m'), + min(value_double) as min_value, + max(value_double) as max_value +FROM test_data +WHERE $__timeFilter(time_date_time) +GROUP BY time +ORDER BY time ``` Currently, there is no support for a dynamic group by time based on time range & panel width. @@ -180,7 +206,7 @@ There are two syntaxes: ```sql SELECT - UNIX_TIMESTAMP(atimestamp) as time_sec, + UNIX_TIMESTAMP(atimestamp) as time, aint as value, avarchar as metric FROM my_table @@ -192,7 +218,7 @@ ORDER BY atimestamp ASC ```sql SELECT - UNIX_TIMESTAMP(atimestamp) as time_sec, + UNIX_TIMESTAMP(atimestamp) as time, aint as value, avarchar as metric FROM my_table @@ -200,28 +226,68 @@ WHERE $__timeFilter(atimestamp) and hostname in([[hostname]]) ORDER BY atimestamp ASC ``` +#### Disabling Quoting for Multi-value Variables + +Grafana automatically creates a quoted, comma-separated string for multi-value variables. For example: if `server01` and `server02` are selected then it will be formatted as: `'server01', 'server02'`. Do disable quoting, use the csv formatting option for variables: + +`${servers:csv}` + +Read more about variable formatting options in the [Variables]({{< relref "reference/templating.md#advanced-formatting-options" >}}) documentation. + ## Annotations -[Annotations]({{< relref "reference/annotations.md" >}}) allows you to overlay rich event information on top of graphs. You add annotation queries via the Dashboard menu / Annotations view. +[Annotations]({{< relref "reference/annotations.md" >}}) allow you to overlay rich event information on top of graphs. You add annotation queries via the Dashboard menu / Annotations view. -An example query: +**Example query using time column with epoch values:** ```sql SELECT - UNIX_TIMESTAMP(atimestamp) as time_sec, - value as text, + epoch_time as time, + metric1 as text, CONCAT(tag1, ',', tag2) as tags -FROM my_table -WHERE $__timeFilter(atimestamp) -ORDER BY atimestamp ASC +FROM + public.test_data +WHERE + $__unixEpochFilter(epoch_time) +``` + +**Example query using time column of native sql date/time data type:** + +```sql +SELECT + native_date_time as time, + metric1 as text, + CONCAT(tag1, ',', tag2) as tags +FROM + public.test_data +WHERE + $__timeFilter(native_date_time) ``` Name | Description ------------ | ------------- -time_sec | The name of the date/time field. +time | The name of the date/time field. Could be a column with a native sql date/time data type or epoch value. text | Event description field. tags | Optional field name to use for event tags as a comma separated string. ## Alerting Time series queries should work in alerting conditions. Table formatted queries is not yet supported in alert rule conditions. + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here are some provisioning examples for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: MySQL + type: mysql + url: localhost:3306 + database: grafana + user: grafana + password: password +``` diff --git a/docs/sources/features/datasources/opentsdb.md b/docs/sources/features/datasources/opentsdb.md index 03795473ff7..1f6f022a18c 100644 --- a/docs/sources/features/datasources/opentsdb.md +++ b/docs/sources/features/datasources/opentsdb.md @@ -28,11 +28,10 @@ Name | Description *Name* | The data source name. This is how you refer to the data source in panels & queries. *Default* | Default data source means that it will be pre-selected for new panels. *Url* | The http protocol, ip and port of you opentsdb server (default port is usually 4242) -*Access* | Proxy = access via Grafana backend, Direct = access directly from browser. +*Access* | Server (default) = URL needs to be accessible from the Grafana backend/server, Browser = URL needs to be accessible from the browser. *Version* | Version = opentsdb version, either <=2.1 or 2.2 *Resolution* | Metrics from opentsdb may have datapoints with either second or millisecond resolution. - ## Query editor Open a graph in edit mode by click the title. Query editor will differ if the datasource has version <=2.1 or = 2.2. @@ -78,7 +77,7 @@ the existing time series data in OpenTSDB, you need to run `tsdb uid metasync` o ### Nested Templating -One template variable can be used to filter tag values for another template varible. First parameter is the metric name, +One template variable can be used to filter tag values for another template variable. First parameter is the metric name, second parameter is the tag key for which you need to find tag values, and after that all other dependent template variables. Some examples are mentioned below to make nested template queries work successfully. @@ -88,3 +87,22 @@ Query | Description *tag_values(cpu, hostanme, env=$env, region=$region)* | Return tag values for cpu metric, selected env tag value, selected region tag value and tag key hostname For details on OpenTSDB metric queries checkout the official [OpenTSDB documentation](http://opentsdb.net/docs/build/html/index.html) + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here are some provisioning examples for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: OpenTsdb + type: opentsdb + access: proxy + url: http://localhost:4242 + jsonData: + tsdbResolution: 1 + tsdbVersion: 1 +``` diff --git a/docs/sources/features/datasources/postgres.md b/docs/sources/features/datasources/postgres.md index 7d52df2fd3e..f9af60a2efc 100644 --- a/docs/sources/features/datasources/postgres.md +++ b/docs/sources/features/datasources/postgres.md @@ -20,6 +20,18 @@ Grafana ships with a built-in PostgreSQL data source plugin that allows you to q 3. Click the `+ Add data source` button in the top header. 4. Select *PostgreSQL* from the *Type* dropdown. +### Data source options + +Name | Description +------------ | ------------- +*Name* | The data source name. This is how you refer to the data source in panels & queries. +*Default* | Default data source means that it will be pre-selected for new panels. +*Host* | The IP address/hostname and optional port of your PostgreSQL instance. +*Database* | Name of your PostgreSQL database. +*User* | Database user's login/username +*Password* | Database user's password +*SSL Mode* | This option determines whether or with what priority a secure SSL TCP/IP connection will be negotiated with the server. + ### Database User Permissions (Important!) The database user you specify when you add the data source should only be granted SELECT permissions on @@ -45,11 +57,12 @@ Macro example | Description ------------ | ------------- *$__time(dateColumn)* | Will be replaced by an expression to rename the column to `time`. For example, *dateColumn as time* *$__timeSec(dateColumn)* | Will be replaced by an expression to rename the column to `time` and converting the value to unix timestamp. For example, *extract(epoch from dateColumn) as time* -*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *extract(epoch from dateColumn) BETWEEN 1494410783 AND 1494497183* -*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *to_timestamp(1494410783)* -*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *to_timestamp(1494497183)* +*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'* +*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'* +*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'* *$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *(extract(epoch from dateColumn)/300)::bigint*300 AS time* -*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183* +*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example). +*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn >= 1494410783 AND dateColumn <= 1494497183* *$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783* *$__unixEpochTo()* | Will be replaced by the end of the currently active time selection as unix timestamp. For example, *1494497183* @@ -82,36 +95,50 @@ You can control the name of the Table panel columns by using regular `as ` SQL c The resulting table panel: -![](/img/docs/v46/postgres_table.png) +![postgres table](/img/docs/v46/postgres_table.png) -### Time series queries +## Time series queries -If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch in seconds. +If you set `Format as` to `Time series`, for use in Graph panel for example, then the query must return a column named `time` that returns either a sql datetime or any numeric datatype representing unix epoch. Any column except `time` and `metric` is treated as a value column. You may return a column named `metric` that is used as metric name for the value column. -Example with `metric` column +**Example with `metric` column:** ```sql SELECT - $__timeGroup(time_date_time,'5m'), - min(value_double), + $__timeGroup("time_date_time",'5m'), + min("value_double"), 'min' as metric FROM test_data -WHERE $__timeFilter(time_date_time) +WHERE $__timeFilter("time_date_time") GROUP BY time ORDER BY time ``` -Example with multiple columns: +**Example using the fill parameter in the $__timeGroup macro to convert null values to be zero instead:** ```sql SELECT - $__timeGroup(time_date_time,'5m'), - min(value_double) as min_value, - max(value_double) as max_value + $__timeGroup("createdAt",'5m',0), + sum(value) as value, + measurement FROM test_data -WHERE $__timeFilter(time_date_time) +WHERE + $__timeFilter("createdAt") +GROUP BY time, measurement +ORDER BY time +``` + +**Example with multiple columns:** + +```sql +SELECT + $__timeGroup("time_date_time",'5m'), + min("value_double") as "min_value", + max("value_double") as "max_value" +FROM test_data +WHERE $__timeFilter("time_date_time") GROUP BY time ORDER BY time ``` @@ -190,26 +217,47 @@ WHERE $__timeFilter(atimestamp) and hostname in([[hostname]]) ORDER BY atimestamp ASC ``` +#### Disabling Quoting for Multi-value Variables + +Grafana automatically creates a quoted, comma-separated string for multi-value variables. For example: if `server01` and `server02` are selected then it will be formatted as: `'server01', 'server02'`. Do disable quoting, use the csv formatting option for variables: + +`${servers:csv}` + +Read more about variable formatting options in the [Variables]({{< relref "reference/templating.md#advanced-formatting-options" >}}) documentation. + ## Annotations [Annotations]({{< relref "reference/annotations.md" >}}) allow you to overlay rich event information on top of graphs. You add annotation queries via the Dashboard menu / Annotations view. -An example query: +**Example query using time column with epoch values:** ```sql SELECT - extract(epoch from time_date_time) AS time, - metric1 as text, + epoch_time as time, + metric1 as text, concat_ws(', ', metric1::text, metric2::text) as tags FROM public.test_data WHERE - $__timeFilter(time_date_time) + $__unixEpochFilter(epoch_time) +``` + +**Example query using time column of native sql date/time data type:** + +```sql +SELECT + native_date_time as time, + metric1 as text, + concat_ws(', ', metric1::text, metric2::text) as tags +FROM + public.test_data +WHERE + $__timeFilter(native_date_time) ``` Name | Description ------------ | ------------- -time | The name of the date/time field. +time | The name of the date/time field. Could be a column with a native sql date/time data type or epoch value. text | Event description field. tags | Optional field name to use for event tags as a comma separated string. @@ -217,3 +265,24 @@ tags | Optional field name to use for event tags as a comma separated string. Time series queries should work in alerting conditions. Table formatted queries is not yet supported in alert rule conditions. + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here are some provisioning examples for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: Postgres + type: postgres + url: localhost:5432 + database: grafana + user: grafana + secureJsonData: + password: "Password!" + jsonData: + sslmode: "disable" # disable/require/verify-ca/verify-full +``` diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index c9bb16441ca..4ff0baee108 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -30,11 +30,11 @@ Name | Description *Name* | The data source name. This is how you refer to the data source in panels & queries. *Default* | Default data source means that it will be pre-selected for new panels. *Url* | The http protocol, ip and port of you Prometheus server (default port is usually 9090) -*Access* | Proxy = access via Grafana backend, Direct = access directly from browser. +*Access* | Server (default) = URL needs to be accessible from the Grafana backend/server, Browser = URL needs to be accessible from the browser. *Basic Auth* | Enable basic authentication to the Prometheus data source. *User* | Name of your Prometheus user *Password* | Database user's password -*Scrape interval* | This will be used as a lower limit for the Prometheus step query parameter. Default value is 15s. +*Scrape interval* | This will be used as a lower limit for the Prometheus step query parameter. Default value is 15s. ## Query editor @@ -50,7 +50,7 @@ Name | Description *Min step* | Set a lower limit for the Prometheus step option. Step controls how big the jumps are when the Prometheus query engine performs range queries. Sadly there is no official prometheus documentation to link to for this very important option. *Resolution* | Controls the step option. Small steps create high-resolution graphs but can be slow over larger time ranges, lowering the resolution can speed things up. `1/2` will try to set step option to generate 1 data point for every other pixel. A value of `1/10` will try to set step option so there is a data point every 10 pixels. *Metric lookup* | Search for metric names in this input field. -*Format as* | **(New in v4.3)** Switch between Table & Time series. Table format will only work in the Table panel. +*Format as* | Switch between Table, Time series or Heatmap. Table format will only work in the Table panel. Heatmap format is suitable for displaying metrics having histogram type on Heatmap panel. Under the hood, it converts cumulative histogram to regular and sorts series by the bucket bound. ## Templating @@ -100,3 +100,19 @@ The step option is useful to limit the number of events returned from your query ## Getting Grafana metrics into Prometheus Since 4.6.0 Grafana exposes metrics for Prometheus on the `/metrics` endpoint. We also bundle a dashboard within Grafana so you can get started viewing your metrics faster. You can import the bundled dashboard by going to the data source edit page and click the dashboard tab. There you can find a dashboard for Grafana and one for Prometheus. Import and start viewing all the metrics! + +## Configure the Datasource with Provisioning + +It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources) + +Here are some provisioning examples for this datasource. + +```yaml +apiVersion: 1 + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://localhost:9090 +``` diff --git a/docs/sources/features/panels/alertlist.md b/docs/sources/features/panels/alertlist.md index 9307bb71391..58aa2c0966a 100644 --- a/docs/sources/features/panels/alertlist.md +++ b/docs/sources/features/panels/alertlist.md @@ -14,7 +14,7 @@ weight = 4 {{< docs-imagebox img="/img/docs/v45/alert-list-panel.png" max-width="850px" >}} -The alert list panel allows you to display your dashbords alerts. The list can be configured to show current state or recent state changes. You can read more about alerts [here](http://docs.grafana.org/alerting/rules). +The alert list panel allows you to display your dashboards alerts. The list can be configured to show current state or recent state changes. You can read more about alerts [here](http://docs.grafana.org/alerting/rules). ## Alert List Options diff --git a/docs/sources/features/panels/dashlist.md b/docs/sources/features/panels/dashlist.md index 8a4ed60875d..2ee578c5b7e 100644 --- a/docs/sources/features/panels/dashlist.md +++ b/docs/sources/features/panels/dashlist.md @@ -25,7 +25,7 @@ The dashboard list panel allows you to display dynamic links to other dashboards 1. **Starred**: The starred dashboard selection displays starred dashboards in alphabetical order. 2. **Recently Viewed**: The recently viewed dashboard selection displays recently viewed dashboards in alphabetical order. 3. **Search**: The search dashboard selection displays dashboards by search query or tag(s). -4. **Show Headings**: When show headings is ticked the choosen list selection(Starred, Recently Viewed, Search) is shown as a heading. +4. **Show Headings**: When show headings is ticked the chosen list selection(Starred, Recently Viewed, Search) is shown as a heading. 5. **Max Items**: Max items set the maximum of items in a list. 6. **Query**: Here is where you enter your query you want to search by. Queries are case-insensitive, and partial values are accepted. 7. **Tags**: Here is where you enter your tag(s) you want to search by. Note that existing tags will not appear as you type, and *are* case sensitive. To see a list of existing tags, you can always return to the dashboard, open the Dashboard Picker at the top and click `tags` link in the search bar. diff --git a/docs/sources/features/panels/graph.md b/docs/sources/features/panels/graph.md index c3b0260c98b..5a010ceca40 100644 --- a/docs/sources/features/panels/graph.md +++ b/docs/sources/features/panels/graph.md @@ -22,15 +22,18 @@ options for the panel. ## General -{{< docs-imagebox img="/img/docs/v43/graph_general.png" max-width= "900px" >}} +{{< docs-imagebox img="/img/docs/v51/graph_general.png" max-width= "800px" >}} The general tab allows customization of a panel's appearance and menu options. -### General Options +### Info -- **Title** - The panel title on the dashboard -- **Span** - The panel width in columns -- **Height** - The panel contents height in pixels +- **Title** - The panel title of the dashboard, displayed at the top. +- **Description** - The panel description, displayed on hover of info icon in the upper left corner of the panel. +- **Transparent** - If checked, removes the solid background of the panel (default not checked). + +### Repeat +Repeat a panel for each value of a variable. Repeating panels are described in more detail [here]({{< relref "reference/templating.md#repeating-panels" >}}). ### Drilldown / detail link @@ -54,47 +57,65 @@ options. ## Axes -{{< docs-imagebox img="/img/docs/v43/graph_axes_grid_options.png" max-width= "900px" >}} +{{< docs-imagebox img="/img/docs/v51/graph_axes_grid_options.png" max-width= "800px" >}} -The Axes tab controls the display of axes, grids and legend. The **Left Y** and **Right Y** can be customized using: +The Axes tab controls the display of axes. + +### Left Y/Right Y + +The **Left Y** and **Right Y** can be customized using: - **Unit** - The display unit for the Y value -- **Scale** - +- **Scale** - The scale to use for the Y value, linear or logarithmic. (default linear) - **Y-Min** - The minimum Y value. (default auto) - **Y-Max** - The maximum Y value. (default auto) +- **Decimals** - Controls how many decimals are displayed for Y value (default auto) - **Label** - The Y axis label (default "") Axes can also be hidden by unchecking the appropriate box from **Show**. -### X-Axis Mode +### X-Axis -There are three options: +Axis can be hidden by unchecking **Show**. + +For **Mode** there are three options: - The default option is **Time** and means the x-axis represents time and that the data is grouped by time (for example, by hour or by minute). - The **Series** option means that the data is grouped by series and not by time. The y-axis still represents the value. - {{< docs-imagebox img="/img/docs/v45/graph-x-axis-mode-series.png" max-width="700px">}} + {{< docs-imagebox img="/img/docs/v51/graph-x-axis-mode-series.png" max-width="800px">}} - The **Histogram** option converts the graph into a histogram. A Histogram is a kind of bar chart that groups numbers into ranges, often called buckets or bins. Taller bars show that more data falls in that range. Histograms and buckets are described in more detail [here](http://docs.grafana.org/features/panels/heatmap/#histograms-and-buckets). -### Legend -The legend hand be hidden by checking the **Show** checkbox. If it's shown, it can be -displayed as a table of values by checking the **Table** checkbox. Series with no -values can be hidden from the legend using the **Hide empty** checkbox. +### Y-Axes -### Legend Values +- **Align** - Check to align left and right Y-axes by value (default unchecked/false) +- **Level** - Available when *Align* is checked. Value to use for alignment of left and right Y-axes, starting from Y=0 (default 0) + +## Legend + +{{< docs-imagebox img="/img/docs/v51/graph-legend.png" max-width= "800px" >}} + +### Options + +- **Show** - Uncheck to hide the legend (default checked/true) +- **Table** - Check to display legend in table (default unchecked/false) +- **To the right** - Check to display legend to the right (default unchecked/false) +- **Width** - Available when *To the right* is checked. Value to control the minimum width for the legend (default 0) + +### Values Additional values can be shown along-side the legend names: -- **Total** - Sum of all values returned from metric query -- **Current** - Last value returned from the metric query - **Min** - Minimum of all values returned from metric query - **Max** - Maximum of all values returned from the metric query - **Avg** - Average of all values returned from metric query +- **Current** - Last value returned from the metric query +- **Total** - Sum of all values returned from metric query - **Decimals** - Controls how many decimals are displayed for legend values (and graph hover tooltips) The legend values are calculated client side by Grafana and depend on what type of @@ -103,63 +124,72 @@ be correct at the same time. For example if you plot a rate like requests/second using average as aggregator, then the Total in the legend will not represent the total number of requests. It is just the sum of all data points received by Grafana. +### Hide series + +Hide series when all values of a series from a metric query are of a specific value: + +- **With only nulls** - Value=*null* (default unchecked) +- **With only zeros** - Value=*zero* (default unchecked) + ## Display styles -{{< docs-imagebox img="/img/docs/v43/graph_display_styles.png" max-width= "900px" >}} +{{< docs-imagebox img="/img/docs/v51/graph_display_styles.png" max-width= "800px" >}} Display styles control visual properties of the graph. -### Thresholds +### Draw Options -Thresholds allow you to add arbitrary lines or sections to the graph to make it easier to see when -the graph crosses a particular threshold. - - -### Chart Options +#### Draw Modes - **Bar** - Display values as a bar chart - **Lines** - Display values as a line graph - **Points** - Display points for values -### Line Options +#### Mode Options -- **Line Fill** - Amount of color fill for a series. 0 is none. -- **Line Width** - The width of the line for a series. -- **Null point mode** - How null values are displayed -- **Staircase line** - Draws adjacent points as staircase +- **Fill** - Amount of color fill for a series (default 1). 0 is none. +- **Line Width** - The width of the line for a series (default 1). +- **Staircase** - Draws adjacent points as staircase +- **Points Radius** - Adjust the size of points when *Points* are selected as *Draw Mode*. -### Multiple Series +#### Hover tooltip + +- **Mode** - Controls how many series to display in the tooltip when hover over a point in time, All series or single (default All series). +- **Sort order** - Controls how series displayed in tooltip are sorted, None, Ascending or Descending (default None). +- **Stacked value** - Available when *Stack* are checked and controls how stacked values are displayed in tooltip (default Individual). + - Individual: the value for the series you hover over + - Cumulative - sum of series below plus the series you hover over + +#### Stacking & Null value If there are multiple series, they can be displayed as a group. - **Stack** - Each series is stacked on top of another -- **Percent** - Each series is drawn as a percentage of the total of all series +- **Percent** - Available when *Stack* are checked. Each series is drawn as a percentage of the total of all series +- **Null value** - How null values are displayed -If you have stack enabled, you can select what the mouse hover feature should show. +### Series overrides -- Cumulative - Sum of series below plus the series you hover over -- Individual - Just the value for the series you hover over - -### Rendering - -- **Flot** - Render the graphs in the browser using Flot (default) -- **Graphite PNG** - Render the graph on the server using graphite's render API. - -### Tooltip - -- **All series** - Show all series on the same tooltip and a x crosshairs to help follow all series - -### Series Specific Overrides +{{< docs-imagebox img="/img/docs/v51/graph_display_overrides.png" max-width= "800px" >}} The section allows a series to be rendered differently from the others. For example, one series can be given -a thicker line width to make it stand out. +a thicker line width to make it stand out and/or be moved to the right Y-axis. #### Dashes Drawing Style There is an option under Series overrides to draw lines as dashes. Set Dashes to the value True to override the line draw setting for a specific series. +### Thresholds + +{{< docs-imagebox img="/img/docs/v51/graph_display_thresholds.png" max-width= "800px" >}} + +Thresholds allow you to add arbitrary lines or sections to the graph to make it easier to see when +the graph crosses a particular threshold. + ## Time Range -The time range tab allows you to override the dashboard time range and specify a panel specific time. Either through a relative from now time option or through a timeshift. +{{< docs-imagebox img="/img/docs/v51/graph-time-range.png" max-width= "900px" >}} -{{< docs-imagebox img="/img/docs/v45/graph-time-range.png" max-width= "900px" >}} +The time range tab allows you to override the dashboard time range and specify a panel specific time. +Either through a relative from now time option or through a timeshift. +Panel time overrides & timeshift are described in more detail [here]({{< relref "reference/timerange.md#panel-time-overrides-timeshift" >}}). diff --git a/docs/sources/features/panels/heatmap.md b/docs/sources/features/panels/heatmap.md index e44527f8695..56ffe29f20f 100644 --- a/docs/sources/features/panels/heatmap.md +++ b/docs/sources/features/panels/heatmap.md @@ -56,26 +56,39 @@ Data and bucket options can be found in the `Axes` tab. Data format | Description ------------ | ------------- *Time series* | Grafana does the bucketing by going through all time series values. The bucket sizes & intervals will be determined using the Buckets options. -*Time series buckets* | Each time series already represents a Y-Axis bucket. The time series name (alias) needs to be a numeric value representing the upper interval for the bucket. Grafana does no bucketing so the bucket size options are hidden. +*Time series buckets* | Each time series already represents a Y-Axis bucket. The time series name (alias) needs to be a numeric value representing the upper or lower interval for the bucket. Grafana does no bucketing so the bucket size options are hidden. + +### Bucket bound + +When Data format is *Time series buckets* datasource returns series with names representing bucket bound. But depending +on datasource, a bound may be *upper* or *lower*. This option allows to adjust a bound type. If *Auto* is set, a bound +option will be chosen based on panels' datasource type. ### Bucket Size The Bucket count & size options are used by Grafana to calculate how big each cell in the heatmap is. You can define the bucket size either by count (the first input box) or by specifying a size interval. For the Y-Axis the size interval is just a value but for the X-bucket you can specify a time range in the *Size* input, for example, -the time range `1h`. This will make the cells 1h wide on the X-axis. +the time range `1h`. This will make the cells 1h wide on the X-axis. ### Pre-bucketed data -If you have a data that is already organized into buckets you can use the `Time series buckets` data format. This format requires that your metric query return regular time series and that each time series has a numeric name -that represent the upper or lower bound of the interval. +If you have a data that is already organized into buckets you can use the `Time series buckets` data format. This format +requires that your metric query return regular time series and that each time series has a numeric name that represent +the upper or lower bound of the interval. -The only data source that supports histograms over time is Elasticsearch. You do this by adding a *Histogram* -bucket aggregation before the *Date Histogram*. +There are a number of datasources supporting histogram over time like Elasticsearch (by using a Histogram bucket +aggregation) or Prometheus (with [histogram](https://prometheus.io/docs/concepts/metric_types/#histogram) metric type +and *Format as* option set to Heatmap). But generally, any datasource could be used if it meets the requirements: +returns series with names representing bucket bound or returns sereis sorted by the bound in ascending order. -![](/img/docs/v43/elastic_histogram.png) +With Elasticsearch you control the size of the buckets using the Histogram interval (Y-Axis) and the Date Histogram interval (X-axis). -You control the size of the buckets using the Histogram interval (Y-Axis) and the Date Histogram interval (X-axis). +![Elastic histogram](/img/docs/v43/elastic_histogram.png) + +With Prometheus you can only control X-axis by adjusting *Min step* and *Resolution* options. + +![Prometheus histogram](/img/docs/v51/prometheus_histogram.png) ## Display Options @@ -100,8 +113,8 @@ but include a group by time interval or maxDataPoints limit coupled with an aggr This all depends on the time range of your query of course. But the important point is to know that the Histogram bucketing that Grafana performs may be done on already aggregated and averaged data. To get more accurate heatmaps it is better -to do the bucketing during metric collection or store the data in Elasticsearch, which currently is the only data source -data supports doing Histogram bucketing on the raw data. +to do the bucketing during metric collection or store the data in Elasticsearch, or in the other data source which +supports doing Histogram bucketing on the raw data. If you remove or lower the group by time (or raise maxDataPoints) in your query to return more data points your heatmap will be more accurate but this can also be very CPU & Memory taxing for your browser and could cause hangs and crashes if the number of diff --git a/docs/sources/features/panels/singlestat.md b/docs/sources/features/panels/singlestat.md index 510642337ff..e16f182f9cd 100644 --- a/docs/sources/features/panels/singlestat.md +++ b/docs/sources/features/panels/singlestat.md @@ -30,7 +30,7 @@ The singlestat panel has a normal query editor to allow you define your exact me * **total** - The sum of all the non-null values in the series * **first** - The first value in the series * **delta** - The total incremental increase (of a counter) in the series. An attempt is made to account for counter resets, but this will only be accurate for single instance metrics. Used to show total counter increase in time series. - * **diff** - The difference betwen 'current' (last value) and 'first'. + * **diff** - The difference between 'current' (last value) and 'first'. * **range** - The difference between 'min' and 'max'. Useful the show the range of change for a gauge. 2. **Prefix/Postfix**: The Prefix/Postfix fields let you define a custom label to appear *before/after* the value. The `$__name` variable can be used here to use the series name or alias from the metric query. 3. **Units**: Units are appended to the the Singlestat within the panel, and will respect the color and threshold settings for the value. @@ -70,18 +70,18 @@ Gauges gives a clear picture of how high a value is in it's context. It's a grea {{< docs-imagebox img="/img/docs/v45/singlestat-gauge-options.png" max-width="500px" class="docs-image--right docs-image--no-shadow">}} -1. **Show**: The show checkbox will toggle wether the gauge is shown in the panel. When unselected, only the Singlestat value will appear. +1. **Show**: The show checkbox will toggle whether the gauge is shown in the panel. When unselected, only the Singlestat value will appear. 2. **Min/Max**: This sets the start and end point for the gauge. 3. **Threshold Labels**: Check if you want to show the threshold labels. Thresholds are set in the color options. 4. **Threshold Markers**: Check if you want to have a second meter showing the thresholds.
-### Value to text mapping +### Value/Range to text mapping {{< docs-imagebox img="/img/docs/v45/singlestat-value-mapping.png" class="docs-image--right docs-image--no-shadow">}} -Value to text mapping allows you to translate the value of the summary stat into explicit text. The text will respect all styling, thresholds and customization defined for the value. This can be useful to translate the number of the main Singlestat value into a context-specific human-readable word or message. +Value/Range to text mapping allows you to translate the value of the summary stat into explicit text. The text will respect all styling, thresholds and customization defined for the value. This can be useful to translate the number of the main Singlestat value into a context-specific human-readable word or message.
diff --git a/docs/sources/features/panels/table_panel.md b/docs/sources/features/panels/table_panel.md index a3e56c72960..32f7764e415 100644 --- a/docs/sources/features/panels/table_panel.md +++ b/docs/sources/features/panels/table_panel.md @@ -97,3 +97,14 @@ The column styles allow you control how dates and numbers are formatted. 4. **Thresholds and Coloring**: Specify color mode and thresholds limits. 5. **Type**: The three supported types of types are **Number**, **String** and **Date**. **Unit** and **Decimals**: Specify unit and decimal precision for numbers. **Format**: Specify date format for dates. + +### String +#### Value/Range to text mapping + +> Only available in Grafana v5.1+. + +{{< docs-imagebox img="/img/docs/v51/table-value-mapping.png" class="docs-image--right docs-image--no-shadow">}} + +Value/range to text mapping allows you to translate numeric values into explicit text. The text will respect all styling, thresholds and customization defined for the value. This can be useful to translate the numeric values into a context-specific human-readable word or message. + +
diff --git a/docs/sources/guides/whats-new-in-v2-6.md b/docs/sources/guides/whats-new-in-v2-6.md index b8996680ce6..1e6f30c597b 100644 --- a/docs/sources/guides/whats-new-in-v2-6.md +++ b/docs/sources/guides/whats-new-in-v2-6.md @@ -15,7 +15,7 @@ support for multiple Cloudwatch credentials. The new table panel is very flexible, supporting both multiple modes for time series as well as for -table, annotation and raw JSON data. It also provides date formating and value formating and coloring options. +table, annotation and raw JSON data. It also provides date formatting and value formatting and coloring options. ### Time series to rows diff --git a/docs/sources/guides/whats-new-in-v4-1.md b/docs/sources/guides/whats-new-in-v4-1.md index bd2b0f1b75f..217b21b545e 100644 --- a/docs/sources/guides/whats-new-in-v4-1.md +++ b/docs/sources/guides/whats-new-in-v4-1.md @@ -33,7 +33,7 @@ You can enable/disable the shared tooltip from the dashboard settings menu or cy {{< imgbox max-width="60%" img="/img/docs/v41/helptext_for_panel_settings.png" caption="Hovering help text" >}} -You can set a help text in the general tab on any panel. The help text is using Markdown to enable better formating and linking to other sites that can provide more information. +You can set a help text in the general tab on any panel. The help text is using Markdown to enable better formatting and linking to other sites that can provide more information.
diff --git a/docs/sources/guides/whats-new-in-v4-5.md b/docs/sources/guides/whats-new-in-v4-5.md index b2de451308a..a5cd3ca982d 100644 --- a/docs/sources/guides/whats-new-in-v4-5.md +++ b/docs/sources/guides/whats-new-in-v4-5.md @@ -12,7 +12,7 @@ weight = -4 # What's New in Grafana v4.5 -## Hightlights +## Highlights ### New prometheus query editor @@ -62,7 +62,7 @@ Datas source selection & options & help are now above your metric queries. ### Minor Changes * **InfluxDB**: Change time range filter for absolute time ranges to be inclusive instead of exclusive [#8319](https://github.com/grafana/grafana/issues/8319), thx [@Oxydros](https://github.com/Oxydros) -* **InfluxDB**: Added paranthesis around tag filters in queries [#9131](https://github.com/grafana/grafana/pull/9131) +* **InfluxDB**: Added parenthesis around tag filters in queries [#9131](https://github.com/grafana/grafana/pull/9131) ## Bug Fixes diff --git a/docs/sources/guides/whats-new-in-v4-6.md b/docs/sources/guides/whats-new-in-v4-6.md index fd75384761f..09955fa58cc 100644 --- a/docs/sources/guides/whats-new-in-v4-6.md +++ b/docs/sources/guides/whats-new-in-v4-6.md @@ -45,7 +45,7 @@ This makes exploring and filtering Prometheus data much easier. * **GCS**: Adds support for Google Cloud Storage [#8370](https://github.com/grafana/grafana/issues/8370) thx [@chuhlomin](https://github.com/chuhlomin) * **Prometheus**: Adds /metrics endpoint for exposing Grafana metrics. [#9187](https://github.com/grafana/grafana/pull/9187) -* **Graph**: Add support for local formating in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk) +* **Graph**: Add support for local formatting in axis. [#1395](https://github.com/grafana/grafana/issues/1395), thx [@m0nhawk](https://github.com/m0nhawk) * **Jaeger**: Add support for open tracing using jaeger in Grafana. [#9213](https://github.com/grafana/grafana/pull/9213) * **Unit types**: New date & time unit types added, useful in singlestat to show dates & times. [#3678](https://github.com/grafana/grafana/issues/3678), [#6710](https://github.com/grafana/grafana/issues/6710), [#2764](https://github.com/grafana/grafana/issues/2764) * **CLI**: Make it possible to install plugins from any url [#5873](https://github.com/grafana/grafana/issues/5873) diff --git a/docs/sources/guides/whats-new-in-v5-1.md b/docs/sources/guides/whats-new-in-v5-1.md new file mode 100644 index 00000000000..d992fd9062a --- /dev/null +++ b/docs/sources/guides/whats-new-in-v5-1.md @@ -0,0 +1,125 @@ ++++ +title = "What's New in Grafana v5.1" +description = "Feature & improvement highlights for Grafana v5.1" +keywords = ["grafana", "new", "documentation", "5.1"] +type = "docs" +[menu.docs] +name = "Version 5.1" +identifier = "v5.1" +parent = "whatsnew" +weight = -7 ++++ + +# What's New in Grafana v5.1 + +Grafana v5.1 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements. + +* [Improved scrolling experience]({{< relref "#improved-scrolling-experience" >}}) +* [Improved docker image]({{< relref "#improved-docker-image-breaking-change" >}}) with a breaking change! +* [Heatmap support for Prometheus]({{< relref "#prometheus" >}}) +* [Microsoft SQL Server]({{< relref "#microsoft-sql-server" >}}) as metric & table datasource! +* [Dashboards & Panels]({{< relref "#dashboards-panels" >}}) Improved adding panels to dashboards and enhancements to Graph and Table panels. +* [New variable interpolation syntax]({{< relref "#new-variable-interpolation-syntax" >}}) +* [Improved workflow for provisioned dashboards]({{< relref "#improved-workflow-for-provisioned-dashboards" >}}) + +## Improved scrolling experience + +In Grafana v5.0 we introduced a new scrollbar component. Unfortunately this introduced a lot of issues and in some scenarios removed +the native scrolling functionality. Grafana v5.1 ships with a native scrollbar for all pages together with a scrollbar component for +the dashboard grid and panels that's not overriding the native scrolling functionality. We hope that these changes and improvements should +make the Grafana user experience much better! + +## Improved docker image (breaking change) + +Grafana v5.1 brings an improved official docker image which should make it easier to run and use the Grafana docker image and at the same time give more control to the user how to use/run it. + +We've switched the id of the grafana user running Grafana inside a docker container. Unfortunately this means that files created prior to 5.1 won't have the correct permissions for later versions and thereby this introduces a breaking change. +We made this change so that it would be easier for you to control what user Grafana is executed as (see examples below). + +Version | User | User ID +--------|---------|--------- +< 5.1 | grafana | 104 +>= 5.1 | grafana | 472 + +Please read the [updated documentation](/installation/docker/#migration-from-a-previous-version-of-the-docker-container-to-5-1-or-later) which includes migration instructions and more information. + +## Prometheus + +{{< docs-imagebox img="/img/docs/v51/prometheus_heatmap.png" max-width="800px" class="docs-image--right" >}} + +The Prometheus datasource now support transforming Prometheus histograms to the heatmap panel. Prometheus histogram is a powerful feature, and we're +really happy to finally allow our users to render those as heatmaps. Please read [Heatmap panel documentation](/features/panels/heatmap/#pre-bucketed-data) +for more information on how to use it. + +Prometheus query editor also got support for autocomplete of template variables. More information in the [Prometheus data source documentation](/features/datasources/prometheus/). + +
+ +## Microsoft SQL Server + +{{< docs-imagebox img="/img/docs/v51/mssql_query_editor_showcase.png" max-width= "800px" class="docs-image--right" >}} + +Grafana v5.1 now ships with a built-in Microsoft SQL Server (MSSQL) data source plugin that allows you to query and visualize data from any +Microsoft SQL Server 2005 or newer, including Microsoft Azure SQL Database. Do you have metric or log data in MSSQL? You can now visualize +that data and define alert rules on it like with any of Grafana's other core datasources. + +Please read [Using Microsoft SQL Server in Grafana documentation](/features/datasources/mssql/) for more detailed information on how to get started and use it. + +
+ +## Dashboards & Panels + +### Adding new panels to dashboards + +{{< docs-imagebox img="/img/docs/v51/dashboard_add_panel.png" max-width= "800px" class="docs-image--right" >}} + +The control for adding new panels to dashboards have got some enhancements and now includes functionality to search for the type of panel +you want to add. Further, the control has tabs separating functionality for adding new panels and pasting +copied panels. + +By copying a panel in a dashboard it will be displayed in the `Paste` tab in *any* dashboard and allows you to paste the +copied panel into the current dashboard. + +{{< docs-imagebox img="/img/docs/v51/dashboard_panel_copy.png" max-width= "300px" >}} + +
+ +### Graph Panel + +New enhancements includes support for multiple series stacking in histogram mode, thresholds for right Y axis, aligning left and right Y-axes to one level and additional units. More information in the [Graph panel documentation](/features/panels/graph/). + +### Table Panel + +New enhancements includes support for mapping a numeric value/range to text and additional units. More information in the [Table panel documentation](/features/panels/table_panel/#string). + +## New variable interpolation syntax + +We now support a new option for rendering variables that gives the user full control of how the value(s) should be rendered. +In the table below you can see some examples and you can find all different options in the [Variables documentation](http://docs.grafana.org/reference/templating/#advanced-formatting-options). + +Filter Option | Example | Raw | Interpolated | Description +------------ | ------------- | ------------- | ------------- | ------------- +`glob` | ${servers:glob} | `'test1', 'test2'` | `{test1,test2}` | Formats multi-value variable into a glob +`regex` | ${servers:regex} | `'test.', 'test2'` | (test\.|test2) | Formats multi-value variable into a regex string +`pipe` | ${servers:pipe} | `'test.', 'test2'` | test.|test2 | Formats multi-value variable into a pipe-separated string +`csv`| ${servers:csv} | `'test1', 'test2'` | `test1,test2` | Formats multi-value variable as a comma-separated string + +## Improved workflow for provisioned dashboards + +{{< docs-imagebox img="/img/docs/v51/provisioning_cannot_save_dashboard.png" max-width="800px" class="docs-image--right" >}} + +Grafana v5.1 brings an improved workflow for provisioned dashboards: + +* A populated `id` property in JSON is now automatically removed when provisioning dashboards. +* When making changes to a provisioned dashboard you can `Save` the dashboard which now will bring up a *Cannot save provisioned dashboard* dialog like seen in the screenshot to the right. + + +Available options in the dialog will let you `Copy JSON to Clipboard` and/or `Save JSON to file` which can help you synchronize your dashboard changes back to the provisioning source. +More information in the [Provisioning documentation](/features/datasources/prometheus/). + +
+ +## Changelog + +Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list +of new features, changes, and bug fixes. diff --git a/docs/sources/http_api/annotations.md b/docs/sources/http_api/annotations.md index 19c2a5c386c..6633714d77b 100644 --- a/docs/sources/http_api/annotations.md +++ b/docs/sources/http_api/annotations.md @@ -32,10 +32,12 @@ Query Parameters: - `from`: epoch datetime in milliseconds. Optional. - `to`: epoch datetime in milliseconds. Optional. -- `limit`: number. Optional - default is 10. Max limit for results returned. +- `limit`: number. Optional - default is 100. Max limit for results returned. - `alertId`: number. Optional. Find annotations for a specified alert. - `dashboardId`: number. Optional. Find annotations that are scoped to a specific dashboard - `panelId`: number. Optional. Find annotations that are scoped to a specific panel +- `userId`: number. Optional. Find annotations created by a specific user +- `type`: string. Optional. `alert`|`annotation` Return alerts or user created annotations - `tags`: string. Optional. Use this to filter global annotations. Global annotations are annotations from an annotation data source that are not connected specifically to a dashboard or panel. To do an "AND" filtering with multiple tags, specify the tags parameter multiple times e.g. `tags=tag1&tags=tag2`. **Example Response**: @@ -180,14 +182,14 @@ Content-Type: application/json ## Delete Annotation By Id -`DELETE /api/annotation/:id` +`DELETE /api/annotations/:id` Deletes the annotation that matches the specified id. **Example Request**: ```http -DELETE /api/annotation/1 HTTP/1.1 +DELETE /api/annotations/1 HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk @@ -204,14 +206,14 @@ Content-Type: application/json ## Delete Annotation By RegionId -`DELETE /api/annotation/region/:id` +`DELETE /api/annotations/region/:id` Deletes the annotation that matches the specified region id. A region is an annotation that covers a timerange and has a start and end time. In the Grafana database, this is a stored as two annotations connected by a region id. **Example Request**: ```http -DELETE /api/annotation/region/1 HTTP/1.1 +DELETE /api/annotations/region/1 HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk diff --git a/docs/sources/http_api/data_source.md b/docs/sources/http_api/data_source.md index 364b55b0cfc..9aaf29ec5f4 100644 --- a/docs/sources/http_api/data_source.md +++ b/docs/sources/http_api/data_source.md @@ -188,8 +188,8 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "defaultRegion": "us-west-1" }, "secureJsonData": { - "accessKey": "Ol4pIDpeKSA6XikgOl4p", - "secretKey": "dGVzdCBrZXkgYmxlYXNlIGRvbid0IHN0ZWFs" + "accessKey": "Ol4pIDpeKSA6XikgOl4p", //should not be encoded + "secretKey": "dGVzdCBrZXkgYmxlYXNlIGRvbid0IHN0ZWFs" //should be Base-64 encoded } } ``` diff --git a/docs/sources/http_api/org.md b/docs/sources/http_api/org.md index 4c1dff904c8..b9a15450786 100644 --- a/docs/sources/http_api/org.md +++ b/docs/sources/http_api/org.md @@ -307,7 +307,7 @@ Content-Type: application/json `PUT /api/orgs/:orgId` -Update Organisation, fields *Adress 1*, *Adress 2*, *City* are not implemented yet. +Update Organisation, fields *Address 1*, *Address 2*, *City* are not implemented yet. **Example Request**: @@ -436,4 +436,4 @@ HTTP/1.1 200 Content-Type: application/json {"message":"User removed from organization"} -``` \ No newline at end of file +``` diff --git a/docs/sources/installation/behind_proxy.md b/docs/sources/installation/behind_proxy.md index f1a00a5b1cc..89711aecb46 100644 --- a/docs/sources/installation/behind_proxy.md +++ b/docs/sources/installation/behind_proxy.md @@ -53,7 +53,7 @@ server { ```bash [server] domain = foo.bar -root_url = %(protocol)s://%(domain)s:/grafana +root_url = %(protocol)s://%(domain)s/grafana/ ``` #### Nginx configuration with sub path @@ -98,7 +98,7 @@ Given: ```bash [server] domain = localhost:8080 - root_url = %(protocol)s://%(domain)s:/grafana + root_url = %(protocol)s://%(domain)s/grafana/ ``` Create an Inbound Rule for the parent website (localhost:8080 in this example) in IIS Manager with the following settings: diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index 6169280b798..baec76df5d9 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -482,7 +482,7 @@ Set api_url to the resource that returns [OpenID UserInfo](https://connect2id.co First set up Grafana as an OpenId client "webapplication" in Okta. Then set the Base URIs to `https:///` and set the Login redirect URIs to `https:///login/generic_oauth`. -Finaly set up the generic oauth module like this: +Finally set up the generic oauth module like this: ```bash [auth.generic_oauth] name = Okta @@ -659,6 +659,10 @@ Set to `true` to enable auto sign up of users who do not exist in Grafana DB. De Limit where auth proxy requests come from by configuring a list of IP addresses. This can be used to prevent users spoofing the X-WEBAUTH-USER header. +### headers + +Used to define additional headers for `Name`, `Email` and/or `Login`, for example if the user's name is sent in the X-WEBAUTH-NAME header and their email address in the X-WEBAUTH-EMAIL header, set `headers = Name:X-WEBAUTH-NAME Email:X-WEBAUTH-EMAIL`. +
## [session] diff --git a/docs/sources/installation/debian.md b/docs/sources/installation/debian.md index d4d3b05343a..83d26351295 100644 --- a/docs/sources/installation/debian.md +++ b/docs/sources/installation/debian.md @@ -15,7 +15,10 @@ weight = 1 Description | Download ------------ | ------------- -Stable for Debian-based Linux | [grafana_5.0.3_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.0.3_amd64.deb) +Stable for Debian-based Linux | [grafana_5.1.1_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.1_amd64.deb) + Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. @@ -24,17 +27,24 @@ installation. ```bash -wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.0.3_amd64.deb +wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.1_amd64.deb sudo apt-get install -y adduser libfontconfig -sudo dpkg -i grafana_5.0.3_amd64.deb +sudo dpkg -i grafana_5.1.1_amd64.deb ``` + + ## APT Repository Add the following line to your `/etc/apt/sources.list` file. ```bash -deb https://packagecloud.io/grafana/stable/debian/ jessie main +deb https://packagecloud.io/grafana/stable/debian/ stretch main ``` Use the above line even if you are on Ubuntu or another Debian version. @@ -42,7 +52,7 @@ There is also a testing repository if you want beta or release candidates. ```bash -deb https://packagecloud.io/grafana/testing/debian/ jessie main +deb https://packagecloud.io/grafana/testing/debian/ stretch main ``` Then add the [Package Cloud](https://packagecloud.io/grafana) key. This diff --git a/docs/sources/installation/docker.md b/docs/sources/installation/docker.md index 3ca5ba06638..e78796845c4 100644 --- a/docs/sources/installation/docker.md +++ b/docs/sources/installation/docker.md @@ -12,34 +12,12 @@ weight = 4 # Installing using Docker -Grafana is very easy to install and run using the offical docker container. +Grafana is very easy to install and run using the official docker container. ```bash $ docker run -d -p 3000:3000 grafana/grafana ``` -All Grafana configuration settings can be defined using environment -variables, this is especially useful when using the above container. - -## Docker volumes & ENV config - -The Docker container exposes two volumes, the sqlite3 database in the -folder `/var/lib/grafana` and configuration files is in `/etc/grafana/` -folder. You can map these volumes to host folders when you start the -container: - -```bash -$ docker run -d -p 3000:3000 \ - -v /var/lib/grafana:/var/lib/grafana \ - -e "GF_SECURITY_ADMIN_PASSWORD=secret" \ - grafana/grafana -``` - -In the above example I map the data folder and sets a configuration option via -an `ENV` instruction. - -See the [docker volumes documentation](https://docs.docker.com/engine/admin/volumes/volumes/) if you want to create a volume to use with the Grafana docker image instead of a bind mount (binding to a directory in the host system). - ## Configuration All options defined in conf/grafana.ini can be overridden using environment @@ -56,15 +34,24 @@ $ docker run \ grafana/grafana ``` -You can use your own grafana.ini file by using environment variable `GF_PATHS_CONFIG`. - The back-end web server has a number of configuration options. Go to the [Configuration]({{< relref "configuration.md" >}}) page for details on all those options. +## Running a Specific Version of Grafana + +```bash +# specify right tag, e.g. 5.1.0 - see Docker Hub for available tags +$ docker run \ + -d \ + -p 3000:3000 \ + --name grafana \ + grafana/grafana:5.1.0 +``` + ## Installing Plugins for Grafana -Pass the plugins you want installed to docker with the `GF_INSTALL_PLUGINS` environment variable as a comma separated list. This will pass each plugin name to `grafana-cli plugins install ${plugin}`. +Pass the plugins you want installed to docker with the `GF_INSTALL_PLUGINS` environment variable as a comma separated list. This will pass each plugin name to `grafana-cli plugins install ${plugin}` and install them when Grafana starts. ```bash docker run \ @@ -75,15 +62,22 @@ docker run \ grafana/grafana ``` -## Running a Specific Version of Grafana +## Building a custom Grafana image with pre-installed plugins +In the [grafana-docker](https://github.com/grafana/grafana-docker/) there is a folder called `custom/` which includes a `Dockerfile` that can be used to build a custom Grafana image. It accepts `GRAFANA_VERSION` and `GF_INSTALL_PLUGINS` as build arguments. + +Example of how to build and run: ```bash -# specify right tag, e.g. 4.5.2 - see Docker Hub for available tags -$ docker run \ +cd custom +docker build -t grafana:latest-with-plugins \ + --build-arg "GRAFANA_VERSION=latest" \ + --build-arg "GF_INSTALL_PLUGINS=grafana-clock-panel,grafana-simple-json-datasource" . + +docker run \ -d \ -p 3000:3000 \ - --name grafana \ - grafana/grafana:5.0.2 + --name=grafana \ + grafana:latest-with-plugins ``` ## Configuring AWS Credentials for CloudWatch Support @@ -108,3 +102,94 @@ Supported variables: - `GF_AWS_${profile}_ACCESS_KEY_ID`: AWS access key ID (required). - `GF_AWS_${profile}_SECRET_ACCESS_KEY`: AWS secret access key (required). - `GF_AWS_${profile}_REGION`: AWS region (optional). + +## Grafana container with persistent storage (recommended) + +```bash +# create a persistent volume for your data in /var/lib/grafana (database and plugins) +docker volume create grafana-storage + +# start grafana +docker run \ + -d \ + -p 3000:3000 \ + --name=grafana \ + -v grafana-storage:/var/lib/grafana \ + grafana/grafana +``` + +## Grafana container using bind mounts + +You may want to run Grafana in Docker but use folders on your host for the database or configuration. When doing so it becomes important to start the container with a user that is able to access and write to the folder you map into the container. + +```bash +mkdir data # creates a folder for your data +ID=$(id -u) # saves your user id in the ID variable + +# starts grafana with your user id and using the data folder +docker run -d --user $ID --volume "$PWD/data:/var/lib/grafana" -p 3000:3000 grafana/grafana:5.1.0 +``` + +## Migration from a previous version of the docker container to 5.1 or later + +The docker container for Grafana has seen a major rewrite for 5.1. + +**Important changes** + +* file ownership is no longer modified during startup with `chown` +* default user id `472` instead of `104` +* no more implicit volumes + - `/var/lib/grafana` + - `/etc/grafana` + - `/var/log/grafana` + +### Removal of implicit volumes + +Previously `/var/lib/grafana`, `/etc/grafana` and `/var/log/grafana` were defined as volumes in the `Dockerfile`. This led to the creation of three volumes each time a new instance of the Grafana container started, whether you wanted it or not. + +You should always be careful to define your own named volume for storage, but if you depended on these volumes you should be aware that an upgraded container will no longer have them. + +**Warning**: when migrating from an earlier version to 5.1 or later using docker compose and implicit volumes you need to use `docker inspect` to find out which volumes your container is mapped to so that you can map them to the upgraded container as well. You will also have to change file ownership (or user) as documented below. + +### User ID changes + +In 5.1 we switched the id of the grafana user. Unfortunately this means that files created prior to 5.1 won't have the correct permissions for later versions. We made this change so that it would be more likely that the grafana users id would be unique to Grafana. For example, on Ubuntu 16.04 `104` is already in use by the syslog user. + +Version | User | User ID +--------|---------|--------- +< 5.1 | grafana | 104 +>= 5.1 | grafana | 472 + +There are two possible solutions to this problem. Either you start the new container as the root user and change ownership from `104` to `472` or you start the upgraded container as user `104`. + +#### Running docker as a different user + +```bash +docker run --user 104 --volume "" grafana/grafana:5.1.0 +``` + +##### Specifying a user in docker-compose.yml +```yaml +version: "2" + +services: + grafana: + image: grafana/grafana:5.1.0 + ports: + - 3000:3000 + user: "104" +``` + +#### Modifying permissions + +The commands below will run bash inside the Grafana container with your volume mapped in. This makes it possible to modify the file ownership to match the new container. Always be careful when modifying permissions. + +```bash +$ docker run -ti --user root --volume "" --entrypoint bash grafana/grafana:5.1.0 + +# in the container you just started: +chown -R root:root /etc/grafana && \ + chmod -R a+r /etc/grafana && \ + chown -R grafana:grafana /var/lib/grafana && \ + chown -R grafana:grafana /usr/share/grafana +``` diff --git a/docs/sources/installation/rpm.md b/docs/sources/installation/rpm.md index b0405eb6533..71ef7433be1 100644 --- a/docs/sources/installation/rpm.md +++ b/docs/sources/installation/rpm.md @@ -15,8 +15,10 @@ weight = 2 Description | Download ------------ | ------------- -Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [5.0.3 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.0.3-1.x86_64.rpm) - +Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [5.1.1 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.1-1.x86_64.rpm) + Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. @@ -26,23 +28,29 @@ installation. You can install Grafana using Yum directly. ```bash -$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.0.3-1.x86_64.rpm +$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.1-1.x86_64.rpm ``` + + Or install manually using `rpm`. #### On CentOS / Fedora / Redhat: ```bash -$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.0.3-1.x86_64.rpm +$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.1-1.x86_64.rpm $ sudo yum install initscripts fontconfig -$ sudo rpm -Uvh grafana-5.0.3-1.x86_64.rpm +$ sudo rpm -Uvh grafana-5.1.1-1.x86_64.rpm ``` #### On OpenSuse: ```bash -$ sudo rpm -i --nodeps grafana-5.0.3-1.x86_64.rpm +$ sudo rpm -i --nodeps grafana-5.1.1-1.x86_64.rpm ``` ## Install via YUM Repository @@ -52,7 +60,7 @@ Add the following to a new file at `/etc/yum.repos.d/grafana.repo` ```bash [grafana] name=grafana -baseurl=https://packagecloud.io/grafana/stable/el/6/$basearch +baseurl=https://packagecloud.io/grafana/stable/el/7/$basearch repo_gpgcheck=1 enabled=1 gpgcheck=1 @@ -64,7 +72,7 @@ sslcacert=/etc/pki/tls/certs/ca-bundle.crt There is also a testing repository if you want beta or release candidates. ```bash -baseurl=https://packagecloud.io/grafana/testing/el/6/$basearch +baseurl=https://packagecloud.io/grafana/testing/el/7/$basearch ``` Then install Grafana via the `yum` command. diff --git a/docs/sources/installation/upgrading.md b/docs/sources/installation/upgrading.md index 5b00fd92924..c72bb4c0921 100644 --- a/docs/sources/installation/upgrading.md +++ b/docs/sources/installation/upgrading.md @@ -23,9 +23,9 @@ Before upgrading it can be a good idea to backup your Grafana database. This wil #### sqlite -If you use sqlite you only need to make a backup of you `grafana.db` file. This is usually located at `/var/lib/grafana/grafana.db` on unix system. +If you use sqlite you only need to make a backup of your `grafana.db` file. This is usually located at `/var/lib/grafana/grafana.db` on unix system. If you are unsure what database you use and where it is stored check you grafana configuration file. If you -installed grafana to custom location using a binary tar/zip it is usally in `/data`. +installed grafana to custom location using a binary tar/zip it is usually in `/data`. #### mysql diff --git a/docs/sources/installation/windows.md b/docs/sources/installation/windows.md index 2dac13a6322..cea86812491 100644 --- a/docs/sources/installation/windows.md +++ b/docs/sources/installation/windows.md @@ -8,12 +8,15 @@ parent = "installation" weight = 3 +++ - # Installing on Windows Description | Download ------------ | ------------- -Latest stable package for Windows | [grafana-5.0.3.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.0.3.windows-x64.zip) +Latest stable package for Windows | [grafana-5.1.1.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.1.windows-x64.zip) + + Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. diff --git a/docs/sources/plugins/developing/apps.md b/docs/sources/plugins/developing/apps.md index a3fc35066f6..155f97461c9 100644 --- a/docs/sources/plugins/developing/apps.md +++ b/docs/sources/plugins/developing/apps.md @@ -5,7 +5,7 @@ type = "docs" [menu.docs] name = "Developing App Plugins" parent = "developing" -weight = 6 +weight = 4 +++ # Grafana Apps diff --git a/docs/sources/plugins/developing/datasources.md b/docs/sources/plugins/developing/datasources.md index 09a005ba714..064f3a850ae 100644 --- a/docs/sources/plugins/developing/datasources.md +++ b/docs/sources/plugins/developing/datasources.md @@ -5,7 +5,7 @@ type = "docs" [menu.docs] name = "Developing Datasource Plugins" parent = "developing" -weight = 6 +weight = 5 +++ # Datasources diff --git a/docs/sources/plugins/developing/panels.md b/docs/sources/plugins/developing/panels.md index 26db69c7c94..d679288e2d2 100644 --- a/docs/sources/plugins/developing/panels.md +++ b/docs/sources/plugins/developing/panels.md @@ -1,16 +1,11 @@ ---- -page_title: Plugin panel -page_description: Panel plugins for Grafana -page_keywords: grafana, plugins, documentation ---- - - +++ -title = "Installing Plugins" +title = "Developing Panel Plugins" +keywords = ["grafana", "plugins", "panel", "documentation"] type = "docs" [menu.docs] +name = "Developing Panel Plugins" parent = "developing" -weight = 1 +weight = 4 +++ @@ -20,7 +15,21 @@ Panels are the main building blocks of dashboards. ## Panel development -Examples + +### Scrolling +The grafana dashboard framework controls the panel height. To enable a scrollbar within the panel the PanelCtrl needs to set the scrollable static variable: + +```javascript +export class MyPanelCtrl extends PanelCtrl { + static scrollable = true; + ... +``` + +In this case, make sure the template has a single `
...
` root. The plugin loader will modifiy that element adding a scrollbar. + + + +### Examples - [clock-panel](https://github.com/grafana/clock-panel) - [singlestat-panel](https://github.com/grafana/grafana/blob/master/public/app/plugins/panel/singlestat/module.ts) diff --git a/docs/sources/plugins/developing/plugin.json.md b/docs/sources/plugins/developing/plugin.json.md index 7de5e91986f..2d21a665207 100644 --- a/docs/sources/plugins/developing/plugin.json.md +++ b/docs/sources/plugins/developing/plugin.json.md @@ -5,7 +5,7 @@ type = "docs" [menu.docs] name = "plugin.json Schema" parent = "developing" -weight = 6 +weight = 8 +++ # Plugin.json diff --git a/docs/sources/reference/dashboard.md b/docs/sources/reference/dashboard.md index dbc3ed8635c..30581968743 100644 --- a/docs/sources/reference/dashboard.md +++ b/docs/sources/reference/dashboard.md @@ -71,13 +71,13 @@ Each field in the dashboard JSON is explained below with its usage: | **timepicker** | timepicker metadata, see [timepicker section](#timepicker) for details | | **templating** | templating metadata, see [templating section](#templating) for details | | **annotations** | annotations metadata, see [annotations section](#annotations) for details | -| **schemaVersion** | version of the JSON schema (integer), incremented each time a Grafana update brings changes to the said schema | +| **schemaVersion** | version of the JSON schema (integer), incremented each time a Grafana update brings changes to said schema | | **version** | version of the dashboard (integer), incremented each time the dashboard is updated | | **panels** | panels array, see below for detail. | ## Panels -Panels are the building blocks a dashboard. It consists of datasource queries, type of graphs, aliases, etc. Panel JSON consists of an array of JSON objects, each representing a different panel. Most of the fields are common for all panels but some fields depends on the panel type. Following is an example of panel JSON of a text panel. +Panels are the building blocks of a dashboard. It consists of datasource queries, type of graphs, aliases, etc. Panel JSON consists of an array of JSON objects, each representing a different panel. Most of the fields are common for all panels but some fields depend on the panel type. Following is an example of panel JSON of a text panel. ```json "panels": [ @@ -105,7 +105,7 @@ The gridPos property describes the panel size and position in grid coordinates. - `x` The x position, in same unit as `w`. - `y` The y position, in same unit as `h`. -The grid has a negative gravity that moves panels up if there i empty space above a panel. +The grid has a negative gravity that moves panels up if there is empty space above a panel. ### timepicker @@ -161,7 +161,7 @@ Usage of the fields is explained below: ### templating -`templating` fields contains array of template variables with their saved values along with some other metadata, for example: +The `templating` field contains an array of template variables with their saved values along with some other metadata, for example: ```json "templating": { @@ -236,7 +236,7 @@ Usage of the above mentioned fields in the templating section is explained below | Name | Usage | | ---- | ----- | | **enable** | whether templating is enabled or not | -| **list** | an array of objects representing, each representing one template variable | +| **list** | an array of objects each representing one template variable | | **allFormat** | format to use while fetching all values from datasource, eg: `wildcard`, `glob`, `regex`, `pipe`, etc. | | **current** | shows current selected variable text/value on the dashboard | | **datasource** | shows datasource for the variables | diff --git a/docs/sources/reference/playlist.md b/docs/sources/reference/playlist.md index 5a6bf921334..182e69eebd0 100644 --- a/docs/sources/reference/playlist.md +++ b/docs/sources/reference/playlist.md @@ -49,7 +49,7 @@ Click the back button to rewind to the previous Dashboard in the Playlist. In TV mode the top navbar, row & panel controls will all fade to transparent. This happens automatically after one minute of user inactivity but can also be toggled manually -with the `d v` sequence shortcut. Any mouse movement or keyboard action will +with the `d v` sequence shortcut, or by appending the parameter `?inactive` to the dashboard URL. Any mouse movement or keyboard action will restore navbar & controls. Another feature is the kiosk mode - in kiosk mode the navbar is completely hidden/removed from view. This can be enabled with the `d k` diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index f9e16e26610..8341b9770bd 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -36,6 +36,29 @@ interpolation the variable value might be **escaped** in order to conform to the For example, a variable used in a regex expression in an InfluxDB or Prometheus query will be regex escaped. Read the data source specific documentation article for details on value escaping during interpolation. +### Advanced Formatting Options + +> Only available in Grafana v5.1+. + +The formatting of the variable interpolation depends on the data source but there are some situations where you might want to change the default formatting. For example, the default for the MySql datasource is to join multiple values as comma-separated with quotes: `'server01','server02'`. In some cases you might want to have a comma-separated string without quotes: `server01,server02`. This is now possible with the advanced formatting options. + +Syntax: `${var_name:option}` + +Filter Option | Example | Raw | Interpolated | Description +------------ | ------------- | ------------- | ------------- | ------------- +`glob` | ${servers:glob} | `'test1', 'test2'` | `{test1,test2}` | (Default) Formats multi-value variable into a glob (for Graphite queries) +`regex` | ${servers:regex} | `'test.', 'test2'` | (test\.|test2) | Formats multi-value variable into a regex string +`pipe` | ${servers:pipe} | `'test.', 'test2'` | test.|test2 | Formats multi-value variable into a pipe-separated string +`csv`| ${servers:csv} | `'test1', 'test2'` | `test1,test2` | Formats multi-value variable as a comma-separated string +`distributed`| ${servers:distributed} | `'test1', 'test2'` | `test1,servers=test2` | Formats multi-value variable in custom format for OpenTSDB. +`lucene`| ${servers:lucene} | `'test', 'test2'` | `("test" OR "test2")` | Formats multi-value variable as a lucene expression. + +Test the formatting options on the [Grafana Play site](http://play.grafana.org/d/cJtIfcWiz/template-variable-formatting-options?orgId=1). + +If any invalid formatting option is specified, then `glob` is the default/fallback option. + +An alternative syntax (that might be deprecated in the future) is `[[var_name:option]]`. + ### Variable options A variable is presented as a dropdown select box at the top of the dashboard. It has a current value and a set of **options**. The **options** @@ -166,14 +189,16 @@ Option | Description ------- | -------- *Multi-value* | If enabled, the variable will support the selection of multiple options at the same time. *Include All option* | Add a special `All` option whose value includes all options. -*Custom all value* | By default the `All` value will include all options in combined expression. This can become very long and can have performance problems. Many times it can be better to specify a custom all value, like a wildcard regex. To make it possible to have custom regex, globs or lucene syntax in the **Custom all value** option it is never escaped so you will have to think avbout what is a valid value for your data source. +*Custom all value* | By default the `All` value will include all options in combined expression. This can become very long and can have performance problems. Many times it can be better to specify a custom all value, like a wildcard regex. To make it possible to have custom regex, globs or lucene syntax in the **Custom all value** option it is never escaped so you will have to think about what is a valid value for your data source. -### Formating multiple values +### Formatting multiple values Interpolating a variable with multiple values selected is tricky as it is not straight forward how to format the multiple values to into a string that is valid in the given context where the variable is used. Grafana tries to solve this by allowing each data source plugin to inform the templating interpolation engine what format to use for multiple values. +Note that the *Custom all value* option on the variable will have to be left blank for Grafana to format all values into a single string. + **Graphite**, for example, uses glob expressions. A variable with multiple values would, in this case, be interpolated as `{host1,host2,host3}` if the current variable value was *host1*, *host2* and *host3*. @@ -184,7 +209,7 @@ break the regex expression. **Elasticsearch** uses lucene query syntax, so the same variable would, in this case, be formatted as `("host1" OR "host2" OR "host3")`. In this case every value needs to be escaped so that the value can contain lucene control words and quotation marks. -#### Formating troubles +#### Formatting troubles Automatic escaping & formatting can cause problems and it can be tricky to grasp the logic is behind it. Especially for InfluxDB and Prometheus where the use of regex syntax requires that the variable is used in regex operator context. @@ -275,4 +300,3 @@ Variable values are always synced to the URL using the syntax `var-=val - [Graphite Templated Dashboard](http://play.grafana.org/dashboard/db/graphite-templated-nested) - [Elasticsearch Templated Dashboard](http://play.grafana.org/dashboard/db/elasticsearch-templated) - [InfluxDB Templated Dashboard](http://play.grafana.org/dashboard/db/influxdb-templated-queries) - diff --git a/docs/sources/tutorials/authproxy.md b/docs/sources/tutorials/authproxy.md index 8003be20644..6f13de85c18 100644 --- a/docs/sources/tutorials/authproxy.md +++ b/docs/sources/tutorials/authproxy.md @@ -108,7 +108,7 @@ In this example we use Apache as a reverseProxy in front of Grafana. Apache hand * The next part of the configuration is the tricky part. We use Apache’s rewrite engine to create our **X-WEBAUTH-USER header**, populated with the authenticated user. - * **RewriteRule .* - [E=PROXY_USER:%{LA-U:REMOTE_USER}, NS]**: This line is a little bit of magic. What it does, is for every request use the rewriteEngines look-ahead (LA-U) feature to determine what the REMOTE_USER variable would be set to after processing the request. Then assign the result to the variable PROXY_USER. This is neccessary as the REMOTE_USER variable is not available to the RequestHeader function. + * **RewriteRule .* - [E=PROXY_USER:%{LA-U:REMOTE_USER}, NS]**: This line is a little bit of magic. What it does, is for every request use the rewriteEngines look-ahead (LA-U) feature to determine what the REMOTE_USER variable would be set to after processing the request. Then assign the result to the variable PROXY_USER. This is necessary as the REMOTE_USER variable is not available to the RequestHeader function. * **RequestHeader set X-WEBAUTH-USER “%{PROXY_USER}e”**: With the authenticated username now stored in the PROXY_USER variable, we create a new HTTP request header that will be sent to our backend Grafana containing the username. @@ -149,7 +149,7 @@ auto_sign_up = true ##### Grafana Container -For this example, we use the offical Grafana docker image available at [Docker Hub](https://hub.docker.com/r/grafana/grafana/) +For this example, we use the official Grafana docker image available at [Docker Hub](https://hub.docker.com/r/grafana/grafana/) * Create a file `grafana.ini` with the following contents @@ -166,7 +166,7 @@ header_property = username auto_sign_up = true ``` -* Launch the Grafana container, using our custom grafana.ini to replace `/etc/grafana/grafana.ini`. We dont expose any ports for this container as it will only be connected to by our Apache container. +* Launch the Grafana container, using our custom grafana.ini to replace `/etc/grafana/grafana.ini`. We don't expose any ports for this container as it will only be connected to by our Apache container. ```bash docker run -i -v $(pwd)/grafana.ini:/etc/grafana/grafana.ini --name grafana grafana/grafana @@ -174,7 +174,7 @@ docker run -i -v $(pwd)/grafana.ini:/etc/grafana/grafana.ini --name grafana graf ### Apache Container -For this example we use the offical Apache docker image available at [Docker Hub](https://hub.docker.com/_/httpd/) +For this example we use the official Apache docker image available at [Docker Hub](https://hub.docker.com/_/httpd/) * Create a file `httpd.conf` with the following contents @@ -244,4 +244,4 @@ ProxyPassReverse / http://grafana:3000/ ### Use grafana. -With our Grafana and Apache containers running, you can now connect to http://localhost/ and log in using the username/password we created in the htpasswd file. \ No newline at end of file +With our Grafana and Apache containers running, you can now connect to http://localhost/ and log in using the username/password we created in the htpasswd file. diff --git a/docs/sources/tutorials/iis.md b/docs/sources/tutorials/iis.md index 63a41d67c16..896181c4a9f 100644 --- a/docs/sources/tutorials/iis.md +++ b/docs/sources/tutorials/iis.md @@ -16,7 +16,7 @@ Example: - Parent site: http://localhost:8080 - Grafana: http://localhost:3000 -Grafana as a subpath: http://localhost:8080/grafana +Grafana as a subpath: http://localhost:8080/grafana ## Setup @@ -33,7 +33,7 @@ Given that the subpath should be `grafana` and the parent site is `localhost:808 ```bash [server] domain = localhost:8080 -root_url = %(protocol)s://%(domain)s:/grafana +root_url = %(protocol)s://%(domain)s/grafana/ ``` Restart the Grafana server after changing the config file. @@ -74,11 +74,11 @@ When navigating to the grafana url (`http://localhost:8080/grafana` in the examp 1. The `root_url` setting in the Grafana config file does not match the parent url with subpath. This could happen if the root_url is commented out by mistake (`;` is used for commenting out a line in .ini files): - `; root_url = %(protocol)s://%(domain)s:/grafana` + `; root_url = %(protocol)s://%(domain)s/grafana/` 2. or if the subpath in the `root_url` setting does not match the subpath used in the pattern in the Inbound Rule in IIS: - `root_url = %(protocol)s://%(domain)s:/grafana` + `root_url = %(protocol)s://%(domain)s/grafana/` pattern in Inbound Rule: `wrongsubpath(/)?(.*)` diff --git a/docs/versions.json b/docs/versions.json index 2dcc7ebe776..61e471938f2 100644 --- a/docs/versions.json +++ b/docs/versions.json @@ -1,6 +1,6 @@ [ - { "version": "v5.1", "path": "/v5.1", "archived": false }, - { "version": "v5.0", "path": "/", "archived": false, "current": true }, + { "version": "v5.1", "path": "/", "archived": false, "current": true }, + { "version": "v5.0", "path": "/v5.0", "archived": true }, { "version": "v4.6", "path": "/v4.6", "archived": true }, { "version": "v4.5", "path": "/v4.5", "archived": true }, { "version": "v4.4", "path": "/v4.4", "archived": true }, diff --git a/latest.json b/latest.json index b476f44a00a..5a68ca428b4 100644 --- a/latest.json +++ b/latest.json @@ -1,4 +1,4 @@ { - "stable": "5.0.0", - "testing": "5.0.0" + "stable": "5.0.4", + "testing": "5.0.4" } diff --git a/package.json b/package.json index 6dcfc16b82b..383e0e39ab5 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "company": "Grafana Labs" }, "name": "grafana", - "version": "5.1.0-pre1", + "version": "5.2.0-pre1", "repository": { "type": "git", "url": "http://github.com/grafana/grafana.git" @@ -22,7 +22,9 @@ "axios": "^0.17.1", "babel-core": "^6.26.0", "babel-loader": "^7.1.2", + "babel-plugin-syntax-dynamic-import": "^6.18.0", "babel-preset-es2015": "^6.24.1", + "clean-webpack-plugin": "^0.1.19", "css-loader": "^0.28.7", "enzyme": "^3.1.0", "enzyme-adapter-react-16": "^1.0.1", @@ -54,6 +56,7 @@ "grunt-usemin": "3.1.1", "grunt-webpack": "^3.0.2", "html-loader": "^0.5.1", + "html-webpack-harddisk-plugin": "^0.2.0", "html-webpack-plugin": "^2.30.1", "husky": "^0.14.3", "jest": "^22.0.4", @@ -80,10 +83,12 @@ "postcss-loader": "^2.0.6", "postcss-reporter": "^5.0.0", "prettier": "1.9.2", + "react-hot-loader": "^4.0.1", "react-test-renderer": "^16.0.0", "sass-lint": "^1.10.2", "sass-loader": "^6.0.6", "sinon": "1.17.6", + "style-loader": "^0.20.3", "systemjs": "0.20.19", "systemjs-plugin-css": "^0.1.36", "ts-jest": "^22.0.0", @@ -94,20 +99,22 @@ "webpack": "^3.10.0", "webpack-bundle-analyzer": "^2.9.0", "webpack-cleanup-plugin": "^0.5.1", + "webpack-dev-server": "2.11.1", "webpack-merge": "^4.1.0", "zone.js": "^0.7.2" }, "scripts": { "dev": "webpack --progress --colors --config scripts/webpack/webpack.dev.js", + "start": "webpack-dev-server --progress --colors --config scripts/webpack/webpack.dev.js", "watch": "webpack --progress --colors --watch --config scripts/webpack/webpack.dev.js", "build": "grunt build", "test": "grunt test", "test:coverage": "grunt test --coverage=true", "lint": "tslint -c tslint.json --project tsconfig.json --type-check", - "karma": "node ./node_modules/grunt-cli/bin/grunt karma:dev", - "jest": "node ./node_modules/jest-cli/bin/jest.js --notify --watch", - "api-tests": "node ./node_modules/jest-cli/bin/jest.js --notify --watch --config=tests/api/jest.js", - "precommit": "lint-staged && node ./node_modules/grunt-cli/bin/grunt precommit" + "karma": "grunt karma:dev", + "jest": "jest --notify --watch", + "api-tests": "jest --notify --watch --config=tests/api/jest.js", + "precommit": "lint-staged && grunt precommit" }, "lint-staged": { "*.{ts,tsx}": [ @@ -136,6 +143,7 @@ "angular-route": "^1.6.6", "angular-sanitize": "^1.6.6", "babel-polyfill": "^6.26.0", + "baron": "^3.0.3", "brace": "^0.10.0", "classnames": "^2.2.5", "clipboard": "^1.7.1", @@ -143,6 +151,7 @@ "d3-scale-chromatic": "^1.1.1", "eventemitter3": "^2.0.3", "file-saver": "^1.3.3", + "immutable": "^3.8.2", "jquery": "^3.2.1", "lodash": "^4.17.4", "mobx": "^3.4.1", @@ -151,7 +160,7 @@ "moment": "^2.18.1", "mousetrap": "^1.6.0", "mousetrap-global-bind": "^1.1.0", - "perfect-scrollbar": "^1.2.0", + "prismjs": "^1.6.0", "prop-types": "^15.6.0", "react": "^16.2.0", "react-dom": "^16.2.0", @@ -164,6 +173,9 @@ "remarkable": "^1.7.1", "rst2html": "github:thoward/rst2html#990cb89", "rxjs": "^5.4.3", + "slate": "^0.33.4", + "slate-plain-serializer": "^0.5.10", + "slate-react": "^0.12.4", "tether": "^1.4.0", "tether-drop": "https://github.com/torkelo/drop/tarball/master", "tinycolor2": "^1.4.1" diff --git a/packaging/publish/publish_testing.sh b/packaging/publish/publish_testing.sh index 08ba2a89dd9..9fd4e1f93b9 100755 --- a/packaging/publish/publish_testing.sh +++ b/packaging/publish/publish_testing.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -deb_ver=5.0.0-beta5 -rpm_ver=5.0.0-beta5 +deb_ver=5.1.0-beta1 +rpm_ver=5.1.0-beta1 wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_${deb_ver}_amd64.deb diff --git a/pkg/api/admin.go b/pkg/api/admin.go index 52d271ce69b..54a86724f0c 100644 --- a/pkg/api/admin.go +++ b/pkg/api/admin.go @@ -12,7 +12,7 @@ import ( func AdminGetSettings(c *m.ReqContext) { settings := make(map[string]interface{}) - for _, section := range setting.Cfg.Sections() { + for _, section := range setting.Raw.Sections() { jsonSec := make(map[string]interface{}) settings[section.Name()] = jsonSec diff --git a/pkg/api/annotations.go b/pkg/api/annotations.go index 886913d6324..52eeb57dbb9 100644 --- a/pkg/api/annotations.go +++ b/pkg/api/annotations.go @@ -2,7 +2,6 @@ package api import ( "strings" - "time" "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/components/simplejson" @@ -15,9 +14,10 @@ import ( func GetAnnotations(c *m.ReqContext) Response { query := &annotations.ItemQuery{ - From: c.QueryInt64("from") / 1000, - To: c.QueryInt64("to") / 1000, + From: c.QueryInt64("from"), + To: c.QueryInt64("to"), OrgId: c.OrgId, + UserId: c.QueryInt64("userId"), AlertId: c.QueryInt64("alertId"), DashboardId: c.QueryInt64("dashboardId"), PanelId: c.QueryInt64("panelId"), @@ -37,7 +37,7 @@ func GetAnnotations(c *m.ReqContext) Response { if item.Email != "" { item.AvatarUrl = dtos.GetGravatarUrl(item.Email) } - item.Time = item.Time * 1000 + item.Time = item.Time } return JSON(200, items) @@ -68,16 +68,12 @@ func PostAnnotation(c *m.ReqContext, cmd dtos.PostAnnotationsCmd) Response { UserId: c.UserId, DashboardId: cmd.DashboardId, PanelId: cmd.PanelId, - Epoch: cmd.Time / 1000, + Epoch: cmd.Time, Text: cmd.Text, Data: cmd.Data, Tags: cmd.Tags, } - if item.Epoch == 0 { - item.Epoch = time.Now().Unix() - } - if err := repo.Save(&item); err != nil { return Error(500, "Failed to save annotation", err) } @@ -97,7 +93,7 @@ func PostAnnotation(c *m.ReqContext, cmd dtos.PostAnnotationsCmd) Response { } item.Id = 0 - item.Epoch = cmd.TimeEnd / 1000 + item.Epoch = cmd.TimeEnd if err := repo.Save(&item); err != nil { return Error(500, "Failed save annotation for region end time", err) @@ -132,9 +128,6 @@ func PostGraphiteAnnotation(c *m.ReqContext, cmd dtos.PostGraphiteAnnotationsCmd return Error(500, "Failed to save Graphite annotation", err) } - if cmd.When == 0 { - cmd.When = time.Now().Unix() - } text := formatGraphiteAnnotation(cmd.What, cmd.Data) // Support tags in prior to Graphite 0.10.0 format (string of tags separated by space) @@ -163,7 +156,7 @@ func PostGraphiteAnnotation(c *m.ReqContext, cmd dtos.PostGraphiteAnnotationsCmd item := annotations.Item{ OrgId: c.OrgId, UserId: c.UserId, - Epoch: cmd.When, + Epoch: cmd.When * 1000, Text: text, Tags: tagsArray, } @@ -191,7 +184,7 @@ func UpdateAnnotation(c *m.ReqContext, cmd dtos.UpdateAnnotationsCmd) Response { OrgId: c.OrgId, UserId: c.UserId, Id: annotationID, - Epoch: cmd.Time / 1000, + Epoch: cmd.Time, Text: cmd.Text, Tags: cmd.Tags, } @@ -203,7 +196,7 @@ func UpdateAnnotation(c *m.ReqContext, cmd dtos.UpdateAnnotationsCmd) Response { if cmd.IsRegion { itemRight := item itemRight.RegionId = item.Id - itemRight.Epoch = cmd.TimeEnd / 1000 + itemRight.Epoch = cmd.TimeEnd // We don't know id of region right event, so set it to 0 and find then using query like // ... WHERE region_id = AND id != ... @@ -301,19 +294,3 @@ func canSave(c *m.ReqContext, repo annotations.Repository, annotationID int64) R return nil } - -func canSaveByRegionID(c *m.ReqContext, repo annotations.Repository, regionID int64) Response { - items, err := repo.Find(&annotations.ItemQuery{RegionId: regionID, OrgId: c.OrgId}) - - if err != nil || len(items) == 0 { - return Error(500, "Could not find annotation to update", err) - } - - dashboardID := items[0].DashboardId - - if canSave, err := canSaveByDashboardID(c, dashboardID); err != nil || !canSave { - return dashboardGuardianResponse(err) - } - - return nil -} diff --git a/pkg/api/api.go b/pkg/api/api.go index 3c7b81e472d..493f9eb9d01 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -23,7 +23,7 @@ func (hs *HTTPServer) registerRoutes() { // automatically set HEAD for every GET macaronR.SetAutoHead(true) - r := newRouteRegister(middleware.RequestMetrics, middleware.RequestTracing) + r := hs.RouteRegister // not logged in views r.Get("/", reqSignedIn, Index) @@ -149,8 +149,6 @@ func (hs *HTTPServer) registerRoutes() { // team (admin permission required) apiRoute.Group("/teams", func(teamsRoute RouteRegister) { - teamsRoute.Get("/:teamId", wrap(GetTeamByID)) - teamsRoute.Get("/search", wrap(SearchTeams)) teamsRoute.Post("/", bind(m.CreateTeamCommand{}), wrap(CreateTeam)) teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), wrap(UpdateTeam)) teamsRoute.Delete("/:teamId", wrap(DeleteTeamByID)) @@ -159,6 +157,12 @@ func (hs *HTTPServer) registerRoutes() { teamsRoute.Delete("/:teamId/members/:userId", wrap(RemoveTeamMember)) }, reqOrgAdmin) + // team without requirement of user to be org admin + apiRoute.Group("/teams", func(teamsRoute RouteRegister) { + teamsRoute.Get("/:teamId", wrap(GetTeamByID)) + teamsRoute.Get("/search", wrap(SearchTeams)) + }) + // org information available to all users. apiRoute.Group("/org", func(orgRoute RouteRegister) { orgRoute.Get("/", wrap(GetOrgCurrent)) @@ -170,7 +174,6 @@ func (hs *HTTPServer) registerRoutes() { orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrgCurrent)) orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddressCurrent)) orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), wrap(AddOrgUserToCurrentOrg)) - orgRoute.Get("/users", wrap(GetOrgUsersForCurrentOrg)) orgRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUserForCurrentOrg)) orgRoute.Delete("/users/:userId", wrap(RemoveOrgUserForCurrentOrg)) @@ -184,6 +187,11 @@ func (hs *HTTPServer) registerRoutes() { orgRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateOrgPreferences)) }, reqOrgAdmin) + // current org without requirement of user to be org admin + apiRoute.Group("/org", func(orgRoute RouteRegister) { + orgRoute.Get("/users", wrap(GetOrgUsersForCurrentOrg)) + }) + // create new org apiRoute.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), wrap(CreateOrg)) diff --git a/pkg/api/avatar/avatar.go b/pkg/api/avatar/avatar.go index ce9da1e8790..5becf90ca35 100644 --- a/pkg/api/avatar/avatar.go +++ b/pkg/api/avatar/avatar.go @@ -226,7 +226,7 @@ func (this *thunderTask) Fetch() { this.Done() } -var client *http.Client = &http.Client{ +var client = &http.Client{ Timeout: time.Second * 2, Transport: &http.Transport{Proxy: http.ProxyFromEnvironment}, } @@ -258,9 +258,6 @@ func (this *thunderTask) fetch() error { this.Avatar.data = &bytes.Buffer{} writer := bufio.NewWriter(this.Avatar.data) - if _, err = io.Copy(writer, resp.Body); err != nil { - return err - } - - return nil + _, err = io.Copy(writer, resp.Body) + return err } diff --git a/pkg/api/dashboard.go b/pkg/api/dashboard.go index 11a028cdd29..c2ab6dd9a1a 100644 --- a/pkg/api/dashboard.go +++ b/pkg/api/dashboard.go @@ -102,6 +102,16 @@ func GetDashboard(c *m.ReqContext) Response { meta.FolderUrl = query.Result.GetUrl() } + isDashboardProvisioned := &m.IsDashboardProvisionedQuery{DashboardId: dash.Id} + err = bus.Dispatch(isDashboardProvisioned) + if err != nil { + return Error(500, "Error while checking if dashboard is provisioned", err) + } + + if isDashboardProvisioned.Result { + meta.Provisioned = true + } + // make sure db version is in sync with json model version dash.Data.Set("version", dash.Version) @@ -228,7 +238,8 @@ func PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand) Response { err == m.ErrDashboardWithSameUIDExists || err == m.ErrFolderNotFound || err == m.ErrDashboardFolderCannotHaveParent || - err == m.ErrDashboardFolderNameExists { + err == m.ErrDashboardFolderNameExists || + err == m.ErrDashboardCannotSaveProvisionedDashboard { return Error(400, err.Error(), nil) } diff --git a/pkg/api/dashboard_permission.go b/pkg/api/dashboard_permission.go index 653815aea5c..342eaf556c6 100644 --- a/pkg/api/dashboard_permission.go +++ b/pkg/api/dashboard_permission.go @@ -29,6 +29,11 @@ func GetDashboardPermissionList(c *m.ReqContext) Response { } for _, perm := range acl { + perm.UserAvatarUrl = dtos.GetGravatarUrl(perm.UserEmail) + + if perm.TeamId > 0 { + perm.TeamAvatarUrl = dtos.GetGravatarUrlWithDefault(perm.TeamEmail, perm.Team) + } if perm.Slug != "" { perm.Url = m.GetDashboardFolderUrl(perm.IsFolder, perm.Uid, perm.Slug) } diff --git a/pkg/api/dashboard_permission_test.go b/pkg/api/dashboard_permission_test.go index bdf80ef5241..24f0bdca365 100644 --- a/pkg/api/dashboard_permission_test.go +++ b/pkg/api/dashboard_permission_test.go @@ -143,7 +143,7 @@ func TestDashboardPermissionApiEndpoint(t *testing.T) { }) }) - Convey("When trying to override inherited permissions with lower presedence", func() { + Convey("When trying to override inherited permissions with lower precedence", func() { origNewGuardian := guardian.New guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{ CanAdminValue: true, diff --git a/pkg/api/dashboard_test.go b/pkg/api/dashboard_test.go index 0d87023ce40..ccde2382787 100644 --- a/pkg/api/dashboard_test.go +++ b/pkg/api/dashboard_test.go @@ -42,6 +42,11 @@ func TestDashboardApiEndpoint(t *testing.T) { return nil }) + bus.AddHandler("test", func(query *m.IsDashboardProvisionedQuery) error { + query.Result = false + return nil + }) + viewerRole := m.ROLE_VIEWER editorRole := m.ROLE_EDITOR @@ -192,6 +197,11 @@ func TestDashboardApiEndpoint(t *testing.T) { fakeDash.HasAcl = true setting.ViewersCanEdit = false + bus.AddHandler("test", func(query *m.IsDashboardProvisionedQuery) error { + query.Result = false + return nil + }) + bus.AddHandler("test", func(query *m.GetDashboardsBySlugQuery) error { dashboards := []*m.Dashboard{fakeDash} query.Result = dashboards @@ -625,6 +635,11 @@ func TestDashboardApiEndpoint(t *testing.T) { dashTwo.FolderId = 3 dashTwo.HasAcl = false + bus.AddHandler("test", func(query *m.IsDashboardProvisionedQuery) error { + query.Result = false + return nil + }) + bus.AddHandler("test", func(query *m.GetDashboardsBySlugQuery) error { dashboards := []*m.Dashboard{dashOne, dashTwo} query.Result = dashboards @@ -720,6 +735,7 @@ func TestDashboardApiEndpoint(t *testing.T) { {SaveError: m.ErrDashboardUpdateAccessDenied, ExpectedStatusCode: 403}, {SaveError: m.ErrDashboardInvalidUid, ExpectedStatusCode: 400}, {SaveError: m.ErrDashboardUidToLong, ExpectedStatusCode: 400}, + {SaveError: m.ErrDashboardCannotSaveProvisionedDashboard, ExpectedStatusCode: 400}, {SaveError: m.UpdatePluginDashboardError{PluginId: "plug"}, ExpectedStatusCode: 412}, } @@ -750,6 +766,11 @@ func TestDashboardApiEndpoint(t *testing.T) { return nil }) + bus.AddHandler("test", func(query *m.IsDashboardProvisionedQuery) error { + query.Result = false + return nil + }) + bus.AddHandler("test", func(query *m.GetDashboardVersionQuery) error { query.Result = &m.DashboardVersion{ Data: simplejson.NewFromAny(map[string]interface{}{ diff --git a/pkg/api/dtos/dashboard.go b/pkg/api/dtos/dashboard.go index e4c66aebbda..39a6dca580d 100644 --- a/pkg/api/dtos/dashboard.go +++ b/pkg/api/dtos/dashboard.go @@ -28,6 +28,7 @@ type DashboardMeta struct { FolderId int64 `json:"folderId"` FolderTitle string `json:"folderTitle"` FolderUrl string `json:"folderUrl"` + Provisioned bool `json:"provisioned"` } type DashboardFullWithMeta struct { diff --git a/pkg/api/dtos/models.go b/pkg/api/dtos/models.go index 2348e217a41..aead67cd04c 100644 --- a/pkg/api/dtos/models.go +++ b/pkg/api/dtos/models.go @@ -22,21 +22,22 @@ type LoginCommand struct { } type CurrentUser struct { - IsSignedIn bool `json:"isSignedIn"` - Id int64 `json:"id"` - Login string `json:"login"` - Email string `json:"email"` - Name string `json:"name"` - LightTheme bool `json:"lightTheme"` - OrgCount int `json:"orgCount"` - OrgId int64 `json:"orgId"` - OrgName string `json:"orgName"` - OrgRole m.RoleType `json:"orgRole"` - IsGrafanaAdmin bool `json:"isGrafanaAdmin"` - GravatarUrl string `json:"gravatarUrl"` - Timezone string `json:"timezone"` - Locale string `json:"locale"` - HelpFlags1 m.HelpFlags1 `json:"helpFlags1"` + IsSignedIn bool `json:"isSignedIn"` + Id int64 `json:"id"` + Login string `json:"login"` + Email string `json:"email"` + Name string `json:"name"` + LightTheme bool `json:"lightTheme"` + OrgCount int `json:"orgCount"` + OrgId int64 `json:"orgId"` + OrgName string `json:"orgName"` + OrgRole m.RoleType `json:"orgRole"` + IsGrafanaAdmin bool `json:"isGrafanaAdmin"` + GravatarUrl string `json:"gravatarUrl"` + Timezone string `json:"timezone"` + Locale string `json:"locale"` + HelpFlags1 m.HelpFlags1 `json:"helpFlags1"` + HasEditPermissionInFolders bool `json:"hasEditPermissionInFolders"` } type MetricRequest struct { diff --git a/pkg/api/folder_permission.go b/pkg/api/folder_permission.go index 0d0904c99ea..d19ec848ab2 100644 --- a/pkg/api/folder_permission.go +++ b/pkg/api/folder_permission.go @@ -33,6 +33,12 @@ func GetFolderPermissionList(c *m.ReqContext) Response { perm.FolderId = folder.Id perm.DashboardId = 0 + perm.UserAvatarUrl = dtos.GetGravatarUrl(perm.UserEmail) + + if perm.TeamId > 0 { + perm.TeamAvatarUrl = dtos.GetGravatarUrlWithDefault(perm.TeamEmail, perm.Team) + } + if perm.Slug != "" { perm.Url = m.GetDashboardFolderUrl(perm.IsFolder, perm.Uid, perm.Slug) } diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 387a543ca89..2afccb8f0d7 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -26,27 +26,34 @@ import ( "github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/setting" ) +func init() { + registry.RegisterService(&HTTPServer{}) +} + type HTTPServer struct { log log.Logger macaron *macaron.Macaron context context.Context streamManager *live.StreamManager cache *gocache.Cache + httpSrv *http.Server - httpSrv *http.Server + RouteRegister RouteRegister `inject:""` + Bus bus.Bus `inject:""` } -func NewHTTPServer() *HTTPServer { - return &HTTPServer{ - log: log.New("http.server"), - cache: gocache.New(5*time.Minute, 10*time.Minute), - } +func (hs *HTTPServer) Init() error { + hs.log = log.New("http.server") + hs.cache = gocache.New(5*time.Minute, 10*time.Minute) + + return nil } -func (hs *HTTPServer) Start(ctx context.Context) error { +func (hs *HTTPServer) Run(ctx context.Context) error { var err error hs.context = ctx @@ -57,9 +64,20 @@ func (hs *HTTPServer) Start(ctx context.Context) error { hs.streamManager.Run(ctx) listenAddr := fmt.Sprintf("%s:%s", setting.HttpAddr, setting.HttpPort) - hs.log.Info("Initializing HTTP Server", "address", listenAddr, "protocol", setting.Protocol, "subUrl", setting.AppSubUrl, "socket", setting.SocketPath) + hs.log.Info("HTTP Server Listen", "address", listenAddr, "protocol", setting.Protocol, "subUrl", setting.AppSubUrl, "socket", setting.SocketPath) hs.httpSrv = &http.Server{Addr: listenAddr, Handler: hs.macaron} + + // handle http shutdown on server context done + go func() { + <-ctx.Done() + // Hacky fix for race condition between ListenAndServe and Shutdown + time.Sleep(time.Millisecond * 100) + if err := hs.httpSrv.Shutdown(context.Background()); err != nil { + hs.log.Error("Failed to shutdown server", "error", err) + } + }() + switch setting.Protocol { case setting.HTTP: err = hs.httpSrv.ListenAndServe() @@ -96,12 +114,6 @@ func (hs *HTTPServer) Start(ctx context.Context) error { return err } -func (hs *HTTPServer) Shutdown(ctx context.Context) error { - err := hs.httpSrv.Shutdown(ctx) - hs.log.Info("Stopped HTTP server") - return err -} - func (hs *HTTPServer) listenAndServeTLS(certfile, keyfile string) error { if certfile == "" { return fmt.Errorf("cert_file cannot be empty when using HTTPS") @@ -139,7 +151,7 @@ func (hs *HTTPServer) listenAndServeTLS(certfile, keyfile string) error { } hs.httpSrv.TLSConfig = tlsCfg - hs.httpSrv.TLSNextProto = make(map[string]func(*http.Server, *tls.Conn, http.Handler), 0) + hs.httpSrv.TLSNextProto = make(map[string]func(*http.Server, *tls.Conn, http.Handler)) return hs.httpSrv.ListenAndServeTLS(setting.CertFile, setting.KeyFile) } @@ -162,6 +174,7 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron { hs.mapStatic(m, route.Directory, "", pluginRoute) } + hs.mapStatic(m, setting.StaticRootPath, "build", "public/build") hs.mapStatic(m, setting.StaticRootPath, "", "public") hs.mapStatic(m, setting.StaticRootPath, "robots.txt", "robots.txt") @@ -229,6 +242,12 @@ func (hs *HTTPServer) mapStatic(m *macaron.Macaron, rootDir string, dir string, c.Resp.Header().Set("Cache-Control", "public, max-age=3600") } + if prefix == "public/build" { + headers = func(c *macaron.Context) { + c.Resp.Header().Set("Cache-Control", "public, max-age=31536000") + } + } + if setting.Env == setting.DEV { headers = func(c *macaron.Context) { c.Resp.Header().Set("Cache-Control", "max-age=0, must-revalidate, no-cache") diff --git a/pkg/api/index.go b/pkg/api/index.go index a1d21d1c686..2a905b474ce 100644 --- a/pkg/api/index.go +++ b/pkg/api/index.go @@ -42,23 +42,29 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { settings["appSubUrl"] = "" } + hasEditPermissionInFoldersQuery := m.HasEditPermissionInFoldersQuery{SignedInUser: c.SignedInUser} + if err := bus.Dispatch(&hasEditPermissionInFoldersQuery); err != nil { + return nil, err + } + var data = dtos.IndexViewData{ User: &dtos.CurrentUser{ - Id: c.UserId, - IsSignedIn: c.IsSignedIn, - Login: c.Login, - Email: c.Email, - Name: c.Name, - OrgCount: c.OrgCount, - OrgId: c.OrgId, - OrgName: c.OrgName, - OrgRole: c.OrgRole, - GravatarUrl: dtos.GetGravatarUrl(c.Email), - IsGrafanaAdmin: c.IsGrafanaAdmin, - LightTheme: prefs.Theme == "light", - Timezone: prefs.Timezone, - Locale: locale, - HelpFlags1: c.HelpFlags1, + Id: c.UserId, + IsSignedIn: c.IsSignedIn, + Login: c.Login, + Email: c.Email, + Name: c.Name, + OrgCount: c.OrgCount, + OrgId: c.OrgId, + OrgName: c.OrgName, + OrgRole: c.OrgRole, + GravatarUrl: dtos.GetGravatarUrl(c.Email), + IsGrafanaAdmin: c.IsGrafanaAdmin, + LightTheme: prefs.Theme == "light", + Timezone: prefs.Timezone, + Locale: locale, + HelpFlags1: c.HelpFlags1, + HasEditPermissionInFolders: hasEditPermissionInFoldersQuery.Result, }, Settings: settings, Theme: prefs.Theme, @@ -117,10 +123,28 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { Children: dashboardChildNavs, }) + if setting.ExploreEnabled { + data.NavTree = append(data.NavTree, &dtos.NavLink{ + Text: "Explore", + Id: "explore", + SubTitle: "Explore your data", + Icon: "fa fa-rocket", + Url: setting.AppSubUrl + "/explore", + Children: []*dtos.NavLink{ + {Text: "New tab", Icon: "gicon gicon-dashboard-new", Url: setting.AppSubUrl + "/explore"}, + }, + }) + } + if c.IsSignedIn { + // Only set login if it's different from the name + var login string + if c.SignedInUser.Login != c.SignedInUser.NameOrFallback() { + login = c.SignedInUser.Login + } profileNode := &dtos.NavLink{ Text: c.SignedInUser.NameOrFallback(), - SubTitle: c.SignedInUser.Login, + SubTitle: login, Id: "profile", Img: data.User.GravatarUrl, Url: setting.AppSubUrl + "/profile", @@ -284,6 +308,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { data.NavTree = append(data.NavTree, &dtos.NavLink{ Text: "Help", + SubTitle: fmt.Sprintf(`%s v%s (%s)`, setting.ApplicationName, setting.BuildVersion, setting.BuildCommit), Id: "help", Url: "#", Icon: "gicon gicon-question", diff --git a/pkg/api/login.go b/pkg/api/login.go index 671e5fb7ecd..9d0fa31946f 100644 --- a/pkg/api/login.go +++ b/pkg/api/login.go @@ -101,13 +101,14 @@ func LoginPost(c *m.ReqContext, cmd dtos.LoginCommand) Response { return Error(401, "Login is disabled", nil) } - authQuery := login.LoginUserQuery{ - Username: cmd.User, - Password: cmd.Password, - IpAddress: c.Req.RemoteAddr, + authQuery := &m.LoginUserQuery{ + ReqContext: c, + Username: cmd.User, + Password: cmd.Password, + IpAddress: c.Req.RemoteAddr, } - if err := bus.Dispatch(&authQuery); err != nil { + if err := bus.Dispatch(authQuery); err != nil { if err == login.ErrInvalidCredentials || err == login.ErrTooManyLoginAttempts { return Error(401, "Invalid username or password", err) } diff --git a/pkg/api/login_oauth.go b/pkg/api/login_oauth.go index 1dba38e9cbd..c4a5f8fdacf 100644 --- a/pkg/api/login_oauth.go +++ b/pkg/api/login_oauth.go @@ -6,7 +6,6 @@ import ( "crypto/tls" "crypto/x509" "encoding/base64" - "errors" "fmt" "io/ioutil" "net/http" @@ -16,22 +15,15 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/login" "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/services/quota" "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/social" ) -var ( - ErrProviderDeniedRequest = errors.New("Login provider denied login request") - ErrEmailNotAllowed = errors.New("Required email domain not fulfilled") - ErrSignUpNotAllowed = errors.New("Signup is not allowed for this adapter") - ErrUsersQuotaReached = errors.New("Users quota reached") - ErrNoEmail = errors.New("Login provider didn't return an email address") - oauthLogger = log.New("oauth") -) +var oauthLogger = log.New("oauth") func GenStateString() string { rnd := make([]byte, 32) @@ -56,7 +48,7 @@ func OAuthLogin(ctx *m.ReqContext) { if errorParam != "" { errorDesc := ctx.Query("error_description") oauthLogger.Error("failed to login ", "error", errorParam, "errorDesc", errorDesc) - redirectWithError(ctx, ErrProviderDeniedRequest, "error", errorParam, "errorDesc", errorDesc) + redirectWithError(ctx, login.ErrProviderDeniedRequest, "error", errorParam, "errorDesc", errorDesc) return } @@ -149,54 +141,43 @@ func OAuthLogin(ctx *m.ReqContext) { // validate that we got at least an email address if userInfo.Email == "" { - redirectWithError(ctx, ErrNoEmail) + redirectWithError(ctx, login.ErrNoEmail) return } // validate that the email is allowed to login to grafana if !connect.IsEmailAllowed(userInfo.Email) { - redirectWithError(ctx, ErrEmailNotAllowed) + redirectWithError(ctx, login.ErrEmailNotAllowed) return } - userQuery := m.GetUserByEmailQuery{Email: userInfo.Email} - err = bus.Dispatch(&userQuery) + extUser := &m.ExternalUserInfo{ + AuthModule: "oauth_" + name, + AuthId: userInfo.Id, + Name: userInfo.Name, + Login: userInfo.Login, + Email: userInfo.Email, + OrgRoles: map[int64]m.RoleType{}, + } - // create account if missing - if err == m.ErrUserNotFound { - if !connect.IsSignupAllowed() { - redirectWithError(ctx, ErrSignUpNotAllowed) - return - } - limitReached, err := quota.QuotaReached(ctx, "user") - if err != nil { - ctx.Handle(500, "Failed to get user quota", err) - return - } - if limitReached { - redirectWithError(ctx, ErrUsersQuotaReached) - return - } - cmd := m.CreateUserCommand{ - Login: userInfo.Login, - Email: userInfo.Email, - Name: userInfo.Name, - Company: userInfo.Company, - DefaultOrgRole: userInfo.Role, - } + if userInfo.Role != "" { + extUser.OrgRoles[1] = m.RoleType(userInfo.Role) + } - if err = bus.Dispatch(&cmd); err != nil { - ctx.Handle(500, "Failed to create account", err) - return - } - - userQuery.Result = &cmd.Result - } else if err != nil { - ctx.Handle(500, "Unexpected error", err) + // add/update user in grafana + cmd := &m.UpsertUserCommand{ + ReqContext: ctx, + ExternalUser: extUser, + SignupAllowed: connect.IsSignupAllowed(), + } + err = bus.Dispatch(cmd) + if err != nil { + redirectWithError(ctx, err) + return } // login - loginUserWithUser(userQuery.Result, ctx) + loginUserWithUser(cmd.Result, ctx) metrics.M_Api_Login_OAuth.Inc() diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go index 5c06d652b70..c1b8ffe595e 100644 --- a/pkg/api/metrics.go +++ b/pkg/api/metrics.go @@ -75,7 +75,7 @@ func GetTestDataScenarios(c *m.ReqContext) Response { return JSON(200, &result) } -// Genereates a index out of range error +// Generates a index out of range error func GenerateError(c *m.ReqContext) Response { var array []string return JSON(200, array[20]) diff --git a/pkg/api/playlist.go b/pkg/api/playlist.go index d2413dfbb4c..a90b6425cb6 100644 --- a/pkg/api/playlist.go +++ b/pkg/api/playlist.go @@ -33,7 +33,7 @@ func ValidateOrgPlaylist(c *m.ReqContext) { return } - if len(items) == 0 { + if len(items) == 0 && c.Context.Req.Method != "DELETE" { c.JsonApiErr(404, "Playlist is empty", itemsErr) return } diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go index 3e1a93abb3b..f4eb1140aa0 100644 --- a/pkg/api/pluginproxy/ds_proxy.go +++ b/pkg/api/pluginproxy/ds_proxy.go @@ -189,12 +189,6 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) { } func (proxy *DataSourceProxy) validateRequest() error { - if proxy.ds.Type == m.DS_INFLUXDB { - if proxy.ctx.Query("db") != proxy.ds.Database { - return errors.New("Datasource is not configured to allow this database") - } - } - if !checkWhiteList(proxy.ctx, proxy.targetUrl.Host) { return errors.New("Target url is not a valid target") } diff --git a/pkg/api/route_register.go b/pkg/api/route_register.go index 76ebb633ca1..926de13c546 100644 --- a/pkg/api/route_register.go +++ b/pkg/api/route_register.go @@ -11,6 +11,8 @@ type Router interface { Get(pattern string, handlers ...macaron.Handler) *macaron.Route } +// RouteRegister allows you to add routes and macaron.Handlers +// that the web server should serve. type RouteRegister interface { Get(string, ...macaron.Handler) Post(string, ...macaron.Handler) @@ -26,7 +28,8 @@ type RouteRegister interface { type RegisterNamedMiddleware func(name string) macaron.Handler -func newRouteRegister(namedMiddleware ...RegisterNamedMiddleware) RouteRegister { +// NewRouteRegister creates a new RouteRegister with all middlewares sent as params +func NewRouteRegister(namedMiddleware ...RegisterNamedMiddleware) RouteRegister { return &routeRegister{ prefix: "", routes: []route{}, diff --git a/pkg/api/route_register_test.go b/pkg/api/route_register_test.go index f8a043c48df..3b5d79599a8 100644 --- a/pkg/api/route_register_test.go +++ b/pkg/api/route_register_test.go @@ -51,7 +51,7 @@ func TestRouteSimpleRegister(t *testing.T) { } // Setup - rr := newRouteRegister(func(name string) macaron.Handler { + rr := NewRouteRegister(func(name string) macaron.Handler { return emptyHandler(name) }) @@ -96,7 +96,7 @@ func TestRouteGroupedRegister(t *testing.T) { } // Setup - rr := newRouteRegister() + rr := NewRouteRegister() rr.Delete("/admin", emptyHandler("1")) rr.Get("/down", emptyHandler("1"), emptyHandler("2")) @@ -150,7 +150,7 @@ func TestNamedMiddlewareRouteRegister(t *testing.T) { } // Setup - rr := newRouteRegister(func(name string) macaron.Handler { + rr := NewRouteRegister(func(name string) macaron.Handler { return emptyHandler(name) }) diff --git a/pkg/api/static/static.go b/pkg/api/static/static.go index 7a61c85b4f3..2a35dd11fa6 100644 --- a/pkg/api/static/static.go +++ b/pkg/api/static/static.go @@ -48,7 +48,7 @@ type StaticOptions struct { // Expires defines which user-defined function to use for producing a HTTP Expires Header // https://developers.google.com/speed/docs/insights/LeverageBrowserCaching AddHeaders func(ctx *macaron.Context) - // FileSystem is the interface for supporting any implmentation of file system. + // FileSystem is the interface for supporting any implementation of file system. FileSystem http.FileSystem } diff --git a/pkg/bus/bus.go b/pkg/bus/bus.go index 59d4592766e..32a591b6672 100644 --- a/pkg/bus/bus.go +++ b/pkg/bus/bus.go @@ -2,7 +2,7 @@ package bus import ( "context" - "fmt" + "errors" "reflect" ) @@ -10,6 +10,8 @@ type HandlerFunc interface{} type CtxHandlerFunc func() type Msg interface{} +var ErrHandlerNotFound = errors.New("handler not found") + type Bus interface { Dispatch(msg Msg) error DispatchCtx(ctx context.Context, msg Msg) error @@ -38,12 +40,17 @@ func New() Bus { return bus } +// Want to get rid of global bus +func GetBus() Bus { + return globalBus +} + func (b *InProcBus) DispatchCtx(ctx context.Context, msg Msg) error { var msgName = reflect.TypeOf(msg).Elem().Name() var handler = b.handlers[msgName] if handler == nil { - return fmt.Errorf("handler not found for %s", msgName) + return ErrHandlerNotFound } var params = make([]reflect.Value, 2) @@ -54,9 +61,8 @@ func (b *InProcBus) DispatchCtx(ctx context.Context, msg Msg) error { err := ret[0].Interface() if err == nil { return nil - } else { - return err.(error) } + return err.(error) } func (b *InProcBus) Dispatch(msg Msg) error { @@ -64,7 +70,7 @@ func (b *InProcBus) Dispatch(msg Msg) error { var handler = b.handlers[msgName] if handler == nil { - return fmt.Errorf("handler not found for %s", msgName) + return ErrHandlerNotFound } var params = make([]reflect.Value, 1) @@ -74,9 +80,8 @@ func (b *InProcBus) Dispatch(msg Msg) error { err := ret[0].Interface() if err == nil { return nil - } else { - return err.(error) } + return err.(error) } func (b *InProcBus) Publish(msg Msg) error { diff --git a/pkg/cmd/grafana-cli/commands/commands.go b/pkg/cmd/grafana-cli/commands/commands.go index d8f01bbdcab..43484749670 100644 --- a/pkg/cmd/grafana-cli/commands/commands.go +++ b/pkg/cmd/grafana-cli/commands/commands.go @@ -15,7 +15,8 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli return func(context *cli.Context) { cmd := &contextCommandLine{context} - setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + cfg.Load(&setting.CommandLineArgs{ Config: cmd.String("config"), HomePath: cmd.String("homepath"), Args: flag.Args(), diff --git a/pkg/cmd/grafana-cli/commands/install_command.go b/pkg/cmd/grafana-cli/commands/install_command.go index f40bc9c081b..9bdb73a5858 100644 --- a/pkg/cmd/grafana-cli/commands/install_command.go +++ b/pkg/cmd/grafana-cli/commands/install_command.go @@ -33,7 +33,7 @@ func validateInput(c CommandLine, pluginFolder string) error { fileInfo, err := os.Stat(pluginsDir) if err != nil { if err = os.MkdirAll(pluginsDir, os.ModePerm); err != nil { - return errors.New(fmt.Sprintf("pluginsDir (%s) is not a directory", pluginsDir)) + return fmt.Errorf("pluginsDir (%s) is not a writable directory", pluginsDir) } return nil } diff --git a/pkg/cmd/grafana-cli/commands/ls_command.go b/pkg/cmd/grafana-cli/commands/ls_command.go index 7dcecb9d725..30745ce3172 100644 --- a/pkg/cmd/grafana-cli/commands/ls_command.go +++ b/pkg/cmd/grafana-cli/commands/ls_command.go @@ -24,7 +24,7 @@ var validateLsCommand = func(pluginDir string) error { return fmt.Errorf("error: %s", err) } - if pluginDirInfo.IsDir() == false { + if !pluginDirInfo.IsDir() { return errors.New("plugin path is not a directory") } diff --git a/pkg/cmd/grafana-cli/commands/remove_command.go b/pkg/cmd/grafana-cli/commands/remove_command.go index d5ed73def05..e51929dc95c 100644 --- a/pkg/cmd/grafana-cli/commands/remove_command.go +++ b/pkg/cmd/grafana-cli/commands/remove_command.go @@ -3,12 +3,11 @@ package commands import ( "errors" "fmt" - m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" - services "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" "strings" + + services "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" ) -var getPluginss func(path string) []m.InstalledPlugin = services.GetLocalPlugins var removePlugin func(pluginPath, id string) error = services.RemoveInstalledPlugin func removeCommand(c CommandLine) error { diff --git a/pkg/cmd/grafana-cli/commands/upgrade_all_command.go b/pkg/cmd/grafana-cli/commands/upgrade_all_command.go index 636292cce11..e01df2dab60 100644 --- a/pkg/cmd/grafana-cli/commands/upgrade_all_command.go +++ b/pkg/cmd/grafana-cli/commands/upgrade_all_command.go @@ -53,8 +53,7 @@ func upgradeAllCommand(c CommandLine) error { for _, p := range pluginsToUpgrade { logger.Infof("Updating %v \n", p.Id) - var err error - err = s.RemoveInstalledPlugin(pluginsDir, p.Id) + err := s.RemoveInstalledPlugin(pluginsDir, p.Id) if err != nil { return err } diff --git a/pkg/cmd/grafana-cli/services/services.go b/pkg/cmd/grafana-cli/services/services.go index d13e90d6a2f..e743d42022c 100644 --- a/pkg/cmd/grafana-cli/services/services.go +++ b/pkg/cmd/grafana-cli/services/services.go @@ -10,6 +10,7 @@ import ( "net/http" "net/url" "path" + "runtime" "time" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" @@ -42,7 +43,7 @@ func Init(version string, skipTLSVerify bool) { } HttpClient = http.Client{ - Timeout: time.Duration(10 * time.Second), + Timeout: 10 * time.Second, Transport: tr, } } @@ -155,6 +156,8 @@ func sendRequest(repoUrl string, subPaths ...string) ([]byte, error) { req, err := http.NewRequest(http.MethodGet, u.String(), nil) req.Header.Set("grafana-version", grafanaVersion) + req.Header.Set("grafana-os", runtime.GOOS) + req.Header.Set("grafana-arch", runtime.GOARCH) req.Header.Set("User-Agent", "grafana "+grafanaVersion) if err != nil { diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index ab0e12f2d9f..c7ea6bb432b 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -18,6 +18,7 @@ import ( "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/setting" + _ "github.com/grafana/grafana/pkg/extensions" _ "github.com/grafana/grafana/pkg/services/alerting/conditions" _ "github.com/grafana/grafana/pkg/services/alerting/notifiers" _ "github.com/grafana/grafana/pkg/tsdb/cloudwatch" @@ -33,12 +34,11 @@ import ( var version = "5.0.0" var commit = "NA" var buildstamp string -var build_date string +var enterprise string var configFile = flag.String("config", "", "path to config file") var homePath = flag.String("homepath", "", "path to grafana install/home path, defaults to working directory") var pidFile = flag.String("pidfile", "", "path to pid file") -var exitChan = make(chan int) func main() { v := flag.Bool("v", false, "prints current version and exits") @@ -77,45 +77,31 @@ func main() { setting.BuildVersion = version setting.BuildCommit = commit setting.BuildStamp = buildstampInt64 + setting.Enterprise, _ = strconv.ParseBool(enterprise) metrics.M_Grafana_Version.WithLabelValues(version).Set(1) - shutdownCompleted := make(chan int) + server := NewGrafanaServer() - go listenToSystemSignals(server, shutdownCompleted) + go listenToSystemSignals(server) - go func() { - code := 0 - if err := server.Start(); err != nil { - log.Error2("Startup failed", "error", err) - code = 1 - } + err := server.Run() - exitChan <- code - }() - - code := <-shutdownCompleted - log.Info2("Grafana shutdown completed.", "code", code) + trace.Stop() log.Close() - os.Exit(code) + + server.Exit(err) } -func listenToSystemSignals(server *GrafanaServerImpl, shutdownCompleted chan int) { +func listenToSystemSignals(server *GrafanaServerImpl) { signalChan := make(chan os.Signal, 1) ignoreChan := make(chan os.Signal, 1) - code := 0 signal.Notify(ignoreChan, syscall.SIGHUP) signal.Notify(signalChan, os.Interrupt, os.Kill, syscall.SIGTERM) select { case sig := <-signalChan: - trace.Stop() // Stops trace if profiling has been enabled - server.Shutdown(0, fmt.Sprintf("system signal: %s", sig)) - shutdownCompleted <- 0 - case code = <-exitChan: - trace.Stop() // Stops trace if profiling has been enabled - server.Shutdown(code, "startup error") - shutdownCompleted <- code + server.Shutdown(fmt.Sprintf("System signal: %s", sig)) } } diff --git a/pkg/cmd/grafana-server/server.go b/pkg/cmd/grafana-server/server.go index 5bbf43087ec..c715086a184 100644 --- a/pkg/cmd/grafana-server/server.go +++ b/pkg/cmd/grafana-server/server.go @@ -8,27 +8,36 @@ import ( "net" "os" "path/filepath" + "reflect" "strconv" "time" - "github.com/grafana/grafana/pkg/services/provisioning" + "github.com/facebookgo/inject" + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/middleware" + "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/services/dashboards" "golang.org/x/sync/errgroup" "github.com/grafana/grafana/pkg/api" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/login" - "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/plugins" - "github.com/grafana/grafana/pkg/services/alerting" - "github.com/grafana/grafana/pkg/services/cleanup" - "github.com/grafana/grafana/pkg/services/notifications" - "github.com/grafana/grafana/pkg/services/search" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/social" "github.com/grafana/grafana/pkg/tracing" + + // self registering services + _ "github.com/grafana/grafana/pkg/extensions" + _ "github.com/grafana/grafana/pkg/metrics" + _ "github.com/grafana/grafana/pkg/plugins" + _ "github.com/grafana/grafana/pkg/services/alerting" + _ "github.com/grafana/grafana/pkg/services/cleanup" + _ "github.com/grafana/grafana/pkg/services/notifications" + _ "github.com/grafana/grafana/pkg/services/provisioning" + _ "github.com/grafana/grafana/pkg/services/search" ) func NewGrafanaServer() *GrafanaServerImpl { @@ -40,110 +49,152 @@ func NewGrafanaServer() *GrafanaServerImpl { shutdownFn: shutdownFn, childRoutines: childRoutines, log: log.New("server"), + cfg: setting.NewCfg(), } } type GrafanaServerImpl struct { - context context.Context - shutdownFn context.CancelFunc - childRoutines *errgroup.Group - log log.Logger + context context.Context + shutdownFn context.CancelFunc + childRoutines *errgroup.Group + log log.Logger + cfg *setting.Cfg + shutdownReason string + shutdownInProgress bool - httpServer *api.HTTPServer + RouteRegister api.RouteRegister `inject:""` + HttpServer *api.HTTPServer `inject:""` } -func (g *GrafanaServerImpl) Start() error { - g.initLogging() +func (g *GrafanaServerImpl) Run() error { + g.loadConfiguration() g.writePIDFile() - initSql() + // initSql + sqlstore.NewEngine() // TODO: this should return an error + sqlstore.EnsureAdminUser() - metrics.Init(setting.Cfg) - search.Init() login.Init() social.NewOAuthService() - pluginManager, err := plugins.NewPluginManager(g.context) - if err != nil { - return fmt.Errorf("Failed to start plugins. error: %v", err) - } - g.childRoutines.Go(func() error { return pluginManager.Run(g.context) }) - - if err := provisioning.Init(g.context, setting.HomePath, setting.Cfg); err != nil { - return fmt.Errorf("Failed to provision Grafana from config. error: %v", err) - } - - tracingCloser, err := tracing.Init(setting.Cfg) + tracingCloser, err := tracing.Init(g.cfg.Raw) if err != nil { return fmt.Errorf("Tracing settings is not valid. error: %v", err) } defer tracingCloser.Close() - // init alerting - if setting.AlertingEnabled && setting.ExecuteAlerts { - engine := alerting.NewEngine() - g.childRoutines.Go(func() error { return engine.Run(g.context) }) + serviceGraph := inject.Graph{} + serviceGraph.Provide(&inject.Object{Value: bus.GetBus()}) + serviceGraph.Provide(&inject.Object{Value: g.cfg}) + serviceGraph.Provide(&inject.Object{Value: dashboards.NewProvisioningService()}) + serviceGraph.Provide(&inject.Object{Value: api.NewRouteRegister(middleware.RequestMetrics, middleware.RequestTracing)}) + + // self registered services + services := registry.GetServices() + + // Add all services to dependency graph + for _, service := range services { + serviceGraph.Provide(&inject.Object{Value: service}) } - // cleanup service - cleanUpService := cleanup.NewCleanUpService() - g.childRoutines.Go(func() error { return cleanUpService.Run(g.context) }) + serviceGraph.Provide(&inject.Object{Value: g}) - if err = notifications.Init(); err != nil { - return fmt.Errorf("Notification service failed to initialize. error: %v", err) + // Inject dependencies to services + if err := serviceGraph.Populate(); err != nil { + return fmt.Errorf("Failed to populate service dependency: %v", err) + } + + // Init & start services + for _, service := range services { + if registry.IsDisabled(service) { + continue + } + + g.log.Info("Initializing " + reflect.TypeOf(service).Elem().Name()) + + if err := service.Init(); err != nil { + return fmt.Errorf("Service init failed: %v", err) + } + } + + // Start background services + for index := range services { + service, ok := services[index].(registry.BackgroundService) + if !ok { + continue + } + + if registry.IsDisabled(services[index]) { + continue + } + + g.childRoutines.Go(func() error { + // Skip starting new service when shutting down + // Can happen when service stop/return during startup + if g.shutdownInProgress { + return nil + } + + err := service.Run(g.context) + + // If error is not canceled then the service crashed + if err != context.Canceled && err != nil { + g.log.Error("Stopped "+reflect.TypeOf(service).Elem().Name(), "reason", err) + } else { + g.log.Info("Stopped "+reflect.TypeOf(service).Elem().Name(), "reason", err) + } + + // Mark that we are in shutdown mode + // So more services are not started + g.shutdownInProgress = true + return err + }) } sendSystemdNotification("READY=1") - return g.startHttpServer() + return g.childRoutines.Wait() } -func initSql() { - sqlstore.NewEngine() - sqlstore.EnsureAdminUser() -} - -func (g *GrafanaServerImpl) initLogging() { - err := setting.NewConfigContext(&setting.CommandLineArgs{ +func (g *GrafanaServerImpl) loadConfiguration() { + err := g.cfg.Load(&setting.CommandLineArgs{ Config: *configFile, HomePath: *homePath, Args: flag.Args(), }) if err != nil { - g.log.Error(err.Error()) + fmt.Fprintf(os.Stderr, "Failed to start grafana. error: %s\n", err.Error()) os.Exit(1) } - g.log.Info("Starting Grafana", "version", version, "commit", commit, "compiled", time.Unix(setting.BuildStamp, 0)) - setting.LogConfigurationInfo() + g.log.Info("Starting "+setting.ApplicationName, "version", version, "commit", commit, "compiled", time.Unix(setting.BuildStamp, 0)) + g.cfg.LogConfigSources() } -func (g *GrafanaServerImpl) startHttpServer() error { - g.httpServer = api.NewHTTPServer() - - err := g.httpServer.Start(g.context) - - if err != nil { - return fmt.Errorf("Fail to start server. error: %v", err) - } - - return nil -} - -func (g *GrafanaServerImpl) Shutdown(code int, reason string) { - g.log.Info("Shutdown started", "code", code, "reason", reason) - - err := g.httpServer.Shutdown(g.context) - if err != nil { - g.log.Error("Failed to shutdown server", "error", err) - } +func (g *GrafanaServerImpl) Shutdown(reason string) { + g.log.Info("Shutdown started", "reason", reason) + g.shutdownReason = reason + g.shutdownInProgress = true + // call cancel func on root context g.shutdownFn() - err = g.childRoutines.Wait() - if err != nil && err != context.Canceled { - g.log.Error("Server shutdown completed with an error", "error", err) + + // wait for child routines + g.childRoutines.Wait() +} + +func (g *GrafanaServerImpl) Exit(reason error) { + // default exit code is 1 + code := 1 + + if reason == context.Canceled && g.shutdownReason != "" { + reason = fmt.Errorf(g.shutdownReason) + code = 0 } + + g.log.Error("Server shutdown", "reason", reason) + os.Exit(code) } func (g *GrafanaServerImpl) writePIDFile() { diff --git a/pkg/components/apikeygen/apikeygen.go b/pkg/components/apikeygen/apikeygen.go index 310188a80ef..7824cf7667f 100644 --- a/pkg/components/apikeygen/apikeygen.go +++ b/pkg/components/apikeygen/apikeygen.go @@ -33,7 +33,7 @@ func New(orgId int64, name string) KeyGenResult { jsonString, _ := json.Marshal(jsonKey) - result.ClientSecret = base64.StdEncoding.EncodeToString([]byte(jsonString)) + result.ClientSecret = base64.StdEncoding.EncodeToString(jsonString) return result } @@ -44,7 +44,7 @@ func Decode(keyString string) (*ApiKeyJson, error) { } var keyObj ApiKeyJson - err = json.Unmarshal([]byte(jsonString), &keyObj) + err = json.Unmarshal(jsonString, &keyObj) if err != nil { return nil, ErrInvalidApiKey } diff --git a/pkg/components/dashdiffs/compare.go b/pkg/components/dashdiffs/compare.go index f5f2104cb92..ae940091ed1 100644 --- a/pkg/components/dashdiffs/compare.go +++ b/pkg/components/dashdiffs/compare.go @@ -6,7 +6,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" diff "github.com/yudai/gojsondiff" deltaFormatter "github.com/yudai/gojsondiff/formatter" @@ -15,11 +14,8 @@ import ( var ( // ErrUnsupportedDiffType occurs when an invalid diff type is used. ErrUnsupportedDiffType = errors.New("dashdiff: unsupported diff type") - // ErrNilDiff occurs when two compared interfaces are identical. ErrNilDiff = errors.New("dashdiff: diff is nil") - - diffLogger = log.New("dashdiffs") ) type DiffType int @@ -145,5 +141,9 @@ func getDiff(baseData, newData *simplejson.Json) (interface{}, diff.Diff, error) left := make(map[string]interface{}) err = json.Unmarshal(leftBytes, &left) + if err != nil { + return nil, nil, err + } + return left, jsonDiff, nil } diff --git a/pkg/components/dashdiffs/formatter_json.go b/pkg/components/dashdiffs/formatter_json.go index 3a9ddcc4ee3..488a345d492 100644 --- a/pkg/components/dashdiffs/formatter_json.go +++ b/pkg/components/dashdiffs/formatter_json.go @@ -22,7 +22,7 @@ const ( ) var ( - // changeTypeToSymbol is used for populating the terminating characer in + // changeTypeToSymbol is used for populating the terminating character in // the diff changeTypeToSymbol = map[ChangeType]string{ ChangeNil: "", diff --git a/pkg/components/dynmap/dynmap.go b/pkg/components/dynmap/dynmap.go index 797694845cd..96effb24332 100644 --- a/pkg/components/dynmap/dynmap.go +++ b/pkg/components/dynmap/dynmap.go @@ -134,9 +134,8 @@ func (v *Value) get(key string) (*Value, error) { child, ok := obj.Map()[key] if ok { return child, nil - } else { - return nil, KeyNotFoundError{key} } + return nil, KeyNotFoundError{key} } return nil, err @@ -174,17 +173,13 @@ func (v *Object) GetObject(keys ...string) (*Object, error) { if err != nil { return nil, err - } else { - - obj, err := child.Object() - - if err != nil { - return nil, err - } else { - return obj, nil - } - } + obj, err := child.Object() + + if err != nil { + return nil, err + } + return obj, nil } // Gets the value at key path and attempts to typecast the value into a string. @@ -196,18 +191,17 @@ func (v *Object) GetString(keys ...string) (string, error) { if err != nil { return "", err - } else { - return child.String() } + return child.String() } func (v *Object) MustGetString(path string, def string) string { keys := strings.Split(path, ".") - if str, err := v.GetString(keys...); err != nil { + str, err := v.GetString(keys...) + if err != nil { return def - } else { - return str } + return str } // Gets the value at key path and attempts to typecast the value into null. @@ -233,16 +227,13 @@ func (v *Object) GetNumber(keys ...string) (json.Number, error) { if err != nil { return "", err - } else { - - n, err := child.Number() - - if err != nil { - return "", err - } else { - return n, nil - } } + n, err := child.Number() + + if err != nil { + return "", err + } + return n, nil } // Gets the value at key path and attempts to typecast the value into a float64. @@ -254,16 +245,13 @@ func (v *Object) GetFloat64(keys ...string) (float64, error) { if err != nil { return 0, err - } else { - - n, err := child.Float64() - - if err != nil { - return 0, err - } else { - return n, nil - } } + n, err := child.Float64() + + if err != nil { + return 0, err + } + return n, nil } // Gets the value at key path and attempts to typecast the value into a float64. @@ -275,16 +263,13 @@ func (v *Object) GetInt64(keys ...string) (int64, error) { if err != nil { return 0, err - } else { - - n, err := child.Int64() - - if err != nil { - return 0, err - } else { - return n, nil - } } + n, err := child.Int64() + + if err != nil { + return 0, err + } + return n, nil } // Gets the value at key path and attempts to typecast the value into a float64. @@ -296,9 +281,8 @@ func (v *Object) GetInterface(keys ...string) (interface{}, error) { if err != nil { return nil, err - } else { - return child.Interface(), nil } + return child.Interface(), nil } // Gets the value at key path and attempts to typecast the value into a bool. @@ -311,7 +295,6 @@ func (v *Object) GetBoolean(keys ...string) (bool, error) { if err != nil { return false, err } - return child.Boolean() } @@ -328,11 +311,8 @@ func (v *Object) GetValueArray(keys ...string) ([]*Value, error) { if err != nil { return nil, err - } else { - - return child.Array() - } + return child.Array() } // Gets the value at key path and attempts to typecast the value into an array of objects. @@ -347,30 +327,24 @@ func (v *Object) GetObjectArray(keys ...string) ([]*Object, error) { if err != nil { return nil, err - } else { + } + array, err := child.Array() - array, err := child.Array() + if err != nil { + return nil, err + } + typedArray := make([]*Object, len(array)) + + for index, arrayItem := range array { + typedArrayItem, err := arrayItem. + Object() if err != nil { return nil, err - } else { - - typedArray := make([]*Object, len(array)) - - for index, arrayItem := range array { - typedArrayItem, err := arrayItem. - Object() - - if err != nil { - return nil, err - } else { - typedArray[index] = typedArrayItem - } - - } - return typedArray, nil } + typedArray[index] = typedArrayItem } + return typedArray, nil } // Gets the value at key path and attempts to typecast the value into an array of string. @@ -387,29 +361,23 @@ func (v *Object) GetStringArray(keys ...string) ([]string, error) { if err != nil { return nil, err - } else { + } + array, err := child.Array() - array, err := child.Array() + if err != nil { + return nil, err + } + typedArray := make([]string, len(array)) + + for index, arrayItem := range array { + typedArrayItem, err := arrayItem.String() if err != nil { return nil, err - } else { - - typedArray := make([]string, len(array)) - - for index, arrayItem := range array { - typedArrayItem, err := arrayItem.String() - - if err != nil { - return nil, err - } else { - typedArray[index] = typedArrayItem - } - - } - return typedArray, nil } + typedArray[index] = typedArrayItem } + return typedArray, nil } // Gets the value at key path and attempts to typecast the value into an array of numbers. @@ -424,29 +392,23 @@ func (v *Object) GetNumberArray(keys ...string) ([]json.Number, error) { if err != nil { return nil, err - } else { + } + array, err := child.Array() - array, err := child.Array() + if err != nil { + return nil, err + } + typedArray := make([]json.Number, len(array)) + + for index, arrayItem := range array { + typedArrayItem, err := arrayItem.Number() if err != nil { return nil, err - } else { - - typedArray := make([]json.Number, len(array)) - - for index, arrayItem := range array { - typedArrayItem, err := arrayItem.Number() - - if err != nil { - return nil, err - } else { - typedArray[index] = typedArrayItem - } - - } - return typedArray, nil } + typedArray[index] = typedArrayItem } + return typedArray, nil } // Gets the value at key path and attempts to typecast the value into an array of floats. @@ -456,29 +418,23 @@ func (v *Object) GetFloat64Array(keys ...string) ([]float64, error) { if err != nil { return nil, err - } else { + } + array, err := child.Array() - array, err := child.Array() + if err != nil { + return nil, err + } + typedArray := make([]float64, len(array)) + + for index, arrayItem := range array { + typedArrayItem, err := arrayItem.Float64() if err != nil { return nil, err - } else { - - typedArray := make([]float64, len(array)) - - for index, arrayItem := range array { - typedArrayItem, err := arrayItem.Float64() - - if err != nil { - return nil, err - } else { - typedArray[index] = typedArrayItem - } - - } - return typedArray, nil } + typedArray[index] = typedArrayItem } + return typedArray, nil } // Gets the value at key path and attempts to typecast the value into an array of ints. @@ -488,29 +444,23 @@ func (v *Object) GetInt64Array(keys ...string) ([]int64, error) { if err != nil { return nil, err - } else { + } + array, err := child.Array() - array, err := child.Array() + if err != nil { + return nil, err + } + typedArray := make([]int64, len(array)) + + for index, arrayItem := range array { + typedArrayItem, err := arrayItem.Int64() if err != nil { return nil, err - } else { - - typedArray := make([]int64, len(array)) - - for index, arrayItem := range array { - typedArrayItem, err := arrayItem.Int64() - - if err != nil { - return nil, err - } else { - typedArray[index] = typedArrayItem - } - - } - return typedArray, nil } + typedArray[index] = typedArrayItem } + return typedArray, nil } // Gets the value at key path and attempts to typecast the value into an array of bools. @@ -520,29 +470,23 @@ func (v *Object) GetBooleanArray(keys ...string) ([]bool, error) { if err != nil { return nil, err - } else { + } + array, err := child.Array() - array, err := child.Array() + if err != nil { + return nil, err + } + typedArray := make([]bool, len(array)) + + for index, arrayItem := range array { + typedArrayItem, err := arrayItem.Boolean() if err != nil { return nil, err - } else { - - typedArray := make([]bool, len(array)) - - for index, arrayItem := range array { - typedArrayItem, err := arrayItem.Boolean() - - if err != nil { - return nil, err - } else { - typedArray[index] = typedArrayItem - } - - } - return typedArray, nil } + typedArray[index] = typedArrayItem } + return typedArray, nil } // Gets the value at key path and attempts to typecast the value into an array of nulls. @@ -552,29 +496,23 @@ func (v *Object) GetNullArray(keys ...string) (int64, error) { if err != nil { return 0, err - } else { + } + array, err := child.Array() - array, err := child.Array() + if err != nil { + return 0, err + } + var length int64 = 0 + + for _, arrayItem := range array { + err := arrayItem.Null() if err != nil { return 0, err - } else { - - var length int64 = 0 - - for _, arrayItem := range array { - err := arrayItem.Null() - - if err != nil { - return 0, err - } else { - length++ - } - - } - return length, nil } + length++ } + return length, nil } // Returns an error if the value is not actually null @@ -585,15 +523,12 @@ func (v *Value) Null() error { switch v.data.(type) { case nil: valid = v.exists // Valid only if j also exists, since other values could possibly also be nil - break } if valid { return nil } - return ErrNotNull - } // Attempts to typecast the current value into an array. @@ -607,24 +542,19 @@ func (v *Value) Array() ([]*Value, error) { switch v.data.(type) { case []interface{}: valid = true - break } // Unsure if this is a good way to use slices, it's probably not var slice []*Value if valid { - for _, element := range v.data.([]interface{}) { child := Value{element, true} slice = append(slice, &child) } - return slice, nil } - return slice, ErrNotArray - } // Attempts to typecast the current value into a number. @@ -638,7 +568,6 @@ func (v *Value) Number() (json.Number, error) { switch v.data.(type) { case json.Number: valid = true - break } if valid { @@ -687,7 +616,6 @@ func (v *Value) Boolean() (bool, error) { switch v.data.(type) { case bool: valid = true - break } if valid { @@ -709,7 +637,6 @@ func (v *Value) Object() (*Object, error) { switch v.data.(type) { case map[string]interface{}: valid = true - break } if valid { @@ -746,7 +673,6 @@ func (v *Value) ObjectArray() ([]*Object, error) { switch v.data.(type) { case []interface{}: valid = true - break } // Unsure if this is a good way to use slices, it's probably not @@ -782,7 +708,6 @@ func (v *Value) String() (string, error) { switch v.data.(type) { case string: valid = true - break } if valid { diff --git a/pkg/components/dynmap/dynmap_test.go b/pkg/components/dynmap/dynmap_test.go index cc002ea06e0..62d356bd67d 100644 --- a/pkg/components/dynmap/dynmap_test.go +++ b/pkg/components/dynmap/dynmap_test.go @@ -21,7 +21,7 @@ func NewAssert(t *testing.T) *Assert { } func (assert *Assert) True(value bool, message string) { - if value == false { + if !value { log.Panicln("Assert: ", message) } } @@ -60,6 +60,7 @@ func TestFirst(t *testing.T) { }` j, err := NewObjectFromBytes([]byte(testJSON)) + assert.True(err == nil, "failed to create new object from bytes") a, err := j.GetObject("address") assert.True(a != nil && err == nil, "failed to create json from string") @@ -76,10 +77,10 @@ func TestFirst(t *testing.T) { assert.True(s == "fallback", "must get string return fallback") s, err = j.GetString("name") - assert.True(s == "anton" && err == nil, "name shoud match") + assert.True(s == "anton" && err == nil, "name should match") s, err = j.GetString("address", "street") - assert.True(s == "Street 42" && err == nil, "street shoud match") + assert.True(s == "Street 42" && err == nil, "street should match") //log.Println("s: ", s.String()) _, err = j.GetNumber("age") @@ -108,6 +109,7 @@ func TestFirst(t *testing.T) { //log.Println("address: ", address) s, err = address.GetString("street") + assert.True(s == "Street 42" && err == nil, "street mismatching") addressAsString, err := j.GetString("address") assert.True(addressAsString == "" && err != nil, "address should not be an string") @@ -119,13 +121,13 @@ func TestFirst(t *testing.T) { assert.True(s == "" && err != nil, "nonexistent string fail") b, err := j.GetBoolean("true") - assert.True(b == true && err == nil, "bool true test") + assert.True(b && err == nil, "bool true test") b, err = j.GetBoolean("false") - assert.True(b == false && err == nil, "bool false test") + assert.True(!b && err == nil, "bool false test") b, err = j.GetBoolean("invalid_field") - assert.True(b == false && err != nil, "bool invalid test") + assert.True(!b && err != nil, "bool invalid test") list, err := j.GetValueArray("list") assert.True(list != nil && err == nil, "list should be an array") @@ -148,6 +150,7 @@ func TestFirst(t *testing.T) { //assert.True(element.IsObject() == true, "first fail") element, err := elementValue.Object() + assert.True(err == nil, "create element fail") s, err = element.GetString("street") assert.True(s == "Street 42" && err == nil, "second fail") @@ -232,6 +235,7 @@ func TestSecond(t *testing.T) { assert.True(fromName == "Tom Brady" && err == nil, "fromName mismatch") actions, err := dataItem.GetObjectArray("actions") + assert.True(err == nil, "get object from array failed") for index, action := range actions { diff --git a/pkg/components/imguploader/azureblobuploader.go b/pkg/components/imguploader/azureblobuploader.go index 40d2de836be..3c0ac5b8884 100644 --- a/pkg/components/imguploader/azureblobuploader.go +++ b/pkg/components/imguploader/azureblobuploader.go @@ -225,7 +225,7 @@ func (a *Auth) SignRequest(req *http.Request) { ) decodedKey, _ := base64.StdEncoding.DecodeString(a.Key) - sha256 := hmac.New(sha256.New, []byte(decodedKey)) + sha256 := hmac.New(sha256.New, decodedKey) sha256.Write([]byte(strToSign)) signature := base64.StdEncoding.EncodeToString(sha256.Sum(nil)) diff --git a/pkg/components/imguploader/azureblobuploader_test.go b/pkg/components/imguploader/azureblobuploader_test.go index 570e105b321..c0c7889a155 100644 --- a/pkg/components/imguploader/azureblobuploader_test.go +++ b/pkg/components/imguploader/azureblobuploader_test.go @@ -10,9 +10,11 @@ import ( func TestUploadToAzureBlob(t *testing.T) { SkipConvey("[Integration test] for external_image_store.azure_blob", t, func() { - err := setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + err := cfg.Load(&setting.CommandLineArgs{ HomePath: "../../../", }) + So(err, ShouldBeNil) uploader, _ := NewImageUploader() diff --git a/pkg/components/imguploader/gcsuploader_test.go b/pkg/components/imguploader/gcsuploader_test.go index bdc21084dbf..58cb21c184c 100644 --- a/pkg/components/imguploader/gcsuploader_test.go +++ b/pkg/components/imguploader/gcsuploader_test.go @@ -10,7 +10,8 @@ import ( func TestUploadToGCS(t *testing.T) { SkipConvey("[Integration test] for external_image_store.gcs", t, func() { - setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + cfg.Load(&setting.CommandLineArgs{ HomePath: "../../../", }) diff --git a/pkg/components/imguploader/imguploader.go b/pkg/components/imguploader/imguploader.go index 52a31f9f606..93f69cadd46 100644 --- a/pkg/components/imguploader/imguploader.go +++ b/pkg/components/imguploader/imguploader.go @@ -3,9 +3,10 @@ package imguploader import ( "context" "fmt" - "github.com/grafana/grafana/pkg/log" "regexp" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/setting" ) @@ -24,7 +25,7 @@ func NewImageUploader() (ImageUploader, error) { switch setting.ImageUploadProvider { case "s3": - s3sec, err := setting.Cfg.GetSection("external_image_storage.s3") + s3sec, err := setting.Raw.GetSection("external_image_storage.s3") if err != nil { return nil, err } @@ -51,7 +52,7 @@ func NewImageUploader() (ImageUploader, error) { return NewS3Uploader(region, bucket, path, "public-read", accessKey, secretKey), nil case "webdav": - webdavSec, err := setting.Cfg.GetSection("external_image_storage.webdav") + webdavSec, err := setting.Raw.GetSection("external_image_storage.webdav") if err != nil { return nil, err } @@ -67,7 +68,7 @@ func NewImageUploader() (ImageUploader, error) { return NewWebdavImageUploader(url, username, password, public_url) case "gcs": - gcssec, err := setting.Cfg.GetSection("external_image_storage.gcs") + gcssec, err := setting.Raw.GetSection("external_image_storage.gcs") if err != nil { return nil, err } @@ -78,7 +79,7 @@ func NewImageUploader() (ImageUploader, error) { return NewGCSUploader(keyFile, bucketName, path), nil case "azure_blob": - azureBlobSec, err := setting.Cfg.GetSection("external_image_storage.azure_blob") + azureBlobSec, err := setting.Raw.GetSection("external_image_storage.azure_blob") if err != nil { return nil, err } diff --git a/pkg/components/imguploader/imguploader_test.go b/pkg/components/imguploader/imguploader_test.go index b0311dac975..570e36a47e3 100644 --- a/pkg/components/imguploader/imguploader_test.go +++ b/pkg/components/imguploader/imguploader_test.go @@ -11,14 +11,16 @@ import ( func TestImageUploaderFactory(t *testing.T) { Convey("Can create image uploader for ", t, func() { Convey("S3ImageUploader config", func() { - setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + cfg.Load(&setting.CommandLineArgs{ HomePath: "../../../", }) setting.ImageUploadProvider = "s3" Convey("with bucket url https://foo.bar.baz.s3-us-east-2.amazonaws.com", func() { - s3sec, err := setting.Cfg.GetSection("external_image_storage.s3") + s3sec, err := setting.Raw.GetSection("external_image_storage.s3") + So(err, ShouldBeNil) s3sec.NewKey("bucket_url", "https://foo.bar.baz.s3-us-east-2.amazonaws.com") s3sec.NewKey("access_key", "access_key") s3sec.NewKey("secret_key", "secret_key") @@ -36,7 +38,8 @@ func TestImageUploaderFactory(t *testing.T) { }) Convey("with bucket url https://s3.amazonaws.com/mybucket", func() { - s3sec, err := setting.Cfg.GetSection("external_image_storage.s3") + s3sec, err := setting.Raw.GetSection("external_image_storage.s3") + So(err, ShouldBeNil) s3sec.NewKey("bucket_url", "https://s3.amazonaws.com/my.bucket.com") s3sec.NewKey("access_key", "access_key") s3sec.NewKey("secret_key", "secret_key") @@ -54,16 +57,16 @@ func TestImageUploaderFactory(t *testing.T) { }) Convey("with bucket url https://s3-us-west-2.amazonaws.com/mybucket", func() { - s3sec, err := setting.Cfg.GetSection("external_image_storage.s3") + s3sec, err := setting.Raw.GetSection("external_image_storage.s3") + So(err, ShouldBeNil) s3sec.NewKey("bucket_url", "https://s3-us-west-2.amazonaws.com/my.bucket.com") s3sec.NewKey("access_key", "access_key") s3sec.NewKey("secret_key", "secret_key") uploader, err := NewImageUploader() - So(err, ShouldBeNil) - original, ok := uploader.(*S3Uploader) + original, ok := uploader.(*S3Uploader) So(ok, ShouldBeTrue) So(original.region, ShouldEqual, "us-west-2") So(original.bucket, ShouldEqual, "my.bucket.com") @@ -75,13 +78,15 @@ func TestImageUploaderFactory(t *testing.T) { Convey("Webdav uploader", func() { var err error - setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + cfg.Load(&setting.CommandLineArgs{ HomePath: "../../../", }) setting.ImageUploadProvider = "webdav" - webdavSec, err := setting.Cfg.GetSection("external_image_storage.webdav") + webdavSec, err := cfg.Raw.GetSection("external_image_storage.webdav") + So(err, ShouldBeNil) webdavSec.NewKey("url", "webdavUrl") webdavSec.NewKey("username", "username") webdavSec.NewKey("password", "password") @@ -100,43 +105,45 @@ func TestImageUploaderFactory(t *testing.T) { Convey("GCS uploader", func() { var err error - setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + cfg.Load(&setting.CommandLineArgs{ HomePath: "../../../", }) setting.ImageUploadProvider = "gcs" - gcpSec, err := setting.Cfg.GetSection("external_image_storage.gcs") + gcpSec, err := cfg.Raw.GetSection("external_image_storage.gcs") + So(err, ShouldBeNil) gcpSec.NewKey("key_file", "/etc/secrets/project-79a52befa3f6.json") gcpSec.NewKey("bucket", "project-grafana-east") uploader, err := NewImageUploader() - So(err, ShouldBeNil) - original, ok := uploader.(*GCSUploader) + original, ok := uploader.(*GCSUploader) So(ok, ShouldBeTrue) So(original.keyFile, ShouldEqual, "/etc/secrets/project-79a52befa3f6.json") So(original.bucket, ShouldEqual, "project-grafana-east") }) Convey("AzureBlobUploader config", func() { - setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + cfg.Load(&setting.CommandLineArgs{ HomePath: "../../../", }) setting.ImageUploadProvider = "azure_blob" Convey("with container name", func() { - azureBlobSec, err := setting.Cfg.GetSection("external_image_storage.azure_blob") + azureBlobSec, err := cfg.Raw.GetSection("external_image_storage.azure_blob") + So(err, ShouldBeNil) azureBlobSec.NewKey("account_name", "account_name") azureBlobSec.NewKey("account_key", "account_key") azureBlobSec.NewKey("container_name", "container_name") uploader, err := NewImageUploader() - So(err, ShouldBeNil) - original, ok := uploader.(*AzureBlobUploader) + original, ok := uploader.(*AzureBlobUploader) So(ok, ShouldBeTrue) So(original.account_name, ShouldEqual, "account_name") So(original.account_key, ShouldEqual, "account_key") @@ -147,7 +154,8 @@ func TestImageUploaderFactory(t *testing.T) { Convey("Local uploader", func() { var err error - setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + cfg.Load(&setting.CommandLineArgs{ HomePath: "../../../", }) diff --git a/pkg/components/imguploader/s3uploader_test.go b/pkg/components/imguploader/s3uploader_test.go index b02d4676b5e..0e43740ef9b 100644 --- a/pkg/components/imguploader/s3uploader_test.go +++ b/pkg/components/imguploader/s3uploader_test.go @@ -10,7 +10,8 @@ import ( func TestUploadToS3(t *testing.T) { SkipConvey("[Integration test] for external_image_store.s3", t, func() { - setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + cfg.Load(&setting.CommandLineArgs{ HomePath: "../../../", }) diff --git a/pkg/components/imguploader/webdavuploader.go b/pkg/components/imguploader/webdavuploader.go index 53d75247c76..f5478ea8a2f 100644 --- a/pkg/components/imguploader/webdavuploader.go +++ b/pkg/components/imguploader/webdavuploader.go @@ -41,14 +41,20 @@ func (u *WebdavUploader) Upload(ctx context.Context, pa string) (string, error) url.Path = path.Join(url.Path, filename) imgData, err := ioutil.ReadFile(pa) + if err != nil { + return "", err + } + req, err := http.NewRequest("PUT", url.String(), bytes.NewReader(imgData)) + if err != nil { + return "", err + } if u.username != "" { req.SetBasicAuth(u.username, u.password) } res, err := netClient.Do(req) - if err != nil { return "", err } diff --git a/pkg/components/null/float.go b/pkg/components/null/float.go index 1e78946e878..4f783f2c584 100644 --- a/pkg/components/null/float.go +++ b/pkg/components/null/float.go @@ -50,7 +50,7 @@ func (f *Float) UnmarshalJSON(data []byte) error { } switch x := v.(type) { case float64: - f.Float64 = float64(x) + f.Float64 = x case map[string]interface{}: err = json.Unmarshal(data, &f.NullFloat64) case nil: @@ -106,6 +106,15 @@ func (f Float) String() string { return fmt.Sprintf("%1.3f", f.Float64) } +// FullString returns float as string in full precision +func (f Float) FullString() string { + if !f.Valid { + return "null" + } + + return fmt.Sprintf("%f", f.Float64) +} + // SetValid changes this Float's value and also sets it to be non-null. func (f *Float) SetValid(n float64) { f.Float64 = n diff --git a/pkg/extensions/main.go b/pkg/extensions/main.go new file mode 100644 index 00000000000..34ac9da7e86 --- /dev/null +++ b/pkg/extensions/main.go @@ -0,0 +1,3 @@ +package extensions + +import _ "github.com/pkg/errors" diff --git a/pkg/log/file.go b/pkg/log/file.go index 721db1e55b3..d137adbf3de 100644 --- a/pkg/log/file.go +++ b/pkg/log/file.go @@ -99,10 +99,7 @@ func (w *FileLogWriter) StartLogger() error { return err } w.mw.SetFd(fd) - if err = w.initFd(); err != nil { - return err - } - return nil + return w.initFd() } func (w *FileLogWriter) docheck(size int) { diff --git a/pkg/log/file_test.go b/pkg/log/file_test.go index 3e98e0786cc..97a3b8fe82f 100644 --- a/pkg/log/file_test.go +++ b/pkg/log/file_test.go @@ -32,7 +32,9 @@ func TestLogFile(t *testing.T) { Convey("Logging should add lines", func() { err := fileLogWrite.WriteLine("test1\n") + So(err, ShouldBeNil) err = fileLogWrite.WriteLine("test2\n") + So(err, ShouldBeNil) err = fileLogWrite.WriteLine("test3\n") So(err, ShouldBeNil) So(fileLogWrite.maxlines_curlines, ShouldEqual, 3) diff --git a/pkg/login/auth.go b/pkg/login/auth.go index 5527c7271d6..215a22cde33 100644 --- a/pkg/login/auth.go +++ b/pkg/login/auth.go @@ -8,23 +8,22 @@ import ( ) var ( - ErrInvalidCredentials = errors.New("Invalid Username or Password") - ErrTooManyLoginAttempts = errors.New("Too many consecutive incorrect login attempts for user. Login for user temporarily blocked") + ErrEmailNotAllowed = errors.New("Required email domain not fulfilled") + ErrInvalidCredentials = errors.New("Invalid Username or Password") + ErrNoEmail = errors.New("Login provider didn't return an email address") + ErrProviderDeniedRequest = errors.New("Login provider denied login request") + ErrSignUpNotAllowed = errors.New("Signup is not allowed for this adapter") + ErrTooManyLoginAttempts = errors.New("Too many consecutive incorrect login attempts for user. Login for user temporarily blocked") + ErrUsersQuotaReached = errors.New("Users quota reached") + ErrGettingUserQuota = errors.New("Error getting user quota") ) -type LoginUserQuery struct { - Username string - Password string - User *m.User - IpAddress string -} - func Init() { bus.AddHandler("auth", AuthenticateUser) loadLdapConfig() } -func AuthenticateUser(query *LoginUserQuery) error { +func AuthenticateUser(query *m.LoginUserQuery) error { if err := validateLoginAttempts(query.Username); err != nil { return err } diff --git a/pkg/login/auth_test.go b/pkg/login/auth_test.go index 59d3c8f2b33..932125c410e 100644 --- a/pkg/login/auth_test.go +++ b/pkg/login/auth_test.go @@ -151,7 +151,7 @@ func TestAuthenticateUser(t *testing.T) { } type authScenarioContext struct { - loginUserQuery *LoginUserQuery + loginUserQuery *m.LoginUserQuery grafanaLoginWasCalled bool ldapLoginWasCalled bool loginAttemptValidationWasCalled bool @@ -161,14 +161,14 @@ type authScenarioContext struct { type authScenarioFunc func(sc *authScenarioContext) func mockLoginUsingGrafanaDB(err error, sc *authScenarioContext) { - loginUsingGrafanaDB = func(query *LoginUserQuery) error { + loginUsingGrafanaDB = func(query *m.LoginUserQuery) error { sc.grafanaLoginWasCalled = true return err } } func mockLoginUsingLdap(enabled bool, err error, sc *authScenarioContext) { - loginUsingLdap = func(query *LoginUserQuery) (bool, error) { + loginUsingLdap = func(query *m.LoginUserQuery) (bool, error) { sc.ldapLoginWasCalled = true return enabled, err } @@ -182,7 +182,7 @@ func mockLoginAttemptValidation(err error, sc *authScenarioContext) { } func mockSaveInvalidLoginAttempt(sc *authScenarioContext) { - saveInvalidLoginAttempt = func(query *LoginUserQuery) { + saveInvalidLoginAttempt = func(query *m.LoginUserQuery) { sc.saveInvalidLoginAttemptWasCalled = true } } @@ -195,7 +195,7 @@ func authScenario(desc string, fn authScenarioFunc) { origSaveInvalidLoginAttempt := saveInvalidLoginAttempt sc := &authScenarioContext{ - loginUserQuery: &LoginUserQuery{ + loginUserQuery: &m.LoginUserQuery{ Username: "user", Password: "pwd", IpAddress: "192.168.1.1:56433", diff --git a/pkg/login/brute_force_login_protection.go b/pkg/login/brute_force_login_protection.go index 2ea93979c7a..d524c420540 100644 --- a/pkg/login/brute_force_login_protection.go +++ b/pkg/login/brute_force_login_protection.go @@ -9,8 +9,8 @@ import ( ) var ( - maxInvalidLoginAttempts int64 = 5 - loginAttemptsWindow time.Duration = time.Minute * 5 + maxInvalidLoginAttempts int64 = 5 + loginAttemptsWindow = time.Minute * 5 ) var validateLoginAttempts = func(username string) error { @@ -34,7 +34,7 @@ var validateLoginAttempts = func(username string) error { return nil } -var saveInvalidLoginAttempt = func(query *LoginUserQuery) { +var saveInvalidLoginAttempt = func(query *m.LoginUserQuery) { if setting.DisableBruteForceLoginProtection { return } diff --git a/pkg/login/brute_force_login_protection_test.go b/pkg/login/brute_force_login_protection_test.go index 5375134ba88..aca100760c7 100644 --- a/pkg/login/brute_force_login_protection_test.go +++ b/pkg/login/brute_force_login_protection_test.go @@ -50,7 +50,7 @@ func TestLoginAttemptsValidation(t *testing.T) { return nil }) - saveInvalidLoginAttempt(&LoginUserQuery{ + saveInvalidLoginAttempt(&m.LoginUserQuery{ Username: "user", Password: "pwd", IpAddress: "192.168.1.1:56433", @@ -103,7 +103,7 @@ func TestLoginAttemptsValidation(t *testing.T) { return nil }) - saveInvalidLoginAttempt(&LoginUserQuery{ + saveInvalidLoginAttempt(&m.LoginUserQuery{ Username: "user", Password: "pwd", IpAddress: "192.168.1.1:56433", diff --git a/pkg/login/ext_user.go b/pkg/login/ext_user.go new file mode 100644 index 00000000000..e1d5e3e3b48 --- /dev/null +++ b/pkg/login/ext_user.go @@ -0,0 +1,184 @@ +package login + +import ( + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/log" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/quota" +) + +func init() { + bus.AddHandler("auth", UpsertUser) +} + +func UpsertUser(cmd *m.UpsertUserCommand) error { + extUser := cmd.ExternalUser + + userQuery := &m.GetUserByAuthInfoQuery{ + AuthModule: extUser.AuthModule, + AuthId: extUser.AuthId, + UserId: extUser.UserId, + Email: extUser.Email, + Login: extUser.Login, + } + err := bus.Dispatch(userQuery) + if err != m.ErrUserNotFound && err != nil { + return err + } + + if err != nil { + if !cmd.SignupAllowed { + log.Warn("Not allowing %s login, user not found in internal user database and allow signup = false", extUser.AuthModule) + return ErrInvalidCredentials + } + + limitReached, err := quota.QuotaReached(cmd.ReqContext, "user") + if err != nil { + log.Warn("Error getting user quota", "err", err) + return ErrGettingUserQuota + } + if limitReached { + return ErrUsersQuotaReached + } + + cmd.Result, err = createUser(extUser) + if err != nil { + return err + } + + if extUser.AuthModule != "" && extUser.AuthId != "" { + cmd2 := &m.SetAuthInfoCommand{ + UserId: cmd.Result.Id, + AuthModule: extUser.AuthModule, + AuthId: extUser.AuthId, + } + if err := bus.Dispatch(cmd2); err != nil { + return err + } + } + + } else { + cmd.Result = userQuery.Result + + err = updateUser(cmd.Result, extUser) + if err != nil { + return err + } + } + + return syncOrgRoles(cmd.Result, extUser) +} + +func createUser(extUser *m.ExternalUserInfo) (*m.User, error) { + cmd := &m.CreateUserCommand{ + Login: extUser.Login, + Email: extUser.Email, + Name: extUser.Name, + SkipOrgSetup: len(extUser.OrgRoles) > 0, + } + if err := bus.Dispatch(cmd); err != nil { + return nil, err + } + + return &cmd.Result, nil +} + +func updateUser(user *m.User, extUser *m.ExternalUserInfo) error { + // sync user info + updateCmd := &m.UpdateUserCommand{ + UserId: user.Id, + } + + needsUpdate := false + if extUser.Login != "" && extUser.Login != user.Login { + updateCmd.Login = extUser.Login + user.Login = extUser.Login + needsUpdate = true + } + + if extUser.Email != "" && extUser.Email != user.Email { + updateCmd.Email = extUser.Email + user.Email = extUser.Email + needsUpdate = true + } + + if extUser.Name != "" && extUser.Name != user.Name { + updateCmd.Name = extUser.Name + user.Name = extUser.Name + needsUpdate = true + } + + if !needsUpdate { + return nil + } + + log.Debug("Syncing user info", "id", user.Id, "update", updateCmd) + return bus.Dispatch(updateCmd) +} + +func syncOrgRoles(user *m.User, extUser *m.ExternalUserInfo) error { + // don't sync org roles if none are specified + if len(extUser.OrgRoles) == 0 { + return nil + } + + orgsQuery := &m.GetUserOrgListQuery{UserId: user.Id} + if err := bus.Dispatch(orgsQuery); err != nil { + return err + } + + handledOrgIds := map[int64]bool{} + deleteOrgIds := []int64{} + + // update existing org roles + for _, org := range orgsQuery.Result { + handledOrgIds[org.OrgId] = true + + if extUser.OrgRoles[org.OrgId] == "" { + deleteOrgIds = append(deleteOrgIds, org.OrgId) + } else if extUser.OrgRoles[org.OrgId] != org.Role { + // update role + cmd := &m.UpdateOrgUserCommand{OrgId: org.OrgId, UserId: user.Id, Role: extUser.OrgRoles[org.OrgId]} + if err := bus.Dispatch(cmd); err != nil { + return err + } + } + } + + // add any new org roles + for orgId, orgRole := range extUser.OrgRoles { + if _, exists := handledOrgIds[orgId]; exists { + continue + } + + // add role + cmd := &m.AddOrgUserCommand{UserId: user.Id, Role: orgRole, OrgId: orgId} + err := bus.Dispatch(cmd) + if err != nil && err != m.ErrOrgNotFound { + return err + } + } + + // delete any removed org roles + for _, orgId := range deleteOrgIds { + cmd := &m.RemoveOrgUserCommand{OrgId: orgId, UserId: user.Id} + if err := bus.Dispatch(cmd); err != nil { + return err + } + } + + // update user's default org if needed + if _, ok := extUser.OrgRoles[user.OrgId]; !ok { + for orgId := range extUser.OrgRoles { + user.OrgId = orgId + break + } + + return bus.Dispatch(&m.SetUsingOrgCommand{ + UserId: user.Id, + OrgId: user.OrgId, + }) + } + + return nil +} diff --git a/pkg/login/grafana_login.go b/pkg/login/grafana_login.go index 677ba776e4f..e8594fdd190 100644 --- a/pkg/login/grafana_login.go +++ b/pkg/login/grafana_login.go @@ -17,7 +17,7 @@ var validatePassword = func(providedPassword string, userPassword string, userSa return nil } -var loginUsingGrafanaDB = func(query *LoginUserQuery) error { +var loginUsingGrafanaDB = func(query *m.LoginUserQuery) error { userQuery := m.GetUserByLoginQuery{LoginOrEmail: query.Username} if err := bus.Dispatch(&userQuery); err != nil { diff --git a/pkg/login/grafana_login_test.go b/pkg/login/grafana_login_test.go index 88e52224113..90422678fd2 100644 --- a/pkg/login/grafana_login_test.go +++ b/pkg/login/grafana_login_test.go @@ -66,7 +66,7 @@ func TestGrafanaLogin(t *testing.T) { } type grafanaLoginScenarioContext struct { - loginUserQuery *LoginUserQuery + loginUserQuery *m.LoginUserQuery validatePasswordCalled bool } @@ -77,7 +77,7 @@ func grafanaLoginScenario(desc string, fn grafanaLoginScenarioFunc) { origValidatePassword := validatePassword sc := &grafanaLoginScenarioContext{ - loginUserQuery: &LoginUserQuery{ + loginUserQuery: &m.LoginUserQuery{ Username: "user", Password: "pwd", IpAddress: "192.168.1.1:56433", diff --git a/pkg/login/ldap.go b/pkg/login/ldap.go index be3babac02e..49b92648561 100644 --- a/pkg/login/ldap.go +++ b/pkg/login/ldap.go @@ -24,10 +24,9 @@ type ILdapConn interface { } type ILdapAuther interface { - Login(query *LoginUserQuery) error - SyncSignedInUser(signedInUser *m.SignedInUser) error - GetGrafanaUserFor(ldapUser *LdapUserInfo) (*m.User, error) - SyncOrgRoles(user *m.User, ldapUser *LdapUserInfo) error + Login(query *m.LoginUserQuery) error + SyncUser(query *m.LoginUserQuery) error + GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo) (*m.User, error) } type ldapAuther struct { @@ -51,12 +50,12 @@ func (a *ldapAuther) Dial() error { if a.server.RootCACert != "" { certPool = x509.NewCertPool() for _, caCertFile := range strings.Split(a.server.RootCACert, " ") { - if pem, err := ioutil.ReadFile(caCertFile); err != nil { + pem, err := ioutil.ReadFile(caCertFile) + if err != nil { return err - } else { - if !certPool.AppendCertsFromPEM(pem) { - return errors.New("Failed to append CA certificate " + caCertFile) - } + } + if !certPool.AppendCertsFromPEM(pem) { + return errors.New("Failed to append CA certificate " + caCertFile) } } } @@ -89,7 +88,8 @@ func (a *ldapAuther) Dial() error { return err } -func (a *ldapAuther) Login(query *LoginUserQuery) error { +func (a *ldapAuther) Login(query *m.LoginUserQuery) error { + // connect to ldap server if err := a.Dial(); err != nil { return err } @@ -101,206 +101,105 @@ func (a *ldapAuther) Login(query *LoginUserQuery) error { } // find user entry & attributes - if ldapUser, err := a.searchForUser(query.Username); err != nil { + ldapUser, err := a.searchForUser(query.Username) + if err != nil { return err - } else { - a.log.Debug("Ldap User found", "info", spew.Sdump(ldapUser)) + } - // check if a second user bind is needed - if a.requireSecondBind { - if err := a.secondBind(ldapUser, query.Password); err != nil { - return err - } - } + a.log.Debug("Ldap User found", "info", spew.Sdump(ldapUser)) - if grafanaUser, err := a.GetGrafanaUserFor(ldapUser); err != nil { + // check if a second user bind is needed + if a.requireSecondBind { + err = a.secondBind(ldapUser, query.Password) + if err != nil { return err - } else { - if syncErr := a.syncInfoAndOrgRoles(grafanaUser, ldapUser); syncErr != nil { - return syncErr - } - query.User = grafanaUser - return nil } } + + grafanaUser, err := a.GetGrafanaUserFor(query.ReqContext, ldapUser) + if err != nil { + return err + } + + query.User = grafanaUser + return nil } -func (a *ldapAuther) SyncSignedInUser(signedInUser *m.SignedInUser) error { - grafanaUser := m.User{ - Id: signedInUser.UserId, - Login: signedInUser.Login, - Email: signedInUser.Email, - Name: signedInUser.Name, - } - - if err := a.Dial(); err != nil { +func (a *ldapAuther) SyncUser(query *m.LoginUserQuery) error { + // connect to ldap server + err := a.Dial() + if err != nil { return err } - defer a.conn.Close() - if err := a.serverBind(); err != nil { + + err = a.serverBind() + if err != nil { return err } - if ldapUser, err := a.searchForUser(signedInUser.Login); err != nil { + // find user entry & attributes + ldapUser, err := a.searchForUser(query.Username) + if err != nil { a.log.Error("Failed searching for user in ldap", "error", err) - return err - } else { - if err := a.syncInfoAndOrgRoles(&grafanaUser, ldapUser); err != nil { - return err + } + + a.log.Debug("Ldap User found", "info", spew.Sdump(ldapUser)) + + grafanaUser, err := a.GetGrafanaUserFor(query.ReqContext, ldapUser) + if err != nil { + return err + } + + query.User = grafanaUser + return nil +} + +func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo) (*m.User, error) { + extUser := &m.ExternalUserInfo{ + AuthModule: "ldap", + AuthId: ldapUser.DN, + Name: fmt.Sprintf("%s %s", ldapUser.FirstName, ldapUser.LastName), + Login: ldapUser.Username, + Email: ldapUser.Email, + OrgRoles: map[int64]m.RoleType{}, + } + + for _, group := range a.server.LdapGroups { + // only use the first match for each org + if extUser.OrgRoles[group.OrgId] != "" { + continue } - a.log.Debug("Got Ldap User Info", "user", spew.Sdump(ldapUser)) + if ldapUser.isMemberOf(group.GroupDN) { + extUser.OrgRoles[group.OrgId] = group.OrgRole + } } - return nil -} - -// Sync info for ldap user and grafana user -func (a *ldapAuther) syncInfoAndOrgRoles(user *m.User, ldapUser *LdapUserInfo) error { - // sync user details - if err := a.syncUserInfo(user, ldapUser); err != nil { - return err - } - // sync org roles - if err := a.SyncOrgRoles(user, ldapUser); err != nil { - return err - } - - return nil -} - -func (a *ldapAuther) GetGrafanaUserFor(ldapUser *LdapUserInfo) (*m.User, error) { // validate that the user has access // if there are no ldap group mappings access is true // otherwise a single group must match - access := len(a.server.LdapGroups) == 0 - for _, ldapGroup := range a.server.LdapGroups { - if ldapUser.isMemberOf(ldapGroup.GroupDN) { - access = true - break - } - } - - if !access { - a.log.Info("Ldap Auth: user does not belong in any of the specified ldap groups", "username", ldapUser.Username, "groups", ldapUser.MemberOf) + if len(a.server.LdapGroups) > 0 && len(extUser.OrgRoles) < 1 { + a.log.Info( + "Ldap Auth: user does not belong in any of the specified ldap groups", + "username", ldapUser.Username, + "groups", ldapUser.MemberOf) return nil, ErrInvalidCredentials } - // get user from grafana db - userQuery := m.GetUserByLoginQuery{LoginOrEmail: ldapUser.Username} - if err := bus.Dispatch(&userQuery); err != nil { - if err == m.ErrUserNotFound && setting.LdapAllowSignup { - return a.createGrafanaUser(ldapUser) - } else if err == m.ErrUserNotFound { - a.log.Warn("Not allowing LDAP login, user not found in internal user database, and ldap allow signup = false") - return nil, ErrInvalidCredentials - } else { - return nil, err - } + // add/update user in grafana + userQuery := &m.UpsertUserCommand{ + ReqContext: ctx, + ExternalUser: extUser, + SignupAllowed: setting.LdapAllowSignup, } - - return userQuery.Result, nil - -} -func (a *ldapAuther) createGrafanaUser(ldapUser *LdapUserInfo) (*m.User, error) { - cmd := m.CreateUserCommand{ - Login: ldapUser.Username, - Email: ldapUser.Email, - Name: fmt.Sprintf("%s %s", ldapUser.FirstName, ldapUser.LastName), - } - - if err := bus.Dispatch(&cmd); err != nil { + err := bus.Dispatch(userQuery) + if err != nil { return nil, err } - return &cmd.Result, nil -} - -func (a *ldapAuther) syncUserInfo(user *m.User, ldapUser *LdapUserInfo) error { - var name = fmt.Sprintf("%s %s", ldapUser.FirstName, ldapUser.LastName) - if user.Email == ldapUser.Email && user.Name == name { - return nil - } - - a.log.Debug("Syncing user info", "username", ldapUser.Username) - updateCmd := m.UpdateUserCommand{} - updateCmd.UserId = user.Id - updateCmd.Login = user.Login - updateCmd.Email = ldapUser.Email - updateCmd.Name = fmt.Sprintf("%s %s", ldapUser.FirstName, ldapUser.LastName) - return bus.Dispatch(&updateCmd) -} - -func (a *ldapAuther) SyncOrgRoles(user *m.User, ldapUser *LdapUserInfo) error { - if len(a.server.LdapGroups) == 0 { - a.log.Warn("No group mappings defined") - return nil - } - - orgsQuery := m.GetUserOrgListQuery{UserId: user.Id} - if err := bus.Dispatch(&orgsQuery); err != nil { - return err - } - - handledOrgIds := map[int64]bool{} - - // update or remove org roles - for _, org := range orgsQuery.Result { - match := false - handledOrgIds[org.OrgId] = true - - for _, group := range a.server.LdapGroups { - if org.OrgId != group.OrgId { - continue - } - - if ldapUser.isMemberOf(group.GroupDN) { - match = true - if org.Role != group.OrgRole { - // update role - cmd := m.UpdateOrgUserCommand{OrgId: org.OrgId, UserId: user.Id, Role: group.OrgRole} - if err := bus.Dispatch(&cmd); err != nil { - return err - } - } - // ignore subsequent ldap group mapping matches - break - } - } - - // remove role if no mappings match - if !match { - cmd := m.RemoveOrgUserCommand{OrgId: org.OrgId, UserId: user.Id} - if err := bus.Dispatch(&cmd); err != nil { - return err - } - } - } - - // add missing org roles - for _, group := range a.server.LdapGroups { - if !ldapUser.isMemberOf(group.GroupDN) { - continue - } - - if _, exists := handledOrgIds[group.OrgId]; exists { - continue - } - - // add role - cmd := m.AddOrgUserCommand{UserId: user.Id, Role: group.OrgRole, OrgId: group.OrgId} - err := bus.Dispatch(&cmd) - if err != nil && err != m.ErrOrgNotFound { - return err - } - - // mark this group has handled so we do not process it again - handledOrgIds[group.OrgId] = true - } - - return nil + return userQuery.Result, nil } func (a *ldapAuther) serverBind() error { @@ -404,9 +303,10 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) { var groupSearchResult *ldap.SearchResult for _, groupSearchBase := range a.server.GroupSearchBaseDNs { var filter_replace string - filter_replace = getLdapAttr(a.server.GroupSearchFilterUserAttribute, searchResult) if a.server.GroupSearchFilterUserAttribute == "" { filter_replace = getLdapAttr(a.server.Attr.Username, searchResult) + } else { + filter_replace = getLdapAttr(a.server.GroupSearchFilterUserAttribute, searchResult) } filter := strings.Replace(a.server.GroupSearchFilter, "%s", ldap.EscapeFilter(filter_replace), -1) @@ -448,6 +348,9 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) { } func getLdapAttrN(name string, result *ldap.SearchResult, n int) string { + if name == "DN" { + return result.Entries[0].DN + } for _, attr := range result.Entries[n].Attributes { if attr.Name == name { if len(attr.Values) > 0 { @@ -470,7 +373,3 @@ func getLdapAttrArray(name string, result *ldap.SearchResult) []string { } return []string{} } - -func createUserFromLdapInfo() error { - return nil -} diff --git a/pkg/login/ldap_login.go b/pkg/login/ldap_login.go index b74b69db036..5974e19d691 100644 --- a/pkg/login/ldap_login.go +++ b/pkg/login/ldap_login.go @@ -1,10 +1,11 @@ package login import ( + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" ) -var loginUsingLdap = func(query *LoginUserQuery) (bool, error) { +var loginUsingLdap = func(query *m.LoginUserQuery) (bool, error) { if !setting.LdapEnabled { return false, nil } diff --git a/pkg/login/ldap_login_test.go b/pkg/login/ldap_login_test.go index 6af125566e8..6067a063795 100644 --- a/pkg/login/ldap_login_test.go +++ b/pkg/login/ldap_login_test.go @@ -79,7 +79,7 @@ func TestLdapLogin(t *testing.T) { ldapLoginScenario("When login", func(sc *ldapLoginScenarioContext) { sc.withLoginResult(false) - enabled, err := loginUsingLdap(&LoginUserQuery{ + enabled, err := loginUsingLdap(&m.LoginUserQuery{ Username: "user", Password: "pwd", }) @@ -117,7 +117,7 @@ type mockLdapAuther struct { loginCalled bool } -func (a *mockLdapAuther) Login(query *LoginUserQuery) error { +func (a *mockLdapAuther) Login(query *m.LoginUserQuery) error { a.loginCalled = true if !a.validLogin { @@ -127,20 +127,16 @@ func (a *mockLdapAuther) Login(query *LoginUserQuery) error { return nil } -func (a *mockLdapAuther) SyncSignedInUser(signedInUser *m.SignedInUser) error { +func (a *mockLdapAuther) SyncUser(query *m.LoginUserQuery) error { return nil } -func (a *mockLdapAuther) GetGrafanaUserFor(ldapUser *LdapUserInfo) (*m.User, error) { +func (a *mockLdapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo) (*m.User, error) { return nil, nil } -func (a *mockLdapAuther) SyncOrgRoles(user *m.User, ldapUser *LdapUserInfo) error { - return nil -} - type ldapLoginScenarioContext struct { - loginUserQuery *LoginUserQuery + loginUserQuery *m.LoginUserQuery ldapAuthenticatorMock *mockLdapAuther } @@ -151,7 +147,7 @@ func ldapLoginScenario(desc string, fn ldapLoginScenarioFunc) { origNewLdapAuthenticator := NewLdapAuthenticator sc := &ldapLoginScenarioContext{ - loginUserQuery: &LoginUserQuery{ + loginUserQuery: &m.LoginUserQuery{ Username: "user", Password: "pwd", IpAddress: "192.168.1.1:56433", diff --git a/pkg/login/ldap_test.go b/pkg/login/ldap_test.go index 8677bbeae42..34932926406 100644 --- a/pkg/login/ldap_test.go +++ b/pkg/login/ldap_test.go @@ -18,7 +18,7 @@ func TestLdapAuther(t *testing.T) { ldapAuther := NewLdapAuthenticator(&LdapServerConf{ LdapGroups: []*LdapGroupToOrgRole{{}}, }) - _, err := ldapAuther.GetGrafanaUserFor(&LdapUserInfo{}) + _, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{}) So(err, ShouldEqual, ErrInvalidCredentials) }) @@ -34,7 +34,7 @@ func TestLdapAuther(t *testing.T) { sc.userQueryReturns(user1) - result, err := ldapAuther.GetGrafanaUserFor(&LdapUserInfo{}) + result, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{}) So(err, ShouldBeNil) So(result, ShouldEqual, user1) }) @@ -48,7 +48,21 @@ func TestLdapAuther(t *testing.T) { sc.userQueryReturns(user1) - result, err := ldapAuther.GetGrafanaUserFor(&LdapUserInfo{MemberOf: []string{"cn=users"}}) + result, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{MemberOf: []string{"cn=users"}}) + So(err, ShouldBeNil) + So(result, ShouldEqual, user1) + }) + + ldapAutherScenario("Given group match with different case", func(sc *scenarioContext) { + ldapAuther := NewLdapAuthenticator(&LdapServerConf{ + LdapGroups: []*LdapGroupToOrgRole{ + {GroupDN: "cn=users", OrgRole: "Admin"}, + }, + }) + + sc.userQueryReturns(user1) + + result, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{MemberOf: []string{"CN=users"}}) So(err, ShouldBeNil) So(result, ShouldEqual, user1) }) @@ -64,7 +78,8 @@ func TestLdapAuther(t *testing.T) { sc.userQueryReturns(nil) - result, err := ldapAuther.GetGrafanaUserFor(&LdapUserInfo{ + result, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{ + DN: "torkelo", Username: "torkelo", Email: "my@email.com", MemberOf: []string{"cn=editor"}, @@ -72,11 +87,6 @@ func TestLdapAuther(t *testing.T) { So(err, ShouldBeNil) - Convey("Should create new user", func() { - So(sc.createUserCmd.Login, ShouldEqual, "torkelo") - So(sc.createUserCmd.Email, ShouldEqual, "my@email.com") - }) - Convey("Should return new user", func() { So(result.Login, ShouldEqual, "torkelo") }) @@ -95,7 +105,7 @@ func TestLdapAuther(t *testing.T) { }) sc.userOrgsQueryReturns([]*m.UserOrgDTO{}) - err := ldapAuther.SyncOrgRoles(&m.User{}, &LdapUserInfo{ + _, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{ MemberOf: []string{"cn=users"}, }) @@ -114,7 +124,7 @@ func TestLdapAuther(t *testing.T) { }) sc.userOrgsQueryReturns([]*m.UserOrgDTO{{OrgId: 1, Role: m.ROLE_EDITOR}}) - err := ldapAuther.SyncOrgRoles(&m.User{}, &LdapUserInfo{ + _, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{ MemberOf: []string{"cn=users"}, }) @@ -122,24 +132,29 @@ func TestLdapAuther(t *testing.T) { So(err, ShouldBeNil) So(sc.updateOrgUserCmd, ShouldNotBeNil) So(sc.updateOrgUserCmd.Role, ShouldEqual, m.ROLE_ADMIN) + So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) }) }) ldapAutherScenario("given current org role is removed in ldap", func(sc *scenarioContext) { ldapAuther := NewLdapAuthenticator(&LdapServerConf{ LdapGroups: []*LdapGroupToOrgRole{ - {GroupDN: "cn=users", OrgId: 1, OrgRole: "Admin"}, + {GroupDN: "cn=users", OrgId: 2, OrgRole: "Admin"}, }, }) - sc.userOrgsQueryReturns([]*m.UserOrgDTO{{OrgId: 1, Role: m.ROLE_EDITOR}}) - err := ldapAuther.SyncOrgRoles(&m.User{}, &LdapUserInfo{ - MemberOf: []string{"cn=other"}, + sc.userOrgsQueryReturns([]*m.UserOrgDTO{ + {OrgId: 1, Role: m.ROLE_EDITOR}, + {OrgId: 2, Role: m.ROLE_EDITOR}, + }) + _, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{ + MemberOf: []string{"cn=users"}, }) Convey("Should remove org role", func() { So(err, ShouldBeNil) So(sc.removeOrgUserCmd, ShouldNotBeNil) + So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 2) }) }) @@ -152,7 +167,7 @@ func TestLdapAuther(t *testing.T) { }) sc.userOrgsQueryReturns([]*m.UserOrgDTO{{OrgId: 1, Role: m.ROLE_EDITOR}}) - err := ldapAuther.SyncOrgRoles(&m.User{}, &LdapUserInfo{ + _, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{ MemberOf: []string{"cn=users"}, }) @@ -160,6 +175,7 @@ func TestLdapAuther(t *testing.T) { So(err, ShouldBeNil) So(sc.removeOrgUserCmd, ShouldBeNil) So(sc.updateOrgUserCmd, ShouldNotBeNil) + So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) }) }) @@ -172,13 +188,14 @@ func TestLdapAuther(t *testing.T) { }) sc.userOrgsQueryReturns([]*m.UserOrgDTO{{OrgId: 1, Role: m.ROLE_ADMIN}}) - err := ldapAuther.SyncOrgRoles(&m.User{}, &LdapUserInfo{ + _, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{ MemberOf: []string{"cn=admins"}, }) Convey("Should take first match, and ignore subsequent matches", func() { So(err, ShouldBeNil) So(sc.updateOrgUserCmd, ShouldBeNil) + So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) }) }) @@ -191,19 +208,20 @@ func TestLdapAuther(t *testing.T) { }) sc.userOrgsQueryReturns([]*m.UserOrgDTO{}) - err := ldapAuther.SyncOrgRoles(&m.User{}, &LdapUserInfo{ + _, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{ MemberOf: []string{"cn=admins"}, }) Convey("Should take first match, and ignore subsequent matches", func() { So(err, ShouldBeNil) So(sc.addOrgUserCmd.Role, ShouldEqual, m.ROLE_ADMIN) + So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) }) }) }) - Convey("When calling SyncSignedInUser", t, func() { + Convey("When calling SyncUser", t, func() { mockLdapConnection := &mockLdapConn{} ldapAuther := NewLdapAuthenticator( @@ -243,17 +261,20 @@ func TestLdapAuther(t *testing.T) { ldapAutherScenario("When ldapUser found call syncInfo and orgRoles", func(sc *scenarioContext) { // arrange - signedInUser := &m.SignedInUser{ - Email: "roel@test.net", - UserId: 1, - Name: "Roel Gerrits", - Login: "roelgerrits", + query := &m.LoginUserQuery{ + Username: "roelgerrits", } + sc.userQueryReturns(&m.User{ + Id: 1, + Email: "roel@test.net", + Name: "Roel Gerrits", + Login: "roelgerrits", + }) sc.userOrgsQueryReturns([]*m.UserOrgDTO{}) // act - syncErrResult := ldapAuther.SyncSignedInUser(signedInUser) + syncErrResult := ldapAuther.SyncUser(query) // assert So(dialCalled, ShouldBeTrue) @@ -299,6 +320,19 @@ func ldapAutherScenario(desc string, fn scenarioFunc) { sc := &scenarioContext{} + bus.AddHandler("test", UpsertUser) + + bus.AddHandler("test", func(cmd *m.GetUserByAuthInfoQuery) error { + sc.getUserByAuthInfoQuery = cmd + sc.getUserByAuthInfoQuery.Result = &m.User{Login: cmd.Login} + return nil + }) + + bus.AddHandler("test", func(cmd *m.GetUserOrgListQuery) error { + sc.getUserOrgListQuery = cmd + return nil + }) + bus.AddHandler("test", func(cmd *m.CreateUserCommand) error { sc.createUserCmd = cmd sc.createUserCmd.Result = m.User{Login: cmd.Login} @@ -325,26 +359,36 @@ func ldapAutherScenario(desc string, fn scenarioFunc) { return nil }) + bus.AddHandler("test", func(cmd *m.SetUsingOrgCommand) error { + sc.setUsingOrgCmd = cmd + return nil + }) + fn(sc) }) } type scenarioContext struct { - createUserCmd *m.CreateUserCommand - addOrgUserCmd *m.AddOrgUserCommand - updateOrgUserCmd *m.UpdateOrgUserCommand - removeOrgUserCmd *m.RemoveOrgUserCommand - updateUserCmd *m.UpdateUserCommand + getUserByAuthInfoQuery *m.GetUserByAuthInfoQuery + getUserOrgListQuery *m.GetUserOrgListQuery + createUserCmd *m.CreateUserCommand + addOrgUserCmd *m.AddOrgUserCommand + updateOrgUserCmd *m.UpdateOrgUserCommand + removeOrgUserCmd *m.RemoveOrgUserCommand + updateUserCmd *m.UpdateUserCommand + setUsingOrgCmd *m.SetUsingOrgCommand } func (sc *scenarioContext) userQueryReturns(user *m.User) { - bus.AddHandler("test", func(query *m.GetUserByLoginQuery) error { + bus.AddHandler("test", func(query *m.GetUserByAuthInfoQuery) error { if user == nil { return m.ErrUserNotFound - } else { - query.Result = user - return nil } + query.Result = user + return nil + }) + bus.AddHandler("test", func(query *m.SetAuthInfoCommand) error { + return nil }) } diff --git a/pkg/login/ldap_user.go b/pkg/login/ldap_user.go index 9f1cf3c96b6..3651d9e5e23 100644 --- a/pkg/login/ldap_user.go +++ b/pkg/login/ldap_user.go @@ -1,5 +1,9 @@ package login +import ( + "strings" +) + type LdapUserInfo struct { DN string FirstName string @@ -15,7 +19,7 @@ func (u *LdapUserInfo) isMemberOf(group string) bool { } for _, member := range u.MemberOf { - if member == group { + if strings.EqualFold(member, group) { return true } } diff --git a/pkg/metrics/graphitebridge/graphite.go b/pkg/metrics/graphitebridge/graphite.go index 68fb544fc7c..5b61f078e6c 100644 --- a/pkg/metrics/graphitebridge/graphite.go +++ b/pkg/metrics/graphitebridge/graphite.go @@ -55,7 +55,7 @@ const ( AbortOnError ) -var metricCategoryPrefix []string = []string{ +var metricCategoryPrefix = []string{ "proxy_", "api_", "page_", @@ -66,7 +66,7 @@ var metricCategoryPrefix []string = []string{ "go_", "process_"} -var trimMetricPrefix []string = []string{"grafana_"} +var trimMetricPrefix = []string{"grafana_"} // Config defines the Graphite bridge config. type Config struct { @@ -295,11 +295,7 @@ func writeMetric(buf *bufio.Writer, m model.Metric, mf *dto.MetricFamily) error } } - if err = addExtentionConventionForRollups(buf, mf, m); err != nil { - return err - } - - return nil + return addExtentionConventionForRollups(buf, mf, m) } func addExtentionConventionForRollups(buf *bufio.Writer, mf *dto.MetricFamily, m model.Metric) error { diff --git a/pkg/metrics/init.go b/pkg/metrics/init.go deleted file mode 100644 index 833b148d319..00000000000 --- a/pkg/metrics/init.go +++ /dev/null @@ -1,38 +0,0 @@ -package metrics - -import ( - "context" - - ini "gopkg.in/ini.v1" - - "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/metrics/graphitebridge" -) - -var metricsLogger log.Logger = log.New("metrics") - -type logWrapper struct { - logger log.Logger -} - -func (lw *logWrapper) Println(v ...interface{}) { - lw.logger.Info("graphite metric bridge", v...) -} - -func Init(file *ini.File) { - cfg := ReadSettings(file) - internalInit(cfg) -} - -func internalInit(settings *MetricSettings) { - initMetricVars(settings) - - if settings.GraphiteBridgeConfig != nil { - bridge, err := graphitebridge.NewBridge(settings.GraphiteBridgeConfig) - if err != nil { - metricsLogger.Error("failed to create graphite bridge", "error", err) - } else { - go bridge.Run(context.Background()) - } - } -} diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index 4d4a11d0faa..83505826910 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -54,6 +54,7 @@ var ( M_Alerting_Active_Alerts prometheus.Gauge M_StatTotal_Dashboards prometheus.Gauge M_StatTotal_Users prometheus.Gauge + M_StatActive_Users prometheus.Gauge M_StatTotal_Orgs prometheus.Gauge M_StatTotal_Playlists prometheus.Gauge M_Grafana_Version *prometheus.GaugeVec @@ -253,6 +254,12 @@ func init() { Namespace: exporterName, }) + M_StatActive_Users = prometheus.NewGauge(prometheus.GaugeOpts{ + Name: "stat_active_users", + Help: "number of active users", + Namespace: exporterName, + }) + M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{ Name: "stat_total_orgs", Help: "total amount of orgs", @@ -270,10 +277,9 @@ func init() { Help: "Information about the Grafana", Namespace: exporterName, }, []string{"version"}) - } -func initMetricVars(settings *MetricSettings) { +func initMetricVars() { prometheus.MustRegister( M_Instance_Start, M_Page_Status, @@ -305,45 +311,25 @@ func initMetricVars(settings *MetricSettings) { M_Alerting_Active_Alerts, M_StatTotal_Dashboards, M_StatTotal_Users, + M_StatActive_Users, M_StatTotal_Orgs, M_StatTotal_Playlists, M_Grafana_Version) - go instrumentationLoop(settings) } -func instrumentationLoop(settings *MetricSettings) chan struct{} { - M_Instance_Start.Inc() - - onceEveryDayTick := time.NewTicker(time.Hour * 24) - secondTicker := time.NewTicker(time.Second * time.Duration(settings.IntervalSeconds)) - - for { - select { - case <-onceEveryDayTick.C: - sendUsageStats() - case <-secondTicker.C: - updateTotalStats() - } - } -} - -var metricPublishCounter int64 = 0 - func updateTotalStats() { - metricPublishCounter++ - if metricPublishCounter == 1 || metricPublishCounter%10 == 0 { - statsQuery := models.GetSystemStatsQuery{} - if err := bus.Dispatch(&statsQuery); err != nil { - metricsLogger.Error("Failed to get system stats", "error", err) - return - } - - M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards)) - M_StatTotal_Users.Set(float64(statsQuery.Result.Users)) - M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists)) - M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs)) + statsQuery := models.GetSystemStatsQuery{} + if err := bus.Dispatch(&statsQuery); err != nil { + metricsLogger.Error("Failed to get system stats", "error", err) + return } + + M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards)) + M_StatTotal_Users.Set(float64(statsQuery.Result.Users)) + M_StatActive_Users.Set(float64(statsQuery.Result.ActiveUsers)) + M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists)) + M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs)) } func sendUsageStats() { @@ -403,6 +389,6 @@ func sendUsageStats() { out, _ := json.MarshalIndent(report, "", " ") data := bytes.NewBuffer(out) - client := http.Client{Timeout: time.Duration(5 * time.Second)} + client := http.Client{Timeout: 5 * time.Second} go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data) } diff --git a/pkg/metrics/service.go b/pkg/metrics/service.go new file mode 100644 index 00000000000..ec38e0acfec --- /dev/null +++ b/pkg/metrics/service.go @@ -0,0 +1,71 @@ +package metrics + +import ( + "context" + "time" + + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/metrics/graphitebridge" + "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/setting" +) + +var metricsLogger log.Logger = log.New("metrics") + +type logWrapper struct { + logger log.Logger +} + +func (lw *logWrapper) Println(v ...interface{}) { + lw.logger.Info("graphite metric bridge", v...) +} + +func init() { + registry.RegisterService(&InternalMetricsService{}) + initMetricVars() +} + +type InternalMetricsService struct { + Cfg *setting.Cfg `inject:""` + + enabled bool + intervalSeconds int64 + graphiteCfg *graphitebridge.Config +} + +func (im *InternalMetricsService) Init() error { + return im.readSettings() +} + +func (im *InternalMetricsService) Run(ctx context.Context) error { + // Start Graphite Bridge + if im.graphiteCfg != nil { + bridge, err := graphitebridge.NewBridge(im.graphiteCfg) + if err != nil { + metricsLogger.Error("failed to create graphite bridge", "error", err) + } else { + go bridge.Run(ctx) + } + } + + M_Instance_Start.Inc() + + // set the total stats gauges before we publishing metrics + updateTotalStats() + + onceEveryDayTick := time.NewTicker(time.Hour * 24) + everyMinuteTicker := time.NewTicker(time.Minute) + defer onceEveryDayTick.Stop() + defer everyMinuteTicker.Stop() + + for { + select { + case <-onceEveryDayTick.C: + sendUsageStats() + case <-everyMinuteTicker.C: + updateTotalStats() + case <-ctx.Done(): + return ctx.Err() + } + } +} diff --git a/pkg/metrics/settings.go b/pkg/metrics/settings.go index 5e51f85768a..58b84a7192f 100644 --- a/pkg/metrics/settings.go +++ b/pkg/metrics/settings.go @@ -1,67 +1,53 @@ package metrics import ( + "fmt" "strings" "time" "github.com/grafana/grafana/pkg/metrics/graphitebridge" "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" - ini "gopkg.in/ini.v1" ) -type MetricSettings struct { - Enabled bool - IntervalSeconds int64 - GraphiteBridgeConfig *graphitebridge.Config -} - -func ReadSettings(file *ini.File) *MetricSettings { - var settings = &MetricSettings{ - Enabled: false, +func (im *InternalMetricsService) readSettings() error { + var section, err = im.Cfg.Raw.GetSection("metrics") + if err != nil { + return fmt.Errorf("Unable to find metrics config section %v", err) } - var section, err = file.GetSection("metrics") - if err != nil { - metricsLogger.Crit("Unable to find metrics config section", "error", err) + im.enabled = section.Key("enabled").MustBool(false) + im.intervalSeconds = section.Key("interval_seconds").MustInt64(10) + + if !im.enabled { return nil } - settings.Enabled = section.Key("enabled").MustBool(false) - settings.IntervalSeconds = section.Key("interval_seconds").MustInt64(10) - - if !settings.Enabled { - return settings + if err := im.parseGraphiteSettings(); err != nil { + return fmt.Errorf("Unable to parse metrics graphite section, %v", err) } - cfg, err := parseGraphiteSettings(settings, file) - if err != nil { - metricsLogger.Crit("Unable to parse metrics graphite section", "error", err) - return nil - } - - settings.GraphiteBridgeConfig = cfg - - return settings + return nil } -func parseGraphiteSettings(settings *MetricSettings, file *ini.File) (*graphitebridge.Config, error) { - graphiteSection, err := setting.Cfg.GetSection("metrics.graphite") +func (im *InternalMetricsService) parseGraphiteSettings() error { + graphiteSection, err := im.Cfg.Raw.GetSection("metrics.graphite") + if err != nil { - return nil, nil + return nil } address := graphiteSection.Key("address").String() if address == "" { - return nil, nil + return nil } - cfg := &graphitebridge.Config{ + bridgeCfg := &graphitebridge.Config{ URL: address, Prefix: graphiteSection.Key("prefix").MustString("prod.grafana.%(instance_name)s"), CountersAsDelta: true, Gatherer: prometheus.DefaultGatherer, - Interval: time.Duration(settings.IntervalSeconds) * time.Second, + Interval: time.Duration(im.intervalSeconds) * time.Second, Timeout: 10 * time.Second, Logger: &logWrapper{logger: metricsLogger}, ErrorHandling: graphitebridge.ContinueOnError, @@ -74,6 +60,8 @@ func parseGraphiteSettings(settings *MetricSettings, file *ini.File) (*graphiteb prefix = "prod.grafana.%(instance_name)s." } - cfg.Prefix = strings.Replace(prefix, "%(instance_name)s", safeInstanceName, -1) - return cfg, nil + bridgeCfg.Prefix = strings.Replace(prefix, "%(instance_name)s", safeInstanceName, -1) + + im.graphiteCfg = bridgeCfg + return nil } diff --git a/pkg/middleware/auth_proxy.go b/pkg/middleware/auth_proxy.go index e1801404453..144a0ae3a69 100644 --- a/pkg/middleware/auth_proxy.go +++ b/pkg/middleware/auth_proxy.go @@ -1,8 +1,10 @@ package middleware import ( - "errors" "fmt" + "net" + "net/mail" + "reflect" "strings" "time" @@ -14,6 +16,8 @@ import ( "github.com/grafana/grafana/pkg/setting" ) +var AUTH_PROXY_SESSION_VAR = "authProxyHeaderValue" + func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool { if !setting.AuthProxyEnabled { return false @@ -25,45 +29,119 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool { } // if auth proxy ip(s) defined, check if request comes from one of those - if err := checkAuthenticationProxy(ctx, proxyHeaderValue); err != nil { + if err := checkAuthenticationProxy(ctx.Req.RemoteAddr, proxyHeaderValue); err != nil { ctx.Handle(407, "Proxy authentication required", err) return true } - query := getSignedInUserQueryForProxyAuth(proxyHeaderValue) - query.OrgId = orgID - if err := bus.Dispatch(query); err != nil { - if err != m.ErrUserNotFound { - ctx.Handle(500, "Failed to find user specified in auth proxy header", err) - return true - } - - if !setting.AuthProxyAutoSignUp { - return false - } - - cmd := getCreateUserCommandForProxyAuth(proxyHeaderValue) - if setting.LdapEnabled { - cmd.SkipOrgSetup = true - } - - if err := bus.Dispatch(cmd); err != nil { - ctx.Handle(500, "Failed to create user specified in auth proxy header", err) - return true - } - query = &m.GetSignedInUserQuery{UserId: cmd.Result.Id, OrgId: orgID} - if err := bus.Dispatch(query); err != nil { - ctx.Handle(500, "Failed find user after creation", err) - return true - } - } - // initialize session if err := ctx.Session.Start(ctx.Context); err != nil { log.Error(3, "Failed to start session", err) return false } + query := &m.GetSignedInUserQuery{OrgId: orgID} + + // if this session has already been authenticated by authProxy just load the user + sessProxyValue := ctx.Session.Get(AUTH_PROXY_SESSION_VAR) + if sessProxyValue != nil && sessProxyValue.(string) == proxyHeaderValue && getRequestUserId(ctx) > 0 { + // if we're using ldap, sync user periodically + if setting.LdapEnabled { + syncQuery := &m.LoginUserQuery{ + ReqContext: ctx, + Username: proxyHeaderValue, + } + + if err := syncGrafanaUserWithLdapUser(syncQuery); err != nil { + if err == login.ErrInvalidCredentials { + ctx.Handle(500, "Unable to authenticate user", err) + return false + } + + ctx.Handle(500, "Failed to sync user", err) + return false + } + } + + query.UserId = getRequestUserId(ctx) + // if we're using ldap, pass authproxy login name to ldap user sync + } else if setting.LdapEnabled { + ctx.Session.Delete(session.SESS_KEY_LASTLDAPSYNC) + + syncQuery := &m.LoginUserQuery{ + ReqContext: ctx, + Username: proxyHeaderValue, + } + + if err := syncGrafanaUserWithLdapUser(syncQuery); err != nil { + if err == login.ErrInvalidCredentials { + ctx.Handle(500, "Unable to authenticate user", err) + return false + } + + ctx.Handle(500, "Failed to sync user", err) + return false + } + + if syncQuery.User == nil { + ctx.Handle(500, "Failed to sync user", nil) + return false + } + + query.UserId = syncQuery.User.Id + // no ldap, just use the info we have + } else { + extUser := &m.ExternalUserInfo{ + AuthModule: "authproxy", + AuthId: proxyHeaderValue, + } + + if setting.AuthProxyHeaderProperty == "username" { + extUser.Login = proxyHeaderValue + + // only set Email if it can be parsed as an email address + emailAddr, emailErr := mail.ParseAddress(proxyHeaderValue) + if emailErr == nil { + extUser.Email = emailAddr.Address + } + } else if setting.AuthProxyHeaderProperty == "email" { + extUser.Email = proxyHeaderValue + extUser.Login = proxyHeaderValue + } else { + ctx.Handle(500, "Auth proxy header property invalid", nil) + return true + } + + for _, field := range []string{"Name", "Email", "Login"} { + if setting.AuthProxyHeaders[field] == "" { + continue + } + + if val := ctx.Req.Header.Get(setting.AuthProxyHeaders[field]); val != "" { + reflect.ValueOf(extUser).Elem().FieldByName(field).SetString(val) + } + } + + // add/update user in grafana + cmd := &m.UpsertUserCommand{ + ReqContext: ctx, + ExternalUser: extUser, + SignupAllowed: setting.AuthProxyAutoSignUp, + } + err := bus.Dispatch(cmd) + if err != nil { + ctx.Handle(500, "Failed to login as user specified in auth proxy header", err) + return true + } + + query.UserId = cmd.Result.Id + } + + if err := bus.Dispatch(query); err != nil { + ctx.Handle(500, "Failed to find user", err) + return true + } + // Make sure that we cannot share a session between different users! if getRequestUserId(ctx) > 0 && getRequestUserId(ctx) != query.Result.UserId { // remove session @@ -77,16 +155,7 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool { } } - // When ldap is enabled, sync userinfo and org roles - if err := syncGrafanaUserWithLdapUser(ctx, query); err != nil { - if err == login.ErrInvalidCredentials { - ctx.Handle(500, "Unable to authenticate user", err) - return false - } - - ctx.Handle(500, "Failed to sync user", err) - return false - } + ctx.Session.Set(AUTH_PROXY_SESSION_VAR, proxyHeaderValue) ctx.SignedInUser = query.Result ctx.IsSignedIn = true @@ -95,81 +164,51 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool { return true } -var syncGrafanaUserWithLdapUser = func(ctx *m.ReqContext, query *m.GetSignedInUserQuery) error { - if !setting.LdapEnabled { - return nil - } - +var syncGrafanaUserWithLdapUser = func(query *m.LoginUserQuery) error { expireEpoch := time.Now().Add(time.Duration(-setting.AuthProxyLdapSyncTtl) * time.Minute).Unix() var lastLdapSync int64 - if lastLdapSyncInSession := ctx.Session.Get(session.SESS_KEY_LASTLDAPSYNC); lastLdapSyncInSession != nil { + if lastLdapSyncInSession := query.ReqContext.Session.Get(session.SESS_KEY_LASTLDAPSYNC); lastLdapSyncInSession != nil { lastLdapSync = lastLdapSyncInSession.(int64) } if lastLdapSync < expireEpoch { ldapCfg := login.LdapCfg + if len(ldapCfg.Servers) < 1 { + return fmt.Errorf("No LDAP servers available") + } + for _, server := range ldapCfg.Servers { author := login.NewLdapAuthenticator(server) - if err := author.SyncSignedInUser(query.Result); err != nil { + if err := author.SyncUser(query); err != nil { return err } } - ctx.Session.Set(session.SESS_KEY_LASTLDAPSYNC, time.Now().Unix()) + query.ReqContext.Session.Set(session.SESS_KEY_LASTLDAPSYNC, time.Now().Unix()) } return nil } -func checkAuthenticationProxy(ctx *m.ReqContext, proxyHeaderValue string) error { +func checkAuthenticationProxy(remoteAddr string, proxyHeaderValue string) error { if len(strings.TrimSpace(setting.AuthProxyWhitelist)) == 0 { return nil } + proxies := strings.Split(setting.AuthProxyWhitelist, ",") - remoteAddrSplit := strings.Split(ctx.Req.RemoteAddr, ":") - sourceIP := remoteAddrSplit[0] - - found := false - for _, proxyIP := range proxies { - if sourceIP == strings.TrimSpace(proxyIP) { - found = true - break - } - } - - if !found { - msg := fmt.Sprintf("Request for user (%s) is not from the authentication proxy", proxyHeaderValue) - err := errors.New(msg) + sourceIP, _, err := net.SplitHostPort(remoteAddr) + if err != nil { return err } - return nil -} - -func getSignedInUserQueryForProxyAuth(headerVal string) *m.GetSignedInUserQuery { - query := m.GetSignedInUserQuery{} - if setting.AuthProxyHeaderProperty == "username" { - query.Login = headerVal - } else if setting.AuthProxyHeaderProperty == "email" { - query.Email = headerVal - } else { - panic("Auth proxy header property invalid") + // Compare allowed IP addresses to actual address + for _, proxyIP := range proxies { + if sourceIP == strings.TrimSpace(proxyIP) { + return nil + } } - return &query -} -func getCreateUserCommandForProxyAuth(headerVal string) *m.CreateUserCommand { - cmd := m.CreateUserCommand{} - if setting.AuthProxyHeaderProperty == "username" { - cmd.Login = headerVal - cmd.Email = headerVal - } else if setting.AuthProxyHeaderProperty == "email" { - cmd.Email = headerVal - cmd.Login = headerVal - } else { - panic("Auth proxy header property invalid") - } - return &cmd + return fmt.Errorf("Request for user (%s) from %s is not from the authentication proxy", proxyHeaderValue, sourceIP) } diff --git a/pkg/middleware/auth_proxy_test.go b/pkg/middleware/auth_proxy_test.go index b3c011bd870..47ed2f71a79 100644 --- a/pkg/middleware/auth_proxy_test.go +++ b/pkg/middleware/auth_proxy_test.go @@ -26,57 +26,71 @@ func TestAuthProxyWithLdapEnabled(t *testing.T) { return &mockLdapAuther } - signedInUser := m.SignedInUser{} - query := m.GetSignedInUserQuery{Result: &signedInUser} - - Convey("When session variable lastLdapSync not set, call syncSignedInUser and set lastLdapSync", func() { + Convey("When user logs in, call SyncUser", func() { // arrange - sess := mockSession{} + sess := newMockSession() ctx := m.ReqContext{Session: &sess} So(sess.Get(session.SESS_KEY_LASTLDAPSYNC), ShouldBeNil) // act - syncGrafanaUserWithLdapUser(&ctx, &query) + syncGrafanaUserWithLdapUser(&m.LoginUserQuery{ + ReqContext: &ctx, + Username: "test", + }) // assert - So(mockLdapAuther.syncSignedInUserCalled, ShouldBeTrue) + So(mockLdapAuther.syncUserCalled, ShouldBeTrue) So(sess.Get(session.SESS_KEY_LASTLDAPSYNC), ShouldBeGreaterThan, 0) }) Convey("When session variable not expired, don't sync and don't change session var", func() { // arrange - sess := mockSession{} + sess := newMockSession() ctx := m.ReqContext{Session: &sess} now := time.Now().Unix() sess.Set(session.SESS_KEY_LASTLDAPSYNC, now) + sess.Set(AUTH_PROXY_SESSION_VAR, "test") // act - syncGrafanaUserWithLdapUser(&ctx, &query) + syncGrafanaUserWithLdapUser(&m.LoginUserQuery{ + ReqContext: &ctx, + Username: "test", + }) // assert So(sess.Get(session.SESS_KEY_LASTLDAPSYNC), ShouldEqual, now) - So(mockLdapAuther.syncSignedInUserCalled, ShouldBeFalse) + So(mockLdapAuther.syncUserCalled, ShouldBeFalse) }) Convey("When lastldapsync is expired, session variable should be updated", func() { // arrange - sess := mockSession{} + sess := newMockSession() ctx := m.ReqContext{Session: &sess} expiredTime := time.Now().Add(time.Duration(-120) * time.Minute).Unix() sess.Set(session.SESS_KEY_LASTLDAPSYNC, expiredTime) + sess.Set(AUTH_PROXY_SESSION_VAR, "test") // act - syncGrafanaUserWithLdapUser(&ctx, &query) + syncGrafanaUserWithLdapUser(&m.LoginUserQuery{ + ReqContext: &ctx, + Username: "test", + }) // assert So(sess.Get(session.SESS_KEY_LASTLDAPSYNC), ShouldBeGreaterThan, expiredTime) - So(mockLdapAuther.syncSignedInUserCalled, ShouldBeTrue) + So(mockLdapAuther.syncUserCalled, ShouldBeTrue) }) }) } type mockSession struct { - value interface{} + value map[interface{}]interface{} +} + +func newMockSession() mockSession { + session := mockSession{} + session.value = make(map[interface{}]interface{}) + return session } func (s *mockSession) Start(c *macaron.Context) error { @@ -84,15 +98,16 @@ func (s *mockSession) Start(c *macaron.Context) error { } func (s *mockSession) Set(k interface{}, v interface{}) error { - s.value = v + s.value[k] = v return nil } func (s *mockSession) Get(k interface{}) interface{} { - return s.value + return s.value[k] } func (s *mockSession) Delete(k interface{}) interface{} { + delete(s.value, k) return nil } @@ -113,21 +128,18 @@ func (s *mockSession) RegenerateId(c *macaron.Context) error { } type mockLdapAuthenticator struct { - syncSignedInUserCalled bool + syncUserCalled bool } -func (a *mockLdapAuthenticator) Login(query *login.LoginUserQuery) error { +func (a *mockLdapAuthenticator) Login(query *m.LoginUserQuery) error { return nil } -func (a *mockLdapAuthenticator) SyncSignedInUser(signedInUser *m.SignedInUser) error { - a.syncSignedInUserCalled = true +func (a *mockLdapAuthenticator) SyncUser(query *m.LoginUserQuery) error { + a.syncUserCalled = true return nil } -func (a *mockLdapAuthenticator) GetGrafanaUserFor(ldapUser *login.LdapUserInfo) (*m.User, error) { +func (a *mockLdapAuthenticator) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *login.LdapUserInfo) (*m.User, error) { return nil, nil } -func (a *mockLdapAuthenticator) SyncOrgRoles(user *m.User, ldapUser *login.LdapUserInfo) error { - return nil -} diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index b5b244d5bff..93db49ed880 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -8,7 +8,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/apikeygen" "github.com/grafana/grafana/pkg/log" - l "github.com/grafana/grafana/pkg/login" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" @@ -165,7 +164,7 @@ func initContextWithBasicAuth(ctx *m.ReqContext, orgId int64) bool { user := loginQuery.Result - loginUserQuery := l.LoginUserQuery{Username: username, Password: password, User: user} + loginUserQuery := m.LoginUserQuery{Username: username, Password: password, User: user} if err := bus.Dispatch(&loginUserQuery); err != nil { ctx.JsonApiErr(401, "Invalid username or password", err) return true diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go index c8e9e535cfa..b827751b1a5 100644 --- a/pkg/middleware/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -9,7 +9,6 @@ import ( ms "github.com/go-macaron/session" "github.com/grafana/grafana/pkg/bus" - l "github.com/grafana/grafana/pkg/login" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" @@ -72,7 +71,7 @@ func TestMiddlewareContext(t *testing.T) { return nil }) - bus.AddHandler("test", func(loginUserQuery *l.LoginUserQuery) error { + bus.AddHandler("test", func(loginUserQuery *m.LoginUserQuery) error { return nil }) @@ -177,12 +176,18 @@ func TestMiddlewareContext(t *testing.T) { setting.AuthProxyEnabled = true setting.AuthProxyHeaderName = "X-WEBAUTH-USER" setting.AuthProxyHeaderProperty = "username" + setting.LdapEnabled = false bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { query.Result = &m.SignedInUser{OrgId: 2, UserId: 12} return nil }) + bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error { + cmd.Result = &m.User{Id: 12} + return nil + }) + sc.fakeReq("GET", "/") sc.req.Header.Add("X-WEBAUTH-USER", "torkelo") sc.exec() @@ -199,18 +204,18 @@ func TestMiddlewareContext(t *testing.T) { setting.AuthProxyHeaderName = "X-WEBAUTH-USER" setting.AuthProxyHeaderProperty = "username" setting.AuthProxyAutoSignUp = true + setting.LdapEnabled = false bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { if query.UserId > 0 { query.Result = &m.SignedInUser{OrgId: 4, UserId: 33} return nil - } else { - return m.ErrUserNotFound } + return m.ErrUserNotFound }) - bus.AddHandler("test", func(cmd *m.CreateUserCommand) error { - cmd.Result = m.User{Id: 33} + bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error { + cmd.Result = &m.User{Id: 33} return nil }) @@ -226,11 +231,11 @@ func TestMiddlewareContext(t *testing.T) { }) }) - middlewareScenario("When auth_proxy is enabled and request RemoteAddr is not trusted", func(sc *scenarioContext) { + middlewareScenario("When auth_proxy is enabled and IPv4 request RemoteAddr is not trusted", func(sc *scenarioContext) { setting.AuthProxyEnabled = true setting.AuthProxyHeaderName = "X-WEBAUTH-USER" setting.AuthProxyHeaderProperty = "username" - setting.AuthProxyWhitelist = "192.168.1.1, 192.168.2.1" + setting.AuthProxyWhitelist = "192.168.1.1, 2001::23" sc.fakeReq("GET", "/") sc.req.Header.Add("X-WEBAUTH-USER", "torkelo") @@ -239,6 +244,24 @@ func TestMiddlewareContext(t *testing.T) { Convey("should return 407 status code", func() { So(sc.resp.Code, ShouldEqual, 407) + So(sc.resp.Body.String(), ShouldContainSubstring, "Request for user (torkelo) from 192.168.3.1 is not from the authentication proxy") + }) + }) + + middlewareScenario("When auth_proxy is enabled and IPv6 request RemoteAddr is not trusted", func(sc *scenarioContext) { + setting.AuthProxyEnabled = true + setting.AuthProxyHeaderName = "X-WEBAUTH-USER" + setting.AuthProxyHeaderProperty = "username" + setting.AuthProxyWhitelist = "192.168.1.1, 2001::23" + + sc.fakeReq("GET", "/") + sc.req.Header.Add("X-WEBAUTH-USER", "torkelo") + sc.req.RemoteAddr = "[2001:23]:12345" + sc.exec() + + Convey("should return 407 status code", func() { + So(sc.resp.Code, ShouldEqual, 407) + So(sc.resp.Body.String(), ShouldContainSubstring, "Request for user (torkelo) from 2001:23 is not from the authentication proxy") }) }) @@ -246,16 +269,21 @@ func TestMiddlewareContext(t *testing.T) { setting.AuthProxyEnabled = true setting.AuthProxyHeaderName = "X-WEBAUTH-USER" setting.AuthProxyHeaderProperty = "username" - setting.AuthProxyWhitelist = "192.168.1.1, 192.168.2.1" + setting.AuthProxyWhitelist = "192.168.1.1, 2001::23" bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { query.Result = &m.SignedInUser{OrgId: 4, UserId: 33} return nil }) + bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error { + cmd.Result = &m.User{Id: 33} + return nil + }) + sc.fakeReq("GET", "/") sc.req.Header.Add("X-WEBAUTH-USER", "torkelo") - sc.req.RemoteAddr = "192.168.2.1:12345" + sc.req.RemoteAddr = "[2001::23]:12345" sc.exec() Convey("Should init context with user info", func() { @@ -271,6 +299,11 @@ func TestMiddlewareContext(t *testing.T) { setting.AuthProxyHeaderProperty = "username" setting.AuthProxyWhitelist = "" + bus.AddHandler("test", func(query *m.UpsertUserCommand) error { + query.Result = &m.User{Id: 32} + return nil + }) + bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { query.Result = &m.SignedInUser{OrgId: 4, UserId: 32} return nil @@ -301,11 +334,17 @@ func TestMiddlewareContext(t *testing.T) { setting.LdapEnabled = true called := false - syncGrafanaUserWithLdapUser = func(ctx *m.ReqContext, query *m.GetSignedInUserQuery) error { + syncGrafanaUserWithLdapUser = func(query *m.LoginUserQuery) error { called = true + query.User = &m.User{Id: 32} return nil } + bus.AddHandler("test", func(query *m.UpsertUserCommand) error { + query.Result = &m.User{Id: 32} + return nil + }) + bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { query.Result = &m.SignedInUser{OrgId: 4, UserId: 32} return nil diff --git a/pkg/middleware/recovery.go b/pkg/middleware/recovery.go index ec289387aa4..456bc91354e 100644 --- a/pkg/middleware/recovery.go +++ b/pkg/middleware/recovery.go @@ -35,7 +35,7 @@ var ( slash = []byte("/") ) -// stack returns a nicely formated stack frame, skipping skip frames +// stack returns a nicely formatted stack frame, skipping skip frames func stack(skip int) []byte { buf := new(bytes.Buffer) // the returned data // As we loop, we open files and read them. These variables record the currently diff --git a/pkg/middleware/render_auth.go b/pkg/middleware/render_auth.go index 6c338becbda..c382eb8e707 100644 --- a/pkg/middleware/render_auth.go +++ b/pkg/middleware/render_auth.go @@ -31,8 +31,6 @@ func initContextWithRenderAuth(ctx *m.ReqContext) bool { return true } -type renderContextFunc func(key string) (string, error) - func AddRenderAuthKey(orgId int64, userId int64, orgRole m.RoleType) string { renderKeysLock.Lock() diff --git a/pkg/models/alert.go b/pkg/models/alert.go index 88b49350b97..b72d87e94b2 100644 --- a/pkg/models/alert.go +++ b/pkg/models/alert.go @@ -34,8 +34,8 @@ const ( ) var ( - ErrCannotChangeStateOnPausedAlert error = fmt.Errorf("Cannot change state on pause alert") - ErrRequiresNewState error = fmt.Errorf("update alert state requires a new state.") + ErrCannotChangeStateOnPausedAlert = fmt.Errorf("Cannot change state on pause alert") + ErrRequiresNewState = fmt.Errorf("update alert state requires a new state.") ) func (s AlertStateType) IsValid() bool { diff --git a/pkg/models/dashboard_acl.go b/pkg/models/dashboard_acl.go index 5b91b2a70b4..5fc09bd16b5 100644 --- a/pkg/models/dashboard_acl.go +++ b/pkg/models/dashboard_acl.go @@ -56,7 +56,10 @@ type DashboardAclInfoDTO struct { UserId int64 `json:"userId"` UserLogin string `json:"userLogin"` UserEmail string `json:"userEmail"` + UserAvatarUrl string `json:"userAvatarUrl"` TeamId int64 `json:"teamId"` + TeamEmail string `json:"teamEmail"` + TeamAvatarUrl string `json:"teamAvatarUrl"` Team string `json:"team"` Role *RoleType `json:"role,omitempty"` Permission PermissionType `json:"permission"` @@ -66,6 +69,7 @@ type DashboardAclInfoDTO struct { Slug string `json:"slug"` IsFolder bool `json:"isFolder"` Url string `json:"url"` + Inherited bool `json:"inherited"` } func (dto *DashboardAclInfoDTO) hasSameRoleAs(other *DashboardAclInfoDTO) bool { diff --git a/pkg/models/dashboards.go b/pkg/models/dashboards.go index 4b771038df6..eb44c1bc582 100644 --- a/pkg/models/dashboards.go +++ b/pkg/models/dashboards.go @@ -13,26 +13,27 @@ import ( // Typed errors var ( - ErrDashboardNotFound = errors.New("Dashboard not found") - ErrDashboardFolderNotFound = errors.New("Folder not found") - ErrDashboardSnapshotNotFound = errors.New("Dashboard snapshot not found") - ErrDashboardWithSameUIDExists = errors.New("A dashboard with the same uid already exists") - ErrDashboardWithSameNameInFolderExists = errors.New("A dashboard with the same name in the folder already exists") - ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else") - ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty") - ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder") - ErrDashboardContainsInvalidAlertData = errors.New("Invalid alert data. Cannot save dashboard") - ErrDashboardFailedToUpdateAlertData = errors.New("Failed to save alert data") - ErrDashboardsWithSameSlugExists = errors.New("Multiple dashboards with the same slug exists") - ErrDashboardFailedGenerateUniqueUid = errors.New("Failed to generate unique dashboard id") - ErrDashboardTypeMismatch = errors.New("Dashboard cannot be changed to a folder") - ErrDashboardFolderWithSameNameAsDashboard = errors.New("Folder name cannot be the same as one of its dashboards") - ErrDashboardWithSameNameAsFolder = errors.New("Dashboard name cannot be the same as folder") - ErrDashboardFolderNameExists = errors.New("A folder with that name already exists") - ErrDashboardUpdateAccessDenied = errors.New("Access denied to save dashboard") - ErrDashboardInvalidUid = errors.New("uid contains illegal characters") - ErrDashboardUidToLong = errors.New("uid to long. max 40 characters") - RootFolderName = "General" + ErrDashboardNotFound = errors.New("Dashboard not found") + ErrDashboardFolderNotFound = errors.New("Folder not found") + ErrDashboardSnapshotNotFound = errors.New("Dashboard snapshot not found") + ErrDashboardWithSameUIDExists = errors.New("A dashboard with the same uid already exists") + ErrDashboardWithSameNameInFolderExists = errors.New("A dashboard with the same name in the folder already exists") + ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else") + ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty") + ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder") + ErrDashboardContainsInvalidAlertData = errors.New("Invalid alert data. Cannot save dashboard") + ErrDashboardFailedToUpdateAlertData = errors.New("Failed to save alert data") + ErrDashboardsWithSameSlugExists = errors.New("Multiple dashboards with the same slug exists") + ErrDashboardFailedGenerateUniqueUid = errors.New("Failed to generate unique dashboard id") + ErrDashboardTypeMismatch = errors.New("Dashboard cannot be changed to a folder") + ErrDashboardFolderWithSameNameAsDashboard = errors.New("Folder name cannot be the same as one of its dashboards") + ErrDashboardWithSameNameAsFolder = errors.New("Dashboard name cannot be the same as folder") + ErrDashboardFolderNameExists = errors.New("A folder with that name already exists") + ErrDashboardUpdateAccessDenied = errors.New("Access denied to save dashboard") + ErrDashboardInvalidUid = errors.New("uid contains illegal characters") + ErrDashboardUidToLong = errors.New("uid to long. max 40 characters") + ErrDashboardCannotSaveProvisionedDashboard = errors.New("Cannot save provisioned dashboard") + RootFolderName = "General" ) type UpdatePluginDashboardError struct { @@ -157,7 +158,7 @@ func NewDashboardFromJson(data *simplejson.Json) *Dashboard { return dash } -// GetDashboardModel turns the command into the savable model +// GetDashboardModel turns the command into the saveable model func (cmd *SaveDashboardCommand) GetDashboardModel() *Dashboard { dash := NewDashboardFromJson(cmd.Dashboard) userId := cmd.UserId @@ -209,14 +210,14 @@ func GetDashboardFolderUrl(isFolder bool, uid string, slug string) string { return GetDashboardUrl(uid, slug) } -// Return the html url for a dashboard +// GetDashboardUrl return the html url for a dashboard func GetDashboardUrl(uid string, slug string) string { return fmt.Sprintf("%s/d/%s/%s", setting.AppSubUrl, uid, slug) } -// Return the full url for a dashboard +// GetFullDashboardUrl return the full url for a dashboard func GetFullDashboardUrl(uid string, slug string) string { - return fmt.Sprintf("%s%s", setting.AppUrl, GetDashboardUrl(uid, slug)) + return fmt.Sprintf("%sd/%s/%s", setting.AppUrl, uid, slug) } // GetFolderUrl return the html url for a folder @@ -224,6 +225,10 @@ func GetFolderUrl(folderUid string, slug string) string { return fmt.Sprintf("%s/dashboards/f/%s/%s", setting.AppSubUrl, folderUid, slug) } +type ValidateDashboardBeforeSaveResult struct { + IsParentFolderChanged bool +} + // // COMMANDS // @@ -268,6 +273,7 @@ type ValidateDashboardBeforeSaveCommand struct { OrgId int64 Dashboard *Dashboard Overwrite bool + Result *ValidateDashboardBeforeSaveResult } // @@ -317,6 +323,12 @@ type GetDashboardSlugByIdQuery struct { Result string } +type IsDashboardProvisionedQuery struct { + DashboardId int64 + + Result bool +} + type GetProvisionedDashboardDataQuery struct { Name string diff --git a/pkg/models/dashboards_test.go b/pkg/models/dashboards_test.go index ad865b575bb..69bc8ab7bd9 100644 --- a/pkg/models/dashboards_test.go +++ b/pkg/models/dashboards_test.go @@ -4,11 +4,24 @@ import ( "testing" "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/setting" . "github.com/smartystreets/goconvey/convey" ) func TestDashboardModel(t *testing.T) { + Convey("Generate full dashboard url", t, func() { + setting.AppUrl = "http://grafana.local/" + fullUrl := GetFullDashboardUrl("uid", "my-dashboard") + So(fullUrl, ShouldEqual, "http://grafana.local/d/uid/my-dashboard") + }) + + Convey("Generate relative dashboard url", t, func() { + setting.AppUrl = "" + fullUrl := GetDashboardUrl("uid", "my-dashboard") + So(fullUrl, ShouldEqual, "/d/uid/my-dashboard") + }) + Convey("When generating slug", t, func() { dashboard := NewDashboard("Grafana Play Home") dashboard.UpdateSlug() diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go index f2236ad8477..b7e3e3eaa17 100644 --- a/pkg/models/datasource.go +++ b/pkg/models/datasource.go @@ -58,7 +58,7 @@ type DataSource struct { Updated time.Time } -var knownDatasourcePlugins map[string]bool = map[string]bool{ +var knownDatasourcePlugins = map[string]bool{ DS_ES: true, DS_GRAPHITE: true, DS_INFLUXDB: true, diff --git a/pkg/models/datasource_cache.go b/pkg/models/datasource_cache.go index b4a4e7f8a4d..66ba66e4d39 100644 --- a/pkg/models/datasource_cache.go +++ b/pkg/models/datasource_cache.go @@ -33,7 +33,7 @@ func (ds *DataSource) GetHttpClient() (*http.Client, error) { } return &http.Client{ - Timeout: time.Duration(30 * time.Second), + Timeout: 30 * time.Second, Transport: transport, }, nil } diff --git a/pkg/models/folders.go b/pkg/models/folders.go index c61620a11fc..f4dd7e5b776 100644 --- a/pkg/models/folders.go +++ b/pkg/models/folders.go @@ -32,7 +32,7 @@ type Folder struct { HasAcl bool } -// GetDashboardModel turns the command into the savable model +// GetDashboardModel turns the command into the saveable model func (cmd *CreateFolderCommand) GetDashboardModel(orgId int64, userId int64) *Dashboard { dashFolder := NewDashboardFolder(strings.TrimSpace(cmd.Title)) dashFolder.OrgId = orgId @@ -89,3 +89,12 @@ type UpdateFolderCommand struct { Result *Folder } + +// +// QUERIES +// + +type HasEditPermissionInFoldersQuery struct { + SignedInUser *SignedInUser + Result bool +} diff --git a/pkg/models/notifications.go b/pkg/models/notifications.go index 089d7c4360d..4b25ecb4dc7 100644 --- a/pkg/models/notifications.go +++ b/pkg/models/notifications.go @@ -19,12 +19,13 @@ type SendEmailCommandSync struct { } type SendWebhookSync struct { - Url string - User string - Password string - Body string - HttpMethod string - HttpHeader map[string]string + Url string + User string + Password string + Body string + HttpMethod string + HttpHeader map[string]string + ContentType string } type SendResetPasswordEmailCommand struct { diff --git a/pkg/models/org_user.go b/pkg/models/org_user.go index ca32cc50060..9231d18cfd6 100644 --- a/pkg/models/org_user.go +++ b/pkg/models/org_user.go @@ -48,9 +48,9 @@ func (r *RoleType) UnmarshalJSON(data []byte) error { *r = RoleType(str) - if (*r).IsValid() == false { + if !(*r).IsValid() { if (*r) != "" { - return errors.New(fmt.Sprintf("JSON validation error: invalid role value: %s", *r)) + return fmt.Errorf("JSON validation error: invalid role value: %s", *r) } *r = ROLE_VIEWER diff --git a/pkg/models/user_auth.go b/pkg/models/user_auth.go new file mode 100644 index 00000000000..0ecd144d52c --- /dev/null +++ b/pkg/models/user_auth.go @@ -0,0 +1,72 @@ +package models + +import ( + "time" +) + +type UserAuth struct { + Id int64 + UserId int64 + AuthModule string + AuthId string + Created time.Time +} + +type ExternalUserInfo struct { + AuthModule string + AuthId string + UserId int64 + Email string + Login string + Name string + OrgRoles map[int64]RoleType +} + +// --------------------- +// COMMANDS + +type UpsertUserCommand struct { + ReqContext *ReqContext + ExternalUser *ExternalUserInfo + SignupAllowed bool + + Result *User +} + +type SetAuthInfoCommand struct { + AuthModule string + AuthId string + UserId int64 +} + +type DeleteAuthInfoCommand struct { + UserAuth *UserAuth +} + +// ---------------------- +// QUERIES + +type LoginUserQuery struct { + ReqContext *ReqContext + Username string + Password string + User *User + IpAddress string +} + +type GetUserByAuthInfoQuery struct { + AuthModule string + AuthId string + UserId int64 + Email string + Login string + + Result *User +} + +type GetAuthInfoQuery struct { + AuthModule string + AuthId string + + Result *UserAuth +} diff --git a/pkg/plugins/dashboard_importer.go b/pkg/plugins/dashboard_importer.go index fb4d63a1fe4..1364fded987 100644 --- a/pkg/plugins/dashboard_importer.go +++ b/pkg/plugins/dashboard_importer.go @@ -148,11 +148,11 @@ func (this *DashTemplateEvaluator) evalValue(source *simplejson.Json) interface{ switch v := sourceValue.(type) { case string: interpolated := this.varRegex.ReplaceAllStringFunc(v, func(match string) string { - if replacement, exists := this.variables[match]; exists { + replacement, exists := this.variables[match] + if exists { return replacement - } else { - return match } + return match }) return interpolated case bool: diff --git a/pkg/plugins/dashboard_importer_test.go b/pkg/plugins/dashboard_importer_test.go index 549b3bb4cf9..6f31b49f99d 100644 --- a/pkg/plugins/dashboard_importer_test.go +++ b/pkg/plugins/dashboard_importer_test.go @@ -1,7 +1,6 @@ package plugins import ( - "context" "io/ioutil" "testing" @@ -88,13 +87,14 @@ func TestDashboardImport(t *testing.T) { func pluginScenario(desc string, t *testing.T, fn func()) { Convey("Given a plugin", t, func() { - setting.Cfg = ini.Empty() - sec, _ := setting.Cfg.NewSection("plugin.test-app") + setting.Raw = ini.Empty() + sec, _ := setting.Raw.NewSection("plugin.test-app") sec.NewKey("path", "../../tests/test-app") - err := initPlugins(context.Background()) + + pm := &PluginManager{} + err := pm.Init() So(err, ShouldBeNil) - Convey(desc, fn) }) } diff --git a/pkg/plugins/dashboards_test.go b/pkg/plugins/dashboards_test.go index 8573d452409..c422a1431c0 100644 --- a/pkg/plugins/dashboards_test.go +++ b/pkg/plugins/dashboards_test.go @@ -1,7 +1,6 @@ package plugins import ( - "context" "testing" "github.com/grafana/grafana/pkg/bus" @@ -15,10 +14,12 @@ import ( func TestPluginDashboards(t *testing.T) { Convey("When asking plugin dashboard info", t, func() { - setting.Cfg = ini.Empty() - sec, _ := setting.Cfg.NewSection("plugin.test-app") + setting.Raw = ini.Empty() + sec, _ := setting.Raw.NewSection("plugin.test-app") sec.NewKey("path", "../../tests/test-app") - err := initPlugins(context.Background()) + + pm := &PluginManager{} + err := pm.Init() So(err, ShouldBeNil) diff --git a/pkg/plugins/dashboards_updater.go b/pkg/plugins/dashboards_updater.go index 835e8873810..ebe11ed32d4 100644 --- a/pkg/plugins/dashboards_updater.go +++ b/pkg/plugins/dashboards_updater.go @@ -1,8 +1,6 @@ package plugins import ( - "time" - "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" ) @@ -11,10 +9,8 @@ func init() { bus.AddEventListener(handlePluginStateChanged) } -func updateAppDashboards() { - time.Sleep(time.Second * 5) - - plog.Debug("Looking for App Dashboard Updates") +func (pm *PluginManager) updateAppDashboards() { + pm.log.Debug("Looking for App Dashboard Updates") query := m.GetPluginSettingsQuery{OrgId: 0} @@ -38,23 +34,24 @@ func updateAppDashboards() { } func autoUpdateAppDashboard(pluginDashInfo *PluginDashboardInfoDTO, orgId int64) error { - if dash, err := loadPluginDashboard(pluginDashInfo.PluginId, pluginDashInfo.Path); err != nil { + dash, err := loadPluginDashboard(pluginDashInfo.PluginId, pluginDashInfo.Path) + if err != nil { return err - } else { - plog.Info("Auto updating App dashboard", "dashboard", dash.Title, "newRev", pluginDashInfo.Revision, "oldRev", pluginDashInfo.ImportedRevision) - updateCmd := ImportDashboardCommand{ - OrgId: orgId, - PluginId: pluginDashInfo.PluginId, - Overwrite: true, - Dashboard: dash.Data, - User: &m.SignedInUser{UserId: 0, OrgRole: m.ROLE_ADMIN}, - Path: pluginDashInfo.Path, - } - - if err := bus.Dispatch(&updateCmd); err != nil { - return err - } } + plog.Info("Auto updating App dashboard", "dashboard", dash.Title, "newRev", pluginDashInfo.Revision, "oldRev", pluginDashInfo.ImportedRevision) + updateCmd := ImportDashboardCommand{ + OrgId: orgId, + PluginId: pluginDashInfo.PluginId, + Overwrite: true, + Dashboard: dash.Data, + User: &m.SignedInUser{UserId: 0, OrgRole: m.ROLE_ADMIN}, + Path: pluginDashInfo.Path, + } + + if err := bus.Dispatch(&updateCmd); err != nil { + return err + } + return nil } @@ -122,15 +119,14 @@ func handlePluginStateChanged(event *m.PluginStateChangedEvent) error { if err := bus.Dispatch(&query); err != nil { return err - } else { - for _, dash := range query.Result { - deleteCmd := m.DeleteDashboardCommand{OrgId: dash.OrgId, Id: dash.Id} + } + for _, dash := range query.Result { + deleteCmd := m.DeleteDashboardCommand{OrgId: dash.OrgId, Id: dash.Id} - plog.Info("Deleting plugin dashboard", "pluginId", event.PluginId, "dashboard", dash.Slug) + plog.Info("Deleting plugin dashboard", "pluginId", event.PluginId, "dashboard", dash.Slug) - if err := bus.Dispatch(&deleteCmd); err != nil { - return err - } + if err := bus.Dispatch(&deleteCmd); err != nil { + return err } } } diff --git a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go b/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go index 834e8238e3a..7ada6fb6b03 100644 --- a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go +++ b/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go @@ -74,7 +74,7 @@ func TestMappingRowValue(t *testing.T) { boolRowValue, _ := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_BOOL, BoolValue: true}) haveBool, ok := boolRowValue.(bool) - if !ok || haveBool != true { + if !ok || !haveBool { t.Fatalf("Expected true, was %v", haveBool) } diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index 37ce175efe4..114b71deefc 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -76,7 +76,7 @@ func composeBinaryName(executable, os, arch string) string { return fmt.Sprintf("%s_%s_%s%s", executable, os, strings.ToLower(arch), extension) } -func (p *DataSourcePlugin) initBackendPlugin(ctx context.Context, log log.Logger) error { +func (p *DataSourcePlugin) startBackendPlugin(ctx context.Context, log log.Logger) error { p.log = log.New("plugin-id", p.Id) err := p.spawnSubProcess() diff --git a/pkg/plugins/models.go b/pkg/plugins/models.go index 541b37c8a8a..9677c21ef04 100644 --- a/pkg/plugins/models.go +++ b/pkg/plugins/models.go @@ -69,7 +69,7 @@ func (pb *PluginBase) registerPlugin(pluginDir string) error { for _, include := range pb.Includes { if include.Role == "" { - include.Role = m.RoleType(m.ROLE_VIEWER) + include.Role = m.ROLE_VIEWER } } diff --git a/pkg/plugins/plugins.go b/pkg/plugins/plugins.go index 417f565dd0c..5096bf5cebc 100644 --- a/pkg/plugins/plugins.go +++ b/pkg/plugins/plugins.go @@ -11,8 +11,10 @@ import ( "path/filepath" "reflect" "strings" + "time" "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) @@ -39,30 +41,12 @@ type PluginManager struct { log log.Logger } -func NewPluginManager(ctx context.Context) (*PluginManager, error) { - err := initPlugins(ctx) - - if err != nil { - return nil, err - } - - return &PluginManager{ - log: log.New("plugins"), - }, nil +func init() { + registry.RegisterService(&PluginManager{}) } -func (p *PluginManager) Run(ctx context.Context) error { - <-ctx.Done() - - for _, p := range DataSources { - p.Kill() - } - - p.log.Info("Stopped Plugins", "error", ctx.Err()) - return ctx.Err() -} - -func initPlugins(ctx context.Context) error { +func (pm *PluginManager) Init() error { + pm.log = log.New("plugins") plog = log.New("plugins") DataSources = map[string]*DataSourcePlugin{} @@ -76,7 +60,7 @@ func initPlugins(ctx context.Context) error { "app": AppPlugin{}, } - plog.Info("Starting plugin search") + pm.log.Info("Starting plugin search") scan(path.Join(setting.StaticRootPath, "app/plugins")) // check if plugins dir exists @@ -99,13 +83,6 @@ func initPlugins(ctx context.Context) error { } for _, ds := range DataSources { - if ds.Backend { - err := ds.initBackendPlugin(ctx, plog) - if err != nil { - plog.Error("Failed to init plugin.", "error", err, "plugin", ds.Id) - } - } - ds.initFrontendPlugin() } @@ -113,14 +90,49 @@ func initPlugins(ctx context.Context) error { app.initApp() } - go StartPluginUpdateChecker() - go updateAppDashboards() + return nil +} + +func (pm *PluginManager) startBackendPlugins(ctx context.Context) error { + for _, ds := range DataSources { + if ds.Backend { + if err := ds.startBackendPlugin(ctx, plog); err != nil { + pm.log.Error("Failed to init plugin.", "error", err, "plugin", ds.Id) + } + } + } return nil } +func (pm *PluginManager) Run(ctx context.Context) error { + pm.startBackendPlugins(ctx) + pm.updateAppDashboards() + pm.checkForUpdates() + + ticker := time.NewTicker(time.Minute * 10) + run := true + + for run { + select { + case <-ticker.C: + pm.checkForUpdates() + case <-ctx.Done(): + run = false + break + } + } + + // kil backend plugins + for _, p := range DataSources { + p.Kill() + } + + return ctx.Err() +} + func checkPluginPaths() error { - for _, section := range setting.Cfg.Sections() { + for _, section := range setting.Raw.Sections() { if strings.HasPrefix(section.Name(), "plugin.") { path := section.Key("path").String() if path != "" { @@ -193,11 +205,11 @@ func (scanner *PluginScanner) loadPluginJson(pluginJsonFilePath string) error { } var loader PluginLoader - if pluginGoType, exists := PluginTypes[pluginCommon.Type]; !exists { + pluginGoType, exists := PluginTypes[pluginCommon.Type] + if !exists { return errors.New("Unknown plugin type " + pluginCommon.Type) - } else { - loader = reflect.New(reflect.TypeOf(pluginGoType)).Interface().(PluginLoader) } + loader = reflect.New(reflect.TypeOf(pluginGoType)).Interface().(PluginLoader) reader.Seek(0, 0) return loader.Load(jsonParser, currentDir) @@ -218,9 +230,9 @@ func GetPluginMarkdown(pluginId string, name string) ([]byte, error) { return make([]byte, 0), nil } - if data, err := ioutil.ReadFile(path); err != nil { + data, err := ioutil.ReadFile(path) + if err != nil { return nil, err - } else { - return data, nil } + return data, nil } diff --git a/pkg/plugins/plugins_test.go b/pkg/plugins/plugins_test.go index 4d3ccb4502b..fa68ae4389d 100644 --- a/pkg/plugins/plugins_test.go +++ b/pkg/plugins/plugins_test.go @@ -1,7 +1,6 @@ package plugins import ( - "context" "path/filepath" "testing" @@ -12,10 +11,12 @@ import ( func TestPluginScans(t *testing.T) { - Convey("When scaning for plugins", t, func() { + Convey("When scanning for plugins", t, func() { setting.StaticRootPath, _ = filepath.Abs("../../public/") - setting.Cfg = ini.Empty() - err := initPlugins(context.Background()) + setting.Raw = ini.Empty() + + pm := &PluginManager{} + err := pm.Init() So(err, ShouldBeNil) So(len(DataSources), ShouldBeGreaterThan, 1) @@ -27,10 +28,12 @@ func TestPluginScans(t *testing.T) { }) Convey("When reading app plugin definition", t, func() { - setting.Cfg = ini.Empty() - sec, _ := setting.Cfg.NewSection("plugin.nginx-app") + setting.Raw = ini.Empty() + sec, _ := setting.Raw.NewSection("plugin.nginx-app") sec.NewKey("path", "../../tests/test-app") - err := initPlugins(context.Background()) + + pm := &PluginManager{} + err := pm.Init() So(err, ShouldBeNil) So(len(Apps), ShouldBeGreaterThan, 0) diff --git a/pkg/plugins/queries.go b/pkg/plugins/queries.go index 5ae1825a88f..5bd412d2cc9 100644 --- a/pkg/plugins/queries.go +++ b/pkg/plugins/queries.go @@ -37,7 +37,7 @@ func GetPluginSettings(orgId int64) (map[string]*m.PluginSettingInfoDTO, error) // if it's included in app check app settings if pluginDef.IncludedInAppId != "" { - // app componets are by default disabled + // app components are by default disabled opt.Enabled = false if appSettings, ok := pluginMap[pluginDef.IncludedInAppId]; ok { diff --git a/pkg/plugins/update_checker.go b/pkg/plugins/update_checker.go index 68ccdeaf840..e61f4cf1df7 100644 --- a/pkg/plugins/update_checker.go +++ b/pkg/plugins/update_checker.go @@ -13,7 +13,7 @@ import ( ) var ( - httpClient http.Client = http.Client{Timeout: time.Duration(10 * time.Second)} + httpClient = http.Client{Timeout: 10 * time.Second} ) type GrafanaNetPlugin struct { @@ -26,23 +26,6 @@ type GithubLatest struct { Testing string `json:"testing"` } -func StartPluginUpdateChecker() { - if !setting.CheckForUpdates { - return - } - - // do one check directly - go checkForUpdates() - - ticker := time.NewTicker(time.Minute * 10) - for { - select { - case <-ticker.C: - checkForUpdates() - } - } -} - func getAllExternalPluginSlugs() string { var result []string for _, plug := range Plugins { @@ -56,8 +39,12 @@ func getAllExternalPluginSlugs() string { return strings.Join(result, ",") } -func checkForUpdates() { - log.Trace("Checking for updates") +func (pm *PluginManager) checkForUpdates() { + if !setting.CheckForUpdates { + return + } + + pm.log.Debug("Checking for updates") pluginSlugs := getAllExternalPluginSlugs() resp, err := httpClient.Get("https://grafana.com/api/plugins/versioncheck?slugIn=" + pluginSlugs + "&grafanaVersion=" + setting.BuildVersion) diff --git a/pkg/registry/registry.go b/pkg/registry/registry.go new file mode 100644 index 00000000000..ba3229d6df6 --- /dev/null +++ b/pkg/registry/registry.go @@ -0,0 +1,33 @@ +package registry + +import ( + "context" +) + +var services = []Service{} + +func RegisterService(srv Service) { + services = append(services, srv) +} + +func GetServices() []Service { + return services +} + +type Service interface { + Init() error +} + +// Useful for alerting service +type CanBeDisabled interface { + IsDisabled() bool +} + +type BackgroundService interface { + Run(ctx context.Context) error +} + +func IsDisabled(srv Service) bool { + canBeDisabled, ok := srv.(CanBeDisabled) + return ok && canBeDisabled.IsDisabled() +} diff --git a/pkg/services/alerting/commands.go b/pkg/services/alerting/commands.go index 2c145614751..02186d697ee 100644 --- a/pkg/services/alerting/commands.go +++ b/pkg/services/alerting/commands.go @@ -13,11 +13,7 @@ func init() { func validateDashboardAlerts(cmd *m.ValidateDashboardAlertsCommand) error { extractor := NewDashAlertExtractor(cmd.Dashboard, cmd.OrgId) - if _, err := extractor.GetAlerts(); err != nil { - return err - } - - return nil + return extractor.ValidateAlerts() } func updateDashboardAlerts(cmd *m.UpdateDashboardAlertsCommand) error { @@ -29,15 +25,12 @@ func updateDashboardAlerts(cmd *m.UpdateDashboardAlertsCommand) error { extractor := NewDashAlertExtractor(cmd.Dashboard, cmd.OrgId) - if alerts, err := extractor.GetAlerts(); err != nil { - return err - } else { - saveAlerts.Alerts = alerts - } - - if err := bus.Dispatch(&saveAlerts); err != nil { + alerts, err := extractor.GetAlerts() + if err != nil { return err } - return nil + saveAlerts.Alerts = alerts + + return bus.Dispatch(&saveAlerts) } diff --git a/pkg/services/alerting/conditions/evaluator.go b/pkg/services/alerting/conditions/evaluator.go index 1b8fb952f65..8d7ca57f010 100644 --- a/pkg/services/alerting/conditions/evaluator.go +++ b/pkg/services/alerting/conditions/evaluator.go @@ -9,8 +9,8 @@ import ( ) var ( - defaultTypes []string = []string{"gt", "lt"} - rangedTypes []string = []string{"within_range", "outside_range"} + defaultTypes = []string{"gt", "lt"} + rangedTypes = []string{"within_range", "outside_range"} ) type AlertEvaluator interface { @@ -20,7 +20,7 @@ type AlertEvaluator interface { type NoValueEvaluator struct{} func (e *NoValueEvaluator) Eval(reducedValue null.Float) bool { - return reducedValue.Valid == false + return !reducedValue.Valid } type ThresholdEvaluator struct { @@ -45,7 +45,7 @@ func newThresholdEvaluator(typ string, model *simplejson.Json) (*ThresholdEvalua } func (e *ThresholdEvaluator) Eval(reducedValue null.Float) bool { - if reducedValue.Valid == false { + if !reducedValue.Valid { return false } @@ -88,7 +88,7 @@ func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, e } func (e *RangedEvaluator) Eval(reducedValue null.Float) bool { - if reducedValue.Valid == false { + if !reducedValue.Valid { return false } diff --git a/pkg/services/alerting/conditions/query.go b/pkg/services/alerting/conditions/query.go index d499c5e8532..7d1a276c42e 100644 --- a/pkg/services/alerting/conditions/query.go +++ b/pkg/services/alerting/conditions/query.go @@ -53,7 +53,7 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.Conditio reducedValue := c.Reducer.Reduce(series) evalMatch := c.Evaluator.Eval(reducedValue) - if reducedValue.Valid == false { + if !reducedValue.Valid { emptySerieCount++ } diff --git a/pkg/services/alerting/engine.go b/pkg/services/alerting/engine.go index a6f97333d1b..c23cf56ae47 100644 --- a/pkg/services/alerting/engine.go +++ b/pkg/services/alerting/engine.go @@ -11,12 +11,14 @@ import ( "github.com/benbjohnson/clock" "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/setting" "golang.org/x/sync/errgroup" ) -type Engine struct { - execQueue chan *Job - clock clock.Clock +type AlertingService struct { + execQueue chan *Job + //clock clock.Clock ticker *Ticker scheduler Scheduler evalHandler EvalHandler @@ -25,35 +27,41 @@ type Engine struct { resultHandler ResultHandler } -func NewEngine() *Engine { - e := &Engine{ - ticker: NewTicker(time.Now(), time.Second*0, clock.New()), - execQueue: make(chan *Job, 1000), - scheduler: NewScheduler(), - evalHandler: NewEvalHandler(), - ruleReader: NewRuleReader(), - log: log.New("alerting.engine"), - resultHandler: NewResultHandler(), - } +func init() { + registry.RegisterService(&AlertingService{}) +} +func NewEngine() *AlertingService { + e := &AlertingService{} + e.Init() return e } -func (e *Engine) Run(ctx context.Context) error { - e.log.Info("Initializing Alerting") +func (e *AlertingService) IsDisabled() bool { + return !setting.AlertingEnabled || !setting.ExecuteAlerts +} +func (e *AlertingService) Init() error { + e.ticker = NewTicker(time.Now(), time.Second*0, clock.New()) + e.execQueue = make(chan *Job, 1000) + e.scheduler = NewScheduler() + e.evalHandler = NewEvalHandler() + e.ruleReader = NewRuleReader() + e.log = log.New("alerting.engine") + e.resultHandler = NewResultHandler() + return nil +} + +func (e *AlertingService) Run(ctx context.Context) error { alertGroup, ctx := errgroup.WithContext(ctx) - alertGroup.Go(func() error { return e.alertingTicker(ctx) }) alertGroup.Go(func() error { return e.runJobDispatcher(ctx) }) err := alertGroup.Wait() - - e.log.Info("Stopped Alerting", "reason", err) return err } -func (e *Engine) alertingTicker(grafanaCtx context.Context) error { +func (e *AlertingService) alertingTicker(grafanaCtx context.Context) error { defer func() { if err := recover(); err != nil { e.log.Error("Scheduler Panic: stopping alertingTicker", "error", err, "stack", log.Stack(1)) @@ -78,7 +86,7 @@ func (e *Engine) alertingTicker(grafanaCtx context.Context) error { } } -func (e *Engine) runJobDispatcher(grafanaCtx context.Context) error { +func (e *AlertingService) runJobDispatcher(grafanaCtx context.Context) error { dispatcherGroup, alertCtx := errgroup.WithContext(grafanaCtx) for { @@ -92,13 +100,13 @@ func (e *Engine) runJobDispatcher(grafanaCtx context.Context) error { } var ( - unfinishedWorkTimeout time.Duration = time.Second * 5 + unfinishedWorkTimeout = time.Second * 5 // TODO: Make alertTimeout and alertMaxAttempts configurable in the config file. - alertTimeout time.Duration = time.Second * 30 - alertMaxAttempts int = 3 + alertTimeout = time.Second * 30 + alertMaxAttempts = 3 ) -func (e *Engine) processJobWithRetry(grafanaCtx context.Context, job *Job) error { +func (e *AlertingService) processJobWithRetry(grafanaCtx context.Context, job *Job) error { defer func() { if err := recover(); err != nil { e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1)) @@ -133,7 +141,7 @@ func (e *Engine) processJobWithRetry(grafanaCtx context.Context, job *Job) error } } -func (e *Engine) endJob(err error, cancelChan chan context.CancelFunc, job *Job) error { +func (e *AlertingService) endJob(err error, cancelChan chan context.CancelFunc, job *Job) error { job.Running = false close(cancelChan) for cancelFn := range cancelChan { @@ -142,7 +150,7 @@ func (e *Engine) endJob(err error, cancelChan chan context.CancelFunc, job *Job) return err } -func (e *Engine) processJob(attemptID int, attemptChan chan int, cancelChan chan context.CancelFunc, job *Job) { +func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancelChan chan context.CancelFunc, job *Job) { defer func() { if err := recover(); err != nil { e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1)) diff --git a/pkg/services/alerting/engine_test.go b/pkg/services/alerting/engine_test.go index 64f954c6dd5..63108bbb9aa 100644 --- a/pkg/services/alerting/engine_test.go +++ b/pkg/services/alerting/engine_test.go @@ -10,7 +10,7 @@ import ( ) type FakeEvalHandler struct { - SuccessCallID int // 0 means never sucess + SuccessCallID int // 0 means never success CallNb int } @@ -87,7 +87,7 @@ func TestEngineProcessJob(t *testing.T) { Convey("Should trigger as many retries as needed", func() { - Convey("never sucess -> max retries number", func() { + Convey("never success -> max retries number", func() { expectedAttempts := alertMaxAttempts evalHandler := NewFakeEvalHandler(0) engine.evalHandler = evalHandler @@ -96,7 +96,7 @@ func TestEngineProcessJob(t *testing.T) { So(evalHandler.CallNb, ShouldEqual, expectedAttempts) }) - Convey("always sucess -> never retry", func() { + Convey("always success -> never retry", func() { expectedAttempts := 1 evalHandler := NewFakeEvalHandler(1) engine.evalHandler = evalHandler @@ -105,7 +105,7 @@ func TestEngineProcessJob(t *testing.T) { So(evalHandler.CallNb, ShouldEqual, expectedAttempts) }) - Convey("some errors before sucess -> some retries", func() { + Convey("some errors before success -> some retries", func() { expectedAttempts := int(math.Ceil(float64(alertMaxAttempts) / 2)) evalHandler := NewFakeEvalHandler(expectedAttempts) engine.evalHandler = evalHandler diff --git a/pkg/services/alerting/eval_context.go b/pkg/services/alerting/eval_context.go index 91d0e179a14..d0441d379b7 100644 --- a/pkg/services/alerting/eval_context.go +++ b/pkg/services/alerting/eval_context.go @@ -106,11 +106,11 @@ func (c *EvalContext) GetRuleUrl() (string, error) { return setting.AppUrl, nil } - if ref, err := c.GetDashboardUID(); err != nil { + ref, err := c.GetDashboardUID() + if err != nil { return "", err - } else { - return fmt.Sprintf(urlFormat, m.GetFullDashboardUrl(ref.Uid, ref.Slug), c.Rule.PanelId, c.Rule.OrgId), nil } + return fmt.Sprintf(urlFormat, m.GetFullDashboardUrl(ref.Uid, ref.Slug), c.Rule.PanelId, c.Rule.OrgId), nil } func (c *EvalContext) GetNewState() m.AlertStateType { diff --git a/pkg/services/alerting/extractor.go b/pkg/services/alerting/extractor.go index 2ae26c1a382..e1c1bfacb2e 100644 --- a/pkg/services/alerting/extractor.go +++ b/pkg/services/alerting/extractor.go @@ -11,76 +11,78 @@ import ( m "github.com/grafana/grafana/pkg/models" ) +// DashAlertExtractor extracts alerts from the dashboard json type DashAlertExtractor struct { Dash *m.Dashboard - OrgId int64 + OrgID int64 log log.Logger } -func NewDashAlertExtractor(dash *m.Dashboard, orgId int64) *DashAlertExtractor { +// NewDashAlertExtractor returns a new DashAlertExtractor +func NewDashAlertExtractor(dash *m.Dashboard, orgID int64) *DashAlertExtractor { return &DashAlertExtractor{ Dash: dash, - OrgId: orgId, + OrgID: orgID, log: log.New("alerting.extractor"), } } -func (e *DashAlertExtractor) lookupDatasourceId(dsName string) (*m.DataSource, error) { +func (e *DashAlertExtractor) lookupDatasourceID(dsName string) (*m.DataSource, error) { if dsName == "" { - query := &m.GetDataSourcesQuery{OrgId: e.OrgId} + query := &m.GetDataSourcesQuery{OrgId: e.OrgID} if err := bus.Dispatch(query); err != nil { return nil, err - } else { - for _, ds := range query.Result { - if ds.IsDefault { - return ds, nil - } + } + + for _, ds := range query.Result { + if ds.IsDefault { + return ds, nil } } } else { - query := &m.GetDataSourceByNameQuery{Name: dsName, OrgId: e.OrgId} + query := &m.GetDataSourceByNameQuery{Name: dsName, OrgId: e.OrgID} if err := bus.Dispatch(query); err != nil { return nil, err - } else { - return query.Result, nil } + + return query.Result, nil } return nil, errors.New("Could not find datasource id for " + dsName) } -func findPanelQueryByRefId(panel *simplejson.Json, refId string) *simplejson.Json { +func findPanelQueryByRefID(panel *simplejson.Json, refID string) *simplejson.Json { for _, targetsObj := range panel.Get("targets").MustArray() { target := simplejson.NewFromAny(targetsObj) - if target.Get("refId").MustString() == refId { + if target.Get("refId").MustString() == refID { return target } } return nil } -func copyJson(in *simplejson.Json) (*simplejson.Json, error) { - rawJson, err := in.MarshalJSON() +func copyJSON(in *simplejson.Json) (*simplejson.Json, error) { + rawJSON, err := in.MarshalJSON() if err != nil { return nil, err } - return simplejson.NewJson(rawJson) + return simplejson.NewJson(rawJSON) } -func (e *DashAlertExtractor) GetAlertFromPanels(jsonWithPanels *simplejson.Json) ([]*m.Alert, error) { +func (e *DashAlertExtractor) getAlertFromPanels(jsonWithPanels *simplejson.Json, validateAlertFunc func(*m.Alert) bool) ([]*m.Alert, error) { alerts := make([]*m.Alert, 0) for _, panelObj := range jsonWithPanels.Get("panels").MustArray() { panel := simplejson.NewFromAny(panelObj) - collapsedJson, collapsed := panel.CheckGet("collapsed") + collapsedJSON, collapsed := panel.CheckGet("collapsed") // check if the panel is collapsed - if collapsed && collapsedJson.MustBool() { + if collapsed && collapsedJSON.MustBool() { // extract alerts from sub panels for collapsed panels - als, err := e.GetAlertFromPanels(panel) + als, err := e.getAlertFromPanels(panel, validateAlertFunc) if err != nil { return nil, err } @@ -95,14 +97,14 @@ func (e *DashAlertExtractor) GetAlertFromPanels(jsonWithPanels *simplejson.Json) continue } - panelId, err := panel.Get("id").Int64() + panelID, err := panel.Get("id").Int64() if err != nil { return nil, fmt.Errorf("panel id is required. err %v", err) } // backward compatibility check, can be removed later enabled, hasEnabled := jsonAlert.CheckGet("enabled") - if hasEnabled && enabled.MustBool() == false { + if hasEnabled && !enabled.MustBool() { continue } @@ -113,8 +115,8 @@ func (e *DashAlertExtractor) GetAlertFromPanels(jsonWithPanels *simplejson.Json) alert := &m.Alert{ DashboardId: e.Dash.Id, - OrgId: e.OrgId, - PanelId: panelId, + OrgId: e.OrgID, + PanelId: panelID, Id: jsonAlert.Get("id").MustInt64(), Name: jsonAlert.Get("name").MustString(), Handler: jsonAlert.Get("handler").MustInt64(), @@ -126,11 +128,11 @@ func (e *DashAlertExtractor) GetAlertFromPanels(jsonWithPanels *simplejson.Json) jsonCondition := simplejson.NewFromAny(condition) jsonQuery := jsonCondition.Get("query") - queryRefId := jsonQuery.Get("params").MustArray()[0].(string) - panelQuery := findPanelQueryByRefId(panel, queryRefId) + queryRefID := jsonQuery.Get("params").MustArray()[0].(string) + panelQuery := findPanelQueryByRefID(panel, queryRefID) if panelQuery == nil { - reason := fmt.Sprintf("Alert on PanelId: %v refers to query(%s) that cannot be found", alert.PanelId, queryRefId) + reason := fmt.Sprintf("Alert on PanelId: %v refers to query(%s) that cannot be found", alert.PanelId, queryRefID) return nil, ValidationError{Reason: reason} } @@ -141,12 +143,13 @@ func (e *DashAlertExtractor) GetAlertFromPanels(jsonWithPanels *simplejson.Json) dsName = panel.Get("datasource").MustString() } - if datasource, err := e.lookupDatasourceId(dsName); err != nil { + datasource, err := e.lookupDatasourceID(dsName) + if err != nil { return nil, err - } else { - jsonQuery.SetPath([]string{"datasourceId"}, datasource.Id) } + jsonQuery.SetPath([]string{"datasourceId"}, datasource.Id) + if interval, err := panel.Get("interval").String(); err == nil { panelQuery.Set("interval", interval) } @@ -162,21 +165,28 @@ func (e *DashAlertExtractor) GetAlertFromPanels(jsonWithPanels *simplejson.Json) return nil, err } - if alert.ValidToSave() { - alerts = append(alerts, alert) - } else { + if !validateAlertFunc(alert) { e.log.Debug("Invalid Alert Data. Dashboard, Org or Panel ID is not correct", "alertName", alert.Name, "panelId", alert.PanelId) return nil, m.ErrDashboardContainsInvalidAlertData } + + alerts = append(alerts, alert) } return alerts, nil } -func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) { - e.log.Debug("GetAlerts") +func validateAlertRule(alert *m.Alert) bool { + return alert.ValidToSave() +} - dashboardJson, err := copyJson(e.Dash.Data) +// GetAlerts extracts alerts from the dashboard json and does full validation on the alert json data +func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) { + return e.extractAlerts(validateAlertRule) +} + +func (e *DashAlertExtractor) extractAlerts(validateFunc func(alert *m.Alert) bool) ([]*m.Alert, error) { + dashboardJSON, err := copyJSON(e.Dash.Data) if err != nil { return nil, err } @@ -185,11 +195,11 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) { // We extract alerts from rows to be backwards compatible // with the old dashboard json model. - rows := dashboardJson.Get("rows").MustArray() + rows := dashboardJSON.Get("rows").MustArray() if len(rows) > 0 { for _, rowObj := range rows { row := simplejson.NewFromAny(rowObj) - a, err := e.GetAlertFromPanels(row) + a, err := e.getAlertFromPanels(row, validateFunc) if err != nil { return nil, err } @@ -197,7 +207,7 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) { alerts = append(alerts, a...) } } else { - a, err := e.GetAlertFromPanels(dashboardJson) + a, err := e.getAlertFromPanels(dashboardJSON, validateFunc) if err != nil { return nil, err } @@ -208,3 +218,10 @@ func (e *DashAlertExtractor) GetAlerts() ([]*m.Alert, error) { e.log.Debug("Extracted alerts from dashboard", "alertCount", len(alerts)) return alerts, nil } + +// ValidateAlerts validates alerts in the dashboard json but does not require a valid dashboard id +// in the first validation pass +func (e *DashAlertExtractor) ValidateAlerts() error { + _, err := e.extractAlerts(func(alert *m.Alert) bool { return alert.OrgId != 0 && alert.PanelId != 0 }) + return err +} diff --git a/pkg/services/alerting/extractor_test.go b/pkg/services/alerting/extractor_test.go index 3bda6c771fb..861e9b9cbfc 100644 --- a/pkg/services/alerting/extractor_test.go +++ b/pkg/services/alerting/extractor_test.go @@ -240,5 +240,26 @@ func TestAlertRuleExtraction(t *testing.T) { So(len(alerts), ShouldEqual, 4) }) }) + + Convey("Parse and validate dashboard without id and containing an alert", func() { + json, err := ioutil.ReadFile("./test-data/dash-without-id.json") + So(err, ShouldBeNil) + + dashJSON, err := simplejson.NewJson(json) + So(err, ShouldBeNil) + dash := m.NewDashboardFromJson(dashJSON) + extractor := NewDashAlertExtractor(dash, 1) + + err = extractor.ValidateAlerts() + + Convey("Should validate without error", func() { + So(err, ShouldBeNil) + }) + + Convey("Should fail on save", func() { + _, err := extractor.GetAlerts() + So(err, ShouldEqual, m.ErrDashboardContainsInvalidAlertData) + }) + }) }) } diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index af9ba52a52a..1d5affbd3ec 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -87,17 +87,17 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { IsAlertContext: true, } - if ref, err := context.GetDashboardUID(); err != nil { + ref, err := context.GetDashboardUID() + if err != nil { return err - } else { - renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?panelId=%d", ref.Uid, ref.Slug, context.Rule.PanelId) } + renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?panelId=%d", ref.Uid, ref.Slug, context.Rule.PanelId) - if imagePath, err := renderer.RenderToPng(renderOpts); err != nil { + imagePath, err := renderer.RenderToPng(renderOpts) + if err != nil { return err - } else { - context.ImageOnDiskPath = imagePath } + context.ImageOnDiskPath = imagePath context.ImagePublicUrl, err = uploader.Upload(context.Ctx, context.ImageOnDiskPath) if err != nil { @@ -117,12 +117,12 @@ func (n *notificationService) getNeededNotifiers(orgId int64, notificationIds [] var result []Notifier for _, notification := range query.Result { - if not, err := n.createNotifierFor(notification); err != nil { + not, err := n.createNotifierFor(notification) + if err != nil { return nil, err - } else { - if not.ShouldNotify(context) { - result = append(result, not) - } + } + if not.ShouldNotify(context) { + result = append(result, not) } } @@ -140,7 +140,7 @@ func (n *notificationService) createNotifierFor(model *m.AlertNotification) (Not type NotifierFactory func(notification *m.AlertNotification) (Notifier, error) -var notifierFactories map[string]*NotifierPlugin = make(map[string]*NotifierPlugin) +var notifierFactories = make(map[string]*NotifierPlugin) func RegisterNotifier(plugin *NotifierPlugin) { notifierFactories[plugin.Type] = plugin diff --git a/pkg/services/alerting/notifiers/dingding.go b/pkg/services/alerting/notifiers/dingding.go index e32b9d34f91..14eacef5831 100644 --- a/pkg/services/alerting/notifiers/dingding.go +++ b/pkg/services/alerting/notifiers/dingding.go @@ -72,7 +72,10 @@ func (this *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { this.log.Error("Failed to create Json data", "error", err, "dingding", this.Name) } - body, _ := bodyJSON.MarshalJSON() + body, err := bodyJSON.MarshalJSON() + if err != nil { + return err + } cmd := &m.SendWebhookSync{ Url: this.Url, diff --git a/pkg/services/alerting/notifiers/discord.go b/pkg/services/alerting/notifiers/discord.go new file mode 100644 index 00000000000..3ffa7484870 --- /dev/null +++ b/pkg/services/alerting/notifiers/discord.go @@ -0,0 +1,173 @@ +package notifiers + +import ( + "bytes" + "io" + "mime/multipart" + "os" + "strconv" + "strings" + + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/log" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/alerting" + "github.com/grafana/grafana/pkg/setting" +) + +func init() { + alerting.RegisterNotifier(&alerting.NotifierPlugin{ + Type: "discord", + Name: "Discord", + Description: "Sends notifications to Discord", + Factory: NewDiscordNotifier, + OptionsTemplate: ` +

Discord settings

+
+ Webhook URL + +
+ `, + }) +} + +func NewDiscordNotifier(model *m.AlertNotification) (alerting.Notifier, error) { + url := model.Settings.Get("url").MustString() + if url == "" { + return nil, alerting.ValidationError{Reason: "Could not find webhook url property in settings"} + } + + return &DiscordNotifier{ + NotifierBase: NewNotifierBase(model.Id, model.IsDefault, model.Name, model.Type, model.Settings), + WebhookURL: url, + log: log.New("alerting.notifier.discord"), + }, nil +} + +type DiscordNotifier struct { + NotifierBase + WebhookURL string + log log.Logger +} + +func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { + this.log.Info("Sending alert notification to", "webhook_url", this.WebhookURL) + + ruleUrl, err := evalContext.GetRuleUrl() + if err != nil { + this.log.Error("Failed get rule link", "error", err) + return err + } + + bodyJSON := simplejson.New() + bodyJSON.Set("username", "Grafana") + + fields := make([]map[string]interface{}, 0) + + for _, evt := range evalContext.EvalMatches { + + fields = append(fields, map[string]interface{}{ + "name": evt.Metric, + "value": evt.Value.FullString(), + "inline": true, + }) + } + + footer := map[string]interface{}{ + "text": "Grafana v" + setting.BuildVersion, + "icon_url": "https://grafana.com/assets/img/fav32.png", + } + + color, _ := strconv.ParseInt(strings.TrimLeft(evalContext.GetStateModel().Color, "#"), 16, 0) + + embed := simplejson.New() + embed.Set("title", evalContext.GetNotificationTitle()) + //Discord takes integer for color + embed.Set("color", color) + embed.Set("url", ruleUrl) + embed.Set("description", evalContext.Rule.Message) + embed.Set("type", "rich") + embed.Set("fields", fields) + embed.Set("footer", footer) + + var image map[string]interface{} + var embeddedImage = false + + if evalContext.ImagePublicUrl != "" { + image = map[string]interface{}{ + "url": evalContext.ImagePublicUrl, + } + embed.Set("image", image) + } else { + image = map[string]interface{}{ + "url": "attachment://graph.png", + } + embed.Set("image", image) + embeddedImage = true + } + + bodyJSON.Set("embeds", []interface{}{embed}) + + json, _ := bodyJSON.MarshalJSON() + + content_type := "application/json" + + var body []byte + + if embeddedImage { + + var b bytes.Buffer + + w := multipart.NewWriter(&b) + + f, err := os.Open(evalContext.ImageOnDiskPath) + + if err != nil { + this.log.Error("Can't open graph file", err) + return err + } + + defer f.Close() + + fw, err := w.CreateFormField("payload_json") + if err != nil { + return err + } + + if _, err = fw.Write([]byte(string(json))); err != nil { + return err + } + + fw, err = w.CreateFormFile("file", "graph.png") + if err != nil { + return err + } + + if _, err = io.Copy(fw, f); err != nil { + return err + } + + w.Close() + + body = b.Bytes() + content_type = w.FormDataContentType() + + } else { + body = json + } + + cmd := &m.SendWebhookSync{ + Url: this.WebhookURL, + Body: string(body), + HttpMethod: "POST", + ContentType: content_type, + } + + if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { + this.log.Error("Failed to send notification to Discord", "error", err) + return err + } + + return nil +} diff --git a/pkg/services/alerting/notifiers/discord_test.go b/pkg/services/alerting/notifiers/discord_test.go new file mode 100644 index 00000000000..fe925aab362 --- /dev/null +++ b/pkg/services/alerting/notifiers/discord_test.go @@ -0,0 +1,52 @@ +package notifiers + +import ( + "testing" + + "github.com/grafana/grafana/pkg/components/simplejson" + m "github.com/grafana/grafana/pkg/models" + . "github.com/smartystreets/goconvey/convey" +) + +func TestDiscordNotifier(t *testing.T) { + Convey("Telegram notifier tests", t, func() { + + Convey("Parsing alert notification from settings", func() { + Convey("empty settings should return error", func() { + json := `{ }` + + settingsJSON, _ := simplejson.NewJson([]byte(json)) + model := &m.AlertNotification{ + Name: "discord_testing", + Type: "discord", + Settings: settingsJSON, + } + + _, err := NewDiscordNotifier(model) + So(err, ShouldNotBeNil) + }) + + Convey("settings should trigger incident", func() { + json := ` + { + "url": "https://web.hook/" + }` + + settingsJSON, _ := simplejson.NewJson([]byte(json)) + model := &m.AlertNotification{ + Name: "discord_testing", + Type: "discord", + Settings: settingsJSON, + } + + not, err := NewDiscordNotifier(model) + discordNotifier := not.(*DiscordNotifier) + + So(err, ShouldBeNil) + So(discordNotifier.Name, ShouldEqual, "discord_testing") + So(discordNotifier.Type, ShouldEqual, "discord") + So(discordNotifier.WebhookURL, ShouldEqual, "https://web.hook/") + }) + }) + }) +} diff --git a/pkg/services/alerting/notifiers/hipchat.go b/pkg/services/alerting/notifiers/hipchat.go index f1f63d42a04..58e1b7bd71e 100644 --- a/pkg/services/alerting/notifiers/hipchat.go +++ b/pkg/services/alerting/notifiers/hipchat.go @@ -111,7 +111,7 @@ func (this *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { } message := "" - if evalContext.Rule.State != models.AlertStateOK { //dont add message when going back to alert state ok. + if evalContext.Rule.State != models.AlertStateOK { //don't add message when going back to alert state ok. message += " " + evalContext.Rule.Message } diff --git a/pkg/services/alerting/notifiers/line.go b/pkg/services/alerting/notifiers/line.go index 4fbaa2d543e..4814662f3a9 100644 --- a/pkg/services/alerting/notifiers/line.go +++ b/pkg/services/alerting/notifiers/line.go @@ -90,7 +90,7 @@ func (this *LineNotifier) createAlert(evalContext *alerting.EvalContext) error { } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send notification to LINE", "error", err, "body", string(body)) + this.log.Error("Failed to send notification to LINE", "error", err, "body", body) return err } diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go index 5d8b15160c4..f0f5142cf05 100644 --- a/pkg/services/alerting/notifiers/opsgenie.go +++ b/pkg/services/alerting/notifiers/opsgenie.go @@ -41,7 +41,7 @@ func init() { } var ( - opsgenieAlertURL string = "https://api.opsgenie.com/v2/alerts" + opsgenieAlertURL = "https://api.opsgenie.com/v2/alerts" ) func NewOpsGenieNotifier(model *m.AlertNotification) (alerting.Notifier, error) { diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go index 58484051432..02219b2203d 100644 --- a/pkg/services/alerting/notifiers/pagerduty.go +++ b/pkg/services/alerting/notifiers/pagerduty.go @@ -40,7 +40,7 @@ func init() { } var ( - pagerdutyEventApiUrl string = "https://events.pagerduty.com/v2/enqueue" + pagerdutyEventApiUrl = "https://events.pagerduty.com/v2/enqueue" ) func NewPagerdutyNotifier(model *m.AlertNotification) (alerting.Notifier, error) { diff --git a/pkg/services/alerting/notifiers/slack.go b/pkg/services/alerting/notifiers/slack.go index e051a71740a..a8139b62726 100644 --- a/pkg/services/alerting/notifiers/slack.go +++ b/pkg/services/alerting/notifiers/slack.go @@ -129,7 +129,7 @@ func (this *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { } message := this.Mention - if evalContext.Rule.State != m.AlertStateOK { //dont add message when going back to alert state ok. + if evalContext.Rule.State != m.AlertStateOK { //don't add message when going back to alert state ok. message += " " + evalContext.Rule.Message } image_url := "" diff --git a/pkg/services/alerting/notifiers/teams.go b/pkg/services/alerting/notifiers/teams.go index 43d628a4415..4e34e16ab51 100644 --- a/pkg/services/alerting/notifiers/teams.go +++ b/pkg/services/alerting/notifiers/teams.go @@ -13,7 +13,7 @@ func init() { alerting.RegisterNotifier(&alerting.NotifierPlugin{ Type: "teams", Name: "Microsoft Teams", - Description: "Sends notifications using Incomming Webhook connector to Microsoft Teams", + Description: "Sends notifications using Incoming Webhook connector to Microsoft Teams", Factory: NewTeamsNotifier, OptionsTemplate: `

Teams settings

diff --git a/pkg/services/alerting/notifiers/telegram.go b/pkg/services/alerting/notifiers/telegram.go index 5cbdad60906..ca24c996914 100644 --- a/pkg/services/alerting/notifiers/telegram.go +++ b/pkg/services/alerting/notifiers/telegram.go @@ -3,13 +3,14 @@ package notifiers import ( "bytes" "fmt" + "io" + "mime/multipart" + "os" + "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" - "io" - "mime/multipart" - "os" ) const ( @@ -17,7 +18,7 @@ const ( ) var ( - telegramApiUrl string = "https://api.telegram.org/bot%s/%s" + telegramApiUrl = "https://api.telegram.org/bot%s/%s" ) func init() { @@ -90,9 +91,8 @@ func (this *TelegramNotifier) buildMessage(evalContext *alerting.EvalContext, se cmd, err := this.buildMessageInlineImage(evalContext) if err == nil { return cmd - } else { - this.log.Error("Could not generate Telegram message with inline image.", "err", err) } + this.log.Error("Could not generate Telegram message with inline image.", "err", err) } return this.buildMessageLinkedImage(evalContext) @@ -133,6 +133,9 @@ func (this *TelegramNotifier) buildMessageInlineImage(evalContext *alerting.Eval } ruleUrl, err := evalContext.GetRuleUrl() + if err != nil { + return nil, err + } metrics := generateMetricsMessage(evalContext) message := generateImageCaption(evalContext, ruleUrl, metrics) @@ -219,7 +222,7 @@ func appendIfPossible(message string, extra string, sizeLimit int) string { func (this *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error { var cmd *m.SendWebhookSync - if evalContext.ImagePublicUrl == "" && this.UploadImage == true { + if evalContext.ImagePublicUrl == "" && this.UploadImage { cmd = this.buildMessage(evalContext, true) } else { cmd = this.buildMessage(evalContext, false) diff --git a/pkg/services/alerting/notifiers/telegram_test.go b/pkg/services/alerting/notifiers/telegram_test.go index 05be787dced..98c8d884ad0 100644 --- a/pkg/services/alerting/notifiers/telegram_test.go +++ b/pkg/services/alerting/notifiers/telegram_test.go @@ -100,7 +100,7 @@ func TestTelegramNotifier(t *testing.T) { So(caption, ShouldContainSubstring, "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise ") }) - Convey("Metrics should be skipped if they dont fit", func() { + Convey("Metrics should be skipped if they don't fit", func() { evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ Name: "This is an alarm", Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ", diff --git a/pkg/services/alerting/notifiers/victorops.go b/pkg/services/alerting/notifiers/victorops.go index 4b4db553cde..a753ca3cbf6 100644 --- a/pkg/services/alerting/notifiers/victorops.go +++ b/pkg/services/alerting/notifiers/victorops.go @@ -83,27 +83,6 @@ func (this *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error { return nil } - fields := make([]map[string]interface{}, 0) - fieldLimitCount := 4 - for index, evt := range evalContext.EvalMatches { - fields = append(fields, map[string]interface{}{ - "title": evt.Metric, - "value": evt.Value, - "short": true, - }) - if index > fieldLimitCount { - break - } - } - - if evalContext.Error != nil { - fields = append(fields, map[string]interface{}{ - "title": "Error message", - "value": evalContext.Error.Error(), - "short": false, - }) - } - messageType := evalContext.Rule.State if evalContext.Rule.State == models.AlertStateAlerting { // translate 'Alerting' to 'CRITICAL' (Victorops analog) messageType = AlertStateCritical diff --git a/pkg/services/alerting/reader.go b/pkg/services/alerting/reader.go index 45f0c65d4fb..627159c286b 100644 --- a/pkg/services/alerting/reader.go +++ b/pkg/services/alerting/reader.go @@ -16,7 +16,7 @@ type RuleReader interface { type DefaultRuleReader struct { sync.RWMutex - serverID string + //serverID string serverPosition int clusterSize int log log.Logger diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go index 8f9deb758a6..0c92fc32110 100644 --- a/pkg/services/alerting/result_handler.go +++ b/pkg/services/alerting/result_handler.go @@ -56,7 +56,7 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { if err := bus.Dispatch(cmd); err != nil { if err == m.ErrCannotChangeStateOnPausedAlert { - handler.log.Error("Cannot change state on alert thats pause", "error", err) + handler.log.Error("Cannot change state on alert that's paused", "error", err) return err } @@ -77,7 +77,7 @@ func (handler *DefaultResultHandler) Handle(evalContext *EvalContext) error { Text: "", NewState: string(evalContext.Rule.State), PrevState: string(evalContext.PrevAlertState), - Epoch: time.Now().Unix(), + Epoch: time.Now().UnixNano() / int64(time.Millisecond), Data: annotationData, } diff --git a/pkg/services/alerting/rule.go b/pkg/services/alerting/rule.go index bdf53798e34..018d138dbe4 100644 --- a/pkg/services/alerting/rule.go +++ b/pkg/services/alerting/rule.go @@ -55,8 +55,8 @@ func (e ValidationError) Error() string { } var ( - ValueFormatRegex = regexp.MustCompile("^\\d+") - UnitFormatRegex = regexp.MustCompile("\\w{1}$") + ValueFormatRegex = regexp.MustCompile(`^\d+`) + UnitFormatRegex = regexp.MustCompile(`\w{1}$`) ) var unitMultiplier = map[string]int{ @@ -103,25 +103,25 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) { for _, v := range ruleDef.Settings.Get("notifications").MustArray() { jsonModel := simplejson.NewFromAny(v) - if id, err := jsonModel.Get("id").Int64(); err != nil { + id, err := jsonModel.Get("id").Int64() + if err != nil { return nil, ValidationError{Reason: "Invalid notification schema", DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} - } else { - model.Notifications = append(model.Notifications, id) } + model.Notifications = append(model.Notifications, id) } for index, condition := range ruleDef.Settings.Get("conditions").MustArray() { conditionModel := simplejson.NewFromAny(condition) conditionType := conditionModel.Get("type").MustString() - if factory, exist := conditionFactories[conditionType]; !exist { + factory, exist := conditionFactories[conditionType] + if !exist { return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} - } else { - if queryCondition, err := factory(conditionModel, index); err != nil { - return nil, ValidationError{Err: err, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} - } else { - model.Conditions = append(model.Conditions, queryCondition) - } } + queryCondition, err := factory(conditionModel, index) + if err != nil { + return nil, ValidationError{Err: err, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} + } + model.Conditions = append(model.Conditions, queryCondition) } if len(model.Conditions) == 0 { @@ -133,7 +133,7 @@ func NewRuleFromDBAlert(ruleDef *m.Alert) (*Rule, error) { type ConditionFactory func(model *simplejson.Json, index int) (Condition, error) -var conditionFactories map[string]ConditionFactory = make(map[string]ConditionFactory) +var conditionFactories = make(map[string]ConditionFactory) func RegisterCondition(typeName string, factory ConditionFactory) { conditionFactories[typeName] = factory diff --git a/pkg/services/alerting/scheduler.go b/pkg/services/alerting/scheduler.go index 151f802ec15..b7555ae8d89 100644 --- a/pkg/services/alerting/scheduler.go +++ b/pkg/services/alerting/scheduler.go @@ -15,7 +15,7 @@ type SchedulerImpl struct { func NewScheduler() Scheduler { return &SchedulerImpl{ - jobs: make(map[int64]*Job, 0), + jobs: make(map[int64]*Job), log: log.New("alerting.scheduler"), } } @@ -23,7 +23,7 @@ func NewScheduler() Scheduler { func (s *SchedulerImpl) Update(rules []*Rule) { s.log.Debug("Scheduling update", "ruleCount", len(rules)) - jobs := make(map[int64]*Job, 0) + jobs := make(map[int64]*Job) for i, rule := range rules { var job *Job @@ -58,7 +58,7 @@ func (s *SchedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) { if job.OffsetWait && now%job.Offset == 0 { job.OffsetWait = false - s.enque(job, execQueue) + s.enqueue(job, execQueue) continue } @@ -66,13 +66,13 @@ func (s *SchedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) { if job.Offset > 0 { job.OffsetWait = true } else { - s.enque(job, execQueue) + s.enqueue(job, execQueue) } } } } -func (s *SchedulerImpl) enque(job *Job, execQueue chan *Job) { +func (s *SchedulerImpl) enqueue(job *Job, execQueue chan *Job) { s.log.Debug("Scheduler: Putting job on to exec queue", "name", job.Rule.Name, "id", job.Rule.Id) execQueue <- job } diff --git a/pkg/services/alerting/test-data/dash-without-id.json b/pkg/services/alerting/test-data/dash-without-id.json new file mode 100644 index 00000000000..e0a212695d8 --- /dev/null +++ b/pkg/services/alerting/test-data/dash-without-id.json @@ -0,0 +1,281 @@ +{ + "title": "Influxdb", + "tags": [ + "apa" + ], + "style": "dark", + "timezone": "browser", + "editable": true, + "hideControls": false, + "sharedCrosshair": false, + "rows": [ + { + "collapse": false, + "editable": true, + "height": "450px", + "panels": [ + { + "alert": { + "conditions": [ + { + "evaluator": { + "params": [ + 10 + ], + "type": "gt" + }, + "query": { + "params": [ + "B", + "5m", + "now" + ] + }, + "reducer": { + "params": [], + "type": "avg" + }, + "type": "query" + } + ], + "frequency": "3s", + "handler": 1, + "name": "Influxdb", + "noDataState": "no_data", + "notifications": [ + { + "id": 6 + } + ] + }, + "alerting": {}, + "aliasColors": { + "logins.count.count": "#890F02" + }, + "bars": false, + "datasource": "InfluxDB", + "editable": true, + "error": false, + "fill": 1, + "grid": {}, + "id": 1, + "interval": ">10s", + "isNew": true, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "span": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "groupBy": [ + { + "params": [ + "$interval" + ], + "type": "time" + }, + { + "params": [ + "datacenter" + ], + "type": "tag" + }, + { + "params": [ + "none" + ], + "type": "fill" + } + ], + "hide": false, + "measurement": "logins.count", + "policy": "default", + "query": "SELECT 8 * count(\"value\") FROM \"logins.count\" WHERE $timeFilter GROUP BY time($interval), \"datacenter\" fill(none)", + "rawQuery": true, + "refId": "B", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + }, + { + "params": [], + "type": "count" + } + ] + ], + "tags": [] + }, + { + "groupBy": [ + { + "params": [ + "$interval" + ], + "type": "time" + }, + { + "params": [ + "null" + ], + "type": "fill" + } + ], + "hide": true, + "measurement": "cpu", + "policy": "default", + "refId": "A", + "resultFormat": "time_series", + "select": [ + [ + { + "params": [ + "value" + ], + "type": "field" + }, + { + "params": [], + "type": "mean" + } + ], + [ + { + "params": [ + "value" + ], + "type": "field" + }, + { + "params": [], + "type": "sum" + } + ] + ], + "tags": [] + } + ], + "thresholds": [ + { + "colorMode": "critical", + "fill": true, + "line": true, + "op": "gt", + "value": 10 + } + ], + "timeFrom": null, + "timeShift": null, + "title": "Panel Title", + "tooltip": { + "msResolution": false, + "ordering": "alphabetical", + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "editable": true, + "error": false, + "id": 2, + "isNew": true, + "limit": 10, + "links": [], + "show": "current", + "span": 2, + "stateFilter": [ + "alerting" + ], + "title": "Alert status", + "type": "alertlist" + } + ], + "title": "Row" + } + ], + "time": { + "from": "now-5m", + "to": "now" + }, + "timepicker": { + "now": true, + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "templating": { + "list": [] + }, + "annotations": { + "list": [] + }, + "schemaVersion": 13, + "version": 120, + "links": [], + "gnetId": null + } diff --git a/pkg/services/alerting/test-data/influxdb-alert.json b/pkg/services/alerting/test-data/influxdb-alert.json index 79ca355c5a1..fd6feb31a47 100644 --- a/pkg/services/alerting/test-data/influxdb-alert.json +++ b/pkg/services/alerting/test-data/influxdb-alert.json @@ -279,4 +279,4 @@ "version": 120, "links": [], "gnetId": null - } \ No newline at end of file + } diff --git a/pkg/services/annotations/annotations.go b/pkg/services/annotations/annotations.go index a6cd7a33318..5cebb3d2df9 100644 --- a/pkg/services/annotations/annotations.go +++ b/pkg/services/annotations/annotations.go @@ -13,6 +13,7 @@ type ItemQuery struct { OrgId int64 `json:"orgId"` From int64 `json:"from"` To int64 `json:"to"` + UserId int64 `json:"userId"` AlertId int64 `json:"alertId"` DashboardId int64 `json:"dashboardId"` PanelId int64 `json:"panelId"` @@ -63,6 +64,8 @@ type Item struct { PrevState string `json:"prevState"` NewState string `json:"newState"` Epoch int64 `json:"epoch"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` Tags []string `json:"tags"` Data *simplejson.Json `json:"data"` @@ -80,6 +83,8 @@ type ItemDTO struct { UserId int64 `json:"userId"` NewState string `json:"newState"` PrevState string `json:"prevState"` + Created int64 `json:"created"` + Updated int64 `json:"updated"` Time int64 `json:"time"` Text string `json:"text"` RegionId int64 `json:"regionId"` diff --git a/pkg/services/cleanup/cleanup.go b/pkg/services/cleanup/cleanup.go index 5e9efeea3b0..69bc7695dea 100644 --- a/pkg/services/cleanup/cleanup.go +++ b/pkg/services/cleanup/cleanup.go @@ -7,60 +7,52 @@ import ( "path" "time" - "golang.org/x/sync/errgroup" - "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/setting" ) type CleanUpService struct { log log.Logger + Cfg *setting.Cfg `inject:""` } -func NewCleanUpService() *CleanUpService { - return &CleanUpService{ - log: log.New("cleanup"), - } +func init() { + registry.RegisterService(&CleanUpService{}) } -func (service *CleanUpService) Run(ctx context.Context) error { - service.log.Info("Initializing CleanUpService") - - g, _ := errgroup.WithContext(ctx) - g.Go(func() error { return service.start(ctx) }) - - err := g.Wait() - service.log.Info("Stopped CleanUpService", "reason", err) - return err +func (srv *CleanUpService) Init() error { + srv.log = log.New("cleanup") + return nil } -func (service *CleanUpService) start(ctx context.Context) error { - service.cleanUpTmpFiles() +func (srv *CleanUpService) Run(ctx context.Context) error { + srv.cleanUpTmpFiles() ticker := time.NewTicker(time.Minute * 10) for { select { case <-ticker.C: - service.cleanUpTmpFiles() - service.deleteExpiredSnapshots() - service.deleteExpiredDashboardVersions() - service.deleteOldLoginAttempts() + srv.cleanUpTmpFiles() + srv.deleteExpiredSnapshots() + srv.deleteExpiredDashboardVersions() + srv.deleteOldLoginAttempts() case <-ctx.Done(): return ctx.Err() } } } -func (service *CleanUpService) cleanUpTmpFiles() { - if _, err := os.Stat(setting.ImagesDir); os.IsNotExist(err) { +func (srv *CleanUpService) cleanUpTmpFiles() { + if _, err := os.Stat(srv.Cfg.ImagesDir); os.IsNotExist(err) { return } - files, err := ioutil.ReadDir(setting.ImagesDir) + files, err := ioutil.ReadDir(srv.Cfg.ImagesDir) if err != nil { - service.log.Error("Problem reading image dir", "error", err) + srv.log.Error("Problem reading image dir", "error", err) return } @@ -72,36 +64,36 @@ func (service *CleanUpService) cleanUpTmpFiles() { } for _, file := range toDelete { - fullPath := path.Join(setting.ImagesDir, file.Name()) + fullPath := path.Join(srv.Cfg.ImagesDir, file.Name()) err := os.Remove(fullPath) if err != nil { - service.log.Error("Failed to delete temp file", "file", file.Name(), "error", err) + srv.log.Error("Failed to delete temp file", "file", file.Name(), "error", err) } } - service.log.Debug("Found old rendered image to delete", "deleted", len(toDelete), "keept", len(files)) + srv.log.Debug("Found old rendered image to delete", "deleted", len(toDelete), "keept", len(files)) } -func (service *CleanUpService) deleteExpiredSnapshots() { +func (srv *CleanUpService) deleteExpiredSnapshots() { cmd := m.DeleteExpiredSnapshotsCommand{} if err := bus.Dispatch(&cmd); err != nil { - service.log.Error("Failed to delete expired snapshots", "error", err.Error()) + srv.log.Error("Failed to delete expired snapshots", "error", err.Error()) } else { - service.log.Debug("Deleted expired snapshots", "rows affected", cmd.DeletedRows) + srv.log.Debug("Deleted expired snapshots", "rows affected", cmd.DeletedRows) } } -func (service *CleanUpService) deleteExpiredDashboardVersions() { +func (srv *CleanUpService) deleteExpiredDashboardVersions() { cmd := m.DeleteExpiredVersionsCommand{} if err := bus.Dispatch(&cmd); err != nil { - service.log.Error("Failed to delete expired dashboard versions", "error", err.Error()) + srv.log.Error("Failed to delete expired dashboard versions", "error", err.Error()) } else { - service.log.Debug("Deleted old/expired dashboard versions", "rows affected", cmd.DeletedRows) + srv.log.Debug("Deleted old/expired dashboard versions", "rows affected", cmd.DeletedRows) } } -func (service *CleanUpService) deleteOldLoginAttempts() { - if setting.DisableBruteForceLoginProtection { +func (srv *CleanUpService) deleteOldLoginAttempts() { + if srv.Cfg.DisableBruteForceLoginProtection { return } @@ -109,8 +101,8 @@ func (service *CleanUpService) deleteOldLoginAttempts() { OlderThan: time.Now().Add(time.Minute * -10), } if err := bus.Dispatch(&cmd); err != nil { - service.log.Error("Problem deleting expired login attempts", "error", err.Error()) + srv.log.Error("Problem deleting expired login attempts", "error", err.Error()) } else { - service.log.Debug("Deleted expired login attempts", "rows affected", cmd.DeletedRows) + srv.log.Debug("Deleted expired login attempts", "rows affected", cmd.DeletedRows) } } diff --git a/pkg/services/dashboards/dashboard_service.go b/pkg/services/dashboards/dashboard_service.go index 02a6ffc8330..278421e6be7 100644 --- a/pkg/services/dashboards/dashboard_service.go +++ b/pkg/services/dashboards/dashboard_service.go @@ -57,7 +57,7 @@ func (dr *dashboardServiceImpl) GetProvisionedDashboardData(name string) ([]*mod return cmd.Result, nil } -func (dr *dashboardServiceImpl) buildSaveDashboardCommand(dto *SaveDashboardDTO, validateAlerts bool) (*models.SaveDashboardCommand, error) { +func (dr *dashboardServiceImpl) buildSaveDashboardCommand(dto *SaveDashboardDTO, validateAlerts bool, validateProvisionedDashboard bool) (*models.SaveDashboardCommand, error) { dash := dto.Dashboard dash.Title = strings.TrimSpace(dash.Title) @@ -103,6 +103,29 @@ func (dr *dashboardServiceImpl) buildSaveDashboardCommand(dto *SaveDashboardDTO, return nil, err } + if validateBeforeSaveCmd.Result.IsParentFolderChanged { + folderGuardian := guardian.New(dash.FolderId, dto.OrgId, dto.User) + if canSave, err := folderGuardian.CanSave(); err != nil || !canSave { + if err != nil { + return nil, err + } + return nil, models.ErrDashboardUpdateAccessDenied + } + } + + if validateProvisionedDashboard { + isDashboardProvisioned := &models.IsDashboardProvisionedQuery{DashboardId: dash.Id} + err := bus.Dispatch(isDashboardProvisioned) + + if err != nil { + return nil, err + } + + if isDashboardProvisioned.Result { + return nil, models.ErrDashboardCannotSaveProvisionedDashboard + } + } + guard := guardian.New(dash.GetDashboardIdForSavePermissionCheck(), dto.OrgId, dto.User) if canSave, err := guard.CanSave(); err != nil || !canSave { if err != nil { @@ -148,7 +171,7 @@ func (dr *dashboardServiceImpl) SaveProvisionedDashboard(dto *SaveDashboardDTO, UserId: 0, OrgRole: models.ROLE_ADMIN, } - cmd, err := dr.buildSaveDashboardCommand(dto, true) + cmd, err := dr.buildSaveDashboardCommand(dto, true, false) if err != nil { return nil, err } @@ -178,7 +201,7 @@ func (dr *dashboardServiceImpl) SaveFolderForProvisionedDashboards(dto *SaveDash UserId: 0, OrgRole: models.ROLE_ADMIN, } - cmd, err := dr.buildSaveDashboardCommand(dto, false) + cmd, err := dr.buildSaveDashboardCommand(dto, false, false) if err != nil { return nil, err } @@ -197,7 +220,7 @@ func (dr *dashboardServiceImpl) SaveFolderForProvisionedDashboards(dto *SaveDash } func (dr *dashboardServiceImpl) SaveDashboard(dto *SaveDashboardDTO) (*models.Dashboard, error) { - cmd, err := dr.buildSaveDashboardCommand(dto, true) + cmd, err := dr.buildSaveDashboardCommand(dto, true, true) if err != nil { return nil, err } @@ -216,7 +239,7 @@ func (dr *dashboardServiceImpl) SaveDashboard(dto *SaveDashboardDTO) (*models.Da } func (dr *dashboardServiceImpl) ImportDashboard(dto *SaveDashboardDTO) (*models.Dashboard, error) { - cmd, err := dr.buildSaveDashboardCommand(dto, false) + cmd, err := dr.buildSaveDashboardCommand(dto, false, true) if err != nil { return nil, err } diff --git a/pkg/services/dashboards/dashboard_service_test.go b/pkg/services/dashboards/dashboard_service_test.go index 965b10655b3..f9d487f625c 100644 --- a/pkg/services/dashboards/dashboard_service_test.go +++ b/pkg/services/dashboards/dashboard_service_test.go @@ -14,7 +14,9 @@ import ( func TestDashboardService(t *testing.T) { Convey("Dashboard service tests", t, func() { - service := dashboardServiceImpl{} + bus.ClearBusHandlers() + + service := &dashboardServiceImpl{} origNewDashboardGuardian := guardian.New guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{CanSaveValue: true}) @@ -51,6 +53,12 @@ func TestDashboardService(t *testing.T) { }) bus.AddHandler("test", func(cmd *models.ValidateDashboardBeforeSaveCommand) error { + cmd.Result = &models.ValidateDashboardBeforeSaveResult{} + return nil + }) + + bus.AddHandler("test", func(cmd *models.IsDashboardProvisionedQuery) error { + cmd.Result = false return nil }) @@ -72,12 +80,42 @@ func TestDashboardService(t *testing.T) { dto.Dashboard.SetUid(tc.Uid) dto.User = &models.SignedInUser{} - _, err := service.buildSaveDashboardCommand(dto, true) + _, err := service.buildSaveDashboardCommand(dto, true, false) So(err, ShouldEqual, tc.Error) } }) + Convey("Should return validation error if dashboard is provisioned", func() { + provisioningValidated := false + bus.AddHandler("test", func(cmd *models.IsDashboardProvisionedQuery) error { + provisioningValidated = true + cmd.Result = true + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardBeforeSaveCommand) error { + cmd.Result = &models.ValidateDashboardBeforeSaveResult{} + return nil + }) + + dto.Dashboard = models.NewDashboard("Dash") + dto.Dashboard.SetId(3) + dto.User = &models.SignedInUser{UserId: 1} + _, err := service.SaveDashboard(dto) + So(provisioningValidated, ShouldBeTrue) + So(err, ShouldEqual, models.ErrDashboardCannotSaveProvisionedDashboard) + }) + Convey("Should return validation error if alert data is invalid", func() { + bus.AddHandler("test", func(cmd *models.IsDashboardProvisionedQuery) error { + cmd.Result = false + return nil + }) + bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { return errors.New("error") }) @@ -88,6 +126,80 @@ func TestDashboardService(t *testing.T) { }) }) + Convey("Save provisioned dashboard validation", func() { + dto := &SaveDashboardDTO{} + + Convey("Should not return validation error if dashboard is provisioned", func() { + provisioningValidated := false + bus.AddHandler("test", func(cmd *models.IsDashboardProvisionedQuery) error { + provisioningValidated = true + cmd.Result = true + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardBeforeSaveCommand) error { + cmd.Result = &models.ValidateDashboardBeforeSaveResult{} + return nil + }) + + bus.AddHandler("test", func(cmd *models.SaveProvisionedDashboardCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.UpdateDashboardAlertsCommand) error { + return nil + }) + + dto.Dashboard = models.NewDashboard("Dash") + dto.Dashboard.SetId(3) + dto.User = &models.SignedInUser{UserId: 1} + _, err := service.SaveProvisionedDashboard(dto, nil) + So(err, ShouldBeNil) + So(provisioningValidated, ShouldBeFalse) + }) + }) + + Convey("Import dashboard validation", func() { + dto := &SaveDashboardDTO{} + + Convey("Should return validation error if dashboard is provisioned", func() { + provisioningValidated := false + bus.AddHandler("test", func(cmd *models.IsDashboardProvisionedQuery) error { + provisioningValidated = true + cmd.Result = true + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardBeforeSaveCommand) error { + cmd.Result = &models.ValidateDashboardBeforeSaveResult{} + return nil + }) + + bus.AddHandler("test", func(cmd *models.SaveProvisionedDashboardCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.UpdateDashboardAlertsCommand) error { + return nil + }) + + dto.Dashboard = models.NewDashboard("Dash") + dto.Dashboard.SetId(3) + dto.User = &models.SignedInUser{UserId: 1} + _, err := service.ImportDashboard(dto) + So(provisioningValidated, ShouldBeTrue) + So(err, ShouldEqual, models.ErrDashboardCannotSaveProvisionedDashboard) + }) + }) + Reset(func() { guardian.New = origNewDashboardGuardian }) diff --git a/pkg/services/dashboards/folder_service.go b/pkg/services/dashboards/folder_service.go index ae92952056e..b521b0e5213 100644 --- a/pkg/services/dashboards/folder_service.go +++ b/pkg/services/dashboards/folder_service.go @@ -104,7 +104,7 @@ func (dr *dashboardServiceImpl) CreateFolder(cmd *models.CreateFolderCommand) er User: dr.user, } - saveDashboardCmd, err := dr.buildSaveDashboardCommand(dto, false) + saveDashboardCmd, err := dr.buildSaveDashboardCommand(dto, false, false) if err != nil { return toFolderError(err) } @@ -141,7 +141,7 @@ func (dr *dashboardServiceImpl) UpdateFolder(existingUid string, cmd *models.Upd Overwrite: cmd.Overwrite, } - saveDashboardCmd, err := dr.buildSaveDashboardCommand(dto, false) + saveDashboardCmd, err := dr.buildSaveDashboardCommand(dto, false, false) if err != nil { return toFolderError(err) } diff --git a/pkg/services/dashboards/folder_service_test.go b/pkg/services/dashboards/folder_service_test.go index 6c0413d1878..4c9cecd3352 100644 --- a/pkg/services/dashboards/folder_service_test.go +++ b/pkg/services/dashboards/folder_service_test.go @@ -32,6 +32,7 @@ func TestFolderService(t *testing.T) { }) bus.AddHandler("test", func(cmd *models.ValidateDashboardBeforeSaveCommand) error { + cmd.Result = &models.ValidateDashboardBeforeSaveResult{} return models.ErrDashboardUpdateAccessDenied }) @@ -92,6 +93,7 @@ func TestFolderService(t *testing.T) { }) bus.AddHandler("test", func(cmd *models.ValidateDashboardBeforeSaveCommand) error { + cmd.Result = &models.ValidateDashboardBeforeSaveResult{} return nil }) @@ -108,11 +110,19 @@ func TestFolderService(t *testing.T) { return nil }) + provisioningValidated := false + + bus.AddHandler("test", func(query *models.IsDashboardProvisionedQuery) error { + provisioningValidated = true + return nil + }) + Convey("When creating folder should not return access denied error", func() { err := service.CreateFolder(&models.CreateFolderCommand{ Title: "Folder", }) So(err, ShouldBeNil) + So(provisioningValidated, ShouldBeFalse) }) Convey("When updating folder should not return access denied error", func() { @@ -121,6 +131,7 @@ func TestFolderService(t *testing.T) { Title: "Folder", }) So(err, ShouldBeNil) + So(provisioningValidated, ShouldBeFalse) }) Convey("When deleting folder by uid should not return access denied error", func() { diff --git a/pkg/services/guardian/guardian.go b/pkg/services/guardian/guardian.go index 811b38cac86..bf455adc7ca 100644 --- a/pkg/services/guardian/guardian.go +++ b/pkg/services/guardian/guardian.go @@ -113,7 +113,7 @@ func (g *dashboardGuardianImpl) checkAcl(permission m.PermissionType, acl []*m.D return false, err } - // evalute team rules + // evaluate team rules for _, p := range acl { for _, ug := range teams { if ug.Id == p.TeamId && p.Permission >= permission { @@ -154,12 +154,7 @@ func (g *dashboardGuardianImpl) CheckPermissionBeforeUpdate(permission m.Permiss // validate overridden permissions to be higher for _, a := range acl { for _, existingPerm := range existingPermissions { - // handle default permissions - if existingPerm.DashboardId == -1 { - existingPerm.DashboardId = g.dashId - } - - if a.DashboardId == existingPerm.DashboardId { + if !existingPerm.Inherited { continue } @@ -173,7 +168,7 @@ func (g *dashboardGuardianImpl) CheckPermissionBeforeUpdate(permission m.Permiss return true, nil } - return g.checkAcl(permission, acl) + return g.checkAcl(permission, existingPermissions) } // GetAcl returns dashboard acl @@ -187,13 +182,6 @@ func (g *dashboardGuardianImpl) GetAcl() ([]*m.DashboardAclInfoDTO, error) { return nil, err } - for _, a := range query.Result { - // handle default permissions - if a.DashboardId == -1 { - a.DashboardId = g.dashId - } - } - g.acl = query.Result return g.acl, nil } diff --git a/pkg/services/guardian/guardian_test.go b/pkg/services/guardian/guardian_test.go index bb7e6bd1a72..5e56b1d88c3 100644 --- a/pkg/services/guardian/guardian_test.go +++ b/pkg/services/guardian/guardian_test.go @@ -2,710 +2,666 @@ package guardian import ( "fmt" + "runtime" "testing" - "github.com/grafana/grafana/pkg/bus" - m "github.com/grafana/grafana/pkg/models" . "github.com/smartystreets/goconvey/convey" ) -func TestGuardian(t *testing.T) { - Convey("Guardian permission tests", t, func() { - orgRoleScenario("Given user has admin org role", m.ROLE_ADMIN, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeTrue) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - - Convey("When trying to update permissions", func() { - Convey("With duplicate user permissions should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_VIEW}, - {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianPermissionExists) - }) - - Convey("With duplicate team permissions should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 1, TeamId: 1, Permission: m.PERMISSION_VIEW}, - {OrgId: 1, DashboardId: 1, TeamId: 1, Permission: m.PERMISSION_ADMIN}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianPermissionExists) - }) - - Convey("With duplicate everyone with editor role permission should return error", func() { - r := m.ROLE_EDITOR - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_VIEW}, - {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_ADMIN}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianPermissionExists) - }) - - Convey("With duplicate everyone with viewer role permission should return error", func() { - r := m.ROLE_VIEWER - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_VIEW}, - {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_ADMIN}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianPermissionExists) - }) - - Convey("With everyone with admin role permission should return error", func() { - r := m.ROLE_ADMIN - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_ADMIN}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianPermissionExists) - }) - }) - - Convey("Given default permissions", func() { - editor := m.ROLE_EDITOR - viewer := m.ROLE_VIEWER - existingPermissions := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: -1, Role: &editor, Permission: m.PERMISSION_EDIT}, - {OrgId: 1, DashboardId: -1, Role: &viewer, Permission: m.PERMISSION_VIEW}, - } - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = existingPermissions - return nil - }) - - Convey("When trying to update dashboard permissions without everyone with role editor can edit should be allowed", func() { - r := m.ROLE_VIEWER - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_VIEW}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - - Convey("When trying to update dashboard permissions without everyone with role viewer can view should be allowed", func() { - r := m.ROLE_EDITOR - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_EDIT}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - }) - - Convey("Given parent folder has user admin permission", func() { - existingPermissions := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_ADMIN}, - } - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = existingPermissions - return nil - }) - - Convey("When trying to update dashboard permissions with admin user permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_ADMIN}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - - Convey("When trying to update dashboard permissions with edit user permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_EDIT}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - - Convey("When trying to update dashboard permissions with view user permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_VIEW}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - }) - - Convey("Given parent folder has user edit permission", func() { - existingPermissions := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_EDIT}, - } - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = existingPermissions - return nil - }) - - Convey("When trying to update dashboard permissions with admin user permission should be allowed", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_ADMIN}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - - Convey("When trying to update dashboard permissions with edit user permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_EDIT}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - - Convey("When trying to update dashboard permissions with view user permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_VIEW}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - }) - - Convey("Given parent folder has user view permission", func() { - existingPermissions := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_VIEW}, - } - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = existingPermissions - return nil - }) - - Convey("When trying to update dashboard permissions with admin user permission should be allowed", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_ADMIN}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - - Convey("When trying to update dashboard permissions with edit user permission should be allowed", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_EDIT}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - - Convey("When trying to update dashboard permissions with view user permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_VIEW}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - }) - - Convey("Given parent folder has team admin permission", func() { - existingPermissions := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 2, TeamId: 1, Permission: m.PERMISSION_ADMIN}, - } - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = existingPermissions - return nil - }) - - Convey("When trying to update dashboard permissions with admin team permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_ADMIN}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - - Convey("When trying to update dashboard permissions with edit team permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_EDIT}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - - Convey("When trying to update dashboard permissions with view team permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_VIEW}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - }) - - Convey("Given parent folder has team edit permission", func() { - existingPermissions := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 2, TeamId: 1, Permission: m.PERMISSION_EDIT}, - } - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = existingPermissions - return nil - }) - - Convey("When trying to update dashboard permissions with admin team permission should be allowed", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_ADMIN}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - - Convey("When trying to update dashboard permissions with edit team permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_EDIT}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - - Convey("When trying to update dashboard permissions with view team permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_VIEW}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - }) - - Convey("Given parent folder has team view permission", func() { - existingPermissions := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 2, TeamId: 1, Permission: m.PERMISSION_VIEW}, - } - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = existingPermissions - return nil - }) - - Convey("When trying to update dashboard permissions with admin team permission should be allowed", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_ADMIN}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - - Convey("When trying to update dashboard permissions with edit team permission should be allowed", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_EDIT}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - - Convey("When trying to update dashboard permissions with view team permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_VIEW}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - }) - - Convey("Given parent folder has editor role with edit permission", func() { - r := m.ROLE_EDITOR - existingPermissions := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 2, Role: &r, Permission: m.PERMISSION_EDIT}, - } - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = existingPermissions - return nil - }) - - Convey("When trying to update dashboard permissions with everyone with editor role can admin permission should be allowed", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_ADMIN}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - - Convey("When trying to update dashboard permissions with everyone with editor role can edit permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_EDIT}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - - Convey("When trying to update dashboard permissions with everyone with editor role can view permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_VIEW}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - }) - - Convey("Given parent folder has editor role with view permission", func() { - r := m.ROLE_EDITOR - existingPermissions := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 2, Role: &r, Permission: m.PERMISSION_VIEW}, - } - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = existingPermissions - return nil - }) - - Convey("When trying to update dashboard permissions with everyone with viewer role can admin permission should be allowed", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_ADMIN}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - - Convey("When trying to update dashboard permissions with everyone with viewer role can edit permission should be allowed", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_EDIT}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeTrue) - }) - - Convey("When trying to update dashboard permissions with everyone with viewer role can view permission should return error", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_VIEW}, - } - _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(err, ShouldEqual, ErrGuardianOverride) - }) - }) - }) - - orgRoleScenario("Given user has editor org role", m.ROLE_EDITOR, func(sc *scenarioContext) { - everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeTrue) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - }) - - everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - }) - - everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeTrue) - }) - - everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeFalse) - }) - - everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeFalse) - }) - - everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeFalse) - }) - - userWithPermissionScenario(m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeTrue) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - }) - - userWithPermissionScenario(m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - }) - - userWithPermissionScenario(m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeTrue) - }) - - teamWithPermissionScenario(m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeTrue) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - }) - - teamWithPermissionScenario(m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - }) - - teamWithPermissionScenario(m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeTrue) - }) - - Convey("When trying to update permissions should return false", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_VIEW}, - {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeFalse) - }) - }) - - orgRoleScenario("Given user has viewer org role", m.ROLE_VIEWER, func(sc *scenarioContext) { - everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeFalse) - }) - - everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeFalse) - }) - - everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeFalse) - }) - - everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeTrue) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - }) - - everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - }) - - everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeTrue) - }) - - userWithPermissionScenario(m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeTrue) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - }) - - userWithPermissionScenario(m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeTrue) - So(canSave, ShouldBeTrue) - So(canView, ShouldBeTrue) - }) - - userWithPermissionScenario(m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { - canAdmin, _ := sc.g.CanAdmin() - canEdit, _ := sc.g.CanEdit() - canSave, _ := sc.g.CanSave() - canView, _ := sc.g.CanView() - So(canAdmin, ShouldBeFalse) - So(canEdit, ShouldBeFalse) - So(canSave, ShouldBeFalse) - So(canView, ShouldBeTrue) - }) - - Convey("When trying to update permissions should return false", func() { - p := []*m.DashboardAcl{ - {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_VIEW}, - {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN}, - } - ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) - So(ok, ShouldBeFalse) - }) +var ( + orgID = int64(1) + defaultDashboardID = int64(-1) + dashboardID = int64(1) + parentFolderID = int64(2) + childDashboardID = int64(3) + userID = int64(1) + otherUserID = int64(2) + teamID = int64(1) + otherTeamID = int64(2) + adminRole = m.ROLE_ADMIN + editorRole = m.ROLE_EDITOR + viewerRole = m.ROLE_VIEWER +) + +func TestGuardianAdmin(t *testing.T) { + Convey("Guardian admin org role tests", t, func() { + orgRoleScenario("Given user has admin org role", t, m.ROLE_ADMIN, func(sc *scenarioContext) { + // dashboard has default permissions + sc.defaultPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) + + // dashboard has user with permission + sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.dashboardPermissionScenario(USER, m.PERMISSION_EDIT, FULL_ACCESS) + sc.dashboardPermissionScenario(USER, m.PERMISSION_VIEW, FULL_ACCESS) + + // dashboard has team with permission + sc.dashboardPermissionScenario(TEAM, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.dashboardPermissionScenario(TEAM, m.PERMISSION_EDIT, FULL_ACCESS) + sc.dashboardPermissionScenario(TEAM, m.PERMISSION_VIEW, FULL_ACCESS) + + // dashboard has editor role with permission + sc.dashboardPermissionScenario(EDITOR, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.dashboardPermissionScenario(EDITOR, m.PERMISSION_EDIT, FULL_ACCESS) + sc.dashboardPermissionScenario(EDITOR, m.PERMISSION_VIEW, FULL_ACCESS) + + // dashboard has viewer role with permission + sc.dashboardPermissionScenario(VIEWER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.dashboardPermissionScenario(VIEWER, m.PERMISSION_EDIT, FULL_ACCESS) + sc.dashboardPermissionScenario(VIEWER, m.PERMISSION_VIEW, FULL_ACCESS) + + // parent folder has user with permission + sc.parentFolderPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.parentFolderPermissionScenario(USER, m.PERMISSION_EDIT, FULL_ACCESS) + sc.parentFolderPermissionScenario(USER, m.PERMISSION_VIEW, FULL_ACCESS) + + // parent folder has team with permission + sc.parentFolderPermissionScenario(TEAM, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.parentFolderPermissionScenario(TEAM, m.PERMISSION_EDIT, FULL_ACCESS) + sc.parentFolderPermissionScenario(TEAM, m.PERMISSION_VIEW, FULL_ACCESS) + + // parent folder has editor role with permission + sc.parentFolderPermissionScenario(EDITOR, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.parentFolderPermissionScenario(EDITOR, m.PERMISSION_EDIT, FULL_ACCESS) + sc.parentFolderPermissionScenario(EDITOR, m.PERMISSION_VIEW, FULL_ACCESS) + + // parent folder has viweer role with permission + sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_EDIT, FULL_ACCESS) + sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_VIEW, FULL_ACCESS) }) }) } -type scenarioContext struct { - g DashboardGuardian +func TestGuardianEditor(t *testing.T) { + Convey("Guardian editor org role tests", t, func() { + orgRoleScenario("Given user has editor org role", t, m.ROLE_EDITOR, func(sc *scenarioContext) { + // dashboard has user with permission + sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.dashboardPermissionScenario(USER, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.dashboardPermissionScenario(USER, m.PERMISSION_VIEW, CAN_VIEW) + + // dashboard has team with permission + sc.dashboardPermissionScenario(TEAM, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.dashboardPermissionScenario(TEAM, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.dashboardPermissionScenario(TEAM, m.PERMISSION_VIEW, CAN_VIEW) + + // dashboard has editor role with permission + sc.dashboardPermissionScenario(EDITOR, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.dashboardPermissionScenario(EDITOR, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.dashboardPermissionScenario(EDITOR, m.PERMISSION_VIEW, VIEWER_ACCESS) + + // dashboard has viewer role with permission + sc.dashboardPermissionScenario(VIEWER, m.PERMISSION_ADMIN, NO_ACCESS) + sc.dashboardPermissionScenario(VIEWER, m.PERMISSION_EDIT, NO_ACCESS) + sc.dashboardPermissionScenario(VIEWER, m.PERMISSION_VIEW, NO_ACCESS) + + // parent folder has user with permission + sc.parentFolderPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.parentFolderPermissionScenario(USER, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.parentFolderPermissionScenario(USER, m.PERMISSION_VIEW, VIEWER_ACCESS) + + // parent folder has team with permission + sc.parentFolderPermissionScenario(TEAM, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.parentFolderPermissionScenario(TEAM, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.parentFolderPermissionScenario(TEAM, m.PERMISSION_VIEW, VIEWER_ACCESS) + + // parent folder has editor role with permission + sc.parentFolderPermissionScenario(EDITOR, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.parentFolderPermissionScenario(EDITOR, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.parentFolderPermissionScenario(EDITOR, m.PERMISSION_VIEW, VIEWER_ACCESS) + + // parent folder has viweer role with permission + sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_ADMIN, NO_ACCESS) + sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_EDIT, NO_ACCESS) + sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_VIEW, NO_ACCESS) + }) + }) } -type scenarioFunc func(c *scenarioContext) +func TestGuardianViewer(t *testing.T) { + Convey("Guardian viewer org role tests", t, func() { + orgRoleScenario("Given user has viewer org role", t, m.ROLE_VIEWER, func(sc *scenarioContext) { + // dashboard has user with permission + sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.dashboardPermissionScenario(USER, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.dashboardPermissionScenario(USER, m.PERMISSION_VIEW, VIEWER_ACCESS) -func orgRoleScenario(desc string, role m.RoleType, fn scenarioFunc) { - user := &m.SignedInUser{ - UserId: 1, - OrgId: 1, - OrgRole: role, - } - guard := New(1, 1, user) - sc := &scenarioContext{ - g: guard, + // dashboard has team with permission + sc.dashboardPermissionScenario(TEAM, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.dashboardPermissionScenario(TEAM, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.dashboardPermissionScenario(TEAM, m.PERMISSION_VIEW, VIEWER_ACCESS) + + // dashboard has editor role with permission + sc.dashboardPermissionScenario(EDITOR, m.PERMISSION_ADMIN, NO_ACCESS) + sc.dashboardPermissionScenario(EDITOR, m.PERMISSION_EDIT, NO_ACCESS) + sc.dashboardPermissionScenario(EDITOR, m.PERMISSION_VIEW, NO_ACCESS) + + // dashboard has viewer role with permission + sc.dashboardPermissionScenario(VIEWER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.dashboardPermissionScenario(VIEWER, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.dashboardPermissionScenario(VIEWER, m.PERMISSION_VIEW, VIEWER_ACCESS) + + // parent folder has user with permission + sc.parentFolderPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.parentFolderPermissionScenario(USER, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.parentFolderPermissionScenario(USER, m.PERMISSION_VIEW, VIEWER_ACCESS) + + // parent folder has team with permission + sc.parentFolderPermissionScenario(TEAM, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.parentFolderPermissionScenario(TEAM, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.parentFolderPermissionScenario(TEAM, m.PERMISSION_VIEW, VIEWER_ACCESS) + + // parent folder has editor role with permission + sc.parentFolderPermissionScenario(EDITOR, m.PERMISSION_ADMIN, NO_ACCESS) + sc.parentFolderPermissionScenario(EDITOR, m.PERMISSION_EDIT, NO_ACCESS) + sc.parentFolderPermissionScenario(EDITOR, m.PERMISSION_VIEW, NO_ACCESS) + + // parent folder has viweer role with permission + sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_EDIT, EDITOR_ACCESS) + sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_VIEW, VIEWER_ACCESS) + }) + }) +} + +func (sc *scenarioContext) defaultPermissionScenario(pt permissionType, permission m.PermissionType, flag permissionFlags) { + _, callerFile, callerLine, _ := runtime.Caller(1) + sc.callerFile = callerFile + sc.callerLine = callerLine + existingPermissions := []*m.DashboardAclInfoDTO{ + toDto(newEditorRolePermission(defaultDashboardID, m.PERMISSION_EDIT)), + toDto(newViewerRolePermission(defaultDashboardID, m.PERMISSION_VIEW)), } - Convey(desc, func() { - fn(sc) + permissionScenario("and existing permissions is the default permissions (everyone with editor role can edit, everyone with viewer role can view)", dashboardID, sc, existingPermissions, func(sc *scenarioContext) { + sc.expectedFlags = flag + sc.verifyExpectedPermissionsFlags() + sc.verifyDuplicatePermissionsShouldNotBeAllowed() + sc.verifyUpdateDashboardPermissionsShouldBeAllowed(pt) + sc.verifyUpdateDashboardPermissionsShouldNotBeAllowed(pt) }) } -func permissionScenario(desc string, sc *scenarioContext, permissions []*m.DashboardAclInfoDTO, fn scenarioFunc) { - bus.ClearBusHandlers() +func (sc *scenarioContext) dashboardPermissionScenario(pt permissionType, permission m.PermissionType, flag permissionFlags) { + _, callerFile, callerLine, _ := runtime.Caller(1) + sc.callerFile = callerFile + sc.callerLine = callerLine + var existingPermissions []*m.DashboardAclInfoDTO - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = permissions - return nil + switch pt { + case USER: + existingPermissions = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: dashboardID, UserId: userID, Permission: permission}} + case TEAM: + existingPermissions = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: dashboardID, TeamId: teamID, Permission: permission}} + case EDITOR: + existingPermissions = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: dashboardID, Role: &editorRole, Permission: permission}} + case VIEWER: + existingPermissions = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: dashboardID, Role: &viewerRole, Permission: permission}} + } + + permissionScenario(fmt.Sprintf("and %s has permission to %s dashboard", pt.String(), permission.String()), dashboardID, sc, existingPermissions, func(sc *scenarioContext) { + sc.expectedFlags = flag + sc.verifyExpectedPermissionsFlags() + sc.verifyDuplicatePermissionsShouldNotBeAllowed() + sc.verifyUpdateDashboardPermissionsShouldBeAllowed(pt) + sc.verifyUpdateDashboardPermissionsShouldNotBeAllowed(pt) }) +} - teams := []*m.Team{} +func (sc *scenarioContext) parentFolderPermissionScenario(pt permissionType, permission m.PermissionType, flag permissionFlags) { + _, callerFile, callerLine, _ := runtime.Caller(1) + sc.callerFile = callerFile + sc.callerLine = callerLine + var folderPermissionList []*m.DashboardAclInfoDTO - for _, p := range permissions { - if p.TeamId > 0 { - teams = append(teams, &m.Team{Id: p.TeamId}) + switch pt { + case USER: + folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, UserId: userID, Permission: permission, Inherited: true}} + case TEAM: + folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, TeamId: teamID, Permission: permission, Inherited: true}} + case EDITOR: + folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, Role: &editorRole, Permission: permission, Inherited: true}} + case VIEWER: + folderPermissionList = []*m.DashboardAclInfoDTO{{OrgId: orgID, DashboardId: parentFolderID, Role: &viewerRole, Permission: permission, Inherited: true}} + } + + permissionScenario(fmt.Sprintf("and parent folder has %s with permission to %s", pt.String(), permission.String()), childDashboardID, sc, folderPermissionList, func(sc *scenarioContext) { + sc.expectedFlags = flag + sc.verifyExpectedPermissionsFlags() + sc.verifyDuplicatePermissionsShouldNotBeAllowed() + sc.verifyUpdateChildDashboardPermissionsShouldBeAllowed(pt, permission) + sc.verifyUpdateChildDashboardPermissionsShouldNotBeAllowed(pt, permission) + sc.verifyUpdateChildDashboardPermissionsWithOverrideShouldBeAllowed(pt, permission) + sc.verifyUpdateChildDashboardPermissionsWithOverrideShouldNotBeAllowed(pt, permission) + }) +} + +func (sc *scenarioContext) verifyExpectedPermissionsFlags() { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + + tc := fmt.Sprintf("should have permissions to %s", sc.expectedFlags.String()) + Convey(tc, func() { + var actualFlag permissionFlags + + if canAdmin { + actualFlag |= CAN_ADMIN } - } - bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = teams - return nil - }) + if canEdit { + actualFlag |= CAN_EDIT + } - Convey(desc, func() { - fn(sc) + if canSave { + actualFlag |= CAN_SAVE + } + + if canView { + actualFlag |= CAN_VIEW + } + + if actualFlag.noAccess() { + actualFlag = NO_ACCESS + } + + if sc.expectedFlags&actualFlag != sc.expectedFlags { + sc.reportFailure(tc, sc.expectedFlags.String(), actualFlag.String()) + } + + sc.reportSuccess() }) } -func userWithPermissionScenario(permission m.PermissionType, sc *scenarioContext, fn scenarioFunc) { - p := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 1, UserId: 1, Permission: permission}, +func (sc *scenarioContext) verifyDuplicatePermissionsShouldNotBeAllowed() { + if !sc.expectedFlags.canAdmin() { + return } - permissionScenario(fmt.Sprintf("and user has permission to %s item", permission), sc, p, fn) + + tc := "When updating dashboard permissions with duplicate permission for user should not be allowed" + Convey(tc, func() { + p := []*m.DashboardAcl{ + newDefaultUserPermission(dashboardID, m.PERMISSION_VIEW), + newDefaultUserPermission(dashboardID, m.PERMISSION_ADMIN), + } + sc.updatePermissions = p + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + + if err != ErrGuardianPermissionExists { + sc.reportFailure(tc, ErrGuardianPermissionExists, err) + } + sc.reportSuccess() + }) + + tc = "When updating dashboard permissions with duplicate permission for team should not be allowed" + Convey(tc, func() { + p := []*m.DashboardAcl{ + newDefaultTeamPermission(dashboardID, m.PERMISSION_VIEW), + newDefaultTeamPermission(dashboardID, m.PERMISSION_ADMIN), + } + sc.updatePermissions = p + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + + if err != ErrGuardianPermissionExists { + sc.reportFailure(tc, ErrGuardianPermissionExists, err) + } + sc.reportSuccess() + }) + + tc = "When updating dashboard permissions with duplicate permission for editor role should not be allowed" + Convey(tc, func() { + p := []*m.DashboardAcl{ + newEditorRolePermission(dashboardID, m.PERMISSION_VIEW), + newEditorRolePermission(dashboardID, m.PERMISSION_ADMIN), + } + sc.updatePermissions = p + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + + if err != ErrGuardianPermissionExists { + sc.reportFailure(tc, ErrGuardianPermissionExists, err) + } + sc.reportSuccess() + }) + + tc = "When updating dashboard permissions with duplicate permission for viewer role should not be allowed" + Convey(tc, func() { + p := []*m.DashboardAcl{ + newViewerRolePermission(dashboardID, m.PERMISSION_VIEW), + newViewerRolePermission(dashboardID, m.PERMISSION_ADMIN), + } + sc.updatePermissions = p + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + + if err != ErrGuardianPermissionExists { + sc.reportFailure(tc, ErrGuardianPermissionExists, err) + } + sc.reportSuccess() + }) + + tc = "When updating dashboard permissions with duplicate permission for admin role should not be allowed" + Convey(tc, func() { + p := []*m.DashboardAcl{ + newAdminRolePermission(dashboardID, m.PERMISSION_ADMIN), + } + sc.updatePermissions = p + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + + if err != ErrGuardianPermissionExists { + sc.reportFailure(tc, ErrGuardianPermissionExists, err) + } + sc.reportSuccess() + }) } -func teamWithPermissionScenario(permission m.PermissionType, sc *scenarioContext, fn scenarioFunc) { - p := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 1, TeamId: 1, Permission: permission}, +func (sc *scenarioContext) verifyUpdateDashboardPermissionsShouldBeAllowed(pt permissionType) { + if !sc.expectedFlags.canAdmin() { + return + } + + for _, p := range []m.PermissionType{m.PERMISSION_ADMIN, m.PERMISSION_EDIT, m.PERMISSION_VIEW} { + tc := fmt.Sprintf("When updating dashboard permissions with %s permissions should be allowed", p.String()) + + Convey(tc, func() { + permissionList := []*m.DashboardAcl{} + switch pt { + case USER: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(dashboardID, p), + newViewerRolePermission(dashboardID, p), + newCustomUserPermission(dashboardID, otherUserID, p), + newDefaultTeamPermission(dashboardID, p), + } + case TEAM: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(dashboardID, p), + newViewerRolePermission(dashboardID, p), + newDefaultUserPermission(dashboardID, p), + newCustomTeamPermission(dashboardID, otherTeamID, p), + } + case EDITOR, VIEWER: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(dashboardID, p), + newViewerRolePermission(dashboardID, p), + newDefaultUserPermission(dashboardID, p), + newDefaultTeamPermission(dashboardID, p), + } + } + + sc.updatePermissions = permissionList + ok, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, permissionList) + + if err != nil { + sc.reportFailure(tc, nil, err) + } + if !ok { + sc.reportFailure(tc, false, true) + } + sc.reportSuccess() + }) } - permissionScenario(fmt.Sprintf("and team has permission to %s item", permission), sc, p, fn) } -func everyoneWithRoleScenario(role m.RoleType, permission m.PermissionType, sc *scenarioContext, fn scenarioFunc) { - p := []*m.DashboardAclInfoDTO{ - {OrgId: 1, DashboardId: 1, UserId: -1, Role: &role, Permission: permission}, +func (sc *scenarioContext) verifyUpdateDashboardPermissionsShouldNotBeAllowed(pt permissionType) { + if sc.expectedFlags.canAdmin() { + return + } + + for _, p := range []m.PermissionType{m.PERMISSION_ADMIN, m.PERMISSION_EDIT, m.PERMISSION_VIEW} { + tc := fmt.Sprintf("When updating dashboard permissions with %s permissions should NOT be allowed", p.String()) + + Convey(tc, func() { + permissionList := []*m.DashboardAcl{ + newEditorRolePermission(dashboardID, p), + newViewerRolePermission(dashboardID, p), + } + switch pt { + case USER: + permissionList = append(permissionList, []*m.DashboardAcl{ + newCustomUserPermission(dashboardID, otherUserID, p), + newDefaultTeamPermission(dashboardID, p), + }...) + case TEAM: + permissionList = append(permissionList, []*m.DashboardAcl{ + newDefaultUserPermission(dashboardID, p), + newCustomTeamPermission(dashboardID, otherTeamID, p), + }...) + } + + sc.updatePermissions = permissionList + ok, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, permissionList) + + if err != nil { + sc.reportFailure(tc, nil, err) + } + if ok { + sc.reportFailure(tc, true, false) + } + sc.reportSuccess() + }) + } +} + +func (sc *scenarioContext) verifyUpdateChildDashboardPermissionsShouldBeAllowed(pt permissionType, parentFolderPermission m.PermissionType) { + if !sc.expectedFlags.canAdmin() { + return + } + + for _, p := range []m.PermissionType{m.PERMISSION_ADMIN, m.PERMISSION_EDIT, m.PERMISSION_VIEW} { + tc := fmt.Sprintf("When updating child dashboard permissions with %s permissions should be allowed", p.String()) + + Convey(tc, func() { + permissionList := []*m.DashboardAcl{} + switch pt { + case USER: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(childDashboardID, p), + newViewerRolePermission(childDashboardID, p), + newCustomUserPermission(childDashboardID, otherUserID, p), + newDefaultTeamPermission(childDashboardID, p), + } + case TEAM: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(childDashboardID, p), + newViewerRolePermission(childDashboardID, p), + newDefaultUserPermission(childDashboardID, p), + newCustomTeamPermission(childDashboardID, otherTeamID, p), + } + case EDITOR: + permissionList = []*m.DashboardAcl{ + newViewerRolePermission(childDashboardID, p), + newDefaultUserPermission(childDashboardID, p), + newDefaultTeamPermission(childDashboardID, p), + } + + // permission to update is higher than parent folder permission + if p > parentFolderPermission { + permissionList = append(permissionList, newEditorRolePermission(childDashboardID, p)) + } + case VIEWER: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(childDashboardID, p), + newDefaultUserPermission(childDashboardID, p), + newDefaultTeamPermission(childDashboardID, p), + } + + // permission to update is higher than parent folder permission + if p > parentFolderPermission { + permissionList = append(permissionList, newViewerRolePermission(childDashboardID, p)) + } + } + + sc.updatePermissions = permissionList + ok, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, permissionList) + + if err != nil { + sc.reportFailure(tc, nil, err) + } + if !ok { + sc.reportFailure(tc, false, true) + } + sc.reportSuccess() + }) + } +} + +func (sc *scenarioContext) verifyUpdateChildDashboardPermissionsShouldNotBeAllowed(pt permissionType, parentFolderPermission m.PermissionType) { + if sc.expectedFlags.canAdmin() { + return + } + + for _, p := range []m.PermissionType{m.PERMISSION_ADMIN, m.PERMISSION_EDIT, m.PERMISSION_VIEW} { + tc := fmt.Sprintf("When updating child dashboard permissions with %s permissions should NOT be allowed", p.String()) + + Convey(tc, func() { + permissionList := []*m.DashboardAcl{} + switch pt { + case USER: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(childDashboardID, p), + newViewerRolePermission(childDashboardID, p), + newCustomUserPermission(childDashboardID, otherUserID, p), + newDefaultTeamPermission(childDashboardID, p), + } + case TEAM: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(childDashboardID, p), + newViewerRolePermission(childDashboardID, p), + newDefaultUserPermission(childDashboardID, p), + newCustomTeamPermission(childDashboardID, otherTeamID, p), + } + case EDITOR: + permissionList = []*m.DashboardAcl{ + newViewerRolePermission(childDashboardID, p), + newDefaultUserPermission(childDashboardID, p), + newDefaultTeamPermission(childDashboardID, p), + } + + // perminssion to update is higher than parent folder permission + if p > parentFolderPermission { + permissionList = append(permissionList, newEditorRolePermission(childDashboardID, p)) + } + case VIEWER: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(childDashboardID, p), + newDefaultUserPermission(childDashboardID, p), + newDefaultTeamPermission(childDashboardID, p), + } + + // perminssion to update is higher than parent folder permission + if p > parentFolderPermission { + permissionList = append(permissionList, newViewerRolePermission(childDashboardID, p)) + } + } + + sc.updatePermissions = permissionList + ok, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, permissionList) + + if err != nil { + sc.reportFailure(tc, nil, err) + } + if ok { + sc.reportFailure(tc, true, false) + } + sc.reportSuccess() + }) + } +} + +func (sc *scenarioContext) verifyUpdateChildDashboardPermissionsWithOverrideShouldBeAllowed(pt permissionType, parentFolderPermission m.PermissionType) { + if !sc.expectedFlags.canAdmin() { + return + } + + for _, p := range []m.PermissionType{m.PERMISSION_ADMIN, m.PERMISSION_EDIT, m.PERMISSION_VIEW} { + // perminssion to update is higher tban parent folder permission + if p > parentFolderPermission { + continue + } + + tc := fmt.Sprintf("When updating child dashboard permissions overriding parent %s permission with %s permission should NOT be allowed", pt.String(), p.String()) + + Convey(tc, func() { + permissionList := []*m.DashboardAcl{} + switch pt { + case USER: + permissionList = []*m.DashboardAcl{ + newDefaultUserPermission(childDashboardID, p), + } + case TEAM: + permissionList = []*m.DashboardAcl{ + newDefaultTeamPermission(childDashboardID, p), + } + case EDITOR: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(childDashboardID, p), + } + case VIEWER: + permissionList = []*m.DashboardAcl{ + newViewerRolePermission(childDashboardID, p), + } + } + + sc.updatePermissions = permissionList + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, permissionList) + + if err != ErrGuardianOverride { + sc.reportFailure(tc, ErrGuardianOverride, err) + } + sc.reportSuccess() + }) + } +} + +func (sc *scenarioContext) verifyUpdateChildDashboardPermissionsWithOverrideShouldNotBeAllowed(pt permissionType, parentFolderPermission m.PermissionType) { + if !sc.expectedFlags.canAdmin() { + return + } + + for _, p := range []m.PermissionType{m.PERMISSION_ADMIN, m.PERMISSION_EDIT, m.PERMISSION_VIEW} { + // perminssion to update is lower than/equal parent folder permission + if p <= parentFolderPermission { + continue + } + + tc := fmt.Sprintf("When updating child dashboard permissions overriding parent %s permission with %s permission should be allowed", pt.String(), p.String()) + + Convey(tc, func() { + permissionList := []*m.DashboardAcl{} + switch pt { + case USER: + permissionList = []*m.DashboardAcl{ + newDefaultUserPermission(childDashboardID, p), + } + case TEAM: + permissionList = []*m.DashboardAcl{ + newDefaultTeamPermission(childDashboardID, p), + } + case EDITOR: + permissionList = []*m.DashboardAcl{ + newEditorRolePermission(childDashboardID, p), + } + case VIEWER: + permissionList = []*m.DashboardAcl{ + newViewerRolePermission(childDashboardID, p), + } + } + + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, permissionList) + if err != nil { + sc.reportFailure(tc, nil, err) + } + sc.updatePermissions = permissionList + ok, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, permissionList) + + if err != nil { + sc.reportFailure(tc, nil, err) + } + if !ok { + sc.reportFailure(tc, false, true) + } + sc.reportSuccess() + }) } - permissionScenario(fmt.Sprintf("and everyone with %s role can %s item", role, permission), sc, p, fn) } diff --git a/pkg/services/guardian/guardian_util_test.go b/pkg/services/guardian/guardian_util_test.go new file mode 100644 index 00000000000..b065c4194ad --- /dev/null +++ b/pkg/services/guardian/guardian_util_test.go @@ -0,0 +1,256 @@ +package guardian + +import ( + "bytes" + "fmt" + "strings" + "testing" + + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + . "github.com/smartystreets/goconvey/convey" +) + +type scenarioContext struct { + t *testing.T + orgRoleScenario string + permissionScenario string + g DashboardGuardian + givenUser *m.SignedInUser + givenDashboardID int64 + givenPermissions []*m.DashboardAclInfoDTO + givenTeams []*m.Team + updatePermissions []*m.DashboardAcl + expectedFlags permissionFlags + callerFile string + callerLine int +} + +type scenarioFunc func(c *scenarioContext) + +func orgRoleScenario(desc string, t *testing.T, role m.RoleType, fn scenarioFunc) { + user := &m.SignedInUser{ + UserId: userID, + OrgId: orgID, + OrgRole: role, + } + guard := New(dashboardID, orgID, user) + sc := &scenarioContext{ + t: t, + orgRoleScenario: desc, + givenUser: user, + givenDashboardID: dashboardID, + g: guard, + } + + Convey(desc, func() { + fn(sc) + }) +} + +func permissionScenario(desc string, dashboardID int64, sc *scenarioContext, permissions []*m.DashboardAclInfoDTO, fn scenarioFunc) { + bus.ClearBusHandlers() + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + if query.OrgId != sc.givenUser.OrgId { + sc.reportFailure("Invalid organization id for GetDashboardAclInfoListQuery", sc.givenUser.OrgId, query.OrgId) + } + if query.DashboardId != sc.givenDashboardID { + sc.reportFailure("Invalid dashboard id for GetDashboardAclInfoListQuery", sc.givenDashboardID, query.DashboardId) + } + + query.Result = permissions + return nil + }) + + teams := []*m.Team{} + + for _, p := range permissions { + if p.TeamId > 0 { + teams = append(teams, &m.Team{Id: p.TeamId}) + } + } + + bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { + if query.OrgId != sc.givenUser.OrgId { + sc.reportFailure("Invalid organization id for GetTeamsByUserQuery", sc.givenUser.OrgId, query.OrgId) + } + if query.UserId != sc.givenUser.UserId { + sc.reportFailure("Invalid user id for GetTeamsByUserQuery", sc.givenUser.UserId, query.UserId) + } + + query.Result = teams + return nil + }) + + sc.permissionScenario = desc + sc.g = New(dashboardID, sc.givenUser.OrgId, sc.givenUser) + sc.givenDashboardID = dashboardID + sc.givenPermissions = permissions + sc.givenTeams = teams + + Convey(desc, func() { + fn(sc) + }) +} + +type permissionType uint8 + +const ( + USER permissionType = 1 << iota + TEAM + EDITOR + VIEWER +) + +func (p permissionType) String() string { + names := map[uint8]string{ + uint8(USER): "user", + uint8(TEAM): "team", + uint8(EDITOR): "editor role", + uint8(VIEWER): "viewer role", + } + return names[uint8(p)] +} + +type permissionFlags uint8 + +const ( + NO_ACCESS permissionFlags = 1 << iota + CAN_ADMIN + CAN_EDIT + CAN_SAVE + CAN_VIEW + FULL_ACCESS = CAN_ADMIN | CAN_EDIT | CAN_SAVE | CAN_VIEW + EDITOR_ACCESS = CAN_EDIT | CAN_SAVE | CAN_VIEW + VIEWER_ACCESS = CAN_VIEW +) + +func (flag permissionFlags) canAdmin() bool { + return flag&CAN_ADMIN != 0 +} + +func (flag permissionFlags) canEdit() bool { + return flag&CAN_EDIT != 0 +} + +func (flag permissionFlags) canSave() bool { + return flag&CAN_SAVE != 0 +} + +func (flag permissionFlags) canView() bool { + return flag&CAN_VIEW != 0 +} + +func (flag permissionFlags) noAccess() bool { + return flag&(CAN_ADMIN|CAN_EDIT|CAN_SAVE|CAN_VIEW) == 0 +} + +func (f permissionFlags) String() string { + r := []string{} + + if f.canAdmin() { + r = append(r, "admin") + } + + if f.canEdit() { + r = append(r, "edit") + } + + if f.canSave() { + r = append(r, "save") + } + + if f.canView() { + r = append(r, "view") + } + + if f.noAccess() { + r = append(r, "") + } + + return strings.Join(r[:], ", ") +} + +func (sc *scenarioContext) reportSuccess() { + So(true, ShouldBeTrue) +} + +func (sc *scenarioContext) reportFailure(desc string, expected interface{}, actual interface{}) { + var buf bytes.Buffer + buf.WriteString("\n") + buf.WriteString(sc.orgRoleScenario) + buf.WriteString(" ") + buf.WriteString(sc.permissionScenario) + buf.WriteString("\n ") + buf.WriteString(desc) + buf.WriteString("\n") + buf.WriteString(fmt.Sprintf("Source test: %s:%d\n", sc.callerFile, sc.callerLine)) + buf.WriteString(fmt.Sprintf("Expected: %v\n", expected)) + buf.WriteString(fmt.Sprintf("Actual: %v\n", actual)) + buf.WriteString("Context:") + buf.WriteString(fmt.Sprintf("\n Given user: orgRole=%s, id=%d, orgId=%d", sc.givenUser.OrgRole, sc.givenUser.UserId, sc.givenUser.OrgId)) + buf.WriteString(fmt.Sprintf("\n Given dashboard id: %d", sc.givenDashboardID)) + + for i, p := range sc.givenPermissions { + r := "" + if p.Role != nil { + r = string(*p.Role) + } + buf.WriteString(fmt.Sprintf("\n Given permission (%d): dashboardId=%d, userId=%d, teamId=%d, role=%v, permission=%s", i, p.DashboardId, p.UserId, p.TeamId, r, p.Permission.String())) + } + + for i, t := range sc.givenTeams { + buf.WriteString(fmt.Sprintf("\n Given team (%d): id=%d", i, t.Id)) + } + + for i, p := range sc.updatePermissions { + r := "" + if p.Role != nil { + r = string(*p.Role) + } + buf.WriteString(fmt.Sprintf("\n Update permission (%d): dashboardId=%d, userId=%d, teamId=%d, role=%v, permission=%s", i, p.DashboardId, p.UserId, p.TeamId, r, p.Permission.String())) + } + + sc.t.Fatalf(buf.String()) +} + +func newCustomUserPermission(dashboardID int64, userID int64, permission m.PermissionType) *m.DashboardAcl { + return &m.DashboardAcl{OrgId: orgID, DashboardId: dashboardID, UserId: userID, Permission: permission} +} + +func newDefaultUserPermission(dashboardID int64, permission m.PermissionType) *m.DashboardAcl { + return newCustomUserPermission(dashboardID, userID, permission) +} + +func newCustomTeamPermission(dashboardID int64, teamID int64, permission m.PermissionType) *m.DashboardAcl { + return &m.DashboardAcl{OrgId: orgID, DashboardId: dashboardID, TeamId: teamID, Permission: permission} +} + +func newDefaultTeamPermission(dashboardID int64, permission m.PermissionType) *m.DashboardAcl { + return newCustomTeamPermission(dashboardID, teamID, permission) +} + +func newAdminRolePermission(dashboardID int64, permission m.PermissionType) *m.DashboardAcl { + return &m.DashboardAcl{OrgId: orgID, DashboardId: dashboardID, Role: &adminRole, Permission: permission} +} + +func newEditorRolePermission(dashboardID int64, permission m.PermissionType) *m.DashboardAcl { + return &m.DashboardAcl{OrgId: orgID, DashboardId: dashboardID, Role: &editorRole, Permission: permission} +} + +func newViewerRolePermission(dashboardID int64, permission m.PermissionType) *m.DashboardAcl { + return &m.DashboardAcl{OrgId: orgID, DashboardId: dashboardID, Role: &viewerRole, Permission: permission} +} + +func toDto(acl *m.DashboardAcl) *m.DashboardAclInfoDTO { + return &m.DashboardAclInfoDTO{ + OrgId: acl.OrgId, + DashboardId: acl.DashboardId, + UserId: acl.UserId, + TeamId: acl.TeamId, + Role: acl.Role, + Permission: acl.Permission, + PermissionName: acl.Permission.String(), + } +} diff --git a/pkg/services/notifications/mailer.go b/pkg/services/notifications/mailer.go index 7fbf39ee41d..4730ef7f0f1 100644 --- a/pkg/services/notifications/mailer.go +++ b/pkg/services/notifications/mailer.go @@ -7,51 +7,18 @@ package notifications import ( "bytes" "crypto/tls" - "errors" "fmt" "html/template" "net" "strconv" - "strings" - "github.com/grafana/grafana/pkg/log" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" - "gopkg.in/gomail.v2" + gomail "gopkg.in/mail.v2" ) -var mailQueue chan *Message - -func initMailQueue() { - mailQueue = make(chan *Message, 10) - go processMailQueue() -} - -func processMailQueue() { - for { - select { - case msg := <-mailQueue: - num, err := send(msg) - tos := strings.Join(msg.To, "; ") - info := "" - if err != nil { - if len(msg.Info) > 0 { - info = ", info: " + msg.Info - } - log.Error(4, fmt.Sprintf("Async sent email %d succeed, not send emails: %s%s err: %s", num, tos, info, err)) - } else { - log.Trace(fmt.Sprintf("Async sent email %d succeed, sent emails: %s%s", num, tos, info)) - } - } - } -} - -var addToMailQueue = func(msg *Message) { - mailQueue <- msg -} - -func send(msg *Message) (int, error) { - dialer, err := createDialer() +func (ns *NotificationService) send(msg *Message) (int, error) { + dialer, err := ns.createDialer() if err != nil { return 0, err } @@ -75,8 +42,8 @@ func send(msg *Message) (int, error) { return len(msg.To), nil } -func createDialer() (*gomail.Dialer, error) { - host, port, err := net.SplitHostPort(setting.Smtp.Host) +func (ns *NotificationService) createDialer() (*gomail.Dialer, error) { + host, port, err := net.SplitHostPort(ns.Cfg.Smtp.Host) if err != nil { return nil, err @@ -87,30 +54,31 @@ func createDialer() (*gomail.Dialer, error) { } tlsconfig := &tls.Config{ - InsecureSkipVerify: setting.Smtp.SkipVerify, + InsecureSkipVerify: ns.Cfg.Smtp.SkipVerify, ServerName: host, } - if setting.Smtp.CertFile != "" { - cert, err := tls.LoadX509KeyPair(setting.Smtp.CertFile, setting.Smtp.KeyFile) + if ns.Cfg.Smtp.CertFile != "" { + cert, err := tls.LoadX509KeyPair(ns.Cfg.Smtp.CertFile, ns.Cfg.Smtp.KeyFile) if err != nil { return nil, fmt.Errorf("Could not load cert or key file. error: %v", err) } tlsconfig.Certificates = []tls.Certificate{cert} } - d := gomail.NewDialer(host, iPort, setting.Smtp.User, setting.Smtp.Password) + d := gomail.NewDialer(host, iPort, ns.Cfg.Smtp.User, ns.Cfg.Smtp.Password) d.TLSConfig = tlsconfig - if setting.Smtp.EhloIdentity != "" { - d.LocalName = setting.Smtp.EhloIdentity + + if ns.Cfg.Smtp.EhloIdentity != "" { + d.LocalName = ns.Cfg.Smtp.EhloIdentity } else { d.LocalName = setting.InstanceName } return d, nil } -func buildEmailMessage(cmd *m.SendEmailCommand) (*Message, error) { - if !setting.Smtp.Enabled { +func (ns *NotificationService) buildEmailMessage(cmd *m.SendEmailCommand) (*Message, error) { + if !ns.Cfg.Smtp.Enabled { return nil, m.ErrSmtpNotEnabled } @@ -135,7 +103,7 @@ func buildEmailMessage(cmd *m.SendEmailCommand) (*Message, error) { subjectText, hasSubject := subjectData["value"] if !hasSubject { - return nil, errors.New(fmt.Sprintf("Missing subject in Template %s", cmd.Template)) + return nil, fmt.Errorf("Missing subject in Template %s", cmd.Template) } subjectTmpl, err := template.New("subject").Parse(subjectText.(string)) @@ -154,7 +122,7 @@ func buildEmailMessage(cmd *m.SendEmailCommand) (*Message, error) { return &Message{ To: cmd.To, - From: fmt.Sprintf("%s <%s>", setting.Smtp.FromName, setting.Smtp.FromAddress), + From: fmt.Sprintf("%s <%s>", ns.Cfg.Smtp.FromName, ns.Cfg.Smtp.FromAddress), Subject: subject, Body: buffer.String(), EmbededFiles: cmd.EmbededFiles, diff --git a/pkg/services/notifications/notifications.go b/pkg/services/notifications/notifications.go index 25eb2b5936a..14d362c5e1e 100644 --- a/pkg/services/notifications/notifications.go +++ b/pkg/services/notifications/notifications.go @@ -7,11 +7,13 @@ import ( "html/template" "net/url" "path/filepath" + "strings" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/events" "github.com/grafana/grafana/pkg/log" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) @@ -21,33 +23,46 @@ var tmplResetPassword = "reset_password.html" var tmplSignUpStarted = "signup_started.html" var tmplWelcomeOnSignUp = "welcome_on_signup.html" -func Init() error { - initMailQueue() - initWebhookQueue() +func init() { + registry.RegisterService(&NotificationService{}) +} - bus.AddHandler("email", sendResetPasswordEmail) - bus.AddHandler("email", validateResetPasswordCode) - bus.AddHandler("email", sendEmailCommandHandler) +type NotificationService struct { + Bus bus.Bus `inject:""` + Cfg *setting.Cfg `inject:""` - bus.AddCtxHandler("email", sendEmailCommandHandlerSync) + mailQueue chan *Message + webhookQueue chan *Webhook + log log.Logger +} - bus.AddCtxHandler("webhook", SendWebhookSync) +func (ns *NotificationService) Init() error { + ns.log = log.New("notifications") + ns.mailQueue = make(chan *Message, 10) + ns.webhookQueue = make(chan *Webhook, 10) - bus.AddEventListener(signUpStartedHandler) - bus.AddEventListener(signUpCompletedHandler) + ns.Bus.AddHandler(ns.sendResetPasswordEmail) + ns.Bus.AddHandler(ns.validateResetPasswordCode) + ns.Bus.AddHandler(ns.sendEmailCommandHandler) + + ns.Bus.AddCtxHandler(ns.sendEmailCommandHandlerSync) + ns.Bus.AddCtxHandler(ns.SendWebhookSync) + + ns.Bus.AddEventListener(ns.signUpStartedHandler) + ns.Bus.AddEventListener(ns.signUpCompletedHandler) mailTemplates = template.New("name") mailTemplates.Funcs(template.FuncMap{ "Subject": subjectTemplateFunc, }) - templatePattern := filepath.Join(setting.StaticRootPath, setting.Smtp.TemplatesPattern) + templatePattern := filepath.Join(setting.StaticRootPath, ns.Cfg.Smtp.TemplatesPattern) _, err := mailTemplates.ParseGlob(templatePattern) if err != nil { return err } - if !util.IsEmail(setting.Smtp.FromAddress) { + if !util.IsEmail(ns.Cfg.Smtp.FromAddress) { return errors.New("Invalid email address for SMTP from_address config") } @@ -58,14 +73,44 @@ func Init() error { return nil } -func SendWebhookSync(ctx context.Context, cmd *m.SendWebhookSync) error { - return sendWebRequestSync(ctx, &Webhook{ - Url: cmd.Url, - User: cmd.User, - Password: cmd.Password, - Body: cmd.Body, - HttpMethod: cmd.HttpMethod, - HttpHeader: cmd.HttpHeader, +func (ns *NotificationService) Run(ctx context.Context) error { + for { + select { + case webhook := <-ns.webhookQueue: + err := ns.sendWebRequestSync(context.Background(), webhook) + + if err != nil { + ns.log.Error("Failed to send webrequest ", "error", err) + } + case msg := <-ns.mailQueue: + num, err := ns.send(msg) + tos := strings.Join(msg.To, "; ") + info := "" + if err != nil { + if len(msg.Info) > 0 { + info = ", info: " + msg.Info + } + ns.log.Error(fmt.Sprintf("Async sent email %d succeed, not send emails: %s%s err: %s", num, tos, info, err)) + } else { + ns.log.Debug(fmt.Sprintf("Async sent email %d succeed, sent emails: %s%s", num, tos, info)) + } + case <-ctx.Done(): + return ctx.Err() + } + } + + return nil +} + +func (ns *NotificationService) SendWebhookSync(ctx context.Context, cmd *m.SendWebhookSync) error { + return ns.sendWebRequestSync(ctx, &Webhook{ + Url: cmd.Url, + User: cmd.User, + Password: cmd.Password, + Body: cmd.Body, + HttpMethod: cmd.HttpMethod, + HttpHeader: cmd.HttpHeader, + ContentType: cmd.ContentType, }) } @@ -74,8 +119,8 @@ func subjectTemplateFunc(obj map[string]interface{}, value string) string { return "" } -func sendEmailCommandHandlerSync(ctx context.Context, cmd *m.SendEmailCommandSync) error { - message, err := buildEmailMessage(&m.SendEmailCommand{ +func (ns *NotificationService) sendEmailCommandHandlerSync(ctx context.Context, cmd *m.SendEmailCommandSync) error { + message, err := ns.buildEmailMessage(&m.SendEmailCommand{ Data: cmd.Data, Info: cmd.Info, Template: cmd.Template, @@ -88,25 +133,23 @@ func sendEmailCommandHandlerSync(ctx context.Context, cmd *m.SendEmailCommandSyn return err } - _, err = send(message) - + _, err = ns.send(message) return err } -func sendEmailCommandHandler(cmd *m.SendEmailCommand) error { - message, err := buildEmailMessage(cmd) +func (ns *NotificationService) sendEmailCommandHandler(cmd *m.SendEmailCommand) error { + message, err := ns.buildEmailMessage(cmd) if err != nil { return err } - addToMailQueue(message) - + ns.mailQueue <- message return nil } -func sendResetPasswordEmail(cmd *m.SendResetPasswordEmailCommand) error { - return sendEmailCommandHandler(&m.SendEmailCommand{ +func (ns *NotificationService) sendResetPasswordEmail(cmd *m.SendResetPasswordEmailCommand) error { + return ns.sendEmailCommandHandler(&m.SendEmailCommand{ To: []string{cmd.User.Email}, Template: tmplResetPassword, Data: map[string]interface{}{ @@ -116,7 +159,7 @@ func sendResetPasswordEmail(cmd *m.SendResetPasswordEmailCommand) error { }) } -func validateResetPasswordCode(query *m.ValidateResetPasswordCodeQuery) error { +func (ns *NotificationService) validateResetPasswordCode(query *m.ValidateResetPasswordCodeQuery) error { login := getLoginForEmailCode(query.Code) if login == "" { return m.ErrInvalidEmailCode @@ -135,18 +178,18 @@ func validateResetPasswordCode(query *m.ValidateResetPasswordCodeQuery) error { return nil } -func signUpStartedHandler(evt *events.SignUpStarted) error { +func (ns *NotificationService) signUpStartedHandler(evt *events.SignUpStarted) error { if !setting.VerifyEmailEnabled { return nil } - log.Info("User signup started: %s", evt.Email) + ns.log.Info("User signup started", "email", evt.Email) if evt.Email == "" { return nil } - err := sendEmailCommandHandler(&m.SendEmailCommand{ + err := ns.sendEmailCommandHandler(&m.SendEmailCommand{ To: []string{evt.Email}, Template: tmplSignUpStarted, Data: map[string]interface{}{ @@ -155,6 +198,7 @@ func signUpStartedHandler(evt *events.SignUpStarted) error { "SignUpUrl": setting.ToAbsUrl(fmt.Sprintf("signup/?email=%s&code=%s", url.QueryEscape(evt.Email), url.QueryEscape(evt.Code))), }, }) + if err != nil { return err } @@ -163,12 +207,12 @@ func signUpStartedHandler(evt *events.SignUpStarted) error { return bus.Dispatch(&emailSentCmd) } -func signUpCompletedHandler(evt *events.SignUpCompleted) error { - if evt.Email == "" || !setting.Smtp.SendWelcomeEmailOnSignUp { +func (ns *NotificationService) signUpCompletedHandler(evt *events.SignUpCompleted) error { + if evt.Email == "" || !ns.Cfg.Smtp.SendWelcomeEmailOnSignUp { return nil } - return sendEmailCommandHandler(&m.SendEmailCommand{ + return ns.sendEmailCommandHandler(&m.SendEmailCommand{ To: []string{evt.Email}, Template: tmplWelcomeOnSignUp, Data: map[string]interface{}{ diff --git a/pkg/services/notifications/notifications_test.go b/pkg/services/notifications/notifications_test.go index 3a5ff5fedb7..504c10c22ec 100644 --- a/pkg/services/notifications/notifications_test.go +++ b/pkg/services/notifications/notifications_test.go @@ -3,6 +3,7 @@ package notifications import ( "testing" + "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" . "github.com/smartystreets/goconvey/convey" @@ -17,25 +18,24 @@ type testTriggeredAlert struct { func TestNotifications(t *testing.T) { Convey("Given the notifications service", t, func() { - //bus.ClearBusHandlers() - setting.StaticRootPath = "../../../public/" - setting.Smtp.Enabled = true - setting.Smtp.TemplatesPattern = "emails/*.html" - setting.Smtp.FromAddress = "from@address.com" - setting.Smtp.FromName = "Grafana Admin" - err := Init() + ns := &NotificationService{} + ns.Bus = bus.New() + ns.Cfg = setting.NewCfg() + ns.Cfg.Smtp.Enabled = true + ns.Cfg.Smtp.TemplatesPattern = "emails/*.html" + ns.Cfg.Smtp.FromAddress = "from@address.com" + ns.Cfg.Smtp.FromName = "Grafana Admin" + + err := ns.Init() So(err, ShouldBeNil) - var sentMsg *Message - addToMailQueue = func(msg *Message) { - sentMsg = msg - } - Convey("When sending reset email password", func() { - err := sendResetPasswordEmail(&m.SendResetPasswordEmailCommand{User: &m.User{Email: "asd@asd.com"}}) + err := ns.sendResetPasswordEmail(&m.SendResetPasswordEmailCommand{User: &m.User{Email: "asd@asd.com"}}) So(err, ShouldBeNil) + + sentMsg := <-ns.mailQueue So(sentMsg.Body, ShouldContainSubstring, "body") So(sentMsg.Subject, ShouldEqual, "Reset your Grafana password - asd@asd.com") So(sentMsg.Body, ShouldNotContainSubstring, "Subject") diff --git a/pkg/services/notifications/send_email_integration_test.go b/pkg/services/notifications/send_email_integration_test.go index a9a5215d3ca..201f86036d3 100644 --- a/pkg/services/notifications/send_email_integration_test.go +++ b/pkg/services/notifications/send_email_integration_test.go @@ -12,23 +12,19 @@ import ( func TestEmailIntegrationTest(t *testing.T) { SkipConvey("Given the notifications service", t, func() { - bus.ClearBusHandlers() - setting.StaticRootPath = "../../../public/" - setting.Smtp.Enabled = true - setting.Smtp.TemplatesPattern = "emails/*.html" - setting.Smtp.FromAddress = "from@address.com" - setting.Smtp.FromName = "Grafana Admin" setting.BuildVersion = "4.0.0" - err := Init() - So(err, ShouldBeNil) + ns := &NotificationService{} + ns.Bus = bus.New() + ns.Cfg = setting.NewCfg() + ns.Cfg.Smtp.Enabled = true + ns.Cfg.Smtp.TemplatesPattern = "emails/*.html" + ns.Cfg.Smtp.FromAddress = "from@address.com" + ns.Cfg.Smtp.FromName = "Grafana Admin" - addToMailQueue = func(msg *Message) { - So(msg.From, ShouldEqual, "Grafana Admin ") - So(msg.To[0], ShouldEqual, "asdf@asdf.com") - ioutil.WriteFile("../../../tmp/test_email.html", []byte(msg.Body), 0777) - } + err := ns.Init() + So(err, ShouldBeNil) Convey("When sending reset email password", func() { cmd := &m.SendEmailCommand{ @@ -59,8 +55,13 @@ func TestEmailIntegrationTest(t *testing.T) { Template: "alert_notification.html", } - err := sendEmailCommandHandler(cmd) + err := ns.sendEmailCommandHandler(cmd) So(err, ShouldBeNil) + + sentMsg := <-ns.mailQueue + So(sentMsg.From, ShouldEqual, "Grafana Admin ") + So(sentMsg.To[0], ShouldEqual, "asdf@asdf.com") + ioutil.WriteFile("../../../tmp/test_email.html", []byte(sentMsg.Body), 0777) }) }) } diff --git a/pkg/services/notifications/webhook.go b/pkg/services/notifications/webhook.go index dff5aa4924a..a236a1d1c4e 100644 --- a/pkg/services/notifications/webhook.go +++ b/pkg/services/notifications/webhook.go @@ -11,17 +11,17 @@ import ( "golang.org/x/net/context/ctxhttp" - "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/util" ) type Webhook struct { - Url string - User string - Password string - Body string - HttpMethod string - HttpHeader map[string]string + Url string + User string + Password string + Body string + HttpMethod string + HttpHeader map[string]string + ContentType string } var netTransport = &http.Transport{ @@ -37,32 +37,8 @@ var netClient = &http.Client{ Transport: netTransport, } -var ( - webhookQueue chan *Webhook - webhookLog log.Logger -) - -func initWebhookQueue() { - webhookLog = log.New("notifications.webhook") - webhookQueue = make(chan *Webhook, 10) - go processWebhookQueue() -} - -func processWebhookQueue() { - for { - select { - case webhook := <-webhookQueue: - err := sendWebRequestSync(context.Background(), webhook) - - if err != nil { - webhookLog.Error("Failed to send webrequest ", "error", err) - } - } - } -} - -func sendWebRequestSync(ctx context.Context, webhook *Webhook) error { - webhookLog.Debug("Sending webhook", "url", webhook.Url, "http method", webhook.HttpMethod) +func (ns *NotificationService) sendWebRequestSync(ctx context.Context, webhook *Webhook) error { + ns.log.Debug("Sending webhook", "url", webhook.Url, "http method", webhook.HttpMethod) if webhook.HttpMethod == "" { webhook.HttpMethod = http.MethodPost @@ -73,8 +49,13 @@ func sendWebRequestSync(ctx context.Context, webhook *Webhook) error { return err } - request.Header.Add("Content-Type", "application/json") + if webhook.ContentType == "" { + webhook.ContentType = "application/json" + } + + request.Header.Add("Content-Type", webhook.ContentType) request.Header.Add("User-Agent", "Grafana") + if webhook.User != "" && webhook.Password != "" { request.Header.Add("Authorization", util.GetBasicAuthHeader(webhook.User, webhook.Password)) } @@ -98,10 +79,6 @@ func sendWebRequestSync(ctx context.Context, webhook *Webhook) error { return err } - webhookLog.Debug("Webhook failed", "statuscode", resp.Status, "body", string(body)) + ns.log.Debug("Webhook failed", "statuscode", resp.Status, "body", string(body)) return fmt.Errorf("Webhook response status %v", resp.Status) } - -var addToWebhookQueue = func(msg *Webhook) { - webhookQueue <- msg -} diff --git a/pkg/services/provisioning/dashboards/config_reader.go b/pkg/services/provisioning/dashboards/config_reader.go index 9030ba609b9..4f9577f82db 100644 --- a/pkg/services/provisioning/dashboards/config_reader.go +++ b/pkg/services/provisioning/dashboards/config_reader.go @@ -58,7 +58,7 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) { files, err := ioutil.ReadDir(cr.path) if err != nil { - cr.log.Error("cant read dashboard provisioning files from directory", "path", cr.path) + cr.log.Error("can't read dashboard provisioning files from directory", "path", cr.path) return dashboards, nil } @@ -69,7 +69,7 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) { parsedDashboards, err := cr.parseConfigs(file) if err != nil { - + return nil, err } if len(parsedDashboards) > 0 { diff --git a/pkg/services/provisioning/dashboards/config_reader_test.go b/pkg/services/provisioning/dashboards/config_reader_test.go index ecbf6435c36..72664c37990 100644 --- a/pkg/services/provisioning/dashboards/config_reader_test.go +++ b/pkg/services/provisioning/dashboards/config_reader_test.go @@ -8,9 +8,9 @@ import ( ) var ( - simpleDashboardConfig string = "./test-configs/dashboards-from-disk" - oldVersion string = "./test-configs/version-0" - brokenConfigs string = "./test-configs/broken-configs" + simpleDashboardConfig = "./test-configs/dashboards-from-disk" + oldVersion = "./test-configs/version-0" + brokenConfigs = "./test-configs/broken-configs" ) func TestDashboardsAsConfig(t *testing.T) { diff --git a/pkg/services/provisioning/dashboards/dashboard.go b/pkg/services/provisioning/dashboards/dashboard.go index a5349517bbe..a856565bf01 100644 --- a/pkg/services/provisioning/dashboards/dashboard.go +++ b/pkg/services/provisioning/dashboards/dashboard.go @@ -10,19 +10,16 @@ import ( type DashboardProvisioner struct { cfgReader *configReader log log.Logger - ctx context.Context } -func Provision(ctx context.Context, configDirectory string) (*DashboardProvisioner, error) { +func NewDashboardProvisioner(configDirectory string) *DashboardProvisioner { log := log.New("provisioning.dashboard") d := &DashboardProvisioner{ cfgReader: &configReader{path: configDirectory, log: log}, log: log, - ctx: ctx, } - err := d.Provision(ctx) - return d, err + return d } func (provider *DashboardProvisioner) Provision(ctx context.Context) error { diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index de0a49d34d9..e5186e12f06 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -19,9 +19,9 @@ import ( ) var ( - checkDiskForChangesInterval time.Duration = time.Second * 3 + checkDiskForChangesInterval = time.Second * 3 - ErrFolderNameMissing error = errors.New("Folder name missing") + ErrFolderNameMissing = errors.New("Folder name missing") ) type fileReader struct { @@ -235,7 +235,6 @@ func getOrCreateFolderId(cfg *DashboardsAsConfig, service dashboards.DashboardPr func resolveSymlink(fileinfo os.FileInfo, path string) (os.FileInfo, error) { checkFilepath, err := filepath.EvalSymlinks(path) if path != checkFilepath { - path = checkFilepath fi, err := os.Lstat(checkFilepath) if err != nil { return nil, err diff --git a/pkg/services/provisioning/dashboards/types.go b/pkg/services/provisioning/dashboards/types.go index f742b321552..4a55351d3e4 100644 --- a/pkg/services/provisioning/dashboards/types.go +++ b/pkg/services/provisioning/dashboards/types.go @@ -55,9 +55,6 @@ func createDashboardJson(data *simplejson.Json, lastModified time.Time, cfg *Das dash.OrgId = cfg.OrgId dash.Dashboard.OrgId = cfg.OrgId dash.Dashboard.FolderId = folderId - if !cfg.Editable { - dash.Dashboard.Data.Set("editable", cfg.Editable) - } if dash.Dashboard.Title == "" { return nil, models.ErrDashboardTitleEmpty diff --git a/pkg/services/provisioning/datasources/config_reader.go b/pkg/services/provisioning/datasources/config_reader.go index 58ed5472a6b..4b8931f0ed3 100644 --- a/pkg/services/provisioning/datasources/config_reader.go +++ b/pkg/services/provisioning/datasources/config_reader.go @@ -19,7 +19,7 @@ func (cr *configReader) readConfig(path string) ([]*DatasourcesAsConfig, error) files, err := ioutil.ReadDir(path) if err != nil { - cr.log.Error("cant read datasource provisioning files from directory", "path", path) + cr.log.Error("can't read datasource provisioning files from directory", "path", path) return datasources, nil } diff --git a/pkg/services/provisioning/datasources/config_reader_test.go b/pkg/services/provisioning/datasources/config_reader_test.go index 3198329e0ae..89ecc5a0b68 100644 --- a/pkg/services/provisioning/datasources/config_reader_test.go +++ b/pkg/services/provisioning/datasources/config_reader_test.go @@ -11,14 +11,14 @@ import ( ) var ( - logger log.Logger = log.New("fake.log") - oneDatasourcesConfig string = "" - twoDatasourcesConfig string = "./test-configs/two-datasources" - twoDatasourcesConfigPurgeOthers string = "./test-configs/insert-two-delete-two" - doubleDatasourcesConfig string = "./test-configs/double-default" - allProperties string = "./test-configs/all-properties" - versionZero string = "./test-configs/version-0" - brokenYaml string = "./test-configs/broken-yaml" + logger log.Logger = log.New("fake.log") + + twoDatasourcesConfig = "./test-configs/two-datasources" + twoDatasourcesConfigPurgeOthers = "./test-configs/insert-two-delete-two" + doubleDatasourcesConfig = "./test-configs/double-default" + allProperties = "./test-configs/all-properties" + versionZero = "./test-configs/version-0" + brokenYaml = "./test-configs/broken-yaml" fakeRepo *fakeRepository ) diff --git a/pkg/services/provisioning/provisioning.go b/pkg/services/provisioning/provisioning.go index b41ec37b797..9044ae97389 100644 --- a/pkg/services/provisioning/provisioning.go +++ b/pkg/services/provisioning/provisioning.go @@ -2,34 +2,40 @@ package provisioning import ( "context" + "fmt" "path" - "path/filepath" + "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/services/provisioning/dashboards" "github.com/grafana/grafana/pkg/services/provisioning/datasources" - ini "gopkg.in/ini.v1" + "github.com/grafana/grafana/pkg/setting" ) -func Init(ctx context.Context, homePath string, cfg *ini.File) error { - provisioningPath := makeAbsolute(cfg.Section("paths").Key("provisioning").String(), homePath) +func init() { + registry.RegisterService(&ProvisioningService{}) +} - datasourcePath := path.Join(provisioningPath, "datasources") +type ProvisioningService struct { + Cfg *setting.Cfg `inject:""` +} + +func (ps *ProvisioningService) Init() error { + datasourcePath := path.Join(ps.Cfg.ProvisioningPath, "datasources") if err := datasources.Provision(datasourcePath); err != nil { - return err - } - - dashboardPath := path.Join(provisioningPath, "dashboards") - _, err := dashboards.Provision(ctx, dashboardPath) - if err != nil { - return err + return fmt.Errorf("Datasource provisioning error: %v", err) } return nil } -func makeAbsolute(path string, root string) string { - if filepath.IsAbs(path) { - return path +func (ps *ProvisioningService) Run(ctx context.Context) error { + dashboardPath := path.Join(ps.Cfg.ProvisioningPath, "dashboards") + dashProvisioner := dashboards.NewDashboardProvisioner(dashboardPath) + + if err := dashProvisioner.Provision(ctx); err != nil { + return err } - return filepath.Join(root, path) + + <-ctx.Done() + return ctx.Err() } diff --git a/pkg/services/search/handlers.go b/pkg/services/search/handlers.go index cf194c320bb..9d40697f489 100644 --- a/pkg/services/search/handlers.go +++ b/pkg/services/search/handlers.go @@ -5,13 +5,23 @@ import ( "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/registry" ) -func Init() { - bus.AddHandler("search", searchHandler) +func init() { + registry.RegisterService(&SearchService{}) } -func searchHandler(query *Query) error { +type SearchService struct { + Bus bus.Bus `inject:""` +} + +func (s *SearchService) Init() error { + s.Bus.AddHandler(s.searchHandler) + return nil +} + +func (s *SearchService) searchHandler(query *Query) error { dashQuery := FindPersistedDashboardsQuery{ Title: query.Title, SignedInUser: query.SignedInUser, diff --git a/pkg/services/search/handlers_test.go b/pkg/services/search/handlers_test.go index fc223b2ef4b..5cf934cbc92 100644 --- a/pkg/services/search/handlers_test.go +++ b/pkg/services/search/handlers_test.go @@ -12,6 +12,7 @@ func TestSearch(t *testing.T) { Convey("Given search query", t, func() { query := Query{Limit: 2000, SignedInUser: &m.SignedInUser{IsGrafanaAdmin: true}} + ss := &SearchService{} bus.AddHandler("test", func(query *FindPersistedDashboardsQuery) error { query.Result = HitList{ @@ -35,7 +36,7 @@ func TestSearch(t *testing.T) { }) Convey("That is empty", func() { - err := searchHandler(&query) + err := ss.searchHandler(&query) So(err, ShouldBeNil) Convey("should return sorted results", func() { diff --git a/pkg/services/sqlstore/alert_notification.go b/pkg/services/sqlstore/alert_notification.go index ae691c7166c..651241f7714 100644 --- a/pkg/services/sqlstore/alert_notification.go +++ b/pkg/services/sqlstore/alert_notification.go @@ -23,12 +23,7 @@ func DeleteAlertNotification(cmd *m.DeleteAlertNotificationCommand) error { return inTransaction(func(sess *DBSession) error { sql := "DELETE FROM alert_notification WHERE alert_notification.org_id = ? AND alert_notification.id = ?" _, err := sess.Exec(sql, cmd.OrgId, cmd.Id) - - if err != nil { - return err - } - - return nil + return err }) } diff --git a/pkg/services/sqlstore/alert_notification_test.go b/pkg/services/sqlstore/alert_notification_test.go index d37062fb58f..761114978a8 100644 --- a/pkg/services/sqlstore/alert_notification_test.go +++ b/pkg/services/sqlstore/alert_notification_test.go @@ -21,7 +21,7 @@ func TestAlertNotificationSQLAccess(t *testing.T) { } err := GetAlertNotifications(cmd) - fmt.Printf("errror %v", err) + fmt.Printf("error %v", err) So(err, ShouldBeNil) So(cmd.Result, ShouldBeNil) }) diff --git a/pkg/services/sqlstore/annotation.go b/pkg/services/sqlstore/annotation.go index 76f1819a18c..1710679cea1 100644 --- a/pkg/services/sqlstore/annotation.go +++ b/pkg/services/sqlstore/annotation.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "strings" + "time" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/annotations" @@ -17,18 +18,24 @@ func (r *SqlAnnotationRepo) Save(item *annotations.Item) error { return inTransaction(func(sess *DBSession) error { tags := models.ParseTagPairs(item.Tags) item.Tags = models.JoinTagPairs(tags) + item.Created = time.Now().UnixNano() / int64(time.Millisecond) + item.Updated = item.Created + if item.Epoch == 0 { + item.Epoch = item.Created + } + if _, err := sess.Table("annotation").Insert(item); err != nil { return err } if item.Tags != nil { - if tags, err := r.ensureTagsExist(sess, tags); err != nil { + tags, err := r.ensureTagsExist(sess, tags) + if err != nil { return err - } else { - for _, tag := range tags { - if _, err := sess.Exec("INSERT INTO annotation_tag (annotation_id, tag_id) VALUES(?,?)", item.Id, tag.Id); err != nil { - return err - } + } + for _, tag := range tags { + if _, err := sess.Exec("INSERT INTO annotation_tag (annotation_id, tag_id) VALUES(?,?)", item.Id, tag.Id); err != nil { + return err } } } @@ -79,6 +86,7 @@ func (r *SqlAnnotationRepo) Update(item *annotations.Item) error { return errors.New("Annotation not found") } + existing.Updated = time.Now().UnixNano() / int64(time.Millisecond) existing.Epoch = item.Epoch existing.Text = item.Text if item.RegionId != 0 { @@ -86,27 +94,24 @@ func (r *SqlAnnotationRepo) Update(item *annotations.Item) error { } if item.Tags != nil { - if tags, err := r.ensureTagsExist(sess, models.ParseTagPairs(item.Tags)); err != nil { + tags, err := r.ensureTagsExist(sess, models.ParseTagPairs(item.Tags)) + if err != nil { return err - } else { - if _, err := sess.Exec("DELETE FROM annotation_tag WHERE annotation_id = ?", existing.Id); err != nil { + } + if _, err := sess.Exec("DELETE FROM annotation_tag WHERE annotation_id = ?", existing.Id); err != nil { + return err + } + for _, tag := range tags { + if _, err := sess.Exec("INSERT INTO annotation_tag (annotation_id, tag_id) VALUES(?,?)", existing.Id, tag.Id); err != nil { return err } - for _, tag := range tags { - if _, err := sess.Exec("INSERT INTO annotation_tag (annotation_id, tag_id) VALUES(?,?)", existing.Id, tag.Id); err != nil { - return err - } - } } } existing.Tags = item.Tags - if _, err := sess.Table("annotation").Id(existing.Id).Cols("epoch", "text", "region_id", "tags").Update(existing); err != nil { - return err - } - - return nil + _, err = sess.Table("annotation").Id(existing.Id).Cols("epoch", "text", "region_id", "updated", "tags").Update(existing) + return err }) } @@ -127,6 +132,8 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I annotation.text, annotation.tags, annotation.data, + annotation.created, + annotation.updated, usr.email, usr.login, alert.name as alert_name @@ -164,6 +171,11 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I params = append(params, query.PanelId) } + if query.UserId != 0 { + sql.WriteString(` AND annotation.user_id = ?`) + params = append(params, query.UserId) + } + if query.From > 0 && query.To > 0 { sql.WriteString(` AND annotation.epoch BETWEEN ? AND ?`) params = append(params, query.From, query.To) @@ -171,6 +183,8 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I if query.Type == "alert" { sql.WriteString(` AND annotation.alert_id > 0`) + } else if query.Type == "annotation" { + sql.WriteString(` AND annotation.alert_id = 0`) } if len(query.Tags) > 0 { @@ -202,7 +216,7 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I } if query.Limit == 0 { - query.Limit = 10 + query.Limit = 100 } sql.WriteString(fmt.Sprintf(" ORDER BY epoch DESC LIMIT %v", query.Limit)) diff --git a/pkg/services/sqlstore/annotation_test.go b/pkg/services/sqlstore/annotation_test.go index d5cee110b9a..949ed8135ba 100644 --- a/pkg/services/sqlstore/annotation_test.go +++ b/pkg/services/sqlstore/annotation_test.go @@ -79,6 +79,12 @@ func TestAnnotations(t *testing.T) { Convey("Can read tags", func() { So(items[0].Tags, ShouldResemble, []string{"outage", "error", "type:outage", "server:server-1"}) }) + + Convey("Has created and updated values", func() { + So(items[0].Created, ShouldBeGreaterThan, 0) + So(items[0].Updated, ShouldBeGreaterThan, 0) + So(items[0].Updated, ShouldEqual, items[0].Created) + }) }) Convey("Can query for annotation by id", func() { @@ -231,6 +237,10 @@ func TestAnnotations(t *testing.T) { So(items[0].Tags, ShouldResemble, []string{"newtag1", "newtag2"}) So(items[0].Text, ShouldEqual, "something new") }) + + Convey("Updated time has increased", func() { + So(items[0].Updated, ShouldBeGreaterThan, items[0].Created) + }) }) Convey("Can delete annotation", func() { @@ -246,6 +256,7 @@ func TestAnnotations(t *testing.T) { annotationId := items[0].Id err = repo.Delete(&annotations.DeleteParams{Id: annotationId}) + So(err, ShouldBeNil) items, err = repo.Find(query) So(err, ShouldBeNil) diff --git a/pkg/services/sqlstore/apikey.go b/pkg/services/sqlstore/apikey.go index 0532f636625..9d41b5c809e 100644 --- a/pkg/services/sqlstore/apikey.go +++ b/pkg/services/sqlstore/apikey.go @@ -55,7 +55,7 @@ func GetApiKeyById(query *m.GetApiKeyByIdQuery) error { if err != nil { return err - } else if has == false { + } else if !has { return m.ErrInvalidApiKey } @@ -69,7 +69,7 @@ func GetApiKeyByName(query *m.GetApiKeyByNameQuery) error { if err != nil { return err - } else if has == false { + } else if !has { return m.ErrInvalidApiKey } diff --git a/pkg/services/sqlstore/dashboard.go b/pkg/services/sqlstore/dashboard.go index 8a89c3d942c..aff532bb3b5 100644 --- a/pkg/services/sqlstore/dashboard.go +++ b/pkg/services/sqlstore/dashboard.go @@ -24,6 +24,7 @@ func init() { bus.AddHandler("sql", GetDashboardPermissionsForUser) bus.AddHandler("sql", GetDashboardsBySlug) bus.AddHandler("sql", ValidateDashboardBeforeSave) + bus.AddHandler("sql", HasEditPermissionInFolders) } var generateNewUid func() string = util.GenerateShortUid @@ -63,7 +64,7 @@ func saveDashboard(sess *DBSession, cmd *m.SaveDashboardCommand) error { } // do not allow plugin dashboard updates without overwrite flag - if existing.PluginId != "" && cmd.Overwrite == false { + if existing.PluginId != "" && !cmd.Overwrite { return m.UpdatePluginDashboardError{PluginId: existing.PluginId} } } @@ -77,7 +78,7 @@ func saveDashboard(sess *DBSession, cmd *m.SaveDashboardCommand) error { } parentVersion := dash.Version - affectedRows := int64(0) + var affectedRows int64 var err error if dash.Id == 0 { @@ -172,7 +173,7 @@ func GetDashboard(query *m.GetDashboardQuery) error { if err != nil { return err - } else if has == false { + } else if !has { return m.ErrDashboardNotFound } @@ -308,7 +309,7 @@ func DeleteDashboard(cmd *m.DeleteDashboardCommand) error { has, err := sess.Get(&dashboard) if err != nil { return err - } else if has == false { + } else if !has { return m.ErrDashboardNotFound } @@ -347,12 +348,7 @@ func GetDashboards(query *m.GetDashboardsQuery) error { err := x.In("id", query.DashboardIds).Find(&dashboards) query.Result = dashboards - - if err != nil { - return err - } - - return nil + return err } // GetDashboardPermissionsForUser returns the maximum permission the specified user has for a dashboard(s) @@ -431,12 +427,7 @@ func GetDashboardsByPluginId(query *m.GetDashboardsByPluginIdQuery) error { err := x.Where(whereExpr, query.OrgId, query.PluginId).Find(&dashboards) query.Result = dashboards - - if err != nil { - return err - } - - return nil + return err } type DashboardSlugDTO struct { @@ -451,7 +442,7 @@ func GetDashboardSlugById(query *m.GetDashboardSlugByIdQuery) error { if err != nil { return err - } else if exists == false { + } else if !exists { return m.ErrDashboardNotFound } @@ -479,7 +470,7 @@ func GetDashboardUIDById(query *m.GetDashboardRefByIdQuery) error { if err != nil { return err - } else if exists == false { + } else if !exists { return m.ErrDashboardNotFound } @@ -544,6 +535,10 @@ func getExistingDashboardByIdOrUidForUpdate(sess *DBSession, cmd *m.ValidateDash dash.SetId(existingByUid.Id) dash.SetUid(existingByUid.Uid) existing = existingByUid + + if !dash.IsFolder { + cmd.Result.IsParentFolderChanged = true + } } if (existing.IsFolder && !dash.IsFolder) || @@ -551,6 +546,10 @@ func getExistingDashboardByIdOrUidForUpdate(sess *DBSession, cmd *m.ValidateDash return m.ErrDashboardTypeMismatch } + if !dash.IsFolder && dash.FolderId != existing.FolderId { + cmd.Result.IsParentFolderChanged = true + } + // check for is someone else has written in between if dash.Version != existing.Version { if cmd.Overwrite { @@ -561,7 +560,7 @@ func getExistingDashboardByIdOrUidForUpdate(sess *DBSession, cmd *m.ValidateDash } // do not allow plugin dashboard updates without overwrite flag - if existing.PluginId != "" && cmd.Overwrite == false { + if existing.PluginId != "" && !cmd.Overwrite { return m.UpdatePluginDashboardError{PluginId: existing.PluginId} } @@ -586,6 +585,10 @@ func getExistingDashboardByTitleAndFolder(sess *DBSession, cmd *m.ValidateDashbo return m.ErrDashboardFolderWithSameNameAsDashboard } + if !dash.IsFolder && (dash.FolderId != existing.FolderId || dash.Id == 0) { + cmd.Result.IsParentFolderChanged = true + } + if cmd.Overwrite { dash.SetId(existing.Id) dash.SetUid(existing.Uid) @@ -599,6 +602,7 @@ func getExistingDashboardByTitleAndFolder(sess *DBSession, cmd *m.ValidateDashbo } func ValidateDashboardBeforeSave(cmd *m.ValidateDashboardBeforeSaveCommand) (err error) { + cmd.Result = &m.ValidateDashboardBeforeSaveResult{} return inTransaction(func(sess *DBSession) error { if err = getExistingDashboardByIdOrUidForUpdate(sess, cmd); err != nil { return err @@ -611,3 +615,27 @@ func ValidateDashboardBeforeSave(cmd *m.ValidateDashboardBeforeSaveCommand) (err return nil }) } + +func HasEditPermissionInFolders(query *m.HasEditPermissionInFoldersQuery) error { + if query.SignedInUser.HasRole(m.ROLE_EDITOR) { + query.Result = true + return nil + } + + builder := &SqlBuilder{} + builder.Write("SELECT COUNT(dashboard.id) AS count FROM dashboard WHERE dashboard.org_id = ? AND dashboard.is_folder = ?", query.SignedInUser.OrgId, dialect.BooleanStr(true)) + builder.writeDashboardPermissionFilter(query.SignedInUser, m.PERMISSION_EDIT) + + type folderCount struct { + Count int64 + } + + resp := make([]*folderCount, 0) + if err := x.Sql(builder.GetSqlString(), builder.params...).Find(&resp); err != nil { + return err + } + + query.Result = len(resp) > 0 && resp[0].Count > 0 + + return nil +} diff --git a/pkg/services/sqlstore/dashboard_acl.go b/pkg/services/sqlstore/dashboard_acl.go index ae91d1d41f3..0b195c4562b 100644 --- a/pkg/services/sqlstore/dashboard_acl.go +++ b/pkg/services/sqlstore/dashboard_acl.go @@ -35,10 +35,8 @@ func UpdateDashboardAcl(cmd *m.UpdateDashboardAclCommand) error { // Update dashboard HasAcl flag dashboard := m.Dashboard{HasAcl: true} - if _, err := sess.Cols("has_acl").Where("id=?", cmd.DashboardId).Update(&dashboard); err != nil { - return err - } - return nil + _, err = sess.Cols("has_acl").Where("id=?", cmd.DashboardId).Update(&dashboard) + return err }) } @@ -69,7 +67,8 @@ func GetDashboardAclInfoList(query *m.GetDashboardAclInfoListQuery) error { '' as title, '' as slug, '' as uid,` + - falseStr + ` AS is_folder + falseStr + ` AS is_folder,` + + falseStr + ` AS inherited FROM dashboard_acl as da WHERE da.dashboard_id = -1` query.Result = make([]*m.DashboardAclInfoDTO, 0) @@ -92,10 +91,12 @@ func GetDashboardAclInfoList(query *m.GetDashboardAclInfoListQuery) error { u.login AS user_login, u.email AS user_email, ug.name AS team, + ug.email AS team_email, d.title, d.slug, d.uid, - d.is_folder + d.is_folder, + CASE WHEN (da.dashboard_id = -1 AND d.folder_id > 0) OR da.dashboard_id = d.folder_id THEN ` + dialect.BooleanStr(true) + ` ELSE ` + falseStr + ` END AS inherited FROM dashboard as d LEFT JOIN dashboard folder on folder.id = d.folder_id LEFT JOIN dashboard_acl AS da ON diff --git a/pkg/services/sqlstore/dashboard_acl_test.go b/pkg/services/sqlstore/dashboard_acl_test.go index 8fbb9c0d813..a034a0565a3 100644 --- a/pkg/services/sqlstore/dashboard_acl_test.go +++ b/pkg/services/sqlstore/dashboard_acl_test.go @@ -26,6 +26,22 @@ func TestDashboardAclDataAccess(t *testing.T) { }) Convey("Given dashboard folder with default permissions", func() { + Convey("When reading folder acl should include default acl", func() { + query := m.GetDashboardAclInfoListQuery{DashboardId: savedFolder.Id, OrgId: 1} + + err := GetDashboardAclInfoList(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 2) + defaultPermissionsId := -1 + So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId) + So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER) + So(query.Result[0].Inherited, ShouldBeFalse) + So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId) + So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR) + So(query.Result[1].Inherited, ShouldBeFalse) + }) + Convey("When reading dashboard acl should include acl for parent folder", func() { query := m.GetDashboardAclInfoListQuery{DashboardId: childDash.Id, OrgId: 1} @@ -36,8 +52,10 @@ func TestDashboardAclDataAccess(t *testing.T) { defaultPermissionsId := -1 So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId) So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER) + So(query.Result[0].Inherited, ShouldBeTrue) So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId) So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR) + So(query.Result[1].Inherited, ShouldBeTrue) }) }) @@ -94,7 +112,9 @@ func TestDashboardAclDataAccess(t *testing.T) { So(len(query.Result), ShouldEqual, 2) So(query.Result[0].DashboardId, ShouldEqual, savedFolder.Id) + So(query.Result[0].Inherited, ShouldBeTrue) So(query.Result[1].DashboardId, ShouldEqual, childDash.Id) + So(query.Result[1].Inherited, ShouldBeFalse) }) }) }) @@ -118,9 +138,12 @@ func TestDashboardAclDataAccess(t *testing.T) { So(len(query.Result), ShouldEqual, 3) So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId) So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER) + So(query.Result[0].Inherited, ShouldBeTrue) So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId) So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR) + So(query.Result[1].Inherited, ShouldBeTrue) So(query.Result[2].DashboardId, ShouldEqual, childDash.Id) + So(query.Result[2].Inherited, ShouldBeFalse) }) }) @@ -131,6 +154,7 @@ func TestDashboardAclDataAccess(t *testing.T) { DashboardId: savedFolder.Id, Permission: m.PERMISSION_EDIT, }) + So(err, ShouldBeNil) q1 := &m.GetDashboardAclInfoListQuery{DashboardId: savedFolder.Id, OrgId: 1} err = GetDashboardAclInfoList(q1) @@ -209,8 +233,10 @@ func TestDashboardAclDataAccess(t *testing.T) { defaultPermissionsId := -1 So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId) So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER) + So(query.Result[0].Inherited, ShouldBeFalse) So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId) So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR) + So(query.Result[1].Inherited, ShouldBeFalse) }) }) }) diff --git a/pkg/services/sqlstore/dashboard_folder_test.go b/pkg/services/sqlstore/dashboard_folder_test.go index 4c92c097931..cdd107c3e90 100644 --- a/pkg/services/sqlstore/dashboard_folder_test.go +++ b/pkg/services/sqlstore/dashboard_folder_test.go @@ -221,7 +221,6 @@ func TestDashboardFolderDataAccess(t *testing.T) { }) Convey("Given two dashboard folders", func() { - folder1 := insertTestDashboard("1 test dash folder", 1, 0, true, "prod") folder2 := insertTestDashboard("2 test dash folder", 1, 0, true, "prod") insertTestDashboard("folder in another org", 2, 0, true, "prod") @@ -264,6 +263,15 @@ func TestDashboardFolderDataAccess(t *testing.T) { So(query.Result[1].DashboardId, ShouldEqual, folder2.Id) So(query.Result[1].Permission, ShouldEqual, m.PERMISSION_ADMIN) }) + + Convey("should have edit permission in folders", func() { + query := &m.HasEditPermissionInFoldersQuery{ + SignedInUser: &m.SignedInUser{UserId: adminUser.Id, OrgId: 1, OrgRole: m.ROLE_ADMIN}, + } + err := HasEditPermissionInFolders(query) + So(err, ShouldBeNil) + So(query.Result, ShouldBeTrue) + }) }) Convey("Editor users", func() { @@ -310,6 +318,14 @@ func TestDashboardFolderDataAccess(t *testing.T) { So(query.Result[0].Id, ShouldEqual, folder2.Id) }) + Convey("should have edit permission in folders", func() { + query := &m.HasEditPermissionInFoldersQuery{ + SignedInUser: &m.SignedInUser{UserId: editorUser.Id, OrgId: 1, OrgRole: m.ROLE_EDITOR}, + } + err := HasEditPermissionInFolders(query) + So(err, ShouldBeNil) + So(query.Result, ShouldBeTrue) + }) }) Convey("Viewer users", func() { @@ -353,6 +369,41 @@ func TestDashboardFolderDataAccess(t *testing.T) { So(len(query.Result), ShouldEqual, 1) So(query.Result[0].Id, ShouldEqual, folder1.Id) }) + + Convey("should not have edit permission in folders", func() { + query := &m.HasEditPermissionInFoldersQuery{ + SignedInUser: &m.SignedInUser{UserId: viewerUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, + } + err := HasEditPermissionInFolders(query) + So(err, ShouldBeNil) + So(query.Result, ShouldBeFalse) + }) + + Convey("and admin permission is given for user with org role viewer in one dashboard folder", func() { + testHelperUpdateDashboardAcl(folder1.Id, m.DashboardAcl{DashboardId: folder1.Id, OrgId: 1, UserId: viewerUser.Id, Permission: m.PERMISSION_ADMIN}) + + Convey("should have edit permission in folders", func() { + query := &m.HasEditPermissionInFoldersQuery{ + SignedInUser: &m.SignedInUser{UserId: viewerUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, + } + err := HasEditPermissionInFolders(query) + So(err, ShouldBeNil) + So(query.Result, ShouldBeTrue) + }) + }) + + Convey("and edit permission is given for user with org role viewer in one dashboard folder", func() { + testHelperUpdateDashboardAcl(folder1.Id, m.DashboardAcl{DashboardId: folder1.Id, OrgId: 1, UserId: viewerUser.Id, Permission: m.PERMISSION_EDIT}) + + Convey("should have edit permission in folders", func() { + query := &m.HasEditPermissionInFoldersQuery{ + SignedInUser: &m.SignedInUser{UserId: viewerUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, + } + err := HasEditPermissionInFolders(query) + So(err, ShouldBeNil) + So(query.Result, ShouldBeTrue) + }) + }) }) }) }) diff --git a/pkg/services/sqlstore/dashboard_provisioning.go b/pkg/services/sqlstore/dashboard_provisioning.go index 69409c3b873..33fbb01c5b7 100644 --- a/pkg/services/sqlstore/dashboard_provisioning.go +++ b/pkg/services/sqlstore/dashboard_provisioning.go @@ -8,6 +8,7 @@ import ( func init() { bus.AddHandler("sql", GetProvisionedDashboardDataQuery) bus.AddHandler("sql", SaveProvisionedDashboard) + bus.AddHandler("sql", GetProvisionedDataByDashboardId) } type DashboardExtras struct { @@ -17,6 +18,19 @@ type DashboardExtras struct { Value string } +func GetProvisionedDataByDashboardId(cmd *models.IsDashboardProvisionedQuery) error { + result := &models.DashboardProvisioning{} + + exist, err := x.Where("dashboard_id = ?", cmd.DashboardId).Get(result) + if err != nil { + return err + } + + cmd.Result = exist + + return nil +} + func SaveProvisionedDashboard(cmd *models.SaveProvisionedDashboardCommand) error { return inTransaction(func(sess *DBSession) error { err := saveDashboard(sess, cmd.DashboardCmd) diff --git a/pkg/services/sqlstore/dashboard_provisioning_test.go b/pkg/services/sqlstore/dashboard_provisioning_test.go index b752173b67d..7ef45df3152 100644 --- a/pkg/services/sqlstore/dashboard_provisioning_test.go +++ b/pkg/services/sqlstore/dashboard_provisioning_test.go @@ -50,6 +50,23 @@ func TestDashboardProvisioningTest(t *testing.T) { So(query.Result[0].DashboardId, ShouldEqual, dashId) So(query.Result[0].Updated, ShouldEqual, now.Unix()) }) + + Convey("Can query for one provisioned dashboard", func() { + query := &models.IsDashboardProvisionedQuery{DashboardId: cmd.Result.Id} + + err := GetProvisionedDataByDashboardId(query) + So(err, ShouldBeNil) + + So(query.Result, ShouldBeTrue) + }) + + Convey("Can query for none provisioned dashboard", func() { + query := &models.IsDashboardProvisionedQuery{DashboardId: 3000} + + err := GetProvisionedDataByDashboardId(query) + So(err, ShouldBeNil) + So(query.Result, ShouldBeFalse) + }) }) }) } diff --git a/pkg/services/sqlstore/dashboard_service_integration_test.go b/pkg/services/sqlstore/dashboard_service_integration_test.go index d005270c33c..a9658f7ab76 100644 --- a/pkg/services/sqlstore/dashboard_service_integration_test.go +++ b/pkg/services/sqlstore/dashboard_service_integration_test.go @@ -19,7 +19,6 @@ func TestIntegratedDashboardService(t *testing.T) { var testOrgId int64 = 1 Convey("Given saved folders and dashboards in organization A", func() { - bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { return nil }) @@ -28,6 +27,11 @@ func TestIntegratedDashboardService(t *testing.T) { return nil }) + bus.AddHandler("test", func(cmd *models.IsDashboardProvisionedQuery) error { + cmd.Result = false + return nil + }) + savedFolder := saveTestFolder("Saved folder", testOrgId) savedDashInFolder := saveTestDashboard("Saved dash in folder", testOrgId, savedFolder.Id) saveTestDashboard("Other saved dash in folder", testOrgId, savedFolder.Id) @@ -74,7 +78,7 @@ func TestIntegratedDashboardService(t *testing.T) { Convey("Given organization B", func() { var otherOrgId int64 = 2 - Convey("When saving a dashboard with id that are saved in organization A", func() { + Convey("When creating a dashboard with same id as dashboard in organization A", func() { cmd := models.SaveDashboardCommand{ OrgId: otherOrgId, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -93,7 +97,7 @@ func TestIntegratedDashboardService(t *testing.T) { }) permissionScenario("Given user has permission to save", true, func(sc *dashboardPermissionScenarioContext) { - Convey("When saving a dashboard with uid that are saved in organization A", func() { + Convey("When creating a dashboard with same uid as dashboard in organization A", func() { var otherOrgId int64 = 2 cmd := models.SaveDashboardCommand{ OrgId: otherOrgId, @@ -106,7 +110,7 @@ func TestIntegratedDashboardService(t *testing.T) { res := callSaveWithResult(cmd) - Convey("It should create dashboard in other organization", func() { + Convey("It should create a new dashboard in organization B", func() { So(res, ShouldNotBeNil) query := models.GetDashboardQuery{OrgId: otherOrgId, Uid: savedDashInFolder.Uid} @@ -126,7 +130,7 @@ func TestIntegratedDashboardService(t *testing.T) { permissionScenario("Given user has no permission to save", false, func(sc *dashboardPermissionScenarioContext) { - Convey("When trying to create a new dashboard in the General folder", func() { + Convey("When creating a new dashboard in the General folder", func() { cmd := models.SaveDashboardCommand{ OrgId: testOrgId, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -138,7 +142,7 @@ func TestIntegratedDashboardService(t *testing.T) { err := callSaveWithError(cmd) - Convey("It should call dashboard guardian with correct arguments and result in access denied error", func() { + Convey("It should create dashboard guardian for General Folder with correct arguments and result in access denied error", func() { So(err, ShouldNotBeNil) So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) @@ -148,7 +152,7 @@ func TestIntegratedDashboardService(t *testing.T) { }) }) - Convey("When trying to create a new dashboard in other folder", func() { + Convey("When creating a new dashboard in other folder", func() { cmd := models.SaveDashboardCommand{ OrgId: testOrgId, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -161,7 +165,7 @@ func TestIntegratedDashboardService(t *testing.T) { err := callSaveWithError(cmd) - Convey("It should call dashboard guardian with correct arguments and rsult in access denied error", func() { + Convey("It should create dashboard guardian for other folder with correct arguments and rsult in access denied error", func() { So(err, ShouldNotBeNil) So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) @@ -171,7 +175,54 @@ func TestIntegratedDashboardService(t *testing.T) { }) }) - Convey("When trying to update a dashboard by existing id in the General folder", func() { + Convey("When creating a new dashboard by existing title in folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "title": savedDashInFolder.Title, + }), + FolderId: savedFolder.Id, + UserId: 10000, + Overwrite: true, + } + + err := callSaveWithError(cmd) + + Convey("It should create dashboard guardian for folder with correct arguments and result in access denied error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) + + So(sc.dashboardGuardianMock.DashId, ShouldEqual, savedFolder.Id) + So(sc.dashboardGuardianMock.OrgId, ShouldEqual, cmd.OrgId) + So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) + }) + }) + + Convey("When creating a new dashboard by existing uid in folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "uid": savedDashInFolder.Uid, + "title": "New dash", + }), + FolderId: savedFolder.Id, + UserId: 10000, + Overwrite: true, + } + + err := callSaveWithError(cmd) + + Convey("It should create dashboard guardian for folder with correct arguments and result in access denied error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) + + So(sc.dashboardGuardianMock.DashId, ShouldEqual, savedFolder.Id) + So(sc.dashboardGuardianMock.OrgId, ShouldEqual, cmd.OrgId) + So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) + }) + }) + + Convey("When updating a dashboard by existing id in the General folder", func() { cmd := models.SaveDashboardCommand{ OrgId: testOrgId, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -185,7 +236,7 @@ func TestIntegratedDashboardService(t *testing.T) { err := callSaveWithError(cmd) - Convey("It should call dashboard guardian with correct arguments and result in access denied error", func() { + Convey("It should create dashboard guardian for dashboard with correct arguments and result in access denied error", func() { So(err, ShouldNotBeNil) So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) @@ -195,7 +246,7 @@ func TestIntegratedDashboardService(t *testing.T) { }) }) - Convey("When trying to update a dashboard by existing id in other folder", func() { + Convey("When updating a dashboard by existing id in other folder", func() { cmd := models.SaveDashboardCommand{ OrgId: testOrgId, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -209,7 +260,7 @@ func TestIntegratedDashboardService(t *testing.T) { err := callSaveWithError(cmd) - Convey("It should call dashboard guardian with correct arguments and result in access denied error", func() { + Convey("It should create dashboard guardian for dashboard with correct arguments and result in access denied error", func() { So(err, ShouldNotBeNil) So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) @@ -218,6 +269,102 @@ func TestIntegratedDashboardService(t *testing.T) { So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) }) }) + + Convey("When moving a dashboard by existing id to other folder from General folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInGeneralFolder.Id, + "title": "Dash", + }), + FolderId: otherSavedFolder.Id, + UserId: 10000, + Overwrite: true, + } + + err := callSaveWithError(cmd) + + Convey("It should create dashboard guardian for other folder with correct arguments and result in access denied error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) + + So(sc.dashboardGuardianMock.DashId, ShouldEqual, otherSavedFolder.Id) + So(sc.dashboardGuardianMock.OrgId, ShouldEqual, cmd.OrgId) + So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) + }) + }) + + Convey("When moving a dashboard by existing id to the General folder from other folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInFolder.Id, + "title": "Dash", + }), + FolderId: 0, + UserId: 10000, + Overwrite: true, + } + + err := callSaveWithError(cmd) + + Convey("It should create dashboard guardian for General folder with correct arguments and result in access denied error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) + + So(sc.dashboardGuardianMock.DashId, ShouldEqual, 0) + So(sc.dashboardGuardianMock.OrgId, ShouldEqual, cmd.OrgId) + So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) + }) + }) + + Convey("When moving a dashboard by existing uid to other folder from General folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "uid": savedDashInGeneralFolder.Uid, + "title": "Dash", + }), + FolderId: otherSavedFolder.Id, + UserId: 10000, + Overwrite: true, + } + + err := callSaveWithError(cmd) + + Convey("It should create dashboard guardian for other folder with correct arguments and result in access denied error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) + + So(sc.dashboardGuardianMock.DashId, ShouldEqual, otherSavedFolder.Id) + So(sc.dashboardGuardianMock.OrgId, ShouldEqual, cmd.OrgId) + So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) + }) + }) + + Convey("When moving a dashboard by existing uid to the General folder from other folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "uid": savedDashInFolder.Uid, + "title": "Dash", + }), + FolderId: 0, + UserId: 10000, + Overwrite: true, + } + + err := callSaveWithError(cmd) + + Convey("It should create dashboard guardian for General folder with correct arguments and result in access denied error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) + + So(sc.dashboardGuardianMock.DashId, ShouldEqual, 0) + So(sc.dashboardGuardianMock.OrgId, ShouldEqual, cmd.OrgId) + So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) + }) + }) }) // Given user has permission to save @@ -668,7 +815,7 @@ func TestIntegratedDashboardService(t *testing.T) { }) }) - Convey("When trying to update existing folder to a dashboard using id", func() { + Convey("When updating existing folder to a dashboard using id", func() { cmd := models.SaveDashboardCommand{ OrgId: 1, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -687,7 +834,7 @@ func TestIntegratedDashboardService(t *testing.T) { }) }) - Convey("When trying to update existing dashboard to a folder using id", func() { + Convey("When updating existing dashboard to a folder using id", func() { cmd := models.SaveDashboardCommand{ OrgId: 1, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -706,7 +853,7 @@ func TestIntegratedDashboardService(t *testing.T) { }) }) - Convey("When trying to update existing folder to a dashboard using uid", func() { + Convey("When updating existing folder to a dashboard using uid", func() { cmd := models.SaveDashboardCommand{ OrgId: 1, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -725,7 +872,7 @@ func TestIntegratedDashboardService(t *testing.T) { }) }) - Convey("When trying to update existing dashboard to a folder using uid", func() { + Convey("When updating existing dashboard to a folder using uid", func() { cmd := models.SaveDashboardCommand{ OrgId: 1, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -744,7 +891,7 @@ func TestIntegratedDashboardService(t *testing.T) { }) }) - Convey("When trying to update existing folder to a dashboard using title", func() { + Convey("When updating existing folder to a dashboard using title", func() { cmd := models.SaveDashboardCommand{ OrgId: 1, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -762,7 +909,7 @@ func TestIntegratedDashboardService(t *testing.T) { }) }) - Convey("When trying to update existing dashboard to a folder using title", func() { + Convey("When updating existing dashboard to a folder using title", func() { cmd := models.SaveDashboardCommand{ OrgId: 1, Dashboard: simplejson.NewFromAny(map[string]interface{}{ @@ -850,23 +997,6 @@ func callSaveWithError(cmd models.SaveDashboardCommand) error { return err } -func dashboardServiceScenario(desc string, mock *guardian.FakeDashboardGuardian, fn scenarioFunc) { - Convey(desc, func() { - origNewDashboardGuardian := guardian.New - guardian.MockDashboardGuardian(mock) - - sc := &scenarioContext{ - dashboardGuardianMock: mock, - } - - defer func() { - guardian.New = origNewDashboardGuardian - }() - - fn(sc) - }) -} - func saveTestDashboard(title string, orgId int64, folderId int64) *models.Dashboard { cmd := models.SaveDashboardCommand{ OrgId: orgId, diff --git a/pkg/services/sqlstore/dashboard_snapshot.go b/pkg/services/sqlstore/dashboard_snapshot.go index 9e82bbb2c83..2e2ea8a4783 100644 --- a/pkg/services/sqlstore/dashboard_snapshot.go +++ b/pkg/services/sqlstore/dashboard_snapshot.go @@ -80,7 +80,7 @@ func GetDashboardSnapshot(query *m.GetDashboardSnapshotQuery) error { if err != nil { return err - } else if has == false { + } else if !has { return m.ErrDashboardSnapshotNotFound } diff --git a/pkg/services/sqlstore/dashboard_test.go b/pkg/services/sqlstore/dashboard_test.go index 9124a686236..6d7c7a93e47 100644 --- a/pkg/services/sqlstore/dashboard_test.go +++ b/pkg/services/sqlstore/dashboard_test.go @@ -104,9 +104,8 @@ func TestDashboardDataAccess(t *testing.T) { timesCalled += 1 if timesCalled <= 2 { return savedDash.Uid - } else { - return util.GenerateShortUid() } + return util.GenerateShortUid() } cmd := m.SaveDashboardCommand{ OrgId: 1, diff --git a/pkg/services/sqlstore/migrations/annotation_mig.go b/pkg/services/sqlstore/migrations/annotation_mig.go index 8d2bf94bc42..7fac0001e5b 100644 --- a/pkg/services/sqlstore/migrations/annotation_mig.go +++ b/pkg/services/sqlstore/migrations/annotation_mig.go @@ -90,4 +90,29 @@ func addAnnotationMig(mg *Migrator) { Sqlite(updateTextFieldSql). Postgres(updateTextFieldSql). Mysql(updateTextFieldSql)) + + // + // Add a 'created' & 'updated' column + // + mg.AddMigration("Add created time to annotation table", NewAddColumnMigration(table, &Column{ + Name: "created", Type: DB_BigInt, Nullable: true, Default: "0", + })) + mg.AddMigration("Add updated time to annotation table", NewAddColumnMigration(table, &Column{ + Name: "updated", Type: DB_BigInt, Nullable: true, Default: "0", + })) + mg.AddMigration("Add index for created in annotation table", NewAddIndexMigration(table, &Index{ + Cols: []string{"org_id", "created"}, Type: IndexType, + })) + mg.AddMigration("Add index for updated in annotation table", NewAddIndexMigration(table, &Index{ + Cols: []string{"org_id", "updated"}, Type: IndexType, + })) + + // + // Convert epoch saved as seconds to miliseconds + // + updateEpochSql := "UPDATE annotation SET epoch = (epoch*1000) where epoch < 9999999999" + mg.AddMigration("Convert existing annotations from seconds to milliseconds", new(RawSqlMigration). + Sqlite(updateEpochSql). + Postgres(updateEpochSql). + Mysql(updateEpochSql)) } diff --git a/pkg/services/sqlstore/migrations/migrations.go b/pkg/services/sqlstore/migrations/migrations.go index 282f98e7318..58ac6256f41 100644 --- a/pkg/services/sqlstore/migrations/migrations.go +++ b/pkg/services/sqlstore/migrations/migrations.go @@ -30,6 +30,7 @@ func AddMigrations(mg *Migrator) { addDashboardAclMigrations(mg) addTagMigration(mg) addLoginAttemptMigrations(mg) + addUserAuthMigrations(mg) } func addMigrationLogMigrations(mg *Migrator) { diff --git a/pkg/services/sqlstore/migrations/migrations_test.go b/pkg/services/sqlstore/migrations/migrations_test.go index 51aea0bbdef..53b398124af 100644 --- a/pkg/services/sqlstore/migrations/migrations_test.go +++ b/pkg/services/sqlstore/migrations/migrations_test.go @@ -8,11 +8,8 @@ import ( "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" . "github.com/smartystreets/goconvey/convey" - //"github.com/grafana/grafana/pkg/log" ) -var indexTypes = []string{"Unknown", "INDEX", "UNIQUE INDEX"} - func TestMigrations(t *testing.T) { testDBs := []sqlutil.TestDB{ sqlutil.TestDB_Sqlite3, @@ -30,7 +27,7 @@ func TestMigrations(t *testing.T) { sqlutil.CleanDB(x) - has, err := x.SQL(sql).Get(&r) + _, err = x.SQL(sql).Get(&r) So(err, ShouldNotBeNil) mg := NewMigrator(x) @@ -39,7 +36,7 @@ func TestMigrations(t *testing.T) { err = mg.Start() So(err, ShouldBeNil) - has, err = x.SQL(sql).Get(&r) + has, err := x.SQL(sql).Get(&r) So(err, ShouldBeNil) So(has, ShouldBeTrue) expectedMigrations := mg.MigrationsCount() - 2 //we currently skip to migrations. We should rewrite skipped migrations to write in the log as well. until then we have to keep this diff --git a/pkg/services/sqlstore/migrations/stats_mig.go b/pkg/services/sqlstore/migrations/stats_mig.go index 7e10eeb9f90..c47b8202c53 100644 --- a/pkg/services/sqlstore/migrations/stats_mig.go +++ b/pkg/services/sqlstore/migrations/stats_mig.go @@ -2,37 +2,38 @@ package migrations import . "github.com/grafana/grafana/pkg/services/sqlstore/migrator" -func addStatsMigrations(mg *Migrator) { - statTable := Table{ - Name: "stat", - Columns: []*Column{ - {Name: "id", Type: DB_Int, IsPrimaryKey: true, IsAutoIncrement: true}, - {Name: "metric", Type: DB_Varchar, Length: 20, Nullable: false}, - {Name: "type", Type: DB_Int, Nullable: false}, - }, - Indices: []*Index{ - {Cols: []string{"metric"}, Type: UniqueIndex}, - }, - } - - // create table - mg.AddMigration("create stat table", NewAddTableMigration(statTable)) - - // create indices - mg.AddMigration("add index stat.metric", NewAddIndexMigration(statTable, statTable.Indices[0])) - - statValue := Table{ - Name: "stat_value", - Columns: []*Column{ - {Name: "id", Type: DB_Int, IsPrimaryKey: true, IsAutoIncrement: true}, - {Name: "value", Type: DB_Double, Nullable: false}, - {Name: "time", Type: DB_DateTime, Nullable: false}, - }, - } - - // create table - mg.AddMigration("create stat_value table", NewAddTableMigration(statValue)) -} +// commented out because of the deadcode CI check +//func addStatsMigrations(mg *Migrator) { +// statTable := Table{ +// Name: "stat", +// Columns: []*Column{ +// {Name: "id", Type: DB_Int, IsPrimaryKey: true, IsAutoIncrement: true}, +// {Name: "metric", Type: DB_Varchar, Length: 20, Nullable: false}, +// {Name: "type", Type: DB_Int, Nullable: false}, +// }, +// Indices: []*Index{ +// {Cols: []string{"metric"}, Type: UniqueIndex}, +// }, +// } +// +// // create table +// mg.AddMigration("create stat table", NewAddTableMigration(statTable)) +// +// // create indices +// mg.AddMigration("add index stat.metric", NewAddIndexMigration(statTable, statTable.Indices[0])) +// +// statValue := Table{ +// Name: "stat_value", +// Columns: []*Column{ +// {Name: "id", Type: DB_Int, IsPrimaryKey: true, IsAutoIncrement: true}, +// {Name: "value", Type: DB_Double, Nullable: false}, +// {Name: "time", Type: DB_DateTime, Nullable: false}, +// }, +// } +// +// // create table +// mg.AddMigration("create stat_value table", NewAddTableMigration(statValue)) +//} func addTestDataMigrations(mg *Migrator) { testData := Table{ diff --git a/pkg/services/sqlstore/migrations/user_auth_mig.go b/pkg/services/sqlstore/migrations/user_auth_mig.go new file mode 100644 index 00000000000..953883376c3 --- /dev/null +++ b/pkg/services/sqlstore/migrations/user_auth_mig.go @@ -0,0 +1,29 @@ +package migrations + +import . "github.com/grafana/grafana/pkg/services/sqlstore/migrator" + +func addUserAuthMigrations(mg *Migrator) { + userAuthV1 := Table{ + Name: "user_auth", + Columns: []*Column{ + {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true}, + {Name: "user_id", Type: DB_BigInt, Nullable: false}, + {Name: "auth_module", Type: DB_NVarchar, Length: 190, Nullable: false}, + {Name: "auth_id", Type: DB_NVarchar, Length: 100, Nullable: false}, + {Name: "created", Type: DB_DateTime, Nullable: false}, + }, + Indices: []*Index{ + {Cols: []string{"auth_module", "auth_id"}}, + }, + } + + // create table + mg.AddMigration("create user auth table", NewAddTableMigration(userAuthV1)) + // add indices + addTableIndicesMigrations(mg, "v1", userAuthV1) + + mg.AddMigration("alter user_auth.auth_id to length 190", new(RawSqlMigration). + Sqlite("SELECT 0 WHERE 0;"). + Postgres("ALTER TABLE user_auth ALTER COLUMN auth_id TYPE VARCHAR(190);"). + Mysql("ALTER TABLE user_auth MODIFY auth_id VARCHAR(190);")) +} diff --git a/pkg/services/sqlstore/migrator/dialect.go b/pkg/services/sqlstore/migrator/dialect.go index 064b5981063..dadc7248844 100644 --- a/pkg/services/sqlstore/migrator/dialect.go +++ b/pkg/services/sqlstore/migrator/dialect.go @@ -84,8 +84,7 @@ func (db *BaseDialect) DateTimeFunc(value string) string { } func (b *BaseDialect) CreateTableSql(table *Table) string { - var sql string - sql = "CREATE TABLE IF NOT EXISTS " + sql := "CREATE TABLE IF NOT EXISTS " sql += b.dialect.Quote(table.Name) + " (\n" pkList := table.PrimaryKeys @@ -162,8 +161,7 @@ func (db *BaseDialect) RenameTable(oldName string, newName string) string { func (db *BaseDialect) DropIndexSql(tableName string, index *Index) string { quote := db.dialect.Quote - var name string - name = index.XName(tableName) + name := index.XName(tableName) return fmt.Sprintf("DROP INDEX %v ON %s", quote(name), quote(tableName)) } diff --git a/pkg/services/sqlstore/migrator/migrations.go b/pkg/services/sqlstore/migrator/migrations.go index 2fec8825fa4..4e6402a14e5 100644 --- a/pkg/services/sqlstore/migrator/migrations.go +++ b/pkg/services/sqlstore/migrator/migrations.go @@ -1,7 +1,6 @@ package migrator import ( - "fmt" "strings" ) @@ -113,7 +112,7 @@ func NewDropIndexMigration(table Table, index *Index) *DropIndexMigration { func (m *DropIndexMigration) Sql(dialect Dialect) string { if m.index.Name == "" { - m.index.Name = fmt.Sprintf("%s", strings.Join(m.index.Cols, "_")) + m.index.Name = strings.Join(m.index.Cols, "_") } return dialect.DropIndexSql(m.tableName, m.index) } @@ -180,7 +179,7 @@ type CopyTableDataMigration struct { targetTable string sourceCols []string targetCols []string - colMap map[string]string + //colMap map[string]string } func NewCopyTableDataMigration(targetTable string, sourceTable string, colMap map[string]string) *CopyTableDataMigration { diff --git a/pkg/services/sqlstore/migrator/migrator.go b/pkg/services/sqlstore/migrator/migrator.go index 0fde3f27c01..cd00cb16712 100644 --- a/pkg/services/sqlstore/migrator/migrator.go +++ b/pkg/services/sqlstore/migrator/migrator.go @@ -97,17 +97,15 @@ func (mg *Migrator) Start() error { mg.Logger.Debug("Executing", "sql", sql) err := mg.inTransaction(func(sess *xorm.Session) error { - - if err := mg.exec(m, sess); err != nil { + err := mg.exec(m, sess) + if err != nil { mg.Logger.Error("Exec failed", "error", err, "sql", sql) record.Error = err.Error() sess.Insert(&record) return err - } else { - record.Success = true - sess.Insert(&record) } - + record.Success = true + sess.Insert(&record) return nil }) diff --git a/pkg/services/sqlstore/migrator/mysql_dialect.go b/pkg/services/sqlstore/migrator/mysql_dialect.go index 1968558dbb8..300224135f0 100644 --- a/pkg/services/sqlstore/migrator/mysql_dialect.go +++ b/pkg/services/sqlstore/migrator/mysql_dialect.go @@ -66,8 +66,8 @@ func (db *Mysql) SqlType(c *Column) string { res = c.Type } - var hasLen1 bool = (c.Length > 0) - var hasLen2 bool = (c.Length2 > 0) + var hasLen1 = (c.Length > 0) + var hasLen2 = (c.Length2 > 0) if res == DB_BigInt && !hasLen1 && !hasLen2 { c.Length = 20 diff --git a/pkg/services/sqlstore/migrator/postgres_dialect.go b/pkg/services/sqlstore/migrator/postgres_dialect.go index 8de26194411..e167aa33122 100644 --- a/pkg/services/sqlstore/migrator/postgres_dialect.go +++ b/pkg/services/sqlstore/migrator/postgres_dialect.go @@ -45,9 +45,8 @@ func (b *Postgres) Default(col *Column) string { if col.Type == DB_Bool { if col.Default == "0" { return "FALSE" - } else { - return "TRUE" } + return "TRUE" } return col.Default } @@ -92,8 +91,8 @@ func (db *Postgres) SqlType(c *Column) string { res = t } - var hasLen1 bool = (c.Length > 0) - var hasLen2 bool = (c.Length2 > 0) + var hasLen1 = (c.Length > 0) + var hasLen2 = (c.Length2 > 0) if hasLen2 { res += "(" + strconv.Itoa(c.Length) + "," + strconv.Itoa(c.Length2) + ")" } else if hasLen1 { diff --git a/pkg/services/sqlstore/migrator/types.go b/pkg/services/sqlstore/migrator/types.go index d42eba0f58a..62ec74e7b9f 100644 --- a/pkg/services/sqlstore/migrator/types.go +++ b/pkg/services/sqlstore/migrator/types.go @@ -46,7 +46,7 @@ type Index struct { func (index *Index) XName(tableName string) string { if index.Name == "" { - index.Name = fmt.Sprintf("%s", strings.Join(index.Cols, "_")) + index.Name = strings.Join(index.Cols, "_") } if !strings.HasPrefix(index.Name, "UQE_") && diff --git a/pkg/services/sqlstore/playlist.go b/pkg/services/sqlstore/playlist.go index 67720cbadb8..7b726880b9e 100644 --- a/pkg/services/sqlstore/playlist.go +++ b/pkg/services/sqlstore/playlist.go @@ -22,6 +22,9 @@ func CreatePlaylist(cmd *m.CreatePlaylistCommand) error { } _, err := x.Insert(&playlist) + if err != nil { + return err + } playlistItems := make([]m.PlaylistItem, 0) for _, item := range cmd.Items { diff --git a/pkg/services/sqlstore/plugin_setting.go b/pkg/services/sqlstore/plugin_setting.go index 172995872eb..676d26fad56 100644 --- a/pkg/services/sqlstore/plugin_setting.go +++ b/pkg/services/sqlstore/plugin_setting.go @@ -36,7 +36,7 @@ func GetPluginSettingById(query *m.GetPluginSettingByIdQuery) error { has, err := x.Get(&pluginSetting) if err != nil { return err - } else if has == false { + } else if !has { return m.ErrPluginSettingNotFound } query.Result = &pluginSetting @@ -48,6 +48,9 @@ func UpdatePluginSetting(cmd *m.UpdatePluginSettingCmd) error { var pluginSetting m.PluginSetting exists, err := sess.Where("org_id=? and plugin_id=?", cmd.OrgId, cmd.PluginId).Get(&pluginSetting) + if err != nil { + return err + } sess.UseBool("enabled") sess.UseBool("pinned") if !exists { @@ -72,34 +75,33 @@ func UpdatePluginSetting(cmd *m.UpdatePluginSettingCmd) error { _, err = sess.Insert(&pluginSetting) return err - } else { - for key, data := range cmd.SecureJsonData { - encryptedData, err := util.Encrypt([]byte(data), setting.SecretKey) - if err != nil { - return err - } - - pluginSetting.SecureJsonData[key] = encryptedData - } - - // add state change event on commit success - if pluginSetting.Enabled != cmd.Enabled { - sess.events = append(sess.events, &m.PluginStateChangedEvent{ - PluginId: cmd.PluginId, - OrgId: cmd.OrgId, - Enabled: cmd.Enabled, - }) - } - - pluginSetting.Updated = time.Now() - pluginSetting.Enabled = cmd.Enabled - pluginSetting.JsonData = cmd.JsonData - pluginSetting.Pinned = cmd.Pinned - pluginSetting.PluginVersion = cmd.PluginVersion - - _, err = sess.Id(pluginSetting.Id).Update(&pluginSetting) - return err } + for key, data := range cmd.SecureJsonData { + encryptedData, err := util.Encrypt([]byte(data), setting.SecretKey) + if err != nil { + return err + } + + pluginSetting.SecureJsonData[key] = encryptedData + } + + // add state change event on commit success + if pluginSetting.Enabled != cmd.Enabled { + sess.events = append(sess.events, &m.PluginStateChangedEvent{ + PluginId: cmd.PluginId, + OrgId: cmd.OrgId, + Enabled: cmd.Enabled, + }) + } + + pluginSetting.Updated = time.Now() + pluginSetting.Enabled = cmd.Enabled + pluginSetting.JsonData = cmd.JsonData + pluginSetting.Pinned = cmd.Pinned + pluginSetting.PluginVersion = cmd.PluginVersion + + _, err = sess.Id(pluginSetting.Id).Update(&pluginSetting) + return err }) } diff --git a/pkg/services/sqlstore/preferences.go b/pkg/services/sqlstore/preferences.go index 399b23f3ffa..885837764fc 100644 --- a/pkg/services/sqlstore/preferences.go +++ b/pkg/services/sqlstore/preferences.go @@ -72,6 +72,9 @@ func SavePreferences(cmd *m.SavePreferencesCommand) error { var prefs m.Preferences exists, err := sess.Where("org_id=? AND user_id=?", cmd.OrgId, cmd.UserId).Get(&prefs) + if err != nil { + return err + } if !exists { prefs = m.Preferences{ @@ -85,14 +88,13 @@ func SavePreferences(cmd *m.SavePreferencesCommand) error { } _, err = sess.Insert(&prefs) return err - } else { - prefs.HomeDashboardId = cmd.HomeDashboardId - prefs.Timezone = cmd.Timezone - prefs.Theme = cmd.Theme - prefs.Updated = time.Now() - prefs.Version += 1 - _, err := sess.Id(prefs.Id).AllCols().Update(&prefs) - return err } + prefs.HomeDashboardId = cmd.HomeDashboardId + prefs.Timezone = cmd.Timezone + prefs.Theme = cmd.Theme + prefs.Updated = time.Now() + prefs.Version += 1 + _, err = sess.Id(prefs.Id).AllCols().Update(&prefs) + return err }) } diff --git a/pkg/services/sqlstore/quota.go b/pkg/services/sqlstore/quota.go index 3db3fc2657e..539555ddc50 100644 --- a/pkg/services/sqlstore/quota.go +++ b/pkg/services/sqlstore/quota.go @@ -31,7 +31,7 @@ func GetOrgQuotaByTarget(query *m.GetOrgQuotaByTargetQuery) error { has, err := x.Get("a) if err != nil { return err - } else if has == false { + } else if !has { quota.Limit = query.Default } @@ -108,7 +108,7 @@ func UpdateOrgQuota(cmd *m.UpdateOrgQuotaCmd) error { return err } quota.Limit = cmd.Limit - if has == false { + if !has { quota.Created = time.Now() //No quota in the DB for this target, so create a new one. if _, err := sess.Insert("a); err != nil { @@ -133,7 +133,7 @@ func GetUserQuotaByTarget(query *m.GetUserQuotaByTargetQuery) error { has, err := x.Get("a) if err != nil { return err - } else if has == false { + } else if !has { quota.Limit = query.Default } @@ -210,7 +210,7 @@ func UpdateUserQuota(cmd *m.UpdateUserQuotaCmd) error { return err } quota.Limit = cmd.Limit - if has == false { + if !has { quota.Created = time.Now() //No quota in the DB for this target, so create a new one. if _, err := sess.Insert("a); err != nil { diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go index 6aace350193..cac0c54226c 100644 --- a/pkg/services/sqlstore/sqlstore.go +++ b/pkg/services/sqlstore/sqlstore.go @@ -77,7 +77,7 @@ func EnsureAdminUser() { log.Info("Created default admin user: %v", setting.AdminUser) } -func NewEngine() { +func NewEngine() *xorm.Engine { x, err := getEngine() if err != nil { @@ -91,6 +91,8 @@ func NewEngine() { sqlog.Error("Fail to initialize orm engine", "error", err) os.Exit(1) } + + return x } func SetEngine(engine *xorm.Engine) (err error) { @@ -121,7 +123,7 @@ func getEngine() (*xorm.Engine, error) { } cnnstr = fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&allowNativePasswords=true", - DbCfg.User, DbCfg.Pwd, protocol, DbCfg.Host, DbCfg.Name) + url.QueryEscape(DbCfg.User), url.QueryEscape(DbCfg.Pwd), protocol, DbCfg.Host, url.PathEscape(DbCfg.Name)) if DbCfg.SslMode == "true" || DbCfg.SslMode == "skip-verify" { tlsCert, err := makeCert("custom", DbCfg) @@ -140,13 +142,17 @@ func getEngine() (*xorm.Engine, error) { if len(fields) > 1 && len(strings.TrimSpace(fields[1])) > 0 { port = fields[1] } - if DbCfg.Pwd == "" { - DbCfg.Pwd = "''" - } - if DbCfg.User == "" { - DbCfg.User = "''" - } - cnnstr = fmt.Sprintf("user=%s password=%s host=%s port=%s dbname=%s sslmode=%s sslcert=%s sslkey=%s sslrootcert=%s", DbCfg.User, DbCfg.Pwd, host, port, DbCfg.Name, DbCfg.SslMode, DbCfg.ClientCertPath, DbCfg.ClientKeyPath, DbCfg.CaCertPath) + cnnstr = fmt.Sprintf("user='%s' password='%s' host='%s' port='%s' dbname='%s' sslmode='%s' sslcert='%s' sslkey='%s' sslrootcert='%s'", + strings.Replace(DbCfg.User, `'`, `\'`, -1), + strings.Replace(DbCfg.Pwd, `'`, `\'`, -1), + strings.Replace(host, `'`, `\'`, -1), + strings.Replace(port, `'`, `\'`, -1), + strings.Replace(DbCfg.Name, `'`, `\'`, -1), + strings.Replace(DbCfg.SslMode, `'`, `\'`, -1), + strings.Replace(DbCfg.ClientCertPath, `'`, `\'`, -1), + strings.Replace(DbCfg.ClientKeyPath, `'`, `\'`, -1), + strings.Replace(DbCfg.CaCertPath, `'`, `\'`, -1), + ) case "sqlite3": if !filepath.IsAbs(DbCfg.Path) { DbCfg.Path = filepath.Join(setting.DataPath, DbCfg.Path) @@ -166,7 +172,7 @@ func getEngine() (*xorm.Engine, error) { engine.SetMaxOpenConns(DbCfg.MaxOpenConn) engine.SetMaxIdleConns(DbCfg.MaxIdleConn) engine.SetConnMaxLifetime(time.Second * time.Duration(DbCfg.ConnMaxLifetime)) - debugSql := setting.Cfg.Section("database").Key("log_queries").MustBool(false) + debugSql := setting.Raw.Section("database").Key("log_queries").MustBool(false) if !debugSql { engine.SetLogger(&xorm.DiscardLogger{}) } else { @@ -179,7 +185,7 @@ func getEngine() (*xorm.Engine, error) { } func LoadConfig() { - sec := setting.Cfg.Section("database") + sec := setting.Raw.Section("database") cfgURL := sec.Key("url").String() if len(cfgURL) != 0 { @@ -258,7 +264,7 @@ func InitTestDB(t *testing.T) *xorm.Engine { // x.ShowSQL() if err != nil { - t.Fatalf("Failed to init in memory sqllite3 db %v", err) + t.Fatalf("Failed to init test database: %v", err) } sqlutil.CleanDB(x) @@ -269,3 +275,19 @@ func InitTestDB(t *testing.T) *xorm.Engine { return x } + +func IsTestDbMySql() bool { + if db, present := os.LookupEnv("GRAFANA_TEST_DB"); present { + return db == dbMySql + } + + return false +} + +func IsTestDbPostgres() bool { + if db, present := os.LookupEnv("GRAFANA_TEST_DB"); present { + return db == dbPostgres + } + + return false +} diff --git a/pkg/services/sqlstore/stats.go b/pkg/services/sqlstore/stats.go index cfe2d88c82c..173a1e56634 100644 --- a/pkg/services/sqlstore/stats.go +++ b/pkg/services/sqlstore/stats.go @@ -13,16 +13,12 @@ func init() { bus.AddHandler("sql", GetAdminStats) } -var activeUserTimeLimit time.Duration = time.Hour * 24 * 30 +var activeUserTimeLimit = time.Hour * 24 * 30 func GetDataSourceStats(query *m.GetDataSourceStatsQuery) error { var rawSql = `SELECT COUNT(*) as count, type FROM data_source GROUP BY type` query.Result = make([]*m.DataSourceStats, 0) err := x.SQL(rawSql).Find(&query.Result) - if err != nil { - return err - } - return err } @@ -68,6 +64,7 @@ func GetSystemStats(query *m.GetSystemStatsQuery) error { } query.Result = &stats + return err } diff --git a/pkg/services/sqlstore/team.go b/pkg/services/sqlstore/team.go index d238301c7ce..b3ff4c81e7c 100644 --- a/pkg/services/sqlstore/team.go +++ b/pkg/services/sqlstore/team.go @@ -210,11 +210,7 @@ func GetTeamsByUser(query *m.GetTeamsByUserQuery) error { sess.Where("team.org_id=? and team_member.user_id=?", query.OrgId, query.UserId) err := sess.Find(&query.Result) - if err != nil { - return err - } - - return nil + return err } // AddTeamMember adds a user to a team diff --git a/pkg/services/sqlstore/team_test.go b/pkg/services/sqlstore/team_test.go index f136411eeba..f4b022906da 100644 --- a/pkg/services/sqlstore/team_test.go +++ b/pkg/services/sqlstore/team_test.go @@ -74,6 +74,7 @@ func TestTeamCommandsAndQueries(t *testing.T) { Convey("Should be able to return all teams a user is member of", func() { groupId := group2.Result.Id err := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[0]}) + So(err, ShouldBeNil) query := &m.GetTeamsByUserQuery{OrgId: testOrgId, UserId: userIds[0]} err = GetTeamsByUser(query) @@ -103,7 +104,7 @@ func TestTeamCommandsAndQueries(t *testing.T) { err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[2]}) So(err, ShouldBeNil) err = testHelperUpdateDashboardAcl(1, m.DashboardAcl{DashboardId: 1, OrgId: testOrgId, Permission: m.PERMISSION_EDIT, TeamId: groupId}) - + So(err, ShouldBeNil) err = DeleteTeam(&m.DeleteTeamCommand{OrgId: testOrgId, Id: groupId}) So(err, ShouldBeNil) diff --git a/pkg/services/sqlstore/temp_user.go b/pkg/services/sqlstore/temp_user.go index 43e1f027057..e93ba2fd641 100644 --- a/pkg/services/sqlstore/temp_user.go +++ b/pkg/services/sqlstore/temp_user.go @@ -126,7 +126,7 @@ func GetTempUserByCode(query *m.GetTempUserByCodeQuery) error { if err != nil { return err - } else if has == false { + } else if !has { return m.ErrTempUserNotFound } diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index f42ff5fb2ed..f19019d28a4 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -47,10 +47,9 @@ func getOrgIdForNewUser(cmd *m.CreateUserCommand, sess *DBSession) (int64, error } if has { return org.Id, nil - } else { - org.Name = "Main Org." - org.Id = 1 } + org.Name = "Main Org." + org.Id = 1 } else { org.Name = cmd.OrgName if len(org.Name) == 0 { @@ -154,7 +153,7 @@ func GetUserById(query *m.GetUserByIdQuery) error { if err != nil { return err - } else if has == false { + } else if !has { return m.ErrUserNotFound } @@ -168,18 +167,16 @@ func GetUserByLogin(query *m.GetUserByLoginQuery) error { return m.ErrUserNotFound } - user := new(m.User) - // Try and find the user by login first. // It's not sufficient to assume that a LoginOrEmail with an "@" is an email. - user = &m.User{Login: query.LoginOrEmail} + user := &m.User{Login: query.LoginOrEmail} has, err := x.Get(user) if err != nil { return err } - if has == false && strings.Contains(query.LoginOrEmail, "@") { + if !has && strings.Contains(query.LoginOrEmail, "@") { // If the user wasn't found, and it contains an "@" fallback to finding the // user by email. user = &m.User{Email: query.LoginOrEmail} @@ -188,7 +185,7 @@ func GetUserByLogin(query *m.GetUserByLoginQuery) error { if err != nil { return err - } else if has == false { + } else if !has { return m.ErrUserNotFound } @@ -202,14 +199,12 @@ func GetUserByEmail(query *m.GetUserByEmailQuery) error { return m.ErrUserNotFound } - user := new(m.User) - - user = &m.User{Email: query.Email} + user := &m.User{Email: query.Email} has, err := x.Get(user) if err != nil { return err - } else if has == false { + } else if !has { return m.ErrUserNotFound } @@ -253,11 +248,8 @@ func ChangeUserPassword(cmd *m.ChangeUserPasswordCommand) error { Updated: time.Now(), } - if _, err := sess.Id(cmd.UserId).Update(&user); err != nil { - return err - } - - return nil + _, err := sess.Id(cmd.UserId).Update(&user) + return err }) } @@ -271,11 +263,8 @@ func UpdateUserLastSeenAt(cmd *m.UpdateUserLastSeenAtCommand) error { LastSeenAt: time.Now(), } - if _, err := sess.Id(cmd.UserId).Update(&user); err != nil { - return err - } - - return nil + _, err := sess.Id(cmd.UserId).Update(&user) + return err }) } @@ -295,11 +284,12 @@ func SetUsingOrg(cmd *m.SetUsingOrgCommand) error { } return inTransaction(func(sess *DBSession) error { - user := m.User{} - sess.Id(cmd.UserId).Get(&user) + user := m.User{ + Id: cmd.UserId, + OrgId: cmd.OrgId, + } - user.OrgId = cmd.OrgId - _, err := sess.Id(user.Id).Update(&user) + _, err := sess.Id(cmd.UserId).Update(&user) return err }) } @@ -310,7 +300,7 @@ func GetUserProfile(query *m.GetUserProfileQuery) error { if err != nil { return err - } else if has == false { + } else if !has { return m.ErrUserNotFound } @@ -333,6 +323,7 @@ func GetUserOrgList(query *m.GetUserOrgListQuery) error { sess.Join("INNER", "org", "org_user.org_id=org.id") sess.Where("org_user.user_id=?", query.UserId) sess.Cols("org.name", "org_user.role", "org_user.org_id") + sess.OrderBy("org.name") err := sess.Find(&query.Result) return err } @@ -444,6 +435,7 @@ func DeleteUser(cmd *m.DeleteUserCommand) error { "DELETE FROM dashboard_acl WHERE user_id = ?", "DELETE FROM preferences WHERE user_id = ?", "DELETE FROM team_member WHERE user_id = ?", + "DELETE FROM user_auth WHERE user_id = ?", } for _, sql := range deletes { @@ -478,10 +470,7 @@ func SetUserHelpFlag(cmd *m.SetUserHelpFlagCommand) error { Updated: time.Now(), } - if _, err := sess.Id(cmd.UserId).Cols("help_flags1").Update(&user); err != nil { - return err - } - - return nil + _, err := sess.Id(cmd.UserId).Cols("help_flags1").Update(&user) + return err }) } diff --git a/pkg/services/sqlstore/user_auth.go b/pkg/services/sqlstore/user_auth.go new file mode 100644 index 00000000000..aec828451a4 --- /dev/null +++ b/pkg/services/sqlstore/user_auth.go @@ -0,0 +1,148 @@ +package sqlstore + +import ( + "time" + + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" +) + +func init() { + bus.AddHandler("sql", GetUserByAuthInfo) + bus.AddHandler("sql", GetAuthInfo) + bus.AddHandler("sql", SetAuthInfo) + bus.AddHandler("sql", DeleteAuthInfo) +} + +func GetUserByAuthInfo(query *m.GetUserByAuthInfoQuery) error { + user := &m.User{} + has := false + var err error + authQuery := &m.GetAuthInfoQuery{} + + // Try to find the user by auth module and id first + if query.AuthModule != "" && query.AuthId != "" { + authQuery.AuthModule = query.AuthModule + authQuery.AuthId = query.AuthId + + err = GetAuthInfo(authQuery) + if err != m.ErrUserNotFound { + if err != nil { + return err + } + + // if user id was specified and doesn't match the user_auth entry, remove it + if query.UserId != 0 && query.UserId != authQuery.Result.UserId { + err = DeleteAuthInfo(&m.DeleteAuthInfoCommand{ + UserAuth: authQuery.Result, + }) + if err != nil { + sqlog.Error("Error removing user_auth entry", "error", err) + } + + authQuery.Result = nil + } else { + has, err = x.Id(authQuery.Result.UserId).Get(user) + if err != nil { + return err + } + + if !has { + // if the user has been deleted then remove the entry + err = DeleteAuthInfo(&m.DeleteAuthInfoCommand{ + UserAuth: authQuery.Result, + }) + if err != nil { + sqlog.Error("Error removing user_auth entry", "error", err) + } + + authQuery.Result = nil + } + } + } + } + + // If not found, try to find the user by id + if !has && query.UserId != 0 { + has, err = x.Id(query.UserId).Get(user) + if err != nil { + return err + } + } + + // If not found, try to find the user by email address + if !has && query.Email != "" { + user = &m.User{Email: query.Email} + has, err = x.Get(user) + if err != nil { + return err + } + } + + // If not found, try to find the user by login + if !has && query.Login != "" { + user = &m.User{Login: query.Login} + has, err = x.Get(user) + if err != nil { + return err + } + } + + // No user found + if !has { + return m.ErrUserNotFound + } + + // create authInfo record to link accounts + if authQuery.Result == nil && query.AuthModule != "" && query.AuthId != "" { + cmd2 := &m.SetAuthInfoCommand{ + UserId: user.Id, + AuthModule: query.AuthModule, + AuthId: query.AuthId, + } + if err := SetAuthInfo(cmd2); err != nil { + return err + } + } + + query.Result = user + return nil +} + +func GetAuthInfo(query *m.GetAuthInfoQuery) error { + userAuth := &m.UserAuth{ + AuthModule: query.AuthModule, + AuthId: query.AuthId, + } + has, err := x.Get(userAuth) + if err != nil { + return err + } + if !has { + return m.ErrUserNotFound + } + + query.Result = userAuth + return nil +} + +func SetAuthInfo(cmd *m.SetAuthInfoCommand) error { + return inTransaction(func(sess *DBSession) error { + authUser := &m.UserAuth{ + UserId: cmd.UserId, + AuthModule: cmd.AuthModule, + AuthId: cmd.AuthId, + Created: time.Now(), + } + + _, err := sess.Insert(authUser) + return err + }) +} + +func DeleteAuthInfo(cmd *m.DeleteAuthInfoCommand) error { + return inTransaction(func(sess *DBSession) error { + _, err := sess.Delete(cmd.UserAuth) + return err + }) +} diff --git a/pkg/services/sqlstore/user_auth_test.go b/pkg/services/sqlstore/user_auth_test.go new file mode 100644 index 00000000000..882e0c7afa5 --- /dev/null +++ b/pkg/services/sqlstore/user_auth_test.go @@ -0,0 +1,131 @@ +package sqlstore + +import ( + "fmt" + "testing" + + . "github.com/smartystreets/goconvey/convey" + + m "github.com/grafana/grafana/pkg/models" +) + +func TestUserAuth(t *testing.T) { + InitTestDB(t) + + Convey("Given 5 users", t, func() { + var err error + var cmd *m.CreateUserCommand + users := []m.User{} + for i := 0; i < 5; i++ { + cmd = &m.CreateUserCommand{ + Email: fmt.Sprint("user", i, "@test.com"), + Name: fmt.Sprint("user", i), + Login: fmt.Sprint("loginuser", i), + } + err = CreateUser(cmd) + So(err, ShouldBeNil) + users = append(users, cmd.Result) + } + + Reset(func() { + _, err := x.Exec("DELETE FROM org_user WHERE 1=1") + So(err, ShouldBeNil) + _, err = x.Exec("DELETE FROM org WHERE 1=1") + So(err, ShouldBeNil) + _, err = x.Exec("DELETE FROM " + dialect.Quote("user") + " WHERE 1=1") + So(err, ShouldBeNil) + _, err = x.Exec("DELETE FROM user_auth WHERE 1=1") + So(err, ShouldBeNil) + }) + + Convey("Can find existing user", func() { + // By Login + login := "loginuser0" + + query := &m.GetUserByAuthInfoQuery{Login: login} + err = GetUserByAuthInfo(query) + + So(err, ShouldBeNil) + So(query.Result.Login, ShouldEqual, login) + + // By ID + id := query.Result.Id + + query = &m.GetUserByAuthInfoQuery{UserId: id} + err = GetUserByAuthInfo(query) + + So(err, ShouldBeNil) + So(query.Result.Id, ShouldEqual, id) + + // By Email + email := "user1@test.com" + + query = &m.GetUserByAuthInfoQuery{Email: email} + err = GetUserByAuthInfo(query) + + So(err, ShouldBeNil) + So(query.Result.Email, ShouldEqual, email) + + // Don't find nonexistent user + email = "nonexistent@test.com" + + query = &m.GetUserByAuthInfoQuery{Email: email} + err = GetUserByAuthInfo(query) + + So(err, ShouldEqual, m.ErrUserNotFound) + So(query.Result, ShouldBeNil) + }) + + Convey("Can set & locate by AuthModule and AuthId", func() { + // get nonexistent user_auth entry + query := &m.GetUserByAuthInfoQuery{AuthModule: "test", AuthId: "test"} + err = GetUserByAuthInfo(query) + + So(err, ShouldEqual, m.ErrUserNotFound) + So(query.Result, ShouldBeNil) + + // create user_auth entry + login := "loginuser0" + + query.Login = login + err = GetUserByAuthInfo(query) + + So(err, ShouldBeNil) + So(query.Result.Login, ShouldEqual, login) + + // get via user_auth + query = &m.GetUserByAuthInfoQuery{AuthModule: "test", AuthId: "test"} + err = GetUserByAuthInfo(query) + + So(err, ShouldBeNil) + So(query.Result.Login, ShouldEqual, login) + + // get with non-matching id + id := query.Result.Id + + query.UserId = id + 1 + err = GetUserByAuthInfo(query) + + So(err, ShouldBeNil) + So(query.Result.Login, ShouldEqual, "loginuser1") + + // get via user_auth + query = &m.GetUserByAuthInfoQuery{AuthModule: "test", AuthId: "test"} + err = GetUserByAuthInfo(query) + + So(err, ShouldBeNil) + So(query.Result.Login, ShouldEqual, "loginuser1") + + // remove user + _, err = x.Exec("DELETE FROM "+dialect.Quote("user")+" WHERE id=?", query.Result.Id) + So(err, ShouldBeNil) + + // get via user_auth for deleted user + query = &m.GetUserByAuthInfoQuery{AuthModule: "test", AuthId: "test"} + err = GetUserByAuthInfo(query) + + So(err, ShouldEqual, m.ErrUserNotFound) + So(query.Result, ShouldBeNil) + }) + }) +} diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index 5b79e866964..f53b1f852d5 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -39,23 +39,24 @@ const ( var ( // App settings. - Env string = DEV + Env = DEV AppUrl string AppSubUrl string InstanceName string // build - BuildVersion string - BuildCommit string - BuildStamp int64 + BuildVersion string + BuildCommit string + BuildStamp int64 + Enterprise bool + ApplicationName string // Paths - LogsPath string - HomePath string - DataPath string - PluginsPath string - ProvisioningPath string - CustomInitPath = "conf/custom.ini" + LogsPath string + HomePath string + DataPath string + PluginsPath string + CustomInitPath = "conf/custom.ini" // Log settings. LogModes []string @@ -123,6 +124,7 @@ var ( AuthProxyAutoSignUp bool AuthProxyLdapSyncTtl int AuthProxyWhitelist string + AuthProxyHeaders map[string]string // Basic Auth BasicAuthEnabled bool @@ -135,7 +137,7 @@ var ( SessionConnMaxLifetime int64 // Global setting objects. - Cfg *ini.File + Raw *ini.File ConfRootPath string IsWindows bool @@ -156,10 +158,7 @@ var ( // LDAP LdapEnabled bool LdapConfigFile string - LdapAllowSignup bool = true - - // SMTP email settings - Smtp SmtpSettings + LdapAllowSignup = true // QUOTA Quota QuotaSettings @@ -168,6 +167,9 @@ var ( AlertingEnabled bool ExecuteAlerts bool + // Explore UI + ExploreEnabled bool + // logger logger log.Logger @@ -182,6 +184,19 @@ var ( ImageUploadProvider string ) +type Cfg struct { + Raw *ini.File + + // Paths + ProvisioningPath string + + // SMTP email settings + Smtp SmtpSettings + + ImagesDir string + DisableBruteForceLoginProtection bool +} + type CommandLineArgs struct { Config string HomePath string @@ -223,9 +238,9 @@ func shouldRedactURLKey(s string) bool { return strings.Contains(uppercased, "DATABASE_URL") } -func applyEnvVariableOverrides() { +func applyEnvVariableOverrides(file *ini.File) error { appliedEnvOverrides = make([]string, 0) - for _, section := range Cfg.Sections() { + for _, section := range file.Sections() { for _, key := range section.Keys() { sectionName := strings.ToUpper(strings.Replace(section.Name(), ".", "_", -1)) keyName := strings.ToUpper(strings.Replace(key.Name(), ".", "_", -1)) @@ -238,7 +253,10 @@ func applyEnvVariableOverrides() { envValue = "*********" } if shouldRedactURLKey(envKey) { - u, _ := url.Parse(envValue) + u, err := url.Parse(envValue) + if err != nil { + return fmt.Errorf("could not parse environment variable. key: %s, value: %s. error: %v", envKey, envValue, err) + } ui := u.User if ui != nil { _, exists := ui.Password() @@ -252,11 +270,13 @@ func applyEnvVariableOverrides() { } } } + + return nil } -func applyCommandLineDefaultProperties(props map[string]string) { +func applyCommandLineDefaultProperties(props map[string]string, file *ini.File) { appliedCommandLineProperties = make([]string, 0) - for _, section := range Cfg.Sections() { + for _, section := range file.Sections() { for _, key := range section.Keys() { keyString := fmt.Sprintf("default.%s.%s", section.Name(), key.Name()) value, exists := props[keyString] @@ -271,8 +291,8 @@ func applyCommandLineDefaultProperties(props map[string]string) { } } -func applyCommandLineProperties(props map[string]string) { - for _, section := range Cfg.Sections() { +func applyCommandLineProperties(props map[string]string, file *ini.File) { + for _, section := range file.Sections() { sectionName := section.Name() + "." if section.Name() == ini.DEFAULT_SECTION { sectionName = "" @@ -331,15 +351,15 @@ func evalEnvVarExpression(value string) string { }) } -func evalConfigValues() { - for _, section := range Cfg.Sections() { +func evalConfigValues(file *ini.File) { + for _, section := range file.Sections() { for _, key := range section.Keys() { key.SetValue(evalEnvVarExpression(key.Value())) } } } -func loadSpecifedConfigFile(configFile string) error { +func loadSpecifedConfigFile(configFile string, masterFile *ini.File) error { if configFile == "" { configFile = filepath.Join(HomePath, CustomInitPath) // return without error if custom file does not exist @@ -361,9 +381,9 @@ func loadSpecifedConfigFile(configFile string) error { continue } - defaultSec, err := Cfg.GetSection(section.Name()) + defaultSec, err := masterFile.GetSection(section.Name()) if err != nil { - defaultSec, _ = Cfg.NewSection(section.Name()) + defaultSec, _ = masterFile.NewSection(section.Name()) } defaultKey, err := defaultSec.GetKey(key.Name()) if err != nil { @@ -377,7 +397,7 @@ func loadSpecifedConfigFile(configFile string) error { return nil } -func loadConfiguration(args *CommandLineArgs) { +func loadConfiguration(args *CommandLineArgs) (*ini.File, error) { var err error // load config defaults @@ -391,39 +411,44 @@ func loadConfiguration(args *CommandLineArgs) { } // load defaults - Cfg, err = ini.Load(defaultConfigFile) + parsedFile, err := ini.Load(defaultConfigFile) if err != nil { fmt.Println(fmt.Sprintf("Failed to parse defaults.ini, %v", err)) os.Exit(1) - return + return nil, err } - Cfg.BlockMode = false + parsedFile.BlockMode = false // command line props commandLineProps := getCommandLineProperties(args.Args) // load default overrides - applyCommandLineDefaultProperties(commandLineProps) + applyCommandLineDefaultProperties(commandLineProps, parsedFile) // load specified config file - err = loadSpecifedConfigFile(args.Config) + err = loadSpecifedConfigFile(args.Config, parsedFile) if err != nil { - initLogging() + initLogging(parsedFile) log.Fatal(3, err.Error()) } // apply environment overrides - applyEnvVariableOverrides() + err = applyEnvVariableOverrides(parsedFile) + if err != nil { + return nil, err + } // apply command line overrides - applyCommandLineProperties(commandLineProps) + applyCommandLineProperties(commandLineProps, parsedFile) // evaluate config values containing environment variables - evalConfigValues() + evalConfigValues(parsedFile) // update data path and logging config - DataPath = makeAbsolute(Cfg.Section("paths").Key("data").String(), HomePath) - initLogging() + DataPath = makeAbsolute(parsedFile.Section("paths").Key("data").String(), HomePath) + initLogging(parsedFile) + + return parsedFile, err } func pathExists(path string) bool { @@ -455,7 +480,7 @@ func setHomePath(args *CommandLineArgs) { } } -var skipStaticRootValidation bool = false +var skipStaticRootValidation = false func validateStaticRootPath() error { if skipStaticRootValidation { @@ -469,15 +494,33 @@ func validateStaticRootPath() error { return nil } -func NewConfigContext(args *CommandLineArgs) error { - setHomePath(args) - loadConfiguration(args) +func NewCfg() *Cfg { + return &Cfg{} +} - Env = Cfg.Section("").Key("app_mode").MustString("development") - InstanceName = Cfg.Section("").Key("instance_name").MustString("unknown_instance_name") - PluginsPath = makeAbsolute(Cfg.Section("paths").Key("plugins").String(), HomePath) - ProvisioningPath = makeAbsolute(Cfg.Section("paths").Key("provisioning").String(), HomePath) - server := Cfg.Section("server") +func (cfg *Cfg) Load(args *CommandLineArgs) error { + setHomePath(args) + + iniFile, err := loadConfiguration(args) + if err != nil { + return err + } + + cfg.Raw = iniFile + + // Temporary keep global, to make refactor in steps + Raw = cfg.Raw + + ApplicationName = "Grafana" + if Enterprise { + ApplicationName += " Enterprise" + } + + Env = iniFile.Section("").Key("app_mode").MustString("development") + InstanceName = iniFile.Section("").Key("instance_name").MustString("unknown_instance_name") + PluginsPath = makeAbsolute(iniFile.Section("paths").Key("plugins").String(), HomePath) + cfg.ProvisioningPath = makeAbsolute(iniFile.Section("paths").Key("provisioning").String(), HomePath) + server := iniFile.Section("server") AppUrl, AppSubUrl = parseAppUrlAndSubUrl(server) Protocol = HTTP @@ -505,27 +548,28 @@ func NewConfigContext(args *CommandLineArgs) error { } // read data proxy settings - dataproxy := Cfg.Section("dataproxy") + dataproxy := iniFile.Section("dataproxy") DataProxyLogging = dataproxy.Key("logging").MustBool(false) // read security settings - security := Cfg.Section("security") + security := iniFile.Section("security") SecretKey = security.Key("secret_key").String() LogInRememberDays = security.Key("login_remember_days").MustInt() CookieUserName = security.Key("cookie_username").String() CookieRememberName = security.Key("cookie_remember_name").String() DisableGravatar = security.Key("disable_gravatar").MustBool(true) - DisableBruteForceLoginProtection = security.Key("disable_brute_force_login_protection").MustBool(false) + cfg.DisableBruteForceLoginProtection = security.Key("disable_brute_force_login_protection").MustBool(false) + DisableBruteForceLoginProtection = cfg.DisableBruteForceLoginProtection // read snapshots settings - snapshots := Cfg.Section("snapshots") + snapshots := iniFile.Section("snapshots") ExternalSnapshotUrl = snapshots.Key("external_snapshot_url").String() ExternalSnapshotName = snapshots.Key("external_snapshot_name").String() ExternalEnabled = snapshots.Key("external_enabled").MustBool(true) SnapShotRemoveExpired = snapshots.Key("snapshot_remove_expired").MustBool(true) // read dashboard settings - dashboards := Cfg.Section("dashboards") + dashboards := iniFile.Section("dashboards") DashboardVersionsToKeep = dashboards.Key("versions_to_keep").MustInt(20) // read data source proxy white list @@ -538,7 +582,7 @@ func NewConfigContext(args *CommandLineArgs) error { AdminUser = security.Key("admin_user").String() AdminPassword = security.Key("admin_password").String() - users := Cfg.Section("users") + users := iniFile.Section("users") AllowUserSignUp = users.Key("allow_sign_up").MustBool(true) AllowUserOrgCreate = users.Key("allow_org_create").MustBool(true) AutoAssignOrg = users.Key("auto_assign_org").MustBool(true) @@ -552,17 +596,17 @@ func NewConfigContext(args *CommandLineArgs) error { ViewersCanEdit = users.Key("viewers_can_edit").MustBool(false) // auth - auth := Cfg.Section("auth") + auth := iniFile.Section("auth") DisableLoginForm = auth.Key("disable_login_form").MustBool(false) DisableSignoutMenu = auth.Key("disable_signout_menu").MustBool(false) // anonymous access - AnonymousEnabled = Cfg.Section("auth.anonymous").Key("enabled").MustBool(false) - AnonymousOrgName = Cfg.Section("auth.anonymous").Key("org_name").String() - AnonymousOrgRole = Cfg.Section("auth.anonymous").Key("org_role").String() + AnonymousEnabled = iniFile.Section("auth.anonymous").Key("enabled").MustBool(false) + AnonymousOrgName = iniFile.Section("auth.anonymous").Key("org_name").String() + AnonymousOrgRole = iniFile.Section("auth.anonymous").Key("org_role").String() // auth proxy - authProxy := Cfg.Section("auth.proxy") + authProxy := iniFile.Section("auth.proxy") AuthProxyEnabled = authProxy.Key("enabled").MustBool(false) AuthProxyHeaderName = authProxy.Key("header_name").String() AuthProxyHeaderProperty = authProxy.Key("header_property").String() @@ -570,61 +614,73 @@ func NewConfigContext(args *CommandLineArgs) error { AuthProxyLdapSyncTtl = authProxy.Key("ldap_sync_ttl").MustInt() AuthProxyWhitelist = authProxy.Key("whitelist").String() + AuthProxyHeaders = make(map[string]string) + for _, propertyAndHeader := range util.SplitString(authProxy.Key("headers").String()) { + split := strings.SplitN(propertyAndHeader, ":", 2) + if len(split) == 2 { + AuthProxyHeaders[split[0]] = split[1] + } + } + // basic auth - authBasic := Cfg.Section("auth.basic") + authBasic := iniFile.Section("auth.basic") BasicAuthEnabled = authBasic.Key("enabled").MustBool(true) // global plugin settings - PluginAppsSkipVerifyTLS = Cfg.Section("plugins").Key("app_tls_skip_verify_insecure").MustBool(false) + PluginAppsSkipVerifyTLS = iniFile.Section("plugins").Key("app_tls_skip_verify_insecure").MustBool(false) // PhantomJS rendering - ImagesDir = filepath.Join(DataPath, "png") + cfg.ImagesDir = filepath.Join(DataPath, "png") + ImagesDir = cfg.ImagesDir PhantomDir = filepath.Join(HomePath, "tools/phantomjs") - analytics := Cfg.Section("analytics") + analytics := iniFile.Section("analytics") ReportingEnabled = analytics.Key("reporting_enabled").MustBool(true) CheckForUpdates = analytics.Key("check_for_updates").MustBool(true) GoogleAnalyticsId = analytics.Key("google_analytics_ua_id").String() GoogleTagManagerId = analytics.Key("google_tag_manager_id").String() - ldapSec := Cfg.Section("auth.ldap") + ldapSec := iniFile.Section("auth.ldap") LdapEnabled = ldapSec.Key("enabled").MustBool(false) LdapConfigFile = ldapSec.Key("config_file").String() LdapAllowSignup = ldapSec.Key("allow_sign_up").MustBool(true) - alerting := Cfg.Section("alerting") + alerting := iniFile.Section("alerting") AlertingEnabled = alerting.Key("enabled").MustBool(true) ExecuteAlerts = alerting.Key("execute_alerts").MustBool(true) - readSessionConfig() - readSmtpSettings() - readQuotaSettings() + explore := iniFile.Section("explore") + ExploreEnabled = explore.Key("enabled").MustBool(false) - if VerifyEmailEnabled && !Smtp.Enabled { + cfg.readSessionConfig() + cfg.readSmtpSettings() + cfg.readQuotaSettings() + + if VerifyEmailEnabled && !cfg.Smtp.Enabled { log.Warn("require_email_validation is enabled but smtp is disabled") } // check old key name - GrafanaComUrl = Cfg.Section("grafana_net").Key("url").MustString("") + GrafanaComUrl = iniFile.Section("grafana_net").Key("url").MustString("") if GrafanaComUrl == "" { - GrafanaComUrl = Cfg.Section("grafana_com").Key("url").MustString("https://grafana.com") + GrafanaComUrl = iniFile.Section("grafana_com").Key("url").MustString("https://grafana.com") } - imageUploadingSection := Cfg.Section("external_image_storage") + imageUploadingSection := iniFile.Section("external_image_storage") ImageUploadProvider = imageUploadingSection.Key("provider").MustString("") return nil } -func readSessionConfig() { - sec := Cfg.Section("session") +func (cfg *Cfg) readSessionConfig() { + sec := cfg.Raw.Section("session") SessionOptions = session.Options{} SessionOptions.Provider = sec.Key("provider").In("memory", []string{"memory", "file", "redis", "mysql", "postgres", "memcache"}) SessionOptions.ProviderConfig = strings.Trim(sec.Key("provider_config").String(), "\" ") SessionOptions.CookieName = sec.Key("cookie_name").MustString("grafana_sess") SessionOptions.CookiePath = AppSubUrl SessionOptions.Secure = sec.Key("cookie_secure").MustBool() - SessionOptions.Gclifetime = Cfg.Section("session").Key("gc_interval_time").MustInt64(86400) - SessionOptions.Maxlifetime = Cfg.Section("session").Key("session_life_time").MustInt64(86400) + SessionOptions.Gclifetime = cfg.Raw.Section("session").Key("gc_interval_time").MustInt64(86400) + SessionOptions.Maxlifetime = cfg.Raw.Section("session").Key("session_life_time").MustInt64(86400) SessionOptions.IDLength = 16 if SessionOptions.Provider == "file" { @@ -636,21 +692,21 @@ func readSessionConfig() { SessionOptions.CookiePath = "/" } - SessionConnMaxLifetime = Cfg.Section("session").Key("conn_max_lifetime").MustInt64(14400) + SessionConnMaxLifetime = cfg.Raw.Section("session").Key("conn_max_lifetime").MustInt64(14400) } -func initLogging() { +func initLogging(file *ini.File) { // split on comma - LogModes = strings.Split(Cfg.Section("log").Key("mode").MustString("console"), ",") + LogModes = strings.Split(file.Section("log").Key("mode").MustString("console"), ",") // also try space if len(LogModes) == 1 { - LogModes = strings.Split(Cfg.Section("log").Key("mode").MustString("console"), " ") + LogModes = strings.Split(file.Section("log").Key("mode").MustString("console"), " ") } - LogsPath = makeAbsolute(Cfg.Section("paths").Key("logs").String(), HomePath) - log.ReadLoggingConfig(LogModes, LogsPath, Cfg) + LogsPath = makeAbsolute(file.Section("paths").Key("logs").String(), HomePath) + log.ReadLoggingConfig(LogModes, LogsPath, file) } -func LogConfigurationInfo() { +func (cfg *Cfg) LogConfigSources() { var text bytes.Buffer for _, file := range configFiles { @@ -674,6 +730,6 @@ func LogConfigurationInfo() { logger.Info("Path Data", "path", DataPath) logger.Info("Path Logs", "path", LogsPath) logger.Info("Path Plugins", "path", PluginsPath) - logger.Info("Path Provisioning", "path", ProvisioningPath) + logger.Info("Path Provisioning", "path", cfg.ProvisioningPath) logger.Info("App mode " + Env) } diff --git a/pkg/setting/setting_quota.go b/pkg/setting/setting_quota.go index 49769d9930f..c3a509219db 100644 --- a/pkg/setting/setting_quota.go +++ b/pkg/setting/setting_quota.go @@ -63,9 +63,9 @@ type QuotaSettings struct { Global *GlobalQuota } -func readQuotaSettings() { +func (cfg *Cfg) readQuotaSettings() { // set global defaults. - quota := Cfg.Section("quota") + quota := cfg.Raw.Section("quota") Quota.Enabled = quota.Key("enabled").MustBool(false) // per ORG Limits diff --git a/pkg/setting/setting_smtp.go b/pkg/setting/setting_smtp.go index 9d8b8a529a5..5df774dc691 100644 --- a/pkg/setting/setting_smtp.go +++ b/pkg/setting/setting_smtp.go @@ -16,20 +16,20 @@ type SmtpSettings struct { TemplatesPattern string } -func readSmtpSettings() { - sec := Cfg.Section("smtp") - Smtp.Enabled = sec.Key("enabled").MustBool(false) - Smtp.Host = sec.Key("host").String() - Smtp.User = sec.Key("user").String() - Smtp.Password = sec.Key("password").String() - Smtp.CertFile = sec.Key("cert_file").String() - Smtp.KeyFile = sec.Key("key_file").String() - Smtp.FromAddress = sec.Key("from_address").String() - Smtp.FromName = sec.Key("from_name").String() - Smtp.EhloIdentity = sec.Key("ehlo_identity").String() - Smtp.SkipVerify = sec.Key("skip_verify").MustBool(false) +func (cfg *Cfg) readSmtpSettings() { + sec := cfg.Raw.Section("smtp") + cfg.Smtp.Enabled = sec.Key("enabled").MustBool(false) + cfg.Smtp.Host = sec.Key("host").String() + cfg.Smtp.User = sec.Key("user").String() + cfg.Smtp.Password = sec.Key("password").String() + cfg.Smtp.CertFile = sec.Key("cert_file").String() + cfg.Smtp.KeyFile = sec.Key("key_file").String() + cfg.Smtp.FromAddress = sec.Key("from_address").String() + cfg.Smtp.FromName = sec.Key("from_name").String() + cfg.Smtp.EhloIdentity = sec.Key("ehlo_identity").String() + cfg.Smtp.SkipVerify = sec.Key("skip_verify").MustBool(false) - emails := Cfg.Section("emails") - Smtp.SendWelcomeEmailOnSignUp = emails.Key("welcome_email_on_sign_up").MustBool(false) - Smtp.TemplatesPattern = emails.Key("templates_pattern").MustString("emails/*.html") + emails := cfg.Raw.Section("emails") + cfg.Smtp.SendWelcomeEmailOnSignUp = emails.Key("welcome_email_on_sign_up").MustBool(false) + cfg.Smtp.TemplatesPattern = emails.Key("templates_pattern").MustString("emails/*.html") } diff --git a/pkg/setting/setting_test.go b/pkg/setting/setting_test.go index 640a1648340..9de22c86811 100644 --- a/pkg/setting/setting_test.go +++ b/pkg/setting/setting_test.go @@ -15,7 +15,8 @@ func TestLoadingSettings(t *testing.T) { skipStaticRootValidation = true Convey("Given the default ini files", func() { - err := NewConfigContext(&CommandLineArgs{HomePath: "../../"}) + cfg := NewCfg() + err := cfg.Load(&CommandLineArgs{HomePath: "../../"}) So(err, ShouldBeNil) So(AdminUser, ShouldEqual, "admin") @@ -23,7 +24,9 @@ func TestLoadingSettings(t *testing.T) { Convey("Should be able to override via environment variables", func() { os.Setenv("GF_SECURITY_ADMIN_USER", "superduper") - NewConfigContext(&CommandLineArgs{HomePath: "../../"}) + + cfg := NewCfg() + cfg.Load(&CommandLineArgs{HomePath: "../../"}) So(AdminUser, ShouldEqual, "superduper") So(DataPath, ShouldEqual, filepath.Join(HomePath, "data")) @@ -32,14 +35,27 @@ func TestLoadingSettings(t *testing.T) { Convey("Should replace password when defined in environment", func() { os.Setenv("GF_SECURITY_ADMIN_PASSWORD", "supersecret") - NewConfigContext(&CommandLineArgs{HomePath: "../../"}) + + cfg := NewCfg() + cfg.Load(&CommandLineArgs{HomePath: "../../"}) So(appliedEnvOverrides, ShouldContain, "GF_SECURITY_ADMIN_PASSWORD=*********") }) + Convey("Should return an error when url is invalid", func() { + os.Setenv("GF_DATABASE_URL", "postgres.%31://grafana:secret@postgres:5432/grafana") + + cfg := NewCfg() + err := cfg.Load(&CommandLineArgs{HomePath: "../../"}) + + So(err, ShouldNotBeNil) + }) + Convey("Should replace password in URL when url environment is defined", func() { os.Setenv("GF_DATABASE_URL", "mysql://user:secret@localhost:3306/database") - NewConfigContext(&CommandLineArgs{HomePath: "../../"}) + + cfg := NewCfg() + cfg.Load(&CommandLineArgs{HomePath: "../../"}) So(appliedEnvOverrides, ShouldContain, "GF_DATABASE_URL=mysql://user:-redacted-@localhost:3306/database") }) @@ -54,14 +70,16 @@ func TestLoadingSettings(t *testing.T) { Convey("Should be able to override via command line", func() { if runtime.GOOS == "windows" { - NewConfigContext(&CommandLineArgs{ + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ HomePath: "../../", Args: []string{`cfg:paths.data=c:\tmp\data`, `cfg:paths.logs=c:\tmp\logs`}, }) So(DataPath, ShouldEqual, `c:\tmp\data`) So(LogsPath, ShouldEqual, `c:\tmp\logs`) } else { - NewConfigContext(&CommandLineArgs{ + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ HomePath: "../../", Args: []string{"cfg:paths.data=/tmp/data", "cfg:paths.logs=/tmp/logs"}, }) @@ -72,7 +90,8 @@ func TestLoadingSettings(t *testing.T) { }) Convey("Should be able to override defaults via command line", func() { - NewConfigContext(&CommandLineArgs{ + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ HomePath: "../../", Args: []string{ "cfg:default.server.domain=test2", @@ -85,7 +104,8 @@ func TestLoadingSettings(t *testing.T) { Convey("Defaults can be overridden in specified config file", func() { if runtime.GOOS == "windows" { - NewConfigContext(&CommandLineArgs{ + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ HomePath: "../../", Config: filepath.Join(HomePath, "tests/config-files/override_windows.ini"), Args: []string{`cfg:default.paths.data=c:\tmp\data`}, @@ -93,7 +113,8 @@ func TestLoadingSettings(t *testing.T) { So(DataPath, ShouldEqual, `c:\tmp\override`) } else { - NewConfigContext(&CommandLineArgs{ + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ HomePath: "../../", Config: filepath.Join(HomePath, "tests/config-files/override.ini"), Args: []string{"cfg:default.paths.data=/tmp/data"}, @@ -105,7 +126,8 @@ func TestLoadingSettings(t *testing.T) { Convey("Command line overrides specified config file", func() { if runtime.GOOS == "windows" { - NewConfigContext(&CommandLineArgs{ + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ HomePath: "../../", Config: filepath.Join(HomePath, "tests/config-files/override_windows.ini"), Args: []string{`cfg:paths.data=c:\tmp\data`}, @@ -113,7 +135,8 @@ func TestLoadingSettings(t *testing.T) { So(DataPath, ShouldEqual, `c:\tmp\data`) } else { - NewConfigContext(&CommandLineArgs{ + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ HomePath: "../../", Config: filepath.Join(HomePath, "tests/config-files/override.ini"), Args: []string{"cfg:paths.data=/tmp/data"}, @@ -126,7 +149,8 @@ func TestLoadingSettings(t *testing.T) { Convey("Can use environment variables in config values", func() { if runtime.GOOS == "windows" { os.Setenv("GF_DATA_PATH", `c:\tmp\env_override`) - NewConfigContext(&CommandLineArgs{ + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ HomePath: "../../", Args: []string{"cfg:paths.data=${GF_DATA_PATH}"}, }) @@ -134,7 +158,8 @@ func TestLoadingSettings(t *testing.T) { So(DataPath, ShouldEqual, `c:\tmp\env_override`) } else { os.Setenv("GF_DATA_PATH", "/tmp/env_override") - NewConfigContext(&CommandLineArgs{ + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ HomePath: "../../", Args: []string{"cfg:paths.data=${GF_DATA_PATH}"}, }) @@ -144,7 +169,8 @@ func TestLoadingSettings(t *testing.T) { }) Convey("instance_name default to hostname even if hostname env is empty", func() { - NewConfigContext(&CommandLineArgs{ + cfg := NewCfg() + cfg.Load(&CommandLineArgs{ HomePath: "../../", }) diff --git a/pkg/social/generic_oauth.go b/pkg/social/generic_oauth.go index b92d64ad9fc..8c02076096d 100644 --- a/pkg/social/generic_oauth.go +++ b/pkg/social/generic_oauth.go @@ -182,7 +182,7 @@ func (s *SocialGenericOAuth) UserInfo(client *http.Client, token *oauth2.Token) var data UserInfoJson var err error - if s.extractToken(&data, token) != true { + if !s.extractToken(&data, token) { response, err := HttpGet(client, s.apiUrl) if err != nil { return nil, fmt.Errorf("Error getting user info: %s", err) diff --git a/pkg/social/grafana_com_oauth.go b/pkg/social/grafana_com_oauth.go index d3614520d61..87601788c3f 100644 --- a/pkg/social/grafana_com_oauth.go +++ b/pkg/social/grafana_com_oauth.go @@ -51,6 +51,7 @@ func (s *SocialGrafanaCom) IsOrganizationMember(organizations []OrgRecord) bool func (s *SocialGrafanaCom) UserInfo(client *http.Client, token *oauth2.Token) (*BasicUserInfo, error) { var data struct { + Id int `json:"id"` Name string `json:"name"` Login string `json:"username"` Email string `json:"email"` @@ -69,6 +70,7 @@ func (s *SocialGrafanaCom) UserInfo(client *http.Client, token *oauth2.Token) (* } userInfo := &BasicUserInfo{ + Id: fmt.Sprintf("%d", data.Id), Name: data.Name, Login: data.Login, Email: data.Email, diff --git a/pkg/social/social.go b/pkg/social/social.go index b763e2d71b2..adbe5a912d9 100644 --- a/pkg/social/social.go +++ b/pkg/social/social.go @@ -14,6 +14,7 @@ import ( ) type BasicUserInfo struct { + Id string Name string Email string Login string @@ -57,7 +58,7 @@ func NewOAuthService() { allOauthes := []string{"github", "google", "generic_oauth", "grafananet", "grafana_com"} for _, name := range allOauthes { - sec := setting.Cfg.Section("auth." + name) + sec := setting.Raw.Section("auth." + name) info := &setting.OAuthInfo{ ClientId: sec.Key("client_id").String(), ClientSecret: sec.Key("client_secret").String(), diff --git a/pkg/tracing/tracing.go b/pkg/tracing/tracing.go index 921996d155d..79b01f70c9b 100644 --- a/pkg/tracing/tracing.go +++ b/pkg/tracing/tracing.go @@ -32,7 +32,7 @@ func Init(file *ini.File) (io.Closer, error) { func parseSettings(file *ini.File) *TracingSettings { settings := &TracingSettings{} - var section, err = setting.Cfg.GetSection("tracing.jaeger") + var section, err = setting.Raw.GetSection("tracing.jaeger") if err != nil { return settings } diff --git a/pkg/tsdb/cloudwatch/annotation_query.go b/pkg/tsdb/cloudwatch/annotation_query.go index 287f4e770ef..e0d9158435e 100644 --- a/pkg/tsdb/cloudwatch/annotation_query.go +++ b/pkg/tsdb/cloudwatch/annotation_query.go @@ -72,7 +72,7 @@ func (e *CloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo MetricName: aws.String(metricName), Dimensions: qd, Statistic: aws.String(s), - Period: aws.Int64(int64(period)), + Period: aws.Int64(period), } resp, err := svc.DescribeAlarmsForMetric(params) if err != nil { @@ -88,7 +88,7 @@ func (e *CloudWatchExecutor) executeAnnotationQuery(ctx context.Context, queryCo MetricName: aws.String(metricName), Dimensions: qd, ExtendedStatistic: aws.String(s), - Period: aws.Int64(int64(period)), + Period: aws.Int64(period), } resp, err := svc.DescribeAlarmsForMetric(params) if err != nil { diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 3879dce4ea6..499a3ed6e03 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -71,15 +71,12 @@ func (e *CloudWatchExecutor) Query(ctx context.Context, dsInfo *models.DataSourc switch queryType { case "metricFindQuery": result, err = e.executeMetricFindQuery(ctx, queryContext) - break case "annotationQuery": result, err = e.executeAnnotationQuery(ctx, queryContext) - break case "timeSeriesQuery": fallthrough default: result, err = e.executeTimeSeriesQuery(ctx, queryContext) - break } return result, err @@ -274,7 +271,7 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { } } - period := 300 + var period int if regexp.MustCompile(`^\d+$`).Match([]byte(p)) { period, err = strconv.Atoi(p) if err != nil { diff --git a/pkg/tsdb/cloudwatch/credentials.go b/pkg/tsdb/cloudwatch/credentials.go index 06848323fbb..8b32c76daa3 100644 --- a/pkg/tsdb/cloudwatch/credentials.go +++ b/pkg/tsdb/cloudwatch/credentials.go @@ -23,7 +23,7 @@ type cache struct { expiration *time.Time } -var awsCredentialCache map[string]cache = make(map[string]cache) +var awsCredentialCache = make(map[string]cache) var credentialCacheLock sync.RWMutex func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) { diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index c82cff390c3..a7d33645b9b 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -175,25 +175,18 @@ func (e *CloudWatchExecutor) executeMetricFindQuery(ctx context.Context, queryCo switch subType { case "regions": data, err = e.handleGetRegions(ctx, parameters, queryContext) - break case "namespaces": data, err = e.handleGetNamespaces(ctx, parameters, queryContext) - break case "metrics": data, err = e.handleGetMetrics(ctx, parameters, queryContext) - break case "dimension_keys": data, err = e.handleGetDimensions(ctx, parameters, queryContext) - break case "dimension_values": data, err = e.handleGetDimensionValues(ctx, parameters, queryContext) - break case "ebs_volume_ids": data, err = e.handleGetEbsVolumeIds(ctx, parameters, queryContext) - break case "ec2_instance_attribute": data, err = e.handleGetEc2InstanceAttribute(ctx, parameters, queryContext) - break } transformToTable(data, queryResult) @@ -229,9 +222,8 @@ func parseMultiSelectValue(input string) []string { trimValues[i] = strings.TrimSpace(v) } return trimValues - } else { - return []string{trimmedInput} } + return []string{trimmedInput} } // Whenever this list is updated, frontend list should also be updated. @@ -261,7 +253,7 @@ func (e *CloudWatchExecutor) handleGetNamespaces(ctx context.Context, parameters keys = append(keys, strings.Split(customNamespaces, ",")...) } - sort.Sort(sort.StringSlice(keys)) + sort.Strings(keys) result := make([]suggestData, 0) for _, key := range keys { @@ -290,7 +282,7 @@ func (e *CloudWatchExecutor) handleGetMetrics(ctx context.Context, parameters *s return nil, errors.New("Unable to call AWS API") } } - sort.Sort(sort.StringSlice(namespaceMetrics)) + sort.Strings(namespaceMetrics) result := make([]suggestData, 0) for _, name := range namespaceMetrics { @@ -319,7 +311,7 @@ func (e *CloudWatchExecutor) handleGetDimensions(ctx context.Context, parameters return nil, errors.New("Unable to call AWS API") } } - sort.Sort(sort.StringSlice(dimensionValues)) + sort.Strings(dimensionValues) result := make([]suggestData, 0) for _, name := range dimensionValues { @@ -573,11 +565,7 @@ func getAllMetrics(cwData *DatasourceInfo) (cloudwatch.ListMetricsOutput, error) } return !lastPage }) - if err != nil { - return resp, err - } - - return resp, nil + return resp, err } var metricsCacheLock sync.Mutex diff --git a/pkg/tsdb/cloudwatch/metric_find_query_test.go b/pkg/tsdb/cloudwatch/metric_find_query_test.go index bf87e7b7d41..e3903e8027e 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query_test.go +++ b/pkg/tsdb/cloudwatch/metric_find_query_test.go @@ -181,10 +181,7 @@ func TestCloudWatchMetrics(t *testing.T) { } func TestParseMultiSelectValue(t *testing.T) { - - var values []string - - values = parseMultiSelectValue(" i-someInstance ") + values := parseMultiSelectValue(" i-someInstance ") assert.Equal(t, []string{"i-someInstance"}, values) values = parseMultiSelectValue("{i-05}") diff --git a/pkg/tsdb/influxdb/model_parser.go b/pkg/tsdb/influxdb/model_parser.go index deb2f15e3ce..f1113511bae 100644 --- a/pkg/tsdb/influxdb/model_parser.go +++ b/pkg/tsdb/influxdb/model_parser.go @@ -40,6 +40,9 @@ func (qp *InfluxdbQueryParser) Parse(model *simplejson.Json, dsInfo *models.Data } parsedInterval, err := tsdb.GetIntervalFrom(dsInfo, model, time.Millisecond*1) + if err != nil { + return nil, err + } return &Query{ Measurement: measurement, diff --git a/pkg/tsdb/influxdb/query.go b/pkg/tsdb/influxdb/query.go index 0a16a507877..0637a5bbb44 100644 --- a/pkg/tsdb/influxdb/query.go +++ b/pkg/tsdb/influxdb/query.go @@ -12,8 +12,8 @@ import ( ) var ( - regexpOperatorPattern *regexp.Regexp = regexp.MustCompile(`^\/.*\/$`) - regexpMeasurementPattern *regexp.Regexp = regexp.MustCompile(`^\/.*\/$`) + regexpOperatorPattern = regexp.MustCompile(`^\/.*\/$`) + regexpMeasurementPattern = regexp.MustCompile(`^\/.*\/$`) ) func (query *Query) Build(queryContext *tsdb.TsdbQuery) (string, error) { @@ -62,9 +62,8 @@ func (query *Query) renderTags() []string { } } - textValue := "" - // quote value unless regex or number + var textValue string if tag.Operator == "=~" || tag.Operator == "!~" { textValue = tag.Value } else if tag.Operator == "<" || tag.Operator == ">" { @@ -107,7 +106,7 @@ func (query *Query) renderSelectors(queryContext *tsdb.TsdbQuery) string { } func (query *Query) renderMeasurement() string { - policy := "" + var policy string if query.Policy == "" || query.Policy == "default" { policy = "" } else { diff --git a/pkg/tsdb/influxdb/response_parser_test.go b/pkg/tsdb/influxdb/response_parser_test.go index a517cf4d71f..d8ec6e145c7 100644 --- a/pkg/tsdb/influxdb/response_parser_test.go +++ b/pkg/tsdb/influxdb/response_parser_test.go @@ -13,7 +13,8 @@ func TestInfluxdbResponseParser(t *testing.T) { Convey("Response parser", func() { parser := &ResponseParser{} - setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + cfg.Load(&setting.CommandLineArgs{ HomePath: "../../../", }) diff --git a/pkg/tsdb/interval.go b/pkg/tsdb/interval.go index e26d39f3986..49904f27a37 100644 --- a/pkg/tsdb/interval.go +++ b/pkg/tsdb/interval.go @@ -10,10 +10,10 @@ import ( ) var ( - defaultRes int64 = 1500 - defaultMinInterval time.Duration = 1 * time.Millisecond - year time.Duration = time.Hour * 24 * 365 - day time.Duration = time.Hour * 24 + defaultRes int64 = 1500 + defaultMinInterval = time.Millisecond * 1 + year = time.Hour * 24 * 365 + day = time.Hour * 24 ) type Interval struct { diff --git a/pkg/tsdb/interval_test.go b/pkg/tsdb/interval_test.go index 1e36e5428fe..941b08dd554 100644 --- a/pkg/tsdb/interval_test.go +++ b/pkg/tsdb/interval_test.go @@ -10,7 +10,8 @@ import ( func TestInterval(t *testing.T) { Convey("Default interval ", t, func() { - setting.NewConfigContext(&setting.CommandLineArgs{ + cfg := setting.NewCfg() + cfg.Load(&setting.CommandLineArgs{ HomePath: "../../", }) diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index 9d41cd03255..bb9489cd654 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -82,11 +82,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= DATEADD(s, %d, '1970-01-01') AND %s <= DATEADD(s, %d, '1970-01-01')", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("%s >= DATEADD(s, %d, '1970-01-01') AND %s <= DATEADD(s, %d, '1970-01-01')", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil case "__timeFrom": - return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil + return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", m.TimeRange.GetFromAsSecondsEpoch()), nil case "__timeTo": - return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", m.TimeRange.GetToAsSecondsEpoch()), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -113,11 +113,11 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil case "__unixEpochFrom": - return fmt.Sprintf("%d", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil + return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil case "__unixEpochTo": - return fmt.Sprintf("%d", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil default: return "", fmt.Errorf("Unknown macro %v", name) } diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go index 12a9b0d82be..ae0d4f67d2b 100644 --- a/pkg/tsdb/mssql/macros_test.go +++ b/pkg/tsdb/mssql/macros_test.go @@ -1,6 +1,8 @@ package mssql import ( + "fmt" + "strconv" "testing" "time" @@ -13,112 +15,213 @@ import ( func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { engine := &MsSqlMacroEngine{} - timeRange := &tsdb.TimeRange{From: "5m", To: "now"} query := &tsdb.Query{ Model: simplejson.New(), } - Convey("interpolate __time function", func() { - sql, err := engine.Interpolate(query, nil, "select $__time(time_column)") - So(err, ShouldBeNil) + Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { + from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC) + to := from.Add(5 * time.Minute) + timeRange := tsdb.NewFakeTimeRange("5m", "now", to) - So(sql, ShouldEqual, "select time_column AS time") + Convey("interpolate __time function", func() { + sql, err := engine.Interpolate(query, nil, "select $__time(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "select time_column AS time") + }) + + Convey("interpolate __timeEpoch function", func() { + sql, err := engine.Interpolate(query, nil, "select $__timeEpoch(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "select DATEDIFF(second, '1970-01-01', time_column) AS time") + }) + + Convey("interpolate __timeEpoch function wrapped in aggregation", func() { + sql, err := engine.Interpolate(query, nil, "select min($__timeEpoch(time_column))") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "select min(DATEDIFF(second, '1970-01-01', time_column) AS time)") + }) + + Convey("interpolate __timeFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix())) + }) + + Convey("interpolate __timeGroup function", func() { + sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300") + }) + + Convey("interpolate __timeGroup function with spaces around arguments", func() { + sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300") + }) + + Convey("interpolate __timeGroup function with fill (value = NULL)", func() { + _, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m', NULL)") + + fill := query.Model.Get("fill").MustBool() + fillNull := query.Model.Get("fillNull").MustBool() + fillInterval := query.Model.Get("fillInterval").MustInt() + + So(err, ShouldBeNil) + So(fill, ShouldBeTrue) + So(fillNull, ShouldBeTrue) + So(fillInterval, ShouldEqual, 5*time.Minute.Seconds()) + }) + + Convey("interpolate __timeGroup function with fill (value = float)", func() { + _, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m', 1.5)") + + fill := query.Model.Get("fill").MustBool() + fillValue := query.Model.Get("fillValue").MustFloat64() + fillInterval := query.Model.Get("fillInterval").MustInt() + + So(err, ShouldBeNil) + So(fill, ShouldBeTrue) + So(fillValue, ShouldEqual, 1.5) + So(fillInterval, ShouldEqual, 5*time.Minute.Seconds()) + }) + + Convey("interpolate __timeFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix())) + }) + + Convey("interpolate __timeTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix())) + }) + + Convey("interpolate __unixEpochFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix())) + }) + + Convey("interpolate __unixEpochFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) + + Convey("interpolate __unixEpochTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) }) - Convey("interpolate __timeEpoch function", func() { - sql, err := engine.Interpolate(query, nil, "select $__timeEpoch(time_column)") - So(err, ShouldBeNil) + Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() { + from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) + to := time.Date(1965, 2, 3, 8, 0, 0, 0, time.UTC) + timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) - So(sql, ShouldEqual, "select DATEDIFF(second, '1970-01-01', time_column) AS time") + Convey("interpolate __timeFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix())) + }) + + Convey("interpolate __timeFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix())) + }) + + Convey("interpolate __timeTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix())) + }) + + Convey("interpolate __unixEpochFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix())) + }) + + Convey("interpolate __unixEpochFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) + + Convey("interpolate __unixEpochTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) }) - Convey("interpolate __timeEpoch function wrapped in aggregation", func() { - sql, err := engine.Interpolate(query, nil, "select min($__timeEpoch(time_column))") - So(err, ShouldBeNil) + Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { + from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) + to := time.Date(1980, 2, 3, 8, 0, 0, 0, time.UTC) + timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) - So(sql, ShouldEqual, "select min(DATEDIFF(second, '1970-01-01', time_column) AS time)") - }) + Convey("interpolate __timeFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") + So(err, ShouldBeNil) - Convey("interpolate __timeFilter function", func() { - sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") - So(err, ShouldBeNil) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix())) + }) - So(sql, ShouldEqual, "WHERE time_column >= DATEADD(s, 18446744066914186738, '1970-01-01') AND time_column <= DATEADD(s, 18446744066914187038, '1970-01-01')") - }) + Convey("interpolate __timeFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") + So(err, ShouldBeNil) - Convey("interpolate __timeGroup function", func() { - sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") - So(err, ShouldBeNil) + So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix())) + }) - So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300") - }) + Convey("interpolate __timeTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") + So(err, ShouldBeNil) - Convey("interpolate __timeGroup function with spaces around arguments", func() { - sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") - So(err, ShouldBeNil) + So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix())) + }) - So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300") - }) + Convey("interpolate __unixEpochFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)") + So(err, ShouldBeNil) - Convey("interpolate __timeGroup function with fill (value = NULL)", func() { - _, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m', NULL)") + So(sql, ShouldEqual, fmt.Sprintf("select time_column >= %d AND time_column <= %d", from.Unix(), to.Unix())) + }) - fill := query.Model.Get("fill").MustBool() - fillNull := query.Model.Get("fillNull").MustBool() - fillInterval := query.Model.Get("fillInterval").MustInt() + Convey("interpolate __unixEpochFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) - So(err, ShouldBeNil) - So(fill, ShouldBeTrue) - So(fillNull, ShouldBeTrue) - So(fillInterval, ShouldEqual, 5*time.Minute.Seconds()) - }) + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) - Convey("interpolate __timeGroup function with fill (value = float)", func() { - _, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m', 1.5)") + Convey("interpolate __unixEpochTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) - fill := query.Model.Get("fill").MustBool() - fillValue := query.Model.Get("fillValue").MustFloat64() - fillInterval := query.Model.Get("fillInterval").MustInt() - - So(err, ShouldBeNil) - So(fill, ShouldBeTrue) - So(fillValue, ShouldEqual, 1.5) - So(fillInterval, ShouldEqual, 5*time.Minute.Seconds()) - }) - - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select DATEADD(second, 18446744066914186738, '1970-01-01')") - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select DATEADD(second, 18446744066914187038, '1970-01-01')") - }) - - Convey("interpolate __unixEpochFilter function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select time_column >= 18446744066914186738 AND time_column <= 18446744066914187038") - }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select 18446744066914186738") - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select 18446744066914187038") + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) }) }) } diff --git a/pkg/tsdb/mssql/mssql.go b/pkg/tsdb/mssql/mssql.go index 2638fd8bb40..eb71259b46b 100644 --- a/pkg/tsdb/mssql/mssql.go +++ b/pkg/tsdb/mssql/mssql.go @@ -8,8 +8,6 @@ import ( "strconv" "strings" - "time" - "math" _ "github.com/denisenkom/go-mssqldb" @@ -147,7 +145,7 @@ func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core. // convert types not handled by denisenkom/go-mssqldb // unhandled types are returned as []byte for i := 0; i < len(types); i++ { - if value, ok := values[i].([]byte); ok == true { + if value, ok := values[i].([]byte); ok { switch types[i].DatabaseTypeName() { case "MONEY", "SMALLMONEY", "DECIMAL": if v, err := strconv.ParseFloat(string(value), 64); err == nil { @@ -211,7 +209,7 @@ func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core. fillValue := null.Float{} if fillMissing { fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 - if query.Model.Get("fillNull").MustBool(false) == false { + if !query.Model.Get("fillNull").MustBool(false) { fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() fillValue.Valid = true } @@ -231,19 +229,22 @@ func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core. return err } + // converts column named time to unix timestamp in milliseconds to make + // native mysql datetime types and epoch dates work in + // annotation and table queries. + tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) + switch columnValue := values[timeIndex].(type) { case int64: - timestamp = float64(columnValue * 1000) + timestamp = float64(columnValue) case float64: - timestamp = columnValue * 1000 - case time.Time: - timestamp = (float64(columnValue.Unix()) * 1000) + float64(columnValue.Nanosecond()/1e6) // in case someone is trying to map times beyond 2262 :D + timestamp = columnValue default: - return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp") + return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) } if metricIndex >= 0 { - if columnValue, ok := values[metricIndex].(string); ok == true { + if columnValue, ok := values[metricIndex].(string); ok { metric = columnValue } else { return fmt.Errorf("Column metric must be of type CHAR, VARCHAR, NCHAR or NVARCHAR. metric column name: %s type: %s but datatype is %T", columnNames[metricIndex], columnTypes[metricIndex].DatabaseTypeName(), values[metricIndex]) @@ -255,22 +256,16 @@ func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core. continue } - switch columnValue := values[i].(type) { - case int64: - value = null.FloatFrom(float64(columnValue)) - case float64: - value = null.FloatFrom(columnValue) - case nil: - value.Valid = false - default: - return fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", col, columnValue, columnValue) + if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil { + return err } + if metricIndex == -1 { metric = col } series, exist := pointsBySeries[metric] - if exist == false { + if !exist { series = &tsdb.TimeSeries{Name: metric} pointsBySeries[metric] = series seriesByQueryOrder.PushBack(metric) @@ -278,7 +273,7 @@ func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core. if fillMissing { var intervalStart float64 - if exist == false { + if !exist { intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) } else { intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go index 4bd1e3a8ad7..e62d30a6325 100644 --- a/pkg/tsdb/mssql/mssql_test.go +++ b/pkg/tsdb/mssql/mssql_test.go @@ -16,13 +16,13 @@ import ( ) // To run this test, remove the Skip from SkipConvey -// and set up a MSSQL db named grafanatest and a user/password grafana/Password! +// The tests require a MSSQL db named grafanatest and a user/password grafana/Password! // Use the docker/blocks/mssql_tests/docker-compose.yaml to spin up a // preconfigured MSSQL server suitable for running these tests. -// Thers's also a dashboard.json in same directory that you can import to Grafana +// There is also a dashboard.json in same directory that you can import to Grafana // once you've created a datasource for the test server/database. // If needed, change the variable below to the IP address of the database. -var serverIP string = "localhost" +var serverIP = "localhost" func TestMSSQL(t *testing.T) { SkipConvey("MSSQL", t, func() { @@ -188,10 +188,8 @@ func TestMSSQL(t *testing.T) { }) } - for _, s := range series { - _, err = sess.Insert(s) - So(err, ShouldBeNil) - } + _, err = sess.InsertMulti(series) + So(err, ShouldBeNil) Convey("When doing a metric query using timeGroup", func() { query := &tsdb.TsdbQuery{ @@ -312,10 +310,18 @@ func TestMSSQL(t *testing.T) { Convey("Given a table with metrics having multiple values and measurements", func() { type metric_values struct { - Time time.Time - Measurement string - ValueOne int64 `xorm:"integer 'valueOne'"` - ValueTwo int64 `xorm:"integer 'valueTwo'"` + Time time.Time + TimeInt64 int64 `xorm:"bigint 'timeInt64' not null"` + TimeInt64Nullable *int64 `xorm:"bigint 'timeInt64Nullable' null"` + TimeFloat64 float64 `xorm:"float 'timeFloat64' not null"` + TimeFloat64Nullable *float64 `xorm:"float 'timeFloat64Nullable' null"` + TimeInt32 int32 `xorm:"int(11) 'timeInt32' not null"` + TimeInt32Nullable *int32 `xorm:"int(11) 'timeInt32Nullable' null"` + TimeFloat32 float32 `xorm:"float(11) 'timeFloat32' not null"` + TimeFloat32Nullable *float32 `xorm:"float(11) 'timeFloat32Nullable' null"` + Measurement string + ValueOne int64 `xorm:"integer 'valueOne'"` + ValueTwo int64 `xorm:"integer 'valueTwo'"` } if exist, err := sess.IsTableExist(metric_values{}); err != nil || exist { @@ -330,26 +336,219 @@ func TestMSSQL(t *testing.T) { return rand.Int63n(max-min) + min } + var tInitial time.Time + series := []*metric_values{} - for _, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) { - series = append(series, &metric_values{ - Time: t, - Measurement: "Metric A", - ValueOne: rnd(0, 100), - ValueTwo: rnd(0, 100), - }) - series = append(series, &metric_values{ - Time: t, - Measurement: "Metric B", - ValueOne: rnd(0, 100), - ValueTwo: rnd(0, 100), - }) + for i, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) { + if i == 0 { + tInitial = t + } + tSeconds := t.Unix() + tSecondsInt32 := int32(tSeconds) + tSecondsFloat32 := float32(tSeconds) + tMilliseconds := tSeconds * 1e3 + tMillisecondsFloat := float64(tMilliseconds) + first := metric_values{ + Time: t, + TimeInt64: tMilliseconds, + TimeInt64Nullable: &(tMilliseconds), + TimeFloat64: tMillisecondsFloat, + TimeFloat64Nullable: &tMillisecondsFloat, + TimeInt32: tSecondsInt32, + TimeInt32Nullable: &tSecondsInt32, + TimeFloat32: tSecondsFloat32, + TimeFloat32Nullable: &tSecondsFloat32, + Measurement: "Metric A", + ValueOne: rnd(0, 100), + ValueTwo: rnd(0, 100), + } + second := first + second.Measurement = "Metric B" + second.ValueOne = rnd(0, 100) + second.ValueTwo = rnd(0, 100) + + series = append(series, &first) + series = append(series, &second) } - for _, s := range series { - _, err = sess.Insert(s) + _, err = sess.InsertMulti(series) + So(err, ShouldBeNil) + + Convey("When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT TOP 1 timeInt64 as time, timeInt64 FROM metric_values ORDER BY time`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) So(err, ShouldBeNil) - } + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT TOP 1 timeInt64Nullable as time, timeInt64Nullable FROM metric_values ORDER BY time`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (float64) as time column and value column (float64) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT TOP 1 timeFloat64 as time, timeFloat64 FROM metric_values ORDER BY time`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT TOP 1 timeFloat64Nullable as time, timeFloat64Nullable FROM metric_values ORDER BY time`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (int32) as time column and value column (int32) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT TOP 1 timeInt32 as time, timeInt32 FROM metric_values ORDER BY time`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT TOP 1 timeInt32Nullable as time, timeInt32Nullable FROM metric_values ORDER BY time`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (float32) as time column and value column (float32) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT TOP 1 timeFloat32 as time, timeFloat32 FROM metric_values ORDER BY time`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + }) + + Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT TOP 1 timeFloat32Nullable as time, timeFloat32Nullable FROM metric_values ORDER BY time`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { query := &tsdb.TsdbQuery{ @@ -476,7 +675,6 @@ func TestMSSQL(t *testing.T) { resp, err := endpoint.Query(nil, nil, query) queryResult := resp.Results["A"] So(err, ShouldBeNil) - fmt.Println("query", "sql", queryResult.Meta) So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 4) @@ -696,7 +894,7 @@ func TestMSSQL(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(float64), ShouldEqual, float64(dt.Unix()*1000)) + So(columns[0].(float64), ShouldEqual, float64(dt.UnixNano()/1e6)) }) Convey("When doing an annotation query with a time column in epoch second format should return ms", func() { @@ -850,15 +1048,15 @@ func TestMSSQL(t *testing.T) { func InitMSSQLTestDB(t *testing.T) *xorm.Engine { x, err := xorm.NewEngine(sqlutil.TestDB_Mssql.DriverName, strings.Replace(sqlutil.TestDB_Mssql.ConnStr, "localhost", serverIP, 1)) + if err != nil { + t.Fatalf("Failed to init mssql db %v", err) + } + x.DatabaseTZ = time.UTC x.TZLocation = time.UTC // x.ShowSQL() - if err != nil { - t.Fatalf("Failed to init mssql db %v", err) - } - return x } diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index a292f209429..fadcbe4edbc 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -77,11 +77,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= FROM_UNIXTIME(%d) AND %s <= FROM_UNIXTIME(%d)", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("%s >= FROM_UNIXTIME(%d) AND %s <= FROM_UNIXTIME(%d)", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil case "__timeFrom": - return fmt.Sprintf("FROM_UNIXTIME(%d)", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil + return fmt.Sprintf("FROM_UNIXTIME(%d)", m.TimeRange.GetFromAsSecondsEpoch()), nil case "__timeTo": - return fmt.Sprintf("FROM_UNIXTIME(%d)", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("FROM_UNIXTIME(%d)", m.TimeRange.GetToAsSecondsEpoch()), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -108,11 +108,11 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil case "__unixEpochFrom": - return fmt.Sprintf("%d", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil + return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil case "__unixEpochTo": - return fmt.Sprintf("%d", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil default: return "", fmt.Errorf("Unknown macro %v", name) } diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go index a89ba16ab78..66ec143eac8 100644 --- a/pkg/tsdb/mysql/macros_test.go +++ b/pkg/tsdb/mysql/macros_test.go @@ -1,7 +1,10 @@ package mysql import ( + "fmt" + "strconv" "testing" + "time" "github.com/grafana/grafana/pkg/tsdb" . "github.com/smartystreets/goconvey/convey" @@ -11,79 +14,179 @@ func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { engine := &MySqlMacroEngine{} query := &tsdb.Query{} - timeRange := &tsdb.TimeRange{From: "5m", To: "now"} - Convey("interpolate __time function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__time(time_column)") - So(err, ShouldBeNil) + Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { + from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC) + to := from.Add(5 * time.Minute) + timeRange := tsdb.NewFakeTimeRange("5m", "now", to) - So(sql, ShouldEqual, "select UNIX_TIMESTAMP(time_column) as time_sec") + Convey("interpolate __time function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__time(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "select UNIX_TIMESTAMP(time_column) as time_sec") + }) + + Convey("interpolate __time function wrapped in aggregation", func() { + sql, err := engine.Interpolate(query, timeRange, "select min($__time(time_column))") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "select min(UNIX_TIMESTAMP(time_column) as time_sec)") + }) + + Convey("interpolate __timeGroup function", func() { + + sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)") + }) + + Convey("interpolate __timeGroup function with spaces around arguments", func() { + + sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)") + }) + + Convey("interpolate __timeFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix())) + }) + + Convey("interpolate __timeFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix())) + }) + + Convey("interpolate __timeTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix())) + }) + + Convey("interpolate __unixEpochFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) + }) + + Convey("interpolate __unixEpochFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) + + Convey("interpolate __unixEpochTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) }) - Convey("interpolate __time function wrapped in aggregation", func() { - sql, err := engine.Interpolate(query, timeRange, "select min($__time(time_column))") - So(err, ShouldBeNil) + Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() { + from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) + to := time.Date(1965, 2, 3, 8, 0, 0, 0, time.UTC) + timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) - So(sql, ShouldEqual, "select min(UNIX_TIMESTAMP(time_column) as time_sec)") + Convey("interpolate __timeFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix())) + }) + + Convey("interpolate __timeFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix())) + }) + + Convey("interpolate __timeTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix())) + }) + + Convey("interpolate __unixEpochFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) + }) + + Convey("interpolate __unixEpochFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) + + Convey("interpolate __unixEpochTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) }) - Convey("interpolate __timeFilter function", func() { - sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") - So(err, ShouldBeNil) + Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { + from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) + to := time.Date(1980, 2, 3, 8, 0, 0, 0, time.UTC) + timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) - So(sql, ShouldEqual, "WHERE time_column >= FROM_UNIXTIME(18446744066914186738) AND time_column <= FROM_UNIXTIME(18446744066914187038)") + Convey("interpolate __timeFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix())) + }) + + Convey("interpolate __timeFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix())) + }) + + Convey("interpolate __timeTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix())) + }) + + Convey("interpolate __unixEpochFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) + }) + + Convey("interpolate __unixEpochFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) + + Convey("interpolate __unixEpochTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) }) - - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select FROM_UNIXTIME(18446744066914186738)") - }) - - Convey("interpolate __timeGroup function", func() { - - sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)") - }) - - Convey("interpolate __timeGroup function with spaces around arguments", func() { - - sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)") - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select FROM_UNIXTIME(18446744066914187038)") - }) - - Convey("interpolate __unixEpochFilter function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(18446744066914186738)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select 18446744066914186738 >= 18446744066914186738 AND 18446744066914186738 <= 18446744066914187038") - }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select 18446744066914186738") - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select 18446744066914187038") - }) - }) } diff --git a/pkg/tsdb/mysql/mysql.go b/pkg/tsdb/mysql/mysql.go index 483974c55a4..7eceaffdb09 100644 --- a/pkg/tsdb/mysql/mysql.go +++ b/pkg/tsdb/mysql/mysql.go @@ -8,7 +8,6 @@ import ( "math" "reflect" "strconv" - "time" "github.com/go-sql-driver/mysql" "github.com/go-xorm/core" @@ -219,7 +218,7 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core. fillValue := null.Float{} if fillMissing { fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 - if query.Model.Get("fillNull").MustBool(false) == false { + if !query.Model.Get("fillNull").MustBool(false) { fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() fillValue.Valid = true } @@ -239,19 +238,22 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core. return err } + // converts column named time to unix timestamp in milliseconds to make + // native mysql datetime types and epoch dates work in + // annotation and table queries. + tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) + switch columnValue := values[timeIndex].(type) { case int64: - timestamp = float64(columnValue * 1000) + timestamp = float64(columnValue) case float64: - timestamp = columnValue * 1000 - case time.Time: - timestamp = float64(columnValue.UnixNano() / 1e6) + timestamp = columnValue default: - return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) + return fmt.Errorf("Invalid type for column time/time_sec, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) } if metricIndex >= 0 { - if columnValue, ok := values[metricIndex].(string); ok == true { + if columnValue, ok := values[metricIndex].(string); ok { metric = columnValue } else { return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex]) @@ -263,22 +265,16 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core. continue } - switch columnValue := values[i].(type) { - case int64: - value = null.FloatFrom(float64(columnValue)) - case float64: - value = null.FloatFrom(columnValue) - case nil: - value.Valid = false - default: - return fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", col, columnValue, columnValue) + if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil { + return err } + if metricIndex == -1 { metric = col } series, exist := pointsBySeries[metric] - if exist == false { + if !exist { series = &tsdb.TimeSeries{Name: metric} pointsBySeries[metric] = series seriesByQueryOrder.PushBack(metric) @@ -286,7 +282,7 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core. if fillMissing { var intervalStart float64 - if exist == false { + if !exist { intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) } else { intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index 750704c9965..29c5b72b408 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -3,25 +3,36 @@ package mysql import ( "fmt" "math/rand" + "strings" "testing" "time" "github.com/go-xorm/xorm" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" "github.com/grafana/grafana/pkg/tsdb" . "github.com/smartystreets/goconvey/convey" ) -// To run this test, remove the Skip from SkipConvey -// and set up a MySQL db named grafana_tests and a user/password grafana/password +// To run this test, set runMySqlTests=true +// Or from the commandline: GRAFANA_TEST_DB=mysql go test -v ./pkg/tsdb/mysql +// The tests require a MySQL db named grafana_ds_tests and a user/password grafana/password // Use the docker/blocks/mysql_tests/docker-compose.yaml to spin up a // preconfigured MySQL server suitable for running these tests. -// Thers's also a dashboard.json in same directory that you can import to Grafana +// There is also a dashboard.json in same directory that you can import to Grafana // once you've created a datasource for the test server/database. func TestMySQL(t *testing.T) { - SkipConvey("MySQL", t, func() { + // change to true to run the MySQL tests + runMySqlTests := false + // runMySqlTests := true + + if !(sqlstore.IsTestDbMySql() || runMySqlTests) { + t.Skip() + } + + Convey("MySQL", t, func() { x := InitMySQLTestDB(t) endpoint := &MysqlQueryEndpoint{ @@ -35,7 +46,7 @@ func TestMySQL(t *testing.T) { sess := x.NewSession() defer sess.Close() - fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.Local) + fromStart := time.Date(2018, 3, 15, 13, 0, 0, 0, time.UTC) Convey("Given a table with different native data types", func() { if exists, err := sess.IsTableExist("mysql_types"); err != nil || exists { @@ -121,9 +132,8 @@ func TestMySQL(t *testing.T) { So(column[7].(float64), ShouldEqual, 1.11) So(column[8].(float64), ShouldEqual, 2.22) So(*column[9].(*float32), ShouldEqual, 3.33) - _, offset := time.Now().Zone() - So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second)) - So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second)) + So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now()) + So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now()) So(column[12].(string), ShouldEqual, "11:11:11") So(column[13].(int64), ShouldEqual, 2018) So(*column[14].(*[]byte), ShouldHaveSameTypeAs, []byte{1}) @@ -137,8 +147,8 @@ func TestMySQL(t *testing.T) { So(column[22].(string), ShouldEqual, "longblob") So(column[23].(string), ShouldEqual, "val2") So(column[24].(string), ShouldEqual, "a,b") - So(column[25].(time.Time).Format("2006-01-02T00:00:00Z"), ShouldEqual, time.Now().Format("2006-01-02T00:00:00Z")) - So(column[26].(float64), ShouldEqual, float64(1514764861000)) + So(column[25].(time.Time).Format("2006-01-02T00:00:00Z"), ShouldEqual, time.Now().UTC().Format("2006-01-02T00:00:00Z")) + So(column[26].(float64), ShouldEqual, float64(1.514764861123456*1e12)) So(column[27], ShouldEqual, nil) So(column[28], ShouldEqual, nil) So(column[29], ShouldEqual, "") @@ -177,10 +187,8 @@ func TestMySQL(t *testing.T) { }) } - for _, s := range series { - _, err = sess.Insert(s) - So(err, ShouldBeNil) - } + _, err = sess.InsertMulti(series) + So(err, ShouldBeNil) Convey("When doing a metric query using timeGroup", func() { query := &tsdb.TsdbQuery{ @@ -301,10 +309,19 @@ func TestMySQL(t *testing.T) { Convey("Given a table with metrics having multiple values and measurements", func() { type metric_values struct { - Time time.Time - Measurement string - ValueOne int64 `xorm:"integer 'valueOne'"` - ValueTwo int64 `xorm:"integer 'valueTwo'"` + Time time.Time `xorm:"datetime 'time' not null"` + TimeNullable *time.Time `xorm:"datetime(6) 'timeNullable' null"` + TimeInt64 int64 `xorm:"bigint(20) 'timeInt64' not null"` + TimeInt64Nullable *int64 `xorm:"bigint(20) 'timeInt64Nullable' null"` + TimeFloat64 float64 `xorm:"double 'timeFloat64' not null"` + TimeFloat64Nullable *float64 `xorm:"double 'timeFloat64Nullable' null"` + TimeInt32 int32 `xorm:"int(11) 'timeInt32' not null"` + TimeInt32Nullable *int32 `xorm:"int(11) 'timeInt32Nullable' null"` + TimeFloat32 float32 `xorm:"double 'timeFloat32' not null"` + TimeFloat32Nullable *float32 `xorm:"double 'timeFloat32Nullable' null"` + Measurement string + ValueOne int64 `xorm:"integer 'valueOne'"` + ValueTwo int64 `xorm:"integer 'valueTwo'"` } if exist, err := sess.IsTableExist(metric_values{}); err != nil || exist { @@ -319,26 +336,265 @@ func TestMySQL(t *testing.T) { return rand.Int63n(max-min) + min } + var tInitial time.Time + series := []*metric_values{} - for _, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) { - series = append(series, &metric_values{ - Time: t, - Measurement: "Metric A", - ValueOne: rnd(0, 100), - ValueTwo: rnd(0, 100), - }) - series = append(series, &metric_values{ - Time: t, - Measurement: "Metric B", - ValueOne: rnd(0, 100), - ValueTwo: rnd(0, 100), - }) + for i, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) { + if i == 0 { + tInitial = t + } + tSeconds := t.Unix() + tSecondsInt32 := int32(tSeconds) + tSecondsFloat32 := float32(tSeconds) + tMilliseconds := tSeconds * 1e3 + tMillisecondsFloat := float64(tMilliseconds) + t2 := t + first := metric_values{ + Time: t, + TimeNullable: &t2, + TimeInt64: tMilliseconds, + TimeInt64Nullable: &(tMilliseconds), + TimeFloat64: tMillisecondsFloat, + TimeFloat64Nullable: &tMillisecondsFloat, + TimeInt32: tSecondsInt32, + TimeInt32Nullable: &tSecondsInt32, + TimeFloat32: tSecondsFloat32, + TimeFloat32Nullable: &tSecondsFloat32, + Measurement: "Metric A", + ValueOne: rnd(0, 100), + ValueTwo: rnd(0, 100), + } + second := first + second.Measurement = "Metric B" + second.ValueOne = rnd(0, 100) + second.ValueTwo = rnd(0, 100) + + series = append(series, &first) + series = append(series, &second) } - for _, s := range series { - _, err := sess.Insert(s) + _, err = sess.InsertMulti(series) + So(err, ShouldBeNil) + + Convey("When doing a metric query using time as time column should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT time, valueOne FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) So(err, ShouldBeNil) - } + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using time (nullable) as time column should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT timeNullable as time, valueOne FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT timeInt64 as time, timeInt64 FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT timeInt64Nullable as time, timeInt64Nullable FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (float64) as time column and value column (float64) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT timeFloat64 as time, timeFloat64 FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT timeFloat64Nullable as time, timeFloat64Nullable FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + FocusConvey("When doing a metric query using epoch (int32) as time column and value column (int32) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT timeInt32 as time, timeInt32 FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT timeInt32Nullable as time, timeInt32Nullable FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (float32) as time column and value column (float32) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT timeFloat32 as time, timeFloat32 FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + }) + + Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT timeFloat32Nullable as time, timeFloat32Nullable FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { query := &tsdb.TsdbQuery{ @@ -647,16 +903,16 @@ func TestMySQL(t *testing.T) { } func InitMySQLTestDB(t *testing.T) *xorm.Engine { - x, err := xorm.NewEngine(sqlutil.TestDB_Mysql.DriverName, sqlutil.TestDB_Mysql.ConnStr+"&parseTime=true") - x.DatabaseTZ = time.Local - x.TZLocation = time.Local - - // x.ShowSQL() - + x, err := xorm.NewEngine(sqlutil.TestDB_Mysql.DriverName, strings.Replace(sqlutil.TestDB_Mysql.ConnStr, "/grafana_tests", "/grafana_ds_tests", 1)) if err != nil { t.Fatalf("Failed to init mysql db %v", err) } + x.DatabaseTZ = time.UTC + x.TZLocation = time.UTC + + // x.ShowSQL() + return x } diff --git a/pkg/tsdb/opentsdb/opentsdb.go b/pkg/tsdb/opentsdb/opentsdb.go index 692b891eddd..16da764de54 100644 --- a/pkg/tsdb/opentsdb/opentsdb.go +++ b/pkg/tsdb/opentsdb/opentsdb.go @@ -83,6 +83,10 @@ func (e *OpenTsdbExecutor) createRequest(dsInfo *models.DataSource, data OpenTsd u.Path = path.Join(u.Path, "api/query") postData, err := json.Marshal(data) + if err != nil { + plog.Info("Failed marshalling data", "error", err) + return nil, fmt.Errorf("Failed to create request. error: %v", err) + } req, err := http.NewRequest(http.MethodPost, u.String(), strings.NewReader(string(postData))) if err != nil { diff --git a/pkg/tsdb/opentsdb/opentsdb_test.go b/pkg/tsdb/opentsdb/opentsdb_test.go index 094deb9e8ec..fe03599f54d 100644 --- a/pkg/tsdb/opentsdb/opentsdb_test.go +++ b/pkg/tsdb/opentsdb/opentsdb_test.go @@ -35,7 +35,7 @@ func TestOpenTsdbExecutor(t *testing.T) { }) - Convey("Build metric with downsampling diabled", func() { + Convey("Build metric with downsampling disabled", func() { query := &tsdb.Query{ Model: simplejson.New(), diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index 23daeebec5a..05e39f2c762 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -79,15 +79,15 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, } return fmt.Sprintf("extract(epoch from %s) as \"time\"", args[0]), nil case "__timeFilter": - // dont use to_timestamp in this macro for redshift compatibility #9566 if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("extract(epoch from %s) BETWEEN %d AND %d", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("to_timestamp(%d)", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil + return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("to_timestamp(%d)", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) @@ -114,11 +114,11 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], uint64(m.TimeRange.GetFromAsMsEpoch()/1000), args[0], uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("%s >= %d AND %s <= %d", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil case "__unixEpochFrom": - return fmt.Sprintf("%d", uint64(m.TimeRange.GetFromAsMsEpoch()/1000)), nil + return fmt.Sprintf("%d", m.TimeRange.GetFromAsSecondsEpoch()), nil case "__unixEpochTo": - return fmt.Sprintf("%d", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil + return fmt.Sprintf("%d", m.TimeRange.GetToAsSecondsEpoch()), nil default: return "", fmt.Errorf("Unknown macro %v", name) } diff --git a/pkg/tsdb/postgres/macros_test.go b/pkg/tsdb/postgres/macros_test.go index b18acced963..c3c15691e42 100644 --- a/pkg/tsdb/postgres/macros_test.go +++ b/pkg/tsdb/postgres/macros_test.go @@ -1,7 +1,10 @@ package postgres import ( + "fmt" + "strconv" "testing" + "time" "github.com/grafana/grafana/pkg/tsdb" . "github.com/smartystreets/goconvey/convey" @@ -9,81 +12,181 @@ import ( func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { - engine := &PostgresMacroEngine{} + engine := NewPostgresMacroEngine() query := &tsdb.Query{} - timeRange := &tsdb.TimeRange{From: "5m", To: "now"} - Convey("interpolate __time function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__time(time_column)") - So(err, ShouldBeNil) + Convey("Given a time range between 2018-04-12 00:00 and 2018-04-12 00:05", func() { + from := time.Date(2018, 4, 12, 18, 0, 0, 0, time.UTC) + to := from.Add(5 * time.Minute) + timeRange := tsdb.NewFakeTimeRange("5m", "now", to) - So(sql, ShouldEqual, "select time_column AS \"time\"") + Convey("interpolate __time function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__time(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "select time_column AS \"time\"") + }) + + Convey("interpolate __time function wrapped in aggregation", func() { + sql, err := engine.Interpolate(query, timeRange, "select min($__time(time_column))") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "select min(time_column AS \"time\")") + }) + + Convey("interpolate __timeFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) + }) + + Convey("interpolate __timeFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) + }) + + Convey("interpolate __timeGroup function", func() { + + sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time") + }) + + Convey("interpolate __timeGroup function with spaces between args", func() { + + sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time") + }) + + Convey("interpolate __timeTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) + }) + + Convey("interpolate __unixEpochFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) + }) + + Convey("interpolate __unixEpochFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) + + Convey("interpolate __unixEpochTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) }) - Convey("interpolate __time function wrapped in aggregation", func() { - sql, err := engine.Interpolate(query, timeRange, "select min($__time(time_column))") - So(err, ShouldBeNil) + Convey("Given a time range between 1960-02-01 07:00 and 1965-02-03 08:00", func() { + from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) + to := time.Date(1965, 2, 3, 8, 0, 0, 0, time.UTC) + timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) - So(sql, ShouldEqual, "select min(time_column AS \"time\")") + Convey("interpolate __timeFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) + }) + + Convey("interpolate __timeFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) + }) + + Convey("interpolate __timeTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) + }) + + Convey("interpolate __unixEpochFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) + }) + + Convey("interpolate __unixEpochFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) + + Convey("interpolate __unixEpochTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) }) - Convey("interpolate __timeFilter function", func() { - sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") - So(err, ShouldBeNil) + Convey("Given a time range between 1960-02-01 07:00 and 1980-02-03 08:00", func() { + from := time.Date(1960, 2, 1, 7, 0, 0, 0, time.UTC) + to := time.Date(1980, 2, 3, 8, 0, 0, 0, time.UTC) + timeRange := tsdb.NewTimeRange(strconv.FormatInt(from.UnixNano()/int64(time.Millisecond), 10), strconv.FormatInt(to.UnixNano()/int64(time.Millisecond), 10)) - So(sql, ShouldEqual, "WHERE extract(epoch from time_column) BETWEEN 18446744066914186738 AND 18446744066914187038") + Convey("interpolate __timeFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) + }) + + Convey("interpolate __timeFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) + }) + + Convey("interpolate __timeTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) + }) + + Convey("interpolate __unixEpochFilter function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(time)") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select time >= %d AND time <= %d", from.Unix(), to.Unix())) + }) + + Convey("interpolate __unixEpochFrom function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", from.Unix())) + }) + + Convey("interpolate __unixEpochTo function", func() { + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, fmt.Sprintf("select %d", to.Unix())) + }) }) - - Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select to_timestamp(18446744066914186738)") - }) - - Convey("interpolate __timeGroup function", func() { - - sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time") - }) - - Convey("interpolate __timeGroup function with spaces between args", func() { - - sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time") - }) - - Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select to_timestamp(18446744066914187038)") - }) - - Convey("interpolate __unixEpochFilter function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(18446744066914186738)") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select 18446744066914186738 >= 18446744066914186738 AND 18446744066914186738 <= 18446744066914187038") - }) - - Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select 18446744066914186738") - }) - - Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") - So(err, ShouldBeNil) - - So(sql, ShouldEqual, "select 18446744066914187038") - }) - }) } diff --git a/pkg/tsdb/postgres/postgres.go b/pkg/tsdb/postgres/postgres.go index 5f6b56ebcf1..fdf09216e51 100644 --- a/pkg/tsdb/postgres/postgres.go +++ b/pkg/tsdb/postgres/postgres.go @@ -7,7 +7,6 @@ import ( "math" "net/url" "strconv" - "time" "github.com/go-xorm/core" "github.com/grafana/grafana/pkg/components/null" @@ -132,7 +131,7 @@ func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, // convert types not handled by lib/pq // unhandled types are returned as []byte for i := 0; i < len(types); i++ { - if value, ok := values[i].([]byte); ok == true { + if value, ok := values[i].([]byte); ok { switch types[i].DatabaseTypeName() { case "NUMERIC": if v, err := strconv.ParseFloat(string(value), 64); err == nil { @@ -199,7 +198,7 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co fillValue := null.Float{} if fillMissing { fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 - if query.Model.Get("fillNull").MustBool(false) == false { + if !query.Model.Get("fillNull").MustBool(false) { fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() fillValue.Valid = true } @@ -219,19 +218,22 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co return err } + // converts column named time to unix timestamp in milliseconds to make + // native mysql datetime types and epoch dates work in + // annotation and table queries. + tsdb.ConvertSqlTimeColumnToEpochMs(values, timeIndex) + switch columnValue := values[timeIndex].(type) { case int64: - timestamp = float64(columnValue * 1000) + timestamp = float64(columnValue) case float64: - timestamp = columnValue * 1000 - case time.Time: - timestamp = float64(columnValue.UnixNano() / 1e6) + timestamp = columnValue default: return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) } if metricIndex >= 0 { - if columnValue, ok := values[metricIndex].(string); ok == true { + if columnValue, ok := values[metricIndex].(string); ok { metric = columnValue } else { return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex]) @@ -243,22 +245,16 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co continue } - switch columnValue := values[i].(type) { - case int64: - value = null.FloatFrom(float64(columnValue)) - case float64: - value = null.FloatFrom(columnValue) - case nil: - value.Valid = false - default: - return fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", col, columnValue, columnValue) + if value, err = tsdb.ConvertSqlValueColumnToFloat(col, values[i]); err != nil { + return err } + if metricIndex == -1 { metric = col } series, exist := pointsBySeries[metric] - if exist == false { + if !exist { series = &tsdb.TimeSeries{Name: metric} pointsBySeries[metric] = series seriesByQueryOrder.PushBack(metric) @@ -266,7 +262,7 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co if fillMissing { var intervalStart float64 - if exist == false { + if !exist { intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) } else { intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index 3f2203ac7a4..7f24d5a2063 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -3,26 +3,37 @@ package postgres import ( "fmt" "math/rand" + "strings" "testing" "time" "github.com/go-xorm/xorm" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" "github.com/grafana/grafana/pkg/tsdb" _ "github.com/lib/pq" . "github.com/smartystreets/goconvey/convey" ) -// To run this test, remove the Skip from SkipConvey -// and set up a PostgreSQL db named grafanatest and a user/password grafanatest/grafanatest! +// To run this test, set runPostgresTests=true +// Or from the commandline: GRAFANA_TEST_DB=postgres go test -v ./pkg/tsdb/postgres +// The tests require a PostgreSQL db named grafanadstest and a user/password grafanatest/grafanatest! // Use the docker/blocks/postgres_tests/docker-compose.yaml to spin up a // preconfigured Postgres server suitable for running these tests. -// Thers's also a dashboard.json in same directory that you can import to Grafana +// There is also a dashboard.json in same directory that you can import to Grafana // once you've created a datasource for the test server/database. func TestPostgres(t *testing.T) { - SkipConvey("PostgreSQL", t, func() { + // change to true to run the MySQL tests + runPostgresTests := false + // runPostgresTests := true + + if !(sqlstore.IsTestDbPostgres() || runPostgresTests) { + t.Skip() + } + + Convey("PostgreSQL", t, func() { x := InitPostgresTestDB(t) endpoint := &PostgresQueryEndpoint{ @@ -156,10 +167,8 @@ func TestPostgres(t *testing.T) { }) } - for _, s := range series { - _, err = sess.Insert(s) - So(err, ShouldBeNil) - } + _, err = sess.InsertMulti(series) + So(err, ShouldBeNil) Convey("When doing a metric query using timeGroup", func() { query := &tsdb.TsdbQuery{ @@ -280,10 +289,18 @@ func TestPostgres(t *testing.T) { Convey("Given a table with metrics having multiple values and measurements", func() { type metric_values struct { - Time time.Time - Measurement string - ValueOne int64 `xorm:"integer 'valueOne'"` - ValueTwo int64 `xorm:"integer 'valueTwo'"` + Time time.Time + TimeInt64 int64 `xorm:"bigint 'timeInt64' not null"` + TimeInt64Nullable *int64 `xorm:"bigint 'timeInt64Nullable' null"` + TimeFloat64 float64 `xorm:"double 'timeFloat64' not null"` + TimeFloat64Nullable *float64 `xorm:"double 'timeFloat64Nullable' null"` + TimeInt32 int32 `xorm:"int(11) 'timeInt32' not null"` + TimeInt32Nullable *int32 `xorm:"int(11) 'timeInt32Nullable' null"` + TimeFloat32 float32 `xorm:"double 'timeFloat32' not null"` + TimeFloat32Nullable *float32 `xorm:"double 'timeFloat32Nullable' null"` + Measurement string + ValueOne int64 `xorm:"integer 'valueOne'"` + ValueTwo int64 `xorm:"integer 'valueTwo'"` } if exist, err := sess.IsTableExist(metric_values{}); err != nil || exist { @@ -298,26 +315,219 @@ func TestPostgres(t *testing.T) { return rand.Int63n(max-min) + min } + var tInitial time.Time + series := []*metric_values{} - for _, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) { - series = append(series, &metric_values{ - Time: t, - Measurement: "Metric A", - ValueOne: rnd(0, 100), - ValueTwo: rnd(0, 100), - }) - series = append(series, &metric_values{ - Time: t, - Measurement: "Metric B", - ValueOne: rnd(0, 100), - ValueTwo: rnd(0, 100), - }) + for i, t := range genTimeRangeByInterval(fromStart.Add(-30*time.Minute), 90*time.Minute, 5*time.Minute) { + if i == 0 { + tInitial = t + } + tSeconds := t.Unix() + tSecondsInt32 := int32(tSeconds) + tSecondsFloat32 := float32(tSeconds) + tMilliseconds := tSeconds * 1e3 + tMillisecondsFloat := float64(tMilliseconds) + first := metric_values{ + Time: t, + TimeInt64: tMilliseconds, + TimeInt64Nullable: &(tMilliseconds), + TimeFloat64: tMillisecondsFloat, + TimeFloat64Nullable: &tMillisecondsFloat, + TimeInt32: tSecondsInt32, + TimeInt32Nullable: &tSecondsInt32, + TimeFloat32: tSecondsFloat32, + TimeFloat32Nullable: &tSecondsFloat32, + Measurement: "Metric A", + ValueOne: rnd(0, 100), + ValueTwo: rnd(0, 100), + } + second := first + second.Measurement = "Metric B" + second.ValueOne = rnd(0, 100) + second.ValueTwo = rnd(0, 100) + + series = append(series, &first) + series = append(series, &second) } - for _, s := range series { - _, err := sess.Insert(s) + _, err = sess.InsertMulti(series) + So(err, ShouldBeNil) + + Convey("When doing a metric query using epoch (int64) as time column and value column (int64) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT "timeInt64" as time, "timeInt64" FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) So(err, ShouldBeNil) - } + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (int64 nullable) as time column and value column (int64 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT "timeInt64Nullable" as time, "timeInt64Nullable" FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (float64) as time column and value column (float64) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT "timeFloat64" as time, "timeFloat64" FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (float64 nullable) as time column and value column (float64 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT "timeFloat64Nullable" as time, "timeFloat64Nullable" FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (int32) as time column and value column (int32) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT "timeInt32" as time, "timeInt32" FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (int32 nullable) as time column and value column (int32 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT "timeInt32Nullable" as time, "timeInt32Nullable" FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(tInitial.UnixNano()/1e6)) + }) + + Convey("When doing a metric query using epoch (float32) as time column and value column (float32) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT "timeFloat32" as time, "timeFloat32" FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + }) + + Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { + query := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: simplejson.NewFromAny(map[string]interface{}{ + "rawSql": `SELECT "timeFloat32Nullable" as time, "timeFloat32Nullable" FROM metric_values ORDER BY time LIMIT 1`, + "format": "time_series", + }), + RefId: "A", + }, + }, + } + + resp, err := endpoint.Query(nil, nil, query) + So(err, ShouldBeNil) + queryResult := resp.Results["A"] + So(queryResult.Error, ShouldBeNil) + + So(len(queryResult.Series), ShouldEqual, 1) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { query := &tsdb.TsdbQuery{ @@ -473,7 +683,7 @@ func TestPostgres(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(float64), ShouldEqual, float64(dt.Unix()*1000)) + So(columns[0].(float64), ShouldEqual, float64(dt.UnixNano()/1e6)) }) Convey("When doing an annotation query with a time column in epoch second format should return ms", func() { @@ -626,16 +836,16 @@ func TestPostgres(t *testing.T) { } func InitPostgresTestDB(t *testing.T) *xorm.Engine { - x, err := xorm.NewEngine(sqlutil.TestDB_Postgres.DriverName, sqlutil.TestDB_Postgres.ConnStr) + x, err := xorm.NewEngine(sqlutil.TestDB_Postgres.DriverName, strings.Replace(sqlutil.TestDB_Postgres.ConnStr, "dbname=grafanatest", "dbname=grafanadstest", 1)) + if err != nil { + t.Fatalf("Failed to init postgres db %v", err) + } + x.DatabaseTZ = time.UTC x.TZLocation = time.UTC // x.ShowSQL() - if err != nil { - t.Fatalf("Failed to init postgres db %v", err) - } - return x } diff --git a/pkg/tsdb/prometheus/prometheus.go b/pkg/tsdb/prometheus/prometheus.go index 1186fccbbf9..bf9fe9f152c 100644 --- a/pkg/tsdb/prometheus/prometheus.go +++ b/pkg/tsdb/prometheus/prometheus.go @@ -108,8 +108,8 @@ func (e *PrometheusExecutor) Query(ctx context.Context, dsInfo *models.DataSourc span, ctx := opentracing.StartSpanFromContext(ctx, "alerting.prometheus") span.SetTag("expr", query.Expr) - span.SetTag("start_unixnano", int64(query.Start.UnixNano())) - span.SetTag("stop_unixnano", int64(query.End.UnixNano())) + span.SetTag("start_unixnano", query.Start.UnixNano()) + span.SetTag("stop_unixnano", query.End.UnixNano()) defer span.Finish() value, err := client.QueryRange(ctx, query.Expr, timeRange) diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 16370a4ea7f..274e5b05dc1 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -2,9 +2,12 @@ package tsdb import ( "context" + "fmt" "sync" "time" + "github.com/grafana/grafana/pkg/components/null" + "github.com/go-xorm/core" "github.com/go-xorm/xorm" "github.com/grafana/grafana/pkg/components/simplejson" @@ -51,7 +54,7 @@ func (e *DefaultSqlEngine) InitEngine(driverName string, dsInfo *models.DataSour defer engineCache.Unlock() if engine, present := engineCache.cache[dsInfo.Id]; present { - if version, _ := engineCache.versions[dsInfo.Id]; version == dsInfo.Version { + if version := engineCache.versions[dsInfo.Id]; version == dsInfo.Version { e.XormEngine = engine return nil } @@ -135,16 +138,16 @@ func (e *DefaultSqlEngine) Query( return result, nil } -// ConvertTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds +// ConvertSqlTimeColumnToEpochMs converts column named time to unix timestamp in milliseconds // to make native datetime types and epoch dates work in annotation and table queries. func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) { if timeIndex >= 0 { switch value := values[timeIndex].(type) { case time.Time: - values[timeIndex] = EpochPrecisionToMs(float64(value.Unix())) + values[timeIndex] = EpochPrecisionToMs(float64(value.UnixNano())) case *time.Time: if value != nil { - values[timeIndex] = EpochPrecisionToMs(float64((*value).Unix())) + values[timeIndex] = EpochPrecisionToMs(float64((*value).UnixNano())) } case int64: values[timeIndex] = int64(EpochPrecisionToMs(float64(value))) @@ -152,12 +155,142 @@ func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) { if value != nil { values[timeIndex] = int64(EpochPrecisionToMs(float64(*value))) } + case uint64: + values[timeIndex] = int64(EpochPrecisionToMs(float64(value))) + case *uint64: + if value != nil { + values[timeIndex] = int64(EpochPrecisionToMs(float64(*value))) + } + case int32: + values[timeIndex] = int64(EpochPrecisionToMs(float64(value))) + case *int32: + if value != nil { + values[timeIndex] = int64(EpochPrecisionToMs(float64(*value))) + } + case uint32: + values[timeIndex] = int64(EpochPrecisionToMs(float64(value))) + case *uint32: + if value != nil { + values[timeIndex] = int64(EpochPrecisionToMs(float64(*value))) + } case float64: values[timeIndex] = EpochPrecisionToMs(value) case *float64: if value != nil { values[timeIndex] = EpochPrecisionToMs(*value) } + case float32: + values[timeIndex] = EpochPrecisionToMs(float64(value)) + case *float32: + if value != nil { + values[timeIndex] = EpochPrecisionToMs(float64(*value)) + } } } } + +// ConvertSqlValueColumnToFloat converts timeseries value column to float. +func ConvertSqlValueColumnToFloat(columnName string, columnValue interface{}) (null.Float, error) { + var value null.Float + + switch typedValue := columnValue.(type) { + case int: + value = null.FloatFrom(float64(typedValue)) + case *int: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case int64: + value = null.FloatFrom(float64(typedValue)) + case *int64: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case int32: + value = null.FloatFrom(float64(typedValue)) + case *int32: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case int16: + value = null.FloatFrom(float64(typedValue)) + case *int16: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case int8: + value = null.FloatFrom(float64(typedValue)) + case *int8: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case uint: + value = null.FloatFrom(float64(typedValue)) + case *uint: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case uint64: + value = null.FloatFrom(float64(typedValue)) + case *uint64: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case uint32: + value = null.FloatFrom(float64(typedValue)) + case *uint32: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case uint16: + value = null.FloatFrom(float64(typedValue)) + case *uint16: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case uint8: + value = null.FloatFrom(float64(typedValue)) + case *uint8: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case float64: + value = null.FloatFrom(typedValue) + case *float64: + value = null.FloatFromPtr(typedValue) + case float32: + value = null.FloatFrom(float64(typedValue)) + case *float32: + if typedValue == nil { + value.Valid = false + } else { + value = null.FloatFrom(float64(*typedValue)) + } + case nil: + value.Valid = false + default: + return null.NewFloat(0, false), fmt.Errorf("Value column must have numeric datatype, column: %s type: %T value: %v", columnName, typedValue, typedValue) + } + + return value, nil +} diff --git a/pkg/tsdb/sql_engine_test.go b/pkg/tsdb/sql_engine_test.go index 48aac2c4d45..ce1fb45de21 100644 --- a/pkg/tsdb/sql_engine_test.go +++ b/pkg/tsdb/sql_engine_test.go @@ -4,42 +4,278 @@ import ( "testing" "time" + "github.com/grafana/grafana/pkg/components/null" + . "github.com/smartystreets/goconvey/convey" ) func TestSqlEngine(t *testing.T) { Convey("SqlEngine", t, func() { - Convey("Given row values with time columns when converting them", func() { - dt := time.Date(2018, 3, 14, 21, 20, 6, 527e6, time.UTC) - fixtures := make([]interface{}, 8) - fixtures[0] = dt - fixtures[1] = dt.Unix() * 1000 - fixtures[2] = dt.Unix() - fixtures[3] = float64(dt.Unix() * 1000) - fixtures[4] = float64(dt.Unix()) + dt := time.Date(2018, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC) - var nilDt *time.Time - var nilInt64 *int64 - var nilFloat64 *float64 - fixtures[5] = nilDt - fixtures[6] = nilInt64 - fixtures[7] = nilFloat64 + Convey("Given row values with time.Time as time columns", func() { + var nilPointer *time.Time + + fixtures := make([]interface{}, 3) + fixtures[0] = dt + fixtures[1] = &dt + fixtures[2] = nilPointer for i := range fixtures { ConvertSqlTimeColumnToEpochMs(fixtures, i) } - Convey("Should convert sql time columns to epoch time in ms ", func() { - expected := float64(dt.Unix() * 1000) + Convey("When converting them should return epoch time with millisecond precision ", func() { + expected := float64(dt.UnixNano()) / float64(time.Millisecond) So(fixtures[0].(float64), ShouldEqual, expected) - So(fixtures[1].(int64), ShouldEqual, expected) - So(fixtures[2].(int64), ShouldEqual, expected) - So(fixtures[3].(float64), ShouldEqual, expected) - So(fixtures[4].(float64), ShouldEqual, expected) + So(fixtures[1].(float64), ShouldEqual, expected) + So(fixtures[2], ShouldBeNil) + }) + }) - So(fixtures[5], ShouldBeNil) + Convey("Given row values with int64 as time columns", func() { + tSeconds := dt.Unix() + tMilliseconds := dt.UnixNano() / 1e6 + tNanoSeconds := dt.UnixNano() + var nilPointer *int64 + + fixtures := make([]interface{}, 7) + fixtures[0] = tSeconds + fixtures[1] = &tSeconds + fixtures[2] = tMilliseconds + fixtures[3] = &tMilliseconds + fixtures[4] = tNanoSeconds + fixtures[5] = &tNanoSeconds + fixtures[6] = nilPointer + + for i := range fixtures { + ConvertSqlTimeColumnToEpochMs(fixtures, i) + } + + Convey("When converting them should return epoch time with millisecond precision ", func() { + So(fixtures[0].(int64), ShouldEqual, tSeconds*1e3) + So(fixtures[1].(int64), ShouldEqual, tSeconds*1e3) + So(fixtures[2].(int64), ShouldEqual, tMilliseconds) + So(fixtures[3].(int64), ShouldEqual, tMilliseconds) + So(fixtures[4].(int64), ShouldEqual, tMilliseconds) + So(fixtures[5].(int64), ShouldEqual, tMilliseconds) So(fixtures[6], ShouldBeNil) - So(fixtures[7], ShouldBeNil) + }) + }) + + Convey("Given row values with uin64 as time columns", func() { + tSeconds := uint64(dt.Unix()) + tMilliseconds := uint64(dt.UnixNano() / 1e6) + tNanoSeconds := uint64(dt.UnixNano()) + var nilPointer *uint64 + + fixtures := make([]interface{}, 7) + fixtures[0] = tSeconds + fixtures[1] = &tSeconds + fixtures[2] = tMilliseconds + fixtures[3] = &tMilliseconds + fixtures[4] = tNanoSeconds + fixtures[5] = &tNanoSeconds + fixtures[6] = nilPointer + + for i := range fixtures { + ConvertSqlTimeColumnToEpochMs(fixtures, i) + } + + Convey("When converting them should return epoch time with millisecond precision ", func() { + So(fixtures[0].(int64), ShouldEqual, tSeconds*1e3) + So(fixtures[1].(int64), ShouldEqual, tSeconds*1e3) + So(fixtures[2].(int64), ShouldEqual, tMilliseconds) + So(fixtures[3].(int64), ShouldEqual, tMilliseconds) + So(fixtures[4].(int64), ShouldEqual, tMilliseconds) + So(fixtures[5].(int64), ShouldEqual, tMilliseconds) + So(fixtures[6], ShouldBeNil) + }) + }) + + Convey("Given row values with int32 as time columns", func() { + tSeconds := int32(dt.Unix()) + var nilInt *int32 + + fixtures := make([]interface{}, 3) + fixtures[0] = tSeconds + fixtures[1] = &tSeconds + fixtures[2] = nilInt + + for i := range fixtures { + ConvertSqlTimeColumnToEpochMs(fixtures, i) + } + + Convey("When converting them should return epoch time with millisecond precision ", func() { + So(fixtures[0].(int64), ShouldEqual, dt.Unix()*1e3) + So(fixtures[1].(int64), ShouldEqual, dt.Unix()*1e3) + So(fixtures[2], ShouldBeNil) + }) + }) + + Convey("Given row values with uint32 as time columns", func() { + tSeconds := uint32(dt.Unix()) + var nilInt *uint32 + + fixtures := make([]interface{}, 3) + fixtures[0] = tSeconds + fixtures[1] = &tSeconds + fixtures[2] = nilInt + + for i := range fixtures { + ConvertSqlTimeColumnToEpochMs(fixtures, i) + } + + Convey("When converting them should return epoch time with millisecond precision ", func() { + So(fixtures[0].(int64), ShouldEqual, dt.Unix()*1e3) + So(fixtures[1].(int64), ShouldEqual, dt.Unix()*1e3) + So(fixtures[2], ShouldBeNil) + }) + }) + + Convey("Given row values with float64 as time columns", func() { + tSeconds := float64(dt.UnixNano()) / float64(time.Second) + tMilliseconds := float64(dt.UnixNano()) / float64(time.Millisecond) + tNanoSeconds := float64(dt.UnixNano()) + var nilPointer *float64 + + fixtures := make([]interface{}, 7) + fixtures[0] = tSeconds + fixtures[1] = &tSeconds + fixtures[2] = tMilliseconds + fixtures[3] = &tMilliseconds + fixtures[4] = tNanoSeconds + fixtures[5] = &tNanoSeconds + fixtures[6] = nilPointer + + for i := range fixtures { + ConvertSqlTimeColumnToEpochMs(fixtures, i) + } + + Convey("When converting them should return epoch time with millisecond precision ", func() { + So(fixtures[0].(float64), ShouldEqual, tMilliseconds) + So(fixtures[1].(float64), ShouldEqual, tMilliseconds) + So(fixtures[2].(float64), ShouldEqual, tMilliseconds) + So(fixtures[3].(float64), ShouldEqual, tMilliseconds) + So(fixtures[4].(float64), ShouldEqual, tMilliseconds) + So(fixtures[5].(float64), ShouldEqual, tMilliseconds) + So(fixtures[6], ShouldBeNil) + }) + }) + + Convey("Given row values with float32 as time columns", func() { + tSeconds := float32(dt.Unix()) + var nilInt *float32 + + fixtures := make([]interface{}, 3) + fixtures[0] = tSeconds + fixtures[1] = &tSeconds + fixtures[2] = nilInt + + for i := range fixtures { + ConvertSqlTimeColumnToEpochMs(fixtures, i) + } + + Convey("When converting them should return epoch time with millisecond precision ", func() { + So(fixtures[0].(float64), ShouldEqual, float32(dt.Unix()*1e3)) + So(fixtures[1].(float64), ShouldEqual, float32(dt.Unix()*1e3)) + So(fixtures[2], ShouldBeNil) + }) + }) + + Convey("Given row with value columns", func() { + intValue := 1 + int64Value := int64(1) + int32Value := int32(1) + int16Value := int16(1) + int8Value := int8(1) + float64Value := float64(1) + float32Value := float32(1) + uintValue := uint(1) + uint64Value := uint64(1) + uint32Value := uint32(1) + uint16Value := uint16(1) + uint8Value := uint8(1) + + fixtures := make([]interface{}, 24) + fixtures[0] = intValue + fixtures[1] = &intValue + fixtures[2] = int64Value + fixtures[3] = &int64Value + fixtures[4] = int32Value + fixtures[5] = &int32Value + fixtures[6] = int16Value + fixtures[7] = &int16Value + fixtures[8] = int8Value + fixtures[9] = &int8Value + fixtures[10] = float64Value + fixtures[11] = &float64Value + fixtures[12] = float32Value + fixtures[13] = &float32Value + fixtures[14] = uintValue + fixtures[15] = &uintValue + fixtures[16] = uint64Value + fixtures[17] = &uint64Value + fixtures[18] = uint32Value + fixtures[19] = &uint32Value + fixtures[20] = uint16Value + fixtures[21] = &uint16Value + fixtures[22] = uint8Value + fixtures[23] = &uint8Value + + var intNilPointer *int + var int64NilPointer *int64 + var int32NilPointer *int32 + var int16NilPointer *int16 + var int8NilPointer *int8 + var float64NilPointer *float64 + var float32NilPointer *float32 + var uintNilPointer *uint + var uint64NilPointer *uint64 + var uint32NilPointer *uint32 + var uint16NilPointer *uint16 + var uint8NilPointer *uint8 + + nilPointerFixtures := make([]interface{}, 12) + nilPointerFixtures[0] = intNilPointer + nilPointerFixtures[1] = int64NilPointer + nilPointerFixtures[2] = int32NilPointer + nilPointerFixtures[3] = int16NilPointer + nilPointerFixtures[4] = int8NilPointer + nilPointerFixtures[5] = float64NilPointer + nilPointerFixtures[6] = float32NilPointer + nilPointerFixtures[7] = uintNilPointer + nilPointerFixtures[8] = uint64NilPointer + nilPointerFixtures[9] = uint32NilPointer + nilPointerFixtures[10] = uint16NilPointer + nilPointerFixtures[11] = uint8NilPointer + + Convey("When converting values to float should return expected value", func() { + for _, f := range fixtures { + value, _ := ConvertSqlValueColumnToFloat("col", f) + + if !value.Valid { + t.Fatalf("Failed to convert %T value, expected a valid float value", f) + } + + if value.Float64 != null.FloatFrom(1).Float64 { + t.Fatalf("Failed to convert %T value, expected a float value of 1.000, but got %v", f, value) + } + } + }) + + Convey("When converting nil pointer values to float should return expected value", func() { + for _, f := range nilPointerFixtures { + value, err := ConvertSqlValueColumnToFloat("col", f) + + if err != nil { + t.Fatalf("Failed to convert %T value, expected a non nil error, but got %v", f, err) + } + + if value.Valid { + t.Fatalf("Failed to convert %T value, expected an invalid float value", f) + } + } }) }) }) diff --git a/pkg/tsdb/time_range.go b/pkg/tsdb/time_range.go index fd0cb3f8e82..18e389e5993 100644 --- a/pkg/tsdb/time_range.go +++ b/pkg/tsdb/time_range.go @@ -15,6 +15,14 @@ func NewTimeRange(from, to string) *TimeRange { } } +func NewFakeTimeRange(from, to string, now time.Time) *TimeRange { + return &TimeRange{ + From: from, + To: to, + now: now, + } +} + type TimeRange struct { From string To string @@ -25,24 +33,40 @@ func (tr *TimeRange) GetFromAsMsEpoch() int64 { return tr.MustGetFrom().UnixNano() / int64(time.Millisecond) } +func (tr *TimeRange) GetFromAsSecondsEpoch() int64 { + return tr.GetFromAsMsEpoch() / 1000 +} + +func (tr *TimeRange) GetFromAsTimeUTC() time.Time { + return tr.MustGetFrom().UTC() +} + func (tr *TimeRange) GetToAsMsEpoch() int64 { return tr.MustGetTo().UnixNano() / int64(time.Millisecond) } +func (tr *TimeRange) GetToAsSecondsEpoch() int64 { + return tr.GetToAsMsEpoch() / 1000 +} + +func (tr *TimeRange) GetToAsTimeUTC() time.Time { + return tr.MustGetTo().UTC() +} + func (tr *TimeRange) MustGetFrom() time.Time { - if res, err := tr.ParseFrom(); err != nil { + res, err := tr.ParseFrom() + if err != nil { return time.Unix(0, 0) - } else { - return res } + return res } func (tr *TimeRange) MustGetTo() time.Time { - if res, err := tr.ParseTo(); err != nil { + res, err := tr.ParseTo() + if err != nil { return time.Unix(0, 0) - } else { - return res } + return res } func tryParseUnixMsEpoch(val string) (time.Time, bool) { @@ -92,9 +116,14 @@ func (tr *TimeRange) ParseTo() (time.Time, error) { // EpochPrecisionToMs converts epoch precision to millisecond, if needed. // Only seconds to milliseconds supported right now func EpochPrecisionToMs(value float64) float64 { - if int64(value)/1e10 == 0 { - return float64(value * 1e3) + s := strconv.FormatFloat(value, 'e', -1, 64) + if strings.HasSuffix(s, "e+09") { + return value * float64(1e3) } - return float64(value) + if strings.HasSuffix(s, "e+18") { + return value / float64(time.Millisecond) + } + + return value } diff --git a/pkg/util/filepath.go b/pkg/util/filepath.go index 3ad8cac3147..d304236fcb1 100644 --- a/pkg/util/filepath.go +++ b/pkg/util/filepath.go @@ -65,9 +65,8 @@ func walk(path string, info os.FileInfo, resolvedPath string, symlinkPathsFollow if _, ok := symlinkPathsFollowed[path2]; ok { errMsg := "Potential SymLink Infinite Loop. Path: %v, Link To: %v" return fmt.Errorf(errMsg, resolvedPath, path2) - } else { - symlinkPathsFollowed[path2] = true } + symlinkPathsFollowed[path2] = true } info2, err := os.Lstat(path2) if err != nil { diff --git a/pkg/util/shortid_generator.go b/pkg/util/shortid_generator.go index d87b6f70fe6..f900cb8275e 100644 --- a/pkg/util/shortid_generator.go +++ b/pkg/util/shortid_generator.go @@ -17,11 +17,7 @@ func init() { // IsValidShortUid checks if short unique identifier contains valid characters func IsValidShortUid(uid string) bool { - if !validUidPattern(uid) { - return false - } - - return true + return validUidPattern(uid) } // GenerateShortUid generates a short unique identifier. diff --git a/public/app/containers/AlertRuleList/AlertRuleList.tsx b/public/app/containers/AlertRuleList/AlertRuleList.tsx index 9ecb9a177d7..b61c7fbaac3 100644 --- a/public/app/containers/AlertRuleList/AlertRuleList.tsx +++ b/public/app/containers/AlertRuleList/AlertRuleList.tsx @@ -1,4 +1,5 @@ import React from 'react'; +import { hot } from 'react-hot-loader'; import classNames from 'classnames'; import { inject, observer } from 'mobx-react'; import PageHeader from 'app/core/components/PageHeader/PageHeader'; @@ -173,3 +174,5 @@ export class AlertRuleItem extends React.Component { ); } } + +export default hot(module)(AlertRuleList); diff --git a/public/app/containers/Explore/ElapsedTime.tsx b/public/app/containers/Explore/ElapsedTime.tsx new file mode 100644 index 00000000000..9cd8f674186 --- /dev/null +++ b/public/app/containers/Explore/ElapsedTime.tsx @@ -0,0 +1,46 @@ +import React, { PureComponent } from 'react'; + +const INTERVAL = 150; + +export default class ElapsedTime extends PureComponent { + offset: number; + timer: number; + + state = { + elapsed: 0, + }; + + start() { + this.offset = Date.now(); + this.timer = window.setInterval(this.tick, INTERVAL); + } + + tick = () => { + const jetzt = Date.now(); + const elapsed = jetzt - this.offset; + this.setState({ elapsed }); + }; + + componentWillReceiveProps(nextProps) { + if (nextProps.time) { + clearInterval(this.timer); + } else if (this.props.time) { + this.start(); + } + } + + componentDidMount() { + this.start(); + } + + componentWillUnmount() { + clearInterval(this.timer); + } + + render() { + const { elapsed } = this.state; + const { className, time } = this.props; + const value = (time || elapsed) / 1000; + return {value.toFixed(1)}s; + } +} diff --git a/public/app/containers/Explore/Explore.tsx b/public/app/containers/Explore/Explore.tsx new file mode 100644 index 00000000000..40261ee635a --- /dev/null +++ b/public/app/containers/Explore/Explore.tsx @@ -0,0 +1,271 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import colors from 'app/core/utils/colors'; +import TimeSeries from 'app/core/time_series2'; + +import ElapsedTime from './ElapsedTime'; +import Legend from './Legend'; +import QueryRows from './QueryRows'; +import Graph from './Graph'; +import Table from './Table'; +import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; +import { buildQueryOptions, ensureQueries, generateQueryKey, hasQuery } from './utils/query'; +import { decodePathComponent } from 'app/core/utils/location_util'; + +function makeTimeSeriesList(dataList, options) { + return dataList.map((seriesData, index) => { + const datapoints = seriesData.datapoints || []; + const alias = seriesData.target; + + const colorIndex = index % colors.length; + const color = colors[colorIndex]; + + const series = new TimeSeries({ + datapoints: datapoints, + alias: alias, + color: color, + unit: seriesData.unit, + }); + + if (datapoints && datapoints.length > 0) { + const last = datapoints[datapoints.length - 1][1]; + const from = options.range.from; + if (last - from < -10000) { + series.isOutsideRange = true; + } + } + + return series; + }); +} + +function parseInitialQueries(initial) { + if (!initial) { + return []; + } + try { + const parsed = JSON.parse(decodePathComponent(initial)); + return parsed.queries.map(q => q.query); + } catch (e) { + console.error(e); + return []; + } +} + +interface IExploreState { + datasource: any; + datasourceError: any; + datasourceLoading: any; + graphResult: any; + latency: number; + loading: any; + queries: any; + requestOptions: any; + showingGraph: boolean; + showingTable: boolean; + tableResult: any; +} + +// @observer +export class Explore extends React.Component { + datasourceSrv: DatasourceSrv; + + constructor(props) { + super(props); + const initialQueries = parseInitialQueries(props.routeParams.initial); + this.state = { + datasource: null, + datasourceError: null, + datasourceLoading: true, + graphResult: null, + latency: 0, + loading: false, + queries: ensureQueries(initialQueries), + requestOptions: null, + showingGraph: true, + showingTable: true, + tableResult: null, + }; + } + + async componentDidMount() { + const datasource = await this.props.datasourceSrv.get(); + const testResult = await datasource.testDatasource(); + if (testResult.status === 'success') { + this.setState({ datasource, datasourceError: null, datasourceLoading: false }, () => this.handleSubmit()); + } else { + this.setState({ datasource: null, datasourceError: testResult.message, datasourceLoading: false }); + } + } + + handleAddQueryRow = index => { + const { queries } = this.state; + const nextQueries = [ + ...queries.slice(0, index + 1), + { query: '', key: generateQueryKey() }, + ...queries.slice(index + 1), + ]; + this.setState({ queries: nextQueries }); + }; + + handleChangeQuery = (query, index) => { + const { queries } = this.state; + const nextQuery = { + ...queries[index], + query, + }; + const nextQueries = [...queries]; + nextQueries[index] = nextQuery; + this.setState({ queries: nextQueries }); + }; + + handleClickGraphButton = () => { + this.setState(state => ({ showingGraph: !state.showingGraph })); + }; + + handleClickTableButton = () => { + this.setState(state => ({ showingTable: !state.showingTable })); + }; + + handleRemoveQueryRow = index => { + const { queries } = this.state; + if (queries.length <= 1) { + return; + } + const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)]; + this.setState({ queries: nextQueries }, () => this.handleSubmit()); + }; + + handleSubmit = () => { + const { showingGraph, showingTable } = this.state; + if (showingTable) { + this.runTableQuery(); + } + if (showingGraph) { + this.runGraphQuery(); + } + }; + + async runGraphQuery() { + const { datasource, queries } = this.state; + if (!hasQuery(queries)) { + return; + } + this.setState({ latency: 0, loading: true, graphResult: null }); + const now = Date.now(); + const options = buildQueryOptions({ + format: 'time_series', + interval: datasource.interval, + instant: false, + now, + queries: queries.map(q => q.query), + }); + try { + const res = await datasource.query(options); + const result = makeTimeSeriesList(res.data, options); + const latency = Date.now() - now; + this.setState({ latency, loading: false, graphResult: result, requestOptions: options }); + } catch (error) { + console.error(error); + this.setState({ loading: false, graphResult: error }); + } + } + + async runTableQuery() { + const { datasource, queries } = this.state; + if (!hasQuery(queries)) { + return; + } + this.setState({ latency: 0, loading: true, tableResult: null }); + const now = Date.now(); + const options = buildQueryOptions({ + format: 'table', + interval: datasource.interval, + instant: true, + now, + queries: queries.map(q => q.query), + }); + try { + const res = await datasource.query(options); + const tableModel = res.data[0]; + const latency = Date.now() - now; + this.setState({ latency, loading: false, tableResult: tableModel, requestOptions: options }); + } catch (error) { + console.error(error); + this.setState({ loading: false, tableResult: null }); + } + } + + request = url => { + const { datasource } = this.state; + return datasource.metadataRequest(url); + }; + + render() { + const { + datasource, + datasourceError, + datasourceLoading, + graphResult, + latency, + loading, + queries, + requestOptions, + showingGraph, + showingTable, + tableResult, + } = this.state; + const showingBoth = showingGraph && showingTable; + const graphHeight = showingBoth ? '200px' : null; + const graphButtonClassName = showingBoth || showingGraph ? 'btn m-r-1' : 'btn btn-inverse m-r-1'; + const tableButtonClassName = showingBoth || showingTable ? 'btn m-r-1' : 'btn btn-inverse m-r-1'; + return ( +
+
+

Explore

+ {datasourceLoading ?
Loading datasource...
: null} + + {datasourceError ?
Error connecting to datasource.
: null} + + {datasource ? ( +
+
+
+ {loading || latency ? : null} + +
+
+ + +
+
+ +
+ {showingGraph ? ( + + ) : null} + {showingGraph ? : null} + {showingTable ? : null} + + + ) : null} + + + ); + } +} + +export default hot(module)(Explore); diff --git a/public/app/containers/Explore/Graph.tsx b/public/app/containers/Explore/Graph.tsx new file mode 100644 index 00000000000..0a13b39619d --- /dev/null +++ b/public/app/containers/Explore/Graph.tsx @@ -0,0 +1,123 @@ +import $ from 'jquery'; +import React, { Component } from 'react'; + +import TimeSeries from 'app/core/time_series2'; + +import 'vendor/flot/jquery.flot'; +import 'vendor/flot/jquery.flot.time'; + +// Copied from graph.ts +function time_format(ticks, min, max) { + if (min && max && ticks) { + var range = max - min; + var secPerTick = range / ticks / 1000; + var oneDay = 86400000; + var oneYear = 31536000000; + + if (secPerTick <= 45) { + return '%H:%M:%S'; + } + if (secPerTick <= 7200 || range <= oneDay) { + return '%H:%M'; + } + if (secPerTick <= 80000) { + return '%m/%d %H:%M'; + } + if (secPerTick <= 2419200 || range <= oneYear) { + return '%m/%d'; + } + return '%Y-%m'; + } + + return '%H:%M'; +} + +const FLOT_OPTIONS = { + legend: { + show: false, + }, + series: { + lines: { + linewidth: 1, + zero: false, + }, + shadowSize: 0, + }, + grid: { + minBorderMargin: 0, + markings: [], + backgroundColor: null, + borderWidth: 0, + // hoverable: true, + clickable: true, + color: '#a1a1a1', + margin: { left: 0, right: 0 }, + labelMarginX: 0, + }, + // selection: { + // mode: 'x', + // color: '#666', + // }, + // crosshair: { + // mode: 'x', + // }, +}; + +class Graph extends Component { + componentDidMount() { + this.draw(); + } + + componentDidUpdate(prevProps) { + if ( + prevProps.data !== this.props.data || + prevProps.options !== this.props.options || + prevProps.height !== this.props.height + ) { + this.draw(); + } + } + + draw() { + const { data, options: userOptions } = this.props; + if (!data) { + return; + } + const series = data.map((ts: TimeSeries) => ({ + label: ts.label, + data: ts.getFlotPairs('null'), + })); + + const $el = $(`#${this.props.id}`); + const ticks = $el.width() / 100; + const min = userOptions.range.from.valueOf(); + const max = userOptions.range.to.valueOf(); + const dynamicOptions = { + xaxis: { + mode: 'time', + min: min, + max: max, + label: 'Datetime', + ticks: ticks, + timeformat: time_format(ticks, min, max), + }, + }; + const options = { + ...FLOT_OPTIONS, + ...dynamicOptions, + ...userOptions, + }; + $.plot($el, series, options); + } + + render() { + const style = { + height: this.props.height || '400px', + width: this.props.width || '100%', + }; + + return
; + } +} + +export default Graph; diff --git a/public/app/containers/Explore/Legend.tsx b/public/app/containers/Explore/Legend.tsx new file mode 100644 index 00000000000..e00932fe566 --- /dev/null +++ b/public/app/containers/Explore/Legend.tsx @@ -0,0 +1,22 @@ +import React, { PureComponent } from 'react'; + +const LegendItem = ({ series }) => ( +
+
+ +
+ {series.alias} +
+); + +export default class Legend extends PureComponent { + render() { + const { className = '', data } = this.props; + const items = data || []; + return ( +
+ {items.map(series => )} +
+ ); + } +} diff --git a/public/app/containers/Explore/QueryField.tsx b/public/app/containers/Explore/QueryField.tsx new file mode 100644 index 00000000000..816473619fd --- /dev/null +++ b/public/app/containers/Explore/QueryField.tsx @@ -0,0 +1,562 @@ +import React from 'react'; +import ReactDOM from 'react-dom'; +import { Value } from 'slate'; +import { Editor } from 'slate-react'; +import Plain from 'slate-plain-serializer'; + +// dom also includes Element polyfills +import { getNextCharacter, getPreviousCousin } from './utils/dom'; +import BracesPlugin from './slate-plugins/braces'; +import ClearPlugin from './slate-plugins/clear'; +import NewlinePlugin from './slate-plugins/newline'; +import PluginPrism, { configurePrismMetricsTokens } from './slate-plugins/prism/index'; +import RunnerPlugin from './slate-plugins/runner'; +import debounce from './utils/debounce'; +import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus'; + +import Typeahead from './Typeahead'; + +const EMPTY_METRIC = ''; +const TYPEAHEAD_DEBOUNCE = 300; + +function flattenSuggestions(s) { + return s ? s.reduce((acc, g) => acc.concat(g.items), []) : []; +} + +const getInitialValue = query => + Value.fromJSON({ + document: { + nodes: [ + { + object: 'block', + type: 'paragraph', + nodes: [ + { + object: 'text', + leaves: [ + { + text: query, + }, + ], + }, + ], + }, + ], + }, + }); + +class Portal extends React.Component { + node: any; + constructor(props) { + super(props); + this.node = document.createElement('div'); + this.node.classList.add(`query-field-portal-${props.index}`); + document.body.appendChild(this.node); + } + + componentWillUnmount() { + document.body.removeChild(this.node); + } + + render() { + return ReactDOM.createPortal(this.props.children, this.node); + } +} + +class QueryField extends React.Component { + menuEl: any; + plugins: any; + resetTimer: any; + + constructor(props, context) { + super(props, context); + + this.plugins = [ + BracesPlugin(), + ClearPlugin(), + RunnerPlugin({ handler: props.onPressEnter }), + NewlinePlugin(), + PluginPrism(), + ]; + + this.state = { + labelKeys: {}, + labelValues: {}, + metrics: props.metrics || [], + suggestions: [], + typeaheadIndex: 0, + typeaheadPrefix: '', + value: getInitialValue(props.initialQuery || ''), + }; + } + + componentDidMount() { + this.updateMenu(); + + if (this.props.metrics === undefined) { + this.fetchMetricNames(); + } + } + + componentWillUnmount() { + clearTimeout(this.resetTimer); + } + + componentDidUpdate() { + this.updateMenu(); + } + + componentWillReceiveProps(nextProps) { + if (nextProps.metrics && nextProps.metrics !== this.props.metrics) { + this.setState({ metrics: nextProps.metrics }, this.onMetricsReceived); + } + // initialQuery is null in case the user typed + if (nextProps.initialQuery !== null && nextProps.initialQuery !== this.props.initialQuery) { + this.setState({ value: getInitialValue(nextProps.initialQuery) }); + } + } + + onChange = ({ value }) => { + const changed = value.document !== this.state.value.document; + this.setState({ value }, () => { + if (changed) { + this.handleChangeQuery(); + } + }); + + window.requestAnimationFrame(this.handleTypeahead); + }; + + onMetricsReceived = () => { + if (!this.state.metrics) { + return; + } + configurePrismMetricsTokens(this.state.metrics); + // Trigger re-render + window.requestAnimationFrame(() => { + // Bogus edit to trigger highlighting + const change = this.state.value + .change() + .insertText(' ') + .deleteBackward(1); + this.onChange(change); + }); + }; + + request = url => { + if (this.props.request) { + return this.props.request(url); + } + return fetch(url); + }; + + handleChangeQuery = () => { + // Send text change to parent + const { onQueryChange } = this.props; + if (onQueryChange) { + onQueryChange(Plain.serialize(this.state.value)); + } + }; + + handleTypeahead = debounce(() => { + const selection = window.getSelection(); + if (selection.anchorNode) { + const wrapperNode = selection.anchorNode.parentElement; + const editorNode = wrapperNode.closest('.query-field'); + if (!editorNode || this.state.value.isBlurred) { + // Not inside this editor + return; + } + + const range = selection.getRangeAt(0); + const text = selection.anchorNode.textContent; + const offset = range.startOffset; + const prefix = cleanText(text.substr(0, offset)); + + // Determine candidates by context + const suggestionGroups = []; + const wrapperClasses = wrapperNode.classList; + let typeaheadContext = null; + + // Take first metric as lucky guess + const metricNode = editorNode.querySelector('.metric'); + + if (wrapperClasses.contains('context-range')) { + // Rate ranges + typeaheadContext = 'context-range'; + suggestionGroups.push({ + label: 'Range vector', + items: [...RATE_RANGES], + }); + } else if (wrapperClasses.contains('context-labels') && metricNode) { + const metric = metricNode.textContent; + const labelKeys = this.state.labelKeys[metric]; + if (labelKeys) { + if ((text && text.startsWith('=')) || wrapperClasses.contains('attr-value')) { + // Label values + const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); + if (labelKeyNode) { + const labelKey = labelKeyNode.textContent; + const labelValues = this.state.labelValues[metric][labelKey]; + typeaheadContext = 'context-label-values'; + suggestionGroups.push({ + label: 'Label values', + items: labelValues, + }); + } + } else { + // Label keys + typeaheadContext = 'context-labels'; + suggestionGroups.push({ label: 'Labels', items: labelKeys }); + } + } else { + this.fetchMetricLabels(metric); + } + } else if (wrapperClasses.contains('context-labels') && !metricNode) { + // Empty name queries + const defaultKeys = ['job', 'instance']; + // Munge all keys that we have seen together + const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => { + return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1)); + }, defaultKeys); + if ((text && text.startsWith('=')) || wrapperClasses.contains('attr-value')) { + // Label values + const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name'); + if (labelKeyNode) { + const labelKey = labelKeyNode.textContent; + if (this.state.labelValues[EMPTY_METRIC]) { + const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey]; + typeaheadContext = 'context-label-values'; + suggestionGroups.push({ + label: 'Label values', + items: labelValues, + }); + } else { + // Can only query label values for now (API to query keys is under development) + this.fetchLabelValues(labelKey); + } + } + } else { + // Label keys + typeaheadContext = 'context-labels'; + suggestionGroups.push({ label: 'Labels', items: labelKeys }); + } + } else if (metricNode && wrapperClasses.contains('context-aggregation')) { + typeaheadContext = 'context-aggregation'; + const metric = metricNode.textContent; + const labelKeys = this.state.labelKeys[metric]; + if (labelKeys) { + suggestionGroups.push({ label: 'Labels', items: labelKeys }); + } else { + this.fetchMetricLabels(metric); + } + } else if ( + (this.state.metrics && ((prefix && !wrapperClasses.contains('token')) || text.match(/[+\-*/^%]/))) || + wrapperClasses.contains('context-function') + ) { + // Need prefix for metrics + typeaheadContext = 'context-metrics'; + suggestionGroups.push({ + label: 'Metrics', + items: this.state.metrics, + }); + } + + let results = 0; + const filteredSuggestions = suggestionGroups.map(group => { + if (group.items) { + group.items = group.items.filter(c => c.length !== prefix.length && c.indexOf(prefix) > -1); + results += group.items.length; + } + return group; + }); + + console.log('handleTypeahead', selection.anchorNode, wrapperClasses, text, offset, prefix, typeaheadContext); + + this.setState({ + typeaheadPrefix: prefix, + typeaheadContext, + typeaheadText: text, + suggestions: results > 0 ? filteredSuggestions : [], + }); + } + }, TYPEAHEAD_DEBOUNCE); + + applyTypeahead(change, suggestion) { + const { typeaheadPrefix, typeaheadContext, typeaheadText } = this.state; + + // Modify suggestion based on context + switch (typeaheadContext) { + case 'context-labels': { + const nextChar = getNextCharacter(); + if (!nextChar || nextChar === '}' || nextChar === ',') { + suggestion += '='; + } + break; + } + + case 'context-label-values': { + // Always add quotes and remove existing ones instead + if (!(typeaheadText.startsWith('="') || typeaheadText.startsWith('"'))) { + suggestion = `"${suggestion}`; + } + if (getNextCharacter() !== '"') { + suggestion = `${suggestion}"`; + } + break; + } + + default: + } + + this.resetTypeahead(); + + // Remove the current, incomplete text and replace it with the selected suggestion + let backward = typeaheadPrefix.length; + const text = cleanText(typeaheadText); + const suffixLength = text.length - typeaheadPrefix.length; + const offset = typeaheadText.indexOf(typeaheadPrefix); + const midWord = typeaheadPrefix && ((suffixLength > 0 && offset > -1) || suggestion === typeaheadText); + const forward = midWord ? suffixLength + offset : 0; + + return ( + change + // TODO this line breaks if cursor was moved left and length is longer than whole prefix + .deleteBackward(backward) + .deleteForward(forward) + .insertText(suggestion) + .focus() + ); + } + + onKeyDown = (event, change) => { + if (this.menuEl) { + const { typeaheadIndex, suggestions } = this.state; + + switch (event.key) { + case 'Escape': { + if (this.menuEl) { + event.preventDefault(); + this.resetTypeahead(); + return true; + } + break; + } + + case 'Tab': { + // Dont blur input + event.preventDefault(); + if (!suggestions || suggestions.length === 0) { + return undefined; + } + + // Get the currently selected suggestion + const flattenedSuggestions = flattenSuggestions(suggestions); + const selected = Math.abs(typeaheadIndex); + const selectedIndex = selected % flattenedSuggestions.length || 0; + const suggestion = flattenedSuggestions[selectedIndex]; + + this.applyTypeahead(change, suggestion); + return true; + } + + case 'ArrowDown': { + // Select next suggestion + event.preventDefault(); + this.setState({ typeaheadIndex: typeaheadIndex + 1 }); + break; + } + + case 'ArrowUp': { + // Select previous suggestion + event.preventDefault(); + this.setState({ typeaheadIndex: Math.max(0, typeaheadIndex - 1) }); + break; + } + + default: { + // console.log('default key', event.key, event.which, event.charCode, event.locale, data.key); + break; + } + } + } + return undefined; + }; + + resetTypeahead = () => { + this.setState({ + suggestions: [], + typeaheadIndex: 0, + typeaheadPrefix: '', + typeaheadContext: null, + }); + }; + + async fetchLabelValues(key) { + const url = `/api/v1/label/${key}/values`; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + const pairs = this.state.labelValues[EMPTY_METRIC]; + const values = { + ...pairs, + [key]: body.data, + }; + // const labelKeys = { + // ...this.state.labelKeys, + // [EMPTY_METRIC]: keys, + // }; + const labelValues = { + ...this.state.labelValues, + [EMPTY_METRIC]: values, + }; + this.setState({ labelValues }, this.handleTypeahead); + } catch (e) { + if (this.props.onRequestError) { + this.props.onRequestError(e); + } else { + console.error(e); + } + } + } + + async fetchMetricLabels(name) { + const url = `/api/v1/series?match[]=${name}`; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + const { keys, values } = processLabels(body.data); + const labelKeys = { + ...this.state.labelKeys, + [name]: keys, + }; + const labelValues = { + ...this.state.labelValues, + [name]: values, + }; + this.setState({ labelKeys, labelValues }, this.handleTypeahead); + } catch (e) { + if (this.props.onRequestError) { + this.props.onRequestError(e); + } else { + console.error(e); + } + } + } + + async fetchMetricNames() { + const url = '/api/v1/label/__name__/values'; + try { + const res = await this.request(url); + const body = await (res.data || res.json()); + this.setState({ metrics: body.data }, this.onMetricsReceived); + } catch (error) { + if (this.props.onRequestError) { + this.props.onRequestError(error); + } else { + console.error(error); + } + } + } + + handleBlur = () => { + const { onBlur } = this.props; + // If we dont wait here, menu clicks wont work because the menu + // will be gone. + this.resetTimer = setTimeout(this.resetTypeahead, 100); + if (onBlur) { + onBlur(); + } + }; + + handleFocus = () => { + const { onFocus } = this.props; + if (onFocus) { + onFocus(); + } + }; + + handleClickMenu = item => { + // Manually triggering change + const change = this.applyTypeahead(this.state.value.change(), item); + this.onChange(change); + }; + + updateMenu = () => { + const { suggestions } = this.state; + const menu = this.menuEl; + const selection = window.getSelection(); + const node = selection.anchorNode; + + // No menu, nothing to do + if (!menu) { + return; + } + + // No suggestions or blur, remove menu + const hasSuggesstions = suggestions && suggestions.length > 0; + if (!hasSuggesstions) { + menu.removeAttribute('style'); + return; + } + + // Align menu overlay to editor node + if (node) { + const rect = node.parentElement.getBoundingClientRect(); + menu.style.opacity = 1; + menu.style.top = `${rect.top + window.scrollY + rect.height + 4}px`; + menu.style.left = `${rect.left + window.scrollX - 2}px`; + } + }; + + menuRef = el => { + this.menuEl = el; + }; + + renderMenu = () => { + const { suggestions } = this.state; + const hasSuggesstions = suggestions && suggestions.length > 0; + if (!hasSuggesstions) { + return null; + } + + // Guard selectedIndex to be within the length of the suggestions + let selectedIndex = Math.max(this.state.typeaheadIndex, 0); + const flattenedSuggestions = flattenSuggestions(suggestions); + selectedIndex = selectedIndex % flattenedSuggestions.length || 0; + const selectedKeys = flattenedSuggestions.length > 0 ? [flattenedSuggestions[selectedIndex]] : []; + + // Create typeahead in DOM root so we can later position it absolutely + return ( + + + + ); + }; + + render() { + return ( +
+ {this.renderMenu()} + +
+ ); + } +} + +export default QueryField; diff --git a/public/app/containers/Explore/QueryRows.tsx b/public/app/containers/Explore/QueryRows.tsx new file mode 100644 index 00000000000..3940d16b2f6 --- /dev/null +++ b/public/app/containers/Explore/QueryRows.tsx @@ -0,0 +1,80 @@ +import React, { PureComponent } from 'react'; + +import QueryField from './QueryField'; + +class QueryRow extends PureComponent { + constructor(props) { + super(props); + this.state = { + edited: false, + query: props.query || '', + }; + } + + handleChangeQuery = value => { + const { index, onChangeQuery } = this.props; + const { query } = this.state; + const edited = query !== value; + this.setState({ edited, query: value }); + if (onChangeQuery) { + onChangeQuery(value, index); + } + }; + + handleClickAddButton = () => { + const { index, onAddQueryRow } = this.props; + if (onAddQueryRow) { + onAddQueryRow(index); + } + }; + + handleClickRemoveButton = () => { + const { index, onRemoveQueryRow } = this.props; + if (onRemoveQueryRow) { + onRemoveQueryRow(index); + } + }; + + handlePressEnter = () => { + const { onExecuteQuery } = this.props; + if (onExecuteQuery) { + onExecuteQuery(); + } + }; + + render() { + const { request } = this.props; + const { edited, query } = this.state; + return ( +
+
+ + +
+
+ +
+
+ ); + } +} + +export default class QueryRows extends PureComponent { + render() { + const { className = '', queries, ...handlers } = this.props; + return ( +
+ {queries.map((q, index) => )} +
+ ); + } +} diff --git a/public/app/containers/Explore/Table.tsx b/public/app/containers/Explore/Table.tsx new file mode 100644 index 00000000000..7179a0fc89a --- /dev/null +++ b/public/app/containers/Explore/Table.tsx @@ -0,0 +1,24 @@ +import React, { PureComponent } from 'react'; +// import TableModel from 'app/core/table_model'; + +const EMPTY_TABLE = { + columns: [], + rows: [], +}; + +export default class Table extends PureComponent { + render() { + const { className = '', data } = this.props; + const tableModel = data || EMPTY_TABLE; + return ( +
+ + {tableModel.columns.map(col => )} + + + {tableModel.rows.map((row, i) => {row.map((content, j) => )})} + +
{col.text}
{content}
+ ); + } +} diff --git a/public/app/containers/Explore/Typeahead.tsx b/public/app/containers/Explore/Typeahead.tsx new file mode 100644 index 00000000000..4943622fe4e --- /dev/null +++ b/public/app/containers/Explore/Typeahead.tsx @@ -0,0 +1,66 @@ +import React from 'react'; + +function scrollIntoView(el) { + if (!el || !el.offsetParent) { + return; + } + const container = el.offsetParent; + if (el.offsetTop > container.scrollTop + container.offsetHeight || el.offsetTop < container.scrollTop) { + container.scrollTop = el.offsetTop - container.offsetTop; + } +} + +class TypeaheadItem extends React.PureComponent { + el: any; + componentDidUpdate(prevProps) { + if (this.props.isSelected && !prevProps.isSelected) { + scrollIntoView(this.el); + } + } + + getRef = el => { + this.el = el; + }; + + render() { + const { isSelected, label, onClickItem } = this.props; + const className = isSelected ? 'typeahead-item typeahead-item__selected' : 'typeahead-item'; + const onClick = () => onClickItem(label); + return ( +
  • + {label} +
  • + ); + } +} + +class TypeaheadGroup extends React.PureComponent { + render() { + const { items, label, selected, onClickItem } = this.props; + return ( +
  • +
    {label}
    +
      + {items.map(item => ( + -1} label={item} /> + ))} +
    +
  • + ); + } +} + +class Typeahead extends React.PureComponent { + render() { + const { groupedItems, menuRef, selectedItems, onClickItem } = this.props; + return ( +
      + {groupedItems.map(g => ( + + ))} +
    + ); + } +} + +export default Typeahead; diff --git a/public/app/containers/Explore/slate-plugins/braces.test.ts b/public/app/containers/Explore/slate-plugins/braces.test.ts new file mode 100644 index 00000000000..5c9a90ae034 --- /dev/null +++ b/public/app/containers/Explore/slate-plugins/braces.test.ts @@ -0,0 +1,47 @@ +import Plain from 'slate-plain-serializer'; + +import BracesPlugin from './braces'; + +declare global { + interface Window { + KeyboardEvent: any; + } +} + +describe('braces', () => { + const handler = BracesPlugin().onKeyDown; + + it('adds closing braces around empty value', () => { + const change = Plain.deserialize('').change(); + const event = new window.KeyboardEvent('keydown', { key: '(' }); + handler(event, change); + expect(Plain.serialize(change.value)).toEqual('()'); + }); + + it('adds closing braces around a value', () => { + const change = Plain.deserialize('foo').change(); + const event = new window.KeyboardEvent('keydown', { key: '(' }); + handler(event, change); + expect(Plain.serialize(change.value)).toEqual('(foo)'); + }); + + it('adds closing braces around the following value only', () => { + const change = Plain.deserialize('foo bar ugh').change(); + let event; + event = new window.KeyboardEvent('keydown', { key: '(' }); + handler(event, change); + expect(Plain.serialize(change.value)).toEqual('(foo) bar ugh'); + + // Wrap bar + change.move(5); + event = new window.KeyboardEvent('keydown', { key: '(' }); + handler(event, change); + expect(Plain.serialize(change.value)).toEqual('(foo) (bar) ugh'); + + // Create empty parens after (bar) + change.move(4); + event = new window.KeyboardEvent('keydown', { key: '(' }); + handler(event, change); + expect(Plain.serialize(change.value)).toEqual('(foo) (bar)() ugh'); + }); +}); diff --git a/public/app/containers/Explore/slate-plugins/braces.ts b/public/app/containers/Explore/slate-plugins/braces.ts new file mode 100644 index 00000000000..b92a224d111 --- /dev/null +++ b/public/app/containers/Explore/slate-plugins/braces.ts @@ -0,0 +1,51 @@ +const BRACES = { + '[': ']', + '{': '}', + '(': ')', +}; + +export default function BracesPlugin() { + return { + onKeyDown(event, change) { + const { value } = change; + if (!value.isCollapsed) { + return undefined; + } + + switch (event.key) { + case '{': + case '[': { + event.preventDefault(); + // Insert matching braces + change + .insertText(`${event.key}${BRACES[event.key]}`) + .move(-1) + .focus(); + return true; + } + + case '(': { + event.preventDefault(); + const text = value.anchorText.text; + const offset = value.anchorOffset; + const space = text.indexOf(' ', offset); + const length = space > 0 ? space : text.length; + const forward = length - offset; + // Insert matching braces + change + .insertText(event.key) + .move(forward) + .insertText(BRACES[event.key]) + .move(-1 - forward) + .focus(); + return true; + } + + default: { + break; + } + } + return undefined; + }, + }; +} diff --git a/public/app/containers/Explore/slate-plugins/clear.test.ts b/public/app/containers/Explore/slate-plugins/clear.test.ts new file mode 100644 index 00000000000..28ba371df14 --- /dev/null +++ b/public/app/containers/Explore/slate-plugins/clear.test.ts @@ -0,0 +1,38 @@ +import Plain from 'slate-plain-serializer'; + +import ClearPlugin from './clear'; + +describe('clear', () => { + const handler = ClearPlugin().onKeyDown; + + it('does not change the empty value', () => { + const change = Plain.deserialize('').change(); + const event = new window.KeyboardEvent('keydown', { + key: 'k', + ctrlKey: true, + }); + handler(event, change); + expect(Plain.serialize(change.value)).toEqual(''); + }); + + it('clears to the end of the line', () => { + const change = Plain.deserialize('foo').change(); + const event = new window.KeyboardEvent('keydown', { + key: 'k', + ctrlKey: true, + }); + handler(event, change); + expect(Plain.serialize(change.value)).toEqual(''); + }); + + it('clears from the middle to the end of the line', () => { + const change = Plain.deserialize('foo bar').change(); + change.move(4); + const event = new window.KeyboardEvent('keydown', { + key: 'k', + ctrlKey: true, + }); + handler(event, change); + expect(Plain.serialize(change.value)).toEqual('foo '); + }); +}); diff --git a/public/app/containers/Explore/slate-plugins/clear.ts b/public/app/containers/Explore/slate-plugins/clear.ts new file mode 100644 index 00000000000..5e2789bf544 --- /dev/null +++ b/public/app/containers/Explore/slate-plugins/clear.ts @@ -0,0 +1,22 @@ +// Clears the rest of the line after the caret +export default function ClearPlugin() { + return { + onKeyDown(event, change) { + const { value } = change; + if (!value.isCollapsed) { + return undefined; + } + + if (event.key === 'k' && event.ctrlKey) { + event.preventDefault(); + const text = value.anchorText.text; + const offset = value.anchorOffset; + const length = text.length; + const forward = length - offset; + change.deleteForward(forward); + return true; + } + return undefined; + }, + }; +} diff --git a/public/app/containers/Explore/slate-plugins/newline.ts b/public/app/containers/Explore/slate-plugins/newline.ts new file mode 100644 index 00000000000..cae8af3acb0 --- /dev/null +++ b/public/app/containers/Explore/slate-plugins/newline.ts @@ -0,0 +1,35 @@ +function getIndent(text) { + let offset = text.length - text.trimLeft().length; + if (offset) { + let indent = text[0]; + while (--offset) { + indent += text[0]; + } + return indent; + } + return ''; +} + +export default function NewlinePlugin() { + return { + onKeyDown(event, change) { + const { value } = change; + if (!value.isCollapsed) { + return undefined; + } + + if (event.key === 'Enter' && event.shiftKey) { + event.preventDefault(); + + const { startBlock } = value; + const currentLineText = startBlock.text; + const indent = getIndent(currentLineText); + + return change + .splitBlock() + .insertText(indent) + .focus(); + } + }, + }; +} diff --git a/public/app/containers/Explore/slate-plugins/prism/index.tsx b/public/app/containers/Explore/slate-plugins/prism/index.tsx new file mode 100644 index 00000000000..7c3fa296d8e --- /dev/null +++ b/public/app/containers/Explore/slate-plugins/prism/index.tsx @@ -0,0 +1,122 @@ +import React from 'react'; +import Prism from 'prismjs'; + +import Promql from './promql'; + +Prism.languages.promql = Promql; + +const TOKEN_MARK = 'prism-token'; + +export function configurePrismMetricsTokens(metrics) { + Prism.languages.promql.metric = { + alias: 'variable', + pattern: new RegExp(`(?:^|\\s)(${metrics.join('|')})(?:$|\\s)`), + }; +} + +/** + * Code-highlighting plugin based on Prism and + * https://github.com/ianstormtaylor/slate/blob/master/examples/code-highlighting/index.js + * + * (Adapted to handle nested grammar definitions.) + */ + +export default function PrismPlugin() { + return { + /** + * Render a Slate mark with appropiate CSS class names + * + * @param {Object} props + * @return {Element} + */ + + renderMark(props) { + const { children, mark } = props; + // Only apply spans to marks identified by this plugin + if (mark.type !== TOKEN_MARK) { + return undefined; + } + const className = `token ${mark.data.get('types')}`; + return {children}; + }, + + /** + * Decorate code blocks with Prism.js highlighting. + * + * @param {Node} node + * @return {Array} + */ + + decorateNode(node) { + if (node.type !== 'paragraph') { + return []; + } + + const texts = node.getTexts().toArray(); + const tstring = texts.map(t => t.text).join('\n'); + const grammar = Prism.languages.promql; + const tokens = Prism.tokenize(tstring, grammar); + const decorations = []; + let startText = texts.shift(); + let endText = startText; + let startOffset = 0; + let endOffset = 0; + let start = 0; + + function processToken(token, acc?) { + // Accumulate token types down the tree + const types = `${acc || ''} ${token.type || ''} ${token.alias || ''}`; + + // Add mark for token node + if (typeof token === 'string' || typeof token.content === 'string') { + startText = endText; + startOffset = endOffset; + + const content = typeof token === 'string' ? token : token.content; + const newlines = content.split('\n').length - 1; + const length = content.length - newlines; + const end = start + length; + + let available = startText.text.length - startOffset; + let remaining = length; + + endOffset = startOffset + remaining; + + while (available < remaining) { + endText = texts.shift(); + remaining = length - available; + available = endText.text.length; + endOffset = remaining; + } + + // Inject marks from up the tree (acc) as well + if (typeof token !== 'string' || acc) { + const range = { + anchorKey: startText.key, + anchorOffset: startOffset, + focusKey: endText.key, + focusOffset: endOffset, + marks: [{ type: TOKEN_MARK, data: { types } }], + }; + + decorations.push(range); + } + + start = end; + } else if (token.content && token.content.length) { + // Tokens can be nested + for (const subToken of token.content) { + processToken(subToken, types); + } + } + } + + // Process top-level tokens + for (const token of tokens) { + processToken(token); + } + + return decorations; + }, + }; +} diff --git a/public/app/containers/Explore/slate-plugins/prism/promql.ts b/public/app/containers/Explore/slate-plugins/prism/promql.ts new file mode 100644 index 00000000000..0f0be18cb6f --- /dev/null +++ b/public/app/containers/Explore/slate-plugins/prism/promql.ts @@ -0,0 +1,123 @@ +export const OPERATORS = ['by', 'group_left', 'group_right', 'ignoring', 'on', 'offset', 'without']; + +const AGGREGATION_OPERATORS = [ + 'sum', + 'min', + 'max', + 'avg', + 'stddev', + 'stdvar', + 'count', + 'count_values', + 'bottomk', + 'topk', + 'quantile', +]; + +export const FUNCTIONS = [ + ...AGGREGATION_OPERATORS, + 'abs', + 'absent', + 'ceil', + 'changes', + 'clamp_max', + 'clamp_min', + 'count_scalar', + 'day_of_month', + 'day_of_week', + 'days_in_month', + 'delta', + 'deriv', + 'drop_common_labels', + 'exp', + 'floor', + 'histogram_quantile', + 'holt_winters', + 'hour', + 'idelta', + 'increase', + 'irate', + 'label_replace', + 'ln', + 'log2', + 'log10', + 'minute', + 'month', + 'predict_linear', + 'rate', + 'resets', + 'round', + 'scalar', + 'sort', + 'sort_desc', + 'sqrt', + 'time', + 'vector', + 'year', + 'avg_over_time', + 'min_over_time', + 'max_over_time', + 'sum_over_time', + 'count_over_time', + 'quantile_over_time', + 'stddev_over_time', + 'stdvar_over_time', +]; + +const tokenizer = { + comment: { + pattern: /(^|[^\n])#.*/, + lookbehind: true, + }, + 'context-aggregation': { + pattern: /((by|without)\s*)\([^)]*\)/, // by () + lookbehind: true, + inside: { + 'label-key': { + pattern: /[^,\s][^,]*[^,\s]*/, + alias: 'attr-name', + }, + }, + }, + 'context-labels': { + pattern: /\{[^}]*(?=})/, + inside: { + 'label-key': { + pattern: /[a-z_]\w*(?=\s*(=|!=|=~|!~))/, + alias: 'attr-name', + }, + 'label-value': { + pattern: /"(?:\\.|[^\\"])*"/, + greedy: true, + alias: 'attr-value', + }, + }, + }, + function: new RegExp(`\\b(?:${FUNCTIONS.join('|')})(?=\\s*\\()`, 'i'), + 'context-range': [ + { + pattern: /\[[^\]]*(?=])/, // [1m] + inside: { + 'range-duration': { + pattern: /\b\d+[smhdwy]\b/i, + alias: 'number', + }, + }, + }, + { + pattern: /(offset\s+)\w+/, // offset 1m + lookbehind: true, + inside: { + 'range-duration': { + pattern: /\b\d+[smhdwy]\b/i, + alias: 'number', + }, + }, + }, + ], + number: /\b-?\d+((\.\d*)?([eE][+-]?\d+)?)?\b/, + operator: new RegExp(`/[-+*/=%^~]|&&?|\\|?\\||!=?|<(?:=>?|<|>)?|>[>=]?|\\b(?:${OPERATORS.join('|')})\\b`, 'i'), + punctuation: /[{};()`,.]/, +}; + +export default tokenizer; diff --git a/public/app/containers/Explore/slate-plugins/runner.ts b/public/app/containers/Explore/slate-plugins/runner.ts new file mode 100644 index 00000000000..44b5943c4a2 --- /dev/null +++ b/public/app/containers/Explore/slate-plugins/runner.ts @@ -0,0 +1,14 @@ +export default function RunnerPlugin({ handler }) { + return { + onKeyDown(event) { + // Handle enter + if (handler && event.key === 'Enter' && !event.shiftKey) { + // Submit on Enter + event.preventDefault(); + handler(event); + return true; + } + return undefined; + }, + }; +} diff --git a/public/app/containers/Explore/utils/debounce.ts b/public/app/containers/Explore/utils/debounce.ts new file mode 100644 index 00000000000..9f2bd35e116 --- /dev/null +++ b/public/app/containers/Explore/utils/debounce.ts @@ -0,0 +1,14 @@ +// Based on underscore.js debounce() +export default function debounce(func, wait) { + let timeout; + return function() { + const context = this; + const args = arguments; + const later = function() { + timeout = null; + func.apply(context, args); + }; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + }; +} diff --git a/public/app/containers/Explore/utils/dom.ts b/public/app/containers/Explore/utils/dom.ts new file mode 100644 index 00000000000..6ba21b54c83 --- /dev/null +++ b/public/app/containers/Explore/utils/dom.ts @@ -0,0 +1,40 @@ +// Node.closest() polyfill +if ('Element' in window && !Element.prototype.closest) { + Element.prototype.closest = function(s) { + const matches = (this.document || this.ownerDocument).querySelectorAll(s); + let el = this; + let i; + // eslint-disable-next-line + do { + i = matches.length; + // eslint-disable-next-line + while (--i >= 0 && matches.item(i) !== el) {} + } while (i < 0 && (el = el.parentElement)); + return el; + }; +} + +export function getPreviousCousin(node, selector) { + let sibling = node.parentElement.previousSibling; + let el; + while (sibling) { + el = sibling.querySelector(selector); + if (el) { + return el; + } + sibling = sibling.previousSibling; + } + return undefined; +} + +export function getNextCharacter(global = window) { + const selection = global.getSelection(); + if (!selection.anchorNode) { + return null; + } + + const range = selection.getRangeAt(0); + const text = selection.anchorNode.textContent; + const offset = range.startOffset; + return text.substr(offset, 1); +} diff --git a/public/app/containers/Explore/utils/prometheus.ts b/public/app/containers/Explore/utils/prometheus.ts new file mode 100644 index 00000000000..30f9c25b8f7 --- /dev/null +++ b/public/app/containers/Explore/utils/prometheus.ts @@ -0,0 +1,20 @@ +export const RATE_RANGES = ['1m', '5m', '10m', '30m', '1h']; + +export function processLabels(labels) { + const values = {}; + labels.forEach(l => { + const { __name__, ...rest } = l; + Object.keys(rest).forEach(key => { + if (!values[key]) { + values[key] = []; + } + if (values[key].indexOf(rest[key]) === -1) { + values[key].push(rest[key]); + } + }); + }); + return { values, keys: Object.keys(values) }; +} + +// Strip syntax chars +export const cleanText = s => s.replace(/[{}[\]="(),!~+\-*/^%]/g, '').trim(); diff --git a/public/app/containers/Explore/utils/query.ts b/public/app/containers/Explore/utils/query.ts new file mode 100644 index 00000000000..d51c7339944 --- /dev/null +++ b/public/app/containers/Explore/utils/query.ts @@ -0,0 +1,31 @@ +export function buildQueryOptions({ format, interval, instant, now, queries }) { + const to = now; + const from = to - 1000 * 60 * 60 * 3; + return { + interval, + range: { + from, + to, + }, + targets: queries.map(expr => ({ + expr, + format, + instant, + })), + }; +} + +export function generateQueryKey(index = 0) { + return `Q-${Date.now()}-${Math.random()}-${index}`; +} + +export function ensureQueries(queries?) { + if (queries && typeof queries === 'object' && queries.length > 0 && typeof queries[0] === 'string') { + return queries.map((query, i) => ({ key: generateQueryKey(i), query })); + } + return [{ key: generateQueryKey(), query: '' }]; +} + +export function hasQuery(queries) { + return queries.some(q => q.query); +} diff --git a/public/app/containers/ManageDashboards/FolderPermissions.tsx b/public/app/containers/ManageDashboards/FolderPermissions.tsx index 9c82db1c18c..abbde63a179 100644 --- a/public/app/containers/ManageDashboards/FolderPermissions.tsx +++ b/public/app/containers/ManageDashboards/FolderPermissions.tsx @@ -1,4 +1,5 @@ import React, { Component } from 'react'; +import { hot } from 'react-hot-loader'; import { inject, observer } from 'mobx-react'; import { toJS } from 'mobx'; import IContainerProps from 'app/containers/IContainerProps'; @@ -15,6 +16,9 @@ export class FolderPermissions extends Component { constructor(props) { super(props); this.handleAddPermission = this.handleAddPermission.bind(this); + } + + componentDidMount() { this.loadStore(); } @@ -72,3 +76,5 @@ export class FolderPermissions extends Component { ); } } + +export default hot(module)(FolderPermissions); diff --git a/public/app/containers/ManageDashboards/FolderSettings.tsx b/public/app/containers/ManageDashboards/FolderSettings.tsx index e56659ad789..d25a7dc6c06 100644 --- a/public/app/containers/ManageDashboards/FolderSettings.tsx +++ b/public/app/containers/ManageDashboards/FolderSettings.tsx @@ -1,4 +1,5 @@ import React from 'react'; +import { hot } from 'react-hot-loader'; import { inject, observer } from 'mobx-react'; import { toJS } from 'mobx'; import PageHeader from 'app/core/components/PageHeader/PageHeader'; @@ -11,8 +12,7 @@ import appEvents from 'app/core/app_events'; export class FolderSettings extends React.Component { formSnapshot: any; - constructor(props) { - super(props); + componentDidMount() { this.loadStore(); } @@ -156,3 +156,5 @@ export class FolderSettings extends React.Component { ); } } + +export default hot(module)(FolderSettings); diff --git a/public/app/containers/ServerStats/ServerStats.tsx b/public/app/containers/ServerStats/ServerStats.tsx index e40b441d967..761b296855f 100644 --- a/public/app/containers/ServerStats/ServerStats.tsx +++ b/public/app/containers/ServerStats/ServerStats.tsx @@ -1,4 +1,5 @@ import React from 'react'; +import { hot } from 'react-hot-loader'; import { inject, observer } from 'mobx-react'; import PageHeader from 'app/core/components/PageHeader/PageHeader'; import IContainerProps from 'app/containers/IContainerProps'; @@ -43,3 +44,5 @@ function StatItem(stat) { ); } + +export default hot(module)(ServerStats); diff --git a/public/app/core/components/Permissions/AddPermissions.tsx b/public/app/core/components/Permissions/AddPermissions.tsx index 07ccfdbbef5..4dcd07ffb48 100644 --- a/public/app/core/components/Permissions/AddPermissions.tsx +++ b/public/app/core/components/Permissions/AddPermissions.tsx @@ -39,7 +39,7 @@ class AddPermissions extends Component { permissions.newItem.setUser(null, null); return; } - return permissions.newItem.setUser(user.id, user.login); + return permissions.newItem.setUser(user.id, user.login, user.avatarUrl); } teamPicked(team: Team) { @@ -48,7 +48,7 @@ class AddPermissions extends Component { permissions.newItem.setTeam(null, null); return; } - return permissions.newItem.setTeam(team.id, team.name); + return permissions.newItem.setTeam(team.id, team.name, team.avatarUrl); } permissionPicked(permission: OptionWithDescription) { diff --git a/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx b/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx index db45714136e..5e2497d983e 100644 --- a/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx +++ b/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx @@ -1,4 +1,4 @@ -import React, { Component } from 'react'; +import React, { Component } from 'react'; import DescriptionPicker from 'app/core/components/Picker/DescriptionPicker'; import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; @@ -12,9 +12,12 @@ export default class DisabledPermissionListItem extends Component { return ( - - - + + + + + {item.name} + (Role) Can diff --git a/public/app/core/components/Permissions/Permissions.tsx b/public/app/core/components/Permissions/Permissions.tsx index 0a0572ed86e..dbdc1682f6b 100644 --- a/public/app/core/components/Permissions/Permissions.tsx +++ b/public/app/core/components/Permissions/Permissions.tsx @@ -15,9 +15,8 @@ export interface DashboardAcl { permissionName?: string; role?: string; icon?: string; - nameHtml?: string; + name?: string; inherited?: boolean; - sortName?: string; sortRank?: number; } diff --git a/public/app/core/components/Permissions/PermissionsList.tsx b/public/app/core/components/Permissions/PermissionsList.tsx index b215dad2391..a77235ecc30 100644 --- a/public/app/core/components/Permissions/PermissionsList.tsx +++ b/public/app/core/components/Permissions/PermissionsList.tsx @@ -1,4 +1,4 @@ -import React, { Component } from 'react'; +import React, { Component } from 'react'; import PermissionsListItem from './PermissionsListItem'; import DisabledPermissionsListItem from './DisabledPermissionsListItem'; import { observer } from 'mobx-react'; @@ -23,7 +23,7 @@ class PermissionsList extends Component { Admin Role', + name: 'Admin', permission: 4, icon: 'fa fa-fw fa-street-view', }} diff --git a/public/app/core/components/Permissions/PermissionsListItem.tsx b/public/app/core/components/Permissions/PermissionsListItem.tsx index 3140b8fcc0c..b0158525d52 100644 --- a/public/app/core/components/Permissions/PermissionsListItem.tsx +++ b/public/app/core/components/Permissions/PermissionsListItem.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React from 'react'; import { observer } from 'mobx-react'; import DescriptionPicker from 'app/core/components/Picker/DescriptionPicker'; import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; @@ -7,6 +7,30 @@ const setClassNameHelper = inherited => { return inherited ? 'gf-form-disabled' : ''; }; +function ItemAvatar({ item }) { + if (item.userAvatarUrl) { + return ; + } + if (item.teamAvatarUrl) { + return ; + } + if (item.role === 'Editor') { + return ; + } + + return ; +} + +function ItemDescription({ item }) { + if (item.userId) { + return (User); + } + if (item.teamId) { + return (Team); + } + return (Role); +} + export default observer(({ item, removeItem, permissionChanged, itemIndex, folderInfo }) => { const handleRemoveItem = evt => { evt.preventDefault(); @@ -17,13 +41,15 @@ export default observer(({ item, removeItem, permissionChanged, itemIndex, folde permissionChanged(itemIndex, permissionOption.value, permissionOption.label); }; - const inheritedFromRoot = item.dashboardId === -1 && folderInfo && folderInfo.id === 0; + const inheritedFromRoot = item.dashboardId === -1 && !item.inherited; return ( - - - + + + + + {item.name} {item.inherited && diff --git a/public/app/core/components/ScrollBar/ScrollBar.tsx b/public/app/core/components/ScrollBar/ScrollBar.tsx index 7d9e015df94..24d17f67367 100644 --- a/public/app/core/components/ScrollBar/ScrollBar.tsx +++ b/public/app/core/components/ScrollBar/ScrollBar.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import PerfectScrollbar from 'perfect-scrollbar'; +import baron from 'baron'; export interface Props { children: any; @@ -8,31 +8,36 @@ export interface Props { export default class ScrollBar extends React.Component { private container: any; - private ps: PerfectScrollbar; + private scrollbar: baron; constructor(props) { super(props); } componentDidMount() { - this.ps = new PerfectScrollbar(this.container, { - wheelPropagation: true, + this.scrollbar = baron({ + root: this.container.parentElement, + scroller: this.container, + bar: '.baron__bar', + barOnCls: '_scrollbar', + scrollingCls: '_scrolling', + track: '.baron__track', }); } componentDidUpdate() { - this.ps.update(); + this.scrollbar.update(); } componentWillUnmount() { - this.ps.destroy(); + this.scrollbar.dispose(); } // methods can be invoked by outside setScrollTop(top) { if (this.container) { this.container.scrollTop = top; - this.ps.update(); + this.scrollbar.update(); return true; } @@ -42,21 +47,31 @@ export default class ScrollBar extends React.Component { setScrollLeft(left) { if (this.container) { this.container.scrollLeft = left; - this.ps.update(); + this.scrollbar.update(); return true; } return false; } + update() { + this.scrollbar.update(); + } + handleRef = ref => { this.container = ref; }; render() { return ( -
    - {this.props.children} +
    +
    + {this.props.children} +
    + +
    +
    +
    ); } diff --git a/public/app/core/components/grafana_app.ts b/public/app/core/components/grafana_app.ts index 798a40cb1bf..89f25776a40 100644 --- a/public/app/core/components/grafana_app.ts +++ b/public/app/core/components/grafana_app.ts @@ -8,11 +8,23 @@ import appEvents from 'app/core/app_events'; import Drop from 'tether-drop'; import { createStore } from 'app/stores/store'; import colors from 'app/core/utils/colors'; +import { BackendSrv } from 'app/core/services/backend_srv'; +import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; export class GrafanaCtrl { /** @ngInject */ - constructor($scope, alertSrv, utilSrv, $rootScope, $controller, contextSrv, bridgeSrv, backendSrv) { - createStore(backendSrv); + constructor( + $scope, + alertSrv, + utilSrv, + $rootScope, + $controller, + contextSrv, + bridgeSrv, + backendSrv: BackendSrv, + datasourceSrv: DatasourceSrv + ) { + createStore({ backendSrv, datasourceSrv }); $scope.init = function() { $scope.contextSrv = contextSrv; @@ -117,6 +129,14 @@ export function grafanaAppDirective(playlistSrv, contextSrv, $timeout, $rootScop appEvents.emit('toggle-kiosk-mode'); } + // check for 'inactive' url param for clean looks like kiosk, but with title + if (data.params.inactive) { + body.addClass('user-activity-low'); + + // for some reason, with this class it looks cleanest + body.addClass('sidemenu-open'); + } + // close all drops for (let drop of Drop.drops) { drop.destroy(); @@ -167,6 +187,7 @@ export function grafanaAppDirective(playlistSrv, contextSrv, $timeout, $rootScop if (sidemenuHidden) { sidemenuHidden = false; body.addClass('sidemenu-open'); + appEvents.emit('toggle-inactive-mode'); $timeout(function() { $rootScope.$broadcast('render'); }, 100); diff --git a/public/app/core/components/json_explorer/helpers.ts b/public/app/core/components/json_explorer/helpers.ts index 5b053792d73..c445e1b0667 100644 --- a/public/app/core/components/json_explorer/helpers.ts +++ b/public/app/core/components/json_explorer/helpers.ts @@ -2,7 +2,7 @@ // Licence MIT, Copyright (c) 2015 Mohsen Azimi /* - * Escapes `"` charachters from string + * Escapes `"` characters from string */ function escapeString(str: string): string { return str.replace('"', '"'); @@ -100,7 +100,7 @@ export function cssClass(className: string): string { } /* - * Creates a new DOM element wiht given type and class + * Creates a new DOM element with given type and class * TODO: move me to helpers */ export function createElement(type: string, className?: string, content?: Element | string): Element { diff --git a/public/app/core/components/json_explorer/json_explorer.ts b/public/app/core/components/json_explorer/json_explorer.ts index 9cc1b53bc82..790ed442d5c 100644 --- a/public/app/core/components/json_explorer/json_explorer.ts +++ b/public/app/core/components/json_explorer/json_explorer.ts @@ -146,7 +146,7 @@ export class JsonExplorer { } /* - * did we recieve a key argument? + * did we receive a key argument? * This means that the formatter was called as a sub formatter of a parent formatter */ private get hasKey(): boolean { diff --git a/public/app/core/components/scroll/page_scroll.ts b/public/app/core/components/scroll/page_scroll.ts new file mode 100644 index 00000000000..e6db344a4d6 --- /dev/null +++ b/public/app/core/components/scroll/page_scroll.ts @@ -0,0 +1,41 @@ +import coreModule from 'app/core/core_module'; +import appEvents from 'app/core/app_events'; + +export function pageScrollbar() { + return { + restrict: 'A', + link: function(scope, elem, attrs) { + let lastPos = 0; + + appEvents.on( + 'dash-scroll', + evt => { + if (evt.restore) { + elem[0].scrollTop = lastPos; + return; + } + + lastPos = elem[0].scrollTop; + + if (evt.animate) { + elem.animate({ scrollTop: evt.pos }, 500); + } else { + elem[0].scrollTop = evt.pos; + } + }, + scope + ); + + scope.$on('$routeChangeSuccess', () => { + lastPos = 0; + elem[0].scrollTop = 0; + elem[0].focus(); + }); + + elem[0].tabIndex = -1; + elem[0].focus(); + }, + }; +} + +coreModule.directive('pageScrollbar', pageScrollbar); diff --git a/public/app/core/components/scroll/scroll.ts b/public/app/core/components/scroll/scroll.ts index fbf5fd6cd37..3f9865e6dce 100644 --- a/public/app/core/components/scroll/scroll.ts +++ b/public/app/core/components/scroll/scroll.ts @@ -1,15 +1,44 @@ -import PerfectScrollbar from 'perfect-scrollbar'; +import $ from 'jquery'; +import baron from 'baron'; import coreModule from 'app/core/core_module'; import appEvents from 'app/core/app_events'; +const scrollBarHTML = ` +
    +
    +
    +`; + +const scrollRootClass = 'baron baron__root'; +const scrollerClass = 'baron__scroller'; + export function geminiScrollbar() { return { restrict: 'A', link: function(scope, elem, attrs) { - let scrollbar = new PerfectScrollbar(elem[0], { - wheelPropagation: true, - wheelSpeed: 3, - }); + let scrollRoot = elem.parent(); + let scroller = elem; + + if (attrs.grafanaScrollbar && attrs.grafanaScrollbar === 'scrollonroot') { + scrollRoot = scroller; + } + + scrollRoot.addClass(scrollRootClass); + $(scrollBarHTML).appendTo(scrollRoot); + elem.addClass(scrollerClass); + + let scrollParams = { + root: scrollRoot[0], + scroller: scroller[0], + bar: '.baron__bar', + barOnCls: '_scrollbar', + scrollingCls: '_scrolling', + track: '.baron__track', + direction: 'v', + }; + + let scrollbar = baron(scrollParams); + let lastPos = 0; appEvents.on( @@ -31,13 +60,24 @@ export function geminiScrollbar() { scope ); + // force updating dashboard width + appEvents.on('toggle-sidemenu', forceUpdate, scope); + appEvents.on('toggle-sidemenu-hidden', forceUpdate, scope); + appEvents.on('toggle-view-mode', forceUpdate, scope); + appEvents.on('toggle-kiosk-mode', forceUpdate, scope); + appEvents.on('toggle-inactive-mode', forceUpdate, scope); + + function forceUpdate() { + scrollbar.scroll(); + } + scope.$on('$routeChangeSuccess', () => { lastPos = 0; elem[0].scrollTop = 0; }); scope.$on('$destroy', () => { - scrollbar.destroy(); + scrollbar.dispose(); }); }, }; diff --git a/public/app/core/components/search/search.html b/public/app/core/components/search/search.html index acaf0730a6b..afb9e723cad 100644 --- a/public/app/core/components/search/search.html +++ b/public/app/core/components/search/search.html @@ -19,6 +19,7 @@
    +
    No dashboards matching your query were found.
    +
    diff --git a/public/app/core/components/search/search_results.html b/public/app/core/components/search/search_results.html index 4e5bc88e0a9..7435f8d0b7e 100644 --- a/public/app/core/components/search/search_results.html +++ b/public/app/core/components/search/search_results.html @@ -20,7 +20,7 @@
    +
    \ No newline at end of file diff --git a/public/app/core/core.ts b/public/app/core/core.ts index 353d8762a9a..fb7021fe883 100644 --- a/public/app/core/core.ts +++ b/public/app/core/core.ts @@ -47,6 +47,7 @@ import { NavModelSrv, NavModel } from './nav_model_srv'; import { userPicker } from './components/user_picker'; import { teamPicker } from './components/team_picker'; import { geminiScrollbar } from './components/scroll/scroll'; +import { pageScrollbar } from './components/scroll/page_scroll'; import { gfPageDirective } from './components/gf_page'; import { orgSwitcher } from './components/org_switcher'; import { profiler } from './profiler'; @@ -85,6 +86,7 @@ export { userPicker, teamPicker, geminiScrollbar, + pageScrollbar, gfPageDirective, orgSwitcher, manageDashboardsDirective, diff --git a/public/app/core/directives/dash_class.js b/public/app/core/directives/dash_class.js deleted file mode 100644 index 4a139272632..00000000000 --- a/public/app/core/directives/dash_class.js +++ /dev/null @@ -1,36 +0,0 @@ -define([ - 'lodash', - 'jquery', - '../core_module', -], -function (_, $, coreModule) { - 'use strict'; - - coreModule.default.directive('dashClass', function() { - return { - link: function($scope, elem) { - - $scope.onAppEvent('panel-fullscreen-enter', function() { - elem.toggleClass('panel-in-fullscreen', true); - }); - - $scope.onAppEvent('panel-fullscreen-exit', function() { - elem.toggleClass('panel-in-fullscreen', false); - }); - - $scope.$watch('ctrl.dashboardViewState.state.editview', function(newValue) { - if (newValue) { - elem.toggleClass('dashboard-page--settings-opening', _.isString(newValue)); - setTimeout(function() { - elem.toggleClass('dashboard-page--settings-open', _.isString(newValue)); - }, 10); - } else { - elem.removeClass('dashboard-page--settings-opening'); - elem.removeClass('dashboard-page--settings-open'); - } - }); - } - }; - }); - -}); diff --git a/public/app/core/directives/dash_class.ts b/public/app/core/directives/dash_class.ts new file mode 100644 index 00000000000..031338d3c5b --- /dev/null +++ b/public/app/core/directives/dash_class.ts @@ -0,0 +1,31 @@ +import _ from 'lodash'; +import coreModule from '../core_module'; + +/** @ngInject */ +export function dashClass() { + return { + link: function($scope, elem) { + $scope.onAppEvent('panel-fullscreen-enter', function() { + elem.toggleClass('panel-in-fullscreen', true); + }); + + $scope.onAppEvent('panel-fullscreen-exit', function() { + elem.toggleClass('panel-in-fullscreen', false); + }); + + $scope.$watch('ctrl.dashboardViewState.state.editview', function(newValue) { + if (newValue) { + elem.toggleClass('dashboard-page--settings-opening', _.isString(newValue)); + setTimeout(function() { + elem.toggleClass('dashboard-page--settings-open', _.isString(newValue)); + }, 10); + } else { + elem.removeClass('dashboard-page--settings-opening'); + elem.removeClass('dashboard-page--settings-open'); + } + }); + }, + }; +} + +coreModule.directive('dashClass', dashClass); diff --git a/public/app/core/directives/dropdown_typeahead.js b/public/app/core/directives/dropdown_typeahead.js deleted file mode 100644 index 25772b4638a..00000000000 --- a/public/app/core/directives/dropdown_typeahead.js +++ /dev/null @@ -1,236 +0,0 @@ -define([ - 'lodash', - 'jquery', - '../core_module', -], -function (_, $, coreModule) { - 'use strict'; - - coreModule.default.directive('dropdownTypeahead', function($compile) { - - var inputTemplate = ''; - - var buttonTemplate = ''; - - return { - scope: { - menuItems: "=dropdownTypeahead", - dropdownTypeaheadOnSelect: "&dropdownTypeaheadOnSelect", - model: '=ngModel' - }, - link: function($scope, elem, attrs) { - var $input = $(inputTemplate); - var $button = $(buttonTemplate); - $input.appendTo(elem); - $button.appendTo(elem); - - if (attrs.linkText) { - $button.html(attrs.linkText); - } - - if (attrs.ngModel) { - $scope.$watch('model', function(newValue) { - _.each($scope.menuItems, function(item) { - _.each(item.submenu, function(subItem) { - if (subItem.value === newValue) { - $button.html(subItem.text); - } - }); - }); - }); - } - - var typeaheadValues = _.reduce($scope.menuItems, function(memo, value, index) { - if (!value.submenu) { - value.click = 'menuItemSelected(' + index + ')'; - memo.push(value.text); - } else { - _.each(value.submenu, function(item, subIndex) { - item.click = 'menuItemSelected(' + index + ',' + subIndex + ')'; - memo.push(value.text + ' ' + item.text); - }); - } - return memo; - }, []); - - $scope.menuItemSelected = function(index, subIndex) { - var menuItem = $scope.menuItems[index]; - var payload = {$item: menuItem}; - if (menuItem.submenu && subIndex !== void 0) { - payload.$subItem = menuItem.submenu[subIndex]; - } - $scope.dropdownTypeaheadOnSelect(payload); - }; - - $input.attr('data-provide', 'typeahead'); - $input.typeahead({ - source: typeaheadValues, - minLength: 1, - items: 10, - updater: function (value) { - var result = {}; - _.each($scope.menuItems, function(menuItem) { - _.each(menuItem.submenu, function(submenuItem) { - if (value === (menuItem.text + ' ' + submenuItem.text)) { - result.$subItem = submenuItem; - result.$item = menuItem; - } - }); - }); - - if (result.$item) { - $scope.$apply(function() { - $scope.dropdownTypeaheadOnSelect(result); - }); - } - - $input.trigger('blur'); - return ''; - } - }); - - $button.click(function() { - $button.hide(); - $input.show(); - $input.focus(); - }); - - $input.keyup(function() { - elem.toggleClass('open', $input.val() === ''); - }); - - $input.blur(function() { - $input.hide(); - $input.val(''); - $button.show(); - $button.focus(); - // clicking the function dropdown menu wont - // work if you remove class at once - setTimeout(function() { - elem.removeClass('open'); - }, 200); - }); - - $compile(elem.contents())($scope); - } - }; - }); - - coreModule.default.directive('dropdownTypeahead2', function($compile) { - - var inputTemplate = ''; - - var buttonTemplate = ''; - - return { - scope: { - menuItems: "=dropdownTypeahead2", - dropdownTypeaheadOnSelect: "&dropdownTypeaheadOnSelect", - model: '=ngModel' - }, - link: function($scope, elem, attrs) { - var $input = $(inputTemplate); - var $button = $(buttonTemplate); - $input.appendTo(elem); - $button.appendTo(elem); - - if (attrs.linkText) { - $button.html(attrs.linkText); - } - - if (attrs.ngModel) { - $scope.$watch('model', function(newValue) { - _.each($scope.menuItems, function(item) { - _.each(item.submenu, function(subItem) { - if (subItem.value === newValue) { - $button.html(subItem.text); - } - }); - }); - }); - } - - var typeaheadValues = _.reduce($scope.menuItems, function(memo, value, index) { - if (!value.submenu) { - value.click = 'menuItemSelected(' + index + ')'; - memo.push(value.text); - } else { - _.each(value.submenu, function(item, subIndex) { - item.click = 'menuItemSelected(' + index + ',' + subIndex + ')'; - memo.push(value.text + ' ' + item.text); - }); - } - return memo; - }, []); - - $scope.menuItemSelected = function(index, subIndex) { - var menuItem = $scope.menuItems[index]; - var payload = {$item: menuItem}; - if (menuItem.submenu && subIndex !== void 0) { - payload.$subItem = menuItem.submenu[subIndex]; - } - $scope.dropdownTypeaheadOnSelect(payload); - }; - - $input.attr('data-provide', 'typeahead'); - $input.typeahead({ - source: typeaheadValues, - minLength: 1, - items: 10, - updater: function (value) { - var result = {}; - _.each($scope.menuItems, function(menuItem) { - _.each(menuItem.submenu, function(submenuItem) { - if (value === (menuItem.text + ' ' + submenuItem.text)) { - result.$subItem = submenuItem; - result.$item = menuItem; - } - }); - }); - - if (result.$item) { - $scope.$apply(function() { - $scope.dropdownTypeaheadOnSelect(result); - }); - } - - $input.trigger('blur'); - return ''; - } - }); - - $button.click(function() { - $button.hide(); - $input.show(); - $input.focus(); - }); - - $input.keyup(function() { - elem.toggleClass('open', $input.val() === ''); - }); - - $input.blur(function() { - $input.hide(); - $input.val(''); - $button.show(); - $button.focus(); - // clicking the function dropdown menu wont - // work if you remove class at once - setTimeout(function() { - elem.removeClass('open'); - }, 200); - }); - - $compile(elem.contents())($scope); - } - }; - }); -}); diff --git a/public/app/core/directives/dropdown_typeahead.ts b/public/app/core/directives/dropdown_typeahead.ts new file mode 100644 index 00000000000..c9e44c5e786 --- /dev/null +++ b/public/app/core/directives/dropdown_typeahead.ts @@ -0,0 +1,244 @@ +import _ from 'lodash'; +import $ from 'jquery'; +import coreModule from '../core_module'; + +/** @ngInject */ +export function dropdownTypeahead($compile) { + let inputTemplate = + ''; + + let buttonTemplate = + ''; + + return { + scope: { + menuItems: '=dropdownTypeahead', + dropdownTypeaheadOnSelect: '&dropdownTypeaheadOnSelect', + model: '=ngModel', + }, + link: function($scope, elem, attrs) { + let $input = $(inputTemplate); + let $button = $(buttonTemplate); + $input.appendTo(elem); + $button.appendTo(elem); + + if (attrs.linkText) { + $button.html(attrs.linkText); + } + + if (attrs.ngModel) { + $scope.$watch('model', function(newValue) { + _.each($scope.menuItems, function(item) { + _.each(item.submenu, function(subItem) { + if (subItem.value === newValue) { + $button.html(subItem.text); + } + }); + }); + }); + } + + let typeaheadValues = _.reduce( + $scope.menuItems, + function(memo, value, index) { + if (!value.submenu) { + value.click = 'menuItemSelected(' + index + ')'; + memo.push(value.text); + } else { + _.each(value.submenu, function(item, subIndex) { + item.click = 'menuItemSelected(' + index + ',' + subIndex + ')'; + memo.push(value.text + ' ' + item.text); + }); + } + return memo; + }, + [] + ); + + $scope.menuItemSelected = function(index, subIndex) { + let menuItem = $scope.menuItems[index]; + let payload: any = { $item: menuItem }; + if (menuItem.submenu && subIndex !== void 0) { + payload.$subItem = menuItem.submenu[subIndex]; + } + $scope.dropdownTypeaheadOnSelect(payload); + }; + + $input.attr('data-provide', 'typeahead'); + $input.typeahead({ + source: typeaheadValues, + minLength: 1, + items: 10, + updater: function(value) { + let result: any = {}; + _.each($scope.menuItems, function(menuItem) { + _.each(menuItem.submenu, function(submenuItem) { + if (value === menuItem.text + ' ' + submenuItem.text) { + result.$subItem = submenuItem; + result.$item = menuItem; + } + }); + }); + + if (result.$item) { + $scope.$apply(function() { + $scope.dropdownTypeaheadOnSelect(result); + }); + } + + $input.trigger('blur'); + return ''; + }, + }); + + $button.click(function() { + $button.hide(); + $input.show(); + $input.focus(); + }); + + $input.keyup(function() { + elem.toggleClass('open', $input.val() === ''); + }); + + $input.blur(function() { + $input.hide(); + $input.val(''); + $button.show(); + $button.focus(); + // clicking the function dropdown menu won't + // work if you remove class at once + setTimeout(function() { + elem.removeClass('open'); + }, 200); + }); + + $compile(elem.contents())($scope); + }, + }; +} + +/** @ngInject */ +export function dropdownTypeahead2($compile) { + let inputTemplate = + ''; + + let buttonTemplate = + ''; + + return { + scope: { + menuItems: '=dropdownTypeahead2', + dropdownTypeaheadOnSelect: '&dropdownTypeaheadOnSelect', + model: '=ngModel', + }, + link: function($scope, elem, attrs) { + let $input = $(inputTemplate); + let $button = $(buttonTemplate); + $input.appendTo(elem); + $button.appendTo(elem); + + if (attrs.linkText) { + $button.html(attrs.linkText); + } + + if (attrs.ngModel) { + $scope.$watch('model', function(newValue) { + _.each($scope.menuItems, function(item) { + _.each(item.submenu, function(subItem) { + if (subItem.value === newValue) { + $button.html(subItem.text); + } + }); + }); + }); + } + + let typeaheadValues = _.reduce( + $scope.menuItems, + function(memo, value, index) { + if (!value.submenu) { + value.click = 'menuItemSelected(' + index + ')'; + memo.push(value.text); + } else { + _.each(value.submenu, function(item, subIndex) { + item.click = 'menuItemSelected(' + index + ',' + subIndex + ')'; + memo.push(value.text + ' ' + item.text); + }); + } + return memo; + }, + [] + ); + + $scope.menuItemSelected = function(index, subIndex) { + let menuItem = $scope.menuItems[index]; + let payload: any = { $item: menuItem }; + if (menuItem.submenu && subIndex !== void 0) { + payload.$subItem = menuItem.submenu[subIndex]; + } + $scope.dropdownTypeaheadOnSelect(payload); + }; + + $input.attr('data-provide', 'typeahead'); + $input.typeahead({ + source: typeaheadValues, + minLength: 1, + items: 10, + updater: function(value) { + let result: any = {}; + _.each($scope.menuItems, function(menuItem) { + _.each(menuItem.submenu, function(submenuItem) { + if (value === menuItem.text + ' ' + submenuItem.text) { + result.$subItem = submenuItem; + result.$item = menuItem; + } + }); + }); + + if (result.$item) { + $scope.$apply(function() { + $scope.dropdownTypeaheadOnSelect(result); + }); + } + + $input.trigger('blur'); + return ''; + }, + }); + + $button.click(function() { + $button.hide(); + $input.show(); + $input.focus(); + }); + + $input.keyup(function() { + elem.toggleClass('open', $input.val() === ''); + }); + + $input.blur(function() { + $input.hide(); + $input.val(''); + $button.show(); + $button.focus(); + // clicking the function dropdown menu won't + // work if you remove class at once + setTimeout(function() { + elem.removeClass('open'); + }, 200); + }); + + $compile(elem.contents())($scope); + }, + }; +} + +coreModule.directive('dropdownTypeahead', dropdownTypeahead); +coreModule.directive('dropdownTypeahead2', dropdownTypeahead2); diff --git a/public/app/core/directives/metric_segment.js b/public/app/core/directives/metric_segment.js deleted file mode 100644 index 7ba4a5a5259..00000000000 --- a/public/app/core/directives/metric_segment.js +++ /dev/null @@ -1,246 +0,0 @@ -define([ - 'lodash', - 'jquery', - '../core_module', -], -function (_, $, coreModule) { - 'use strict'; - - coreModule.default.directive('metricSegment', function($compile, $sce) { - var inputTemplate = ''; - - var linkTemplate = ''; - - var selectTemplate = ''; - - return { - scope: { - segment: "=", - getOptions: "&", - onChange: "&", - debounce: "@", - }, - link: function($scope, elem) { - var $input = $(inputTemplate); - var segment = $scope.segment; - var $button = $(segment.selectMode ? selectTemplate : linkTemplate); - var options = null; - var cancelBlur = null; - var linkMode = true; - var debounceLookup = $scope.debounce; - - $input.appendTo(elem); - $button.appendTo(elem); - - $scope.updateVariableValue = function(value) { - if (value === '' || segment.value === value) { - return; - } - - value = _.unescape(value); - - $scope.$apply(function() { - var selected = _.find($scope.altSegments, {value: value}); - if (selected) { - segment.value = selected.value; - segment.html = selected.html || selected.value; - segment.fake = false; - segment.expandable = selected.expandable; - - if (selected.type) { - segment.type = selected.type; - } - } - else if (segment.custom !== 'false') { - segment.value = value; - segment.html = $sce.trustAsHtml(value); - segment.expandable = true; - segment.fake = false; - } - - $scope.onChange(); - }); - }; - - $scope.switchToLink = function(fromClick) { - if (linkMode && !fromClick) { return; } - - clearTimeout(cancelBlur); - cancelBlur = null; - linkMode = true; - $input.hide(); - $button.show(); - $scope.updateVariableValue($input.val()); - }; - - $scope.inputBlur = function() { - // happens long before the click event on the typeahead options - // need to have long delay because the blur - cancelBlur = setTimeout($scope.switchToLink, 200); - }; - - $scope.source = function(query, callback) { - $scope.$apply(function() { - $scope.getOptions({ $query: query }).then(function(altSegments) { - $scope.altSegments = altSegments; - options = _.map($scope.altSegments, function(alt) { - return _.escape(alt.value); - }); - - // add custom values - if (segment.custom !== 'false') { - if (!segment.fake && _.indexOf(options, segment.value) === -1) { - options.unshift(segment.value); - } - } - - callback(options); - }); - }); - }; - - $scope.updater = function(value) { - if (value === segment.value) { - clearTimeout(cancelBlur); - $input.focus(); - return value; - } - - $input.val(value); - $scope.switchToLink(true); - - return value; - }; - - $scope.matcher = function(item) { - var str = this.query; - if (str[0] === '/') { str = str.substring(1); } - if (str[str.length - 1] === '/') { str = str.substring(0, str.length-1); } - try { - return item.toLowerCase().match(str.toLowerCase()); - } catch(e) { - return false; - } - }; - - $input.attr('data-provide', 'typeahead'); - $input.typeahead({ source: $scope.source, minLength: 0, items: 10000, updater: $scope.updater, matcher: $scope.matcher }); - - var typeahead = $input.data('typeahead'); - typeahead.lookup = function () { - this.query = this.$element.val() || ''; - var items = this.source(this.query, $.proxy(this.process, this)); - return items ? this.process(items) : items; - }; - - if (debounceLookup) { - typeahead.lookup = _.debounce(typeahead.lookup, 500, {leading: true}); - } - - $button.keydown(function(evt) { - // trigger typeahead on down arrow or enter key - if (evt.keyCode === 40 || evt.keyCode === 13) { - $button.click(); - } - }); - - $button.click(function() { - options = null; - $input.css('width', (Math.max($button.width(), 80) + 16) + 'px'); - - $button.hide(); - $input.show(); - $input.focus(); - - linkMode = false; - - var typeahead = $input.data('typeahead'); - if (typeahead) { - $input.val(''); - typeahead.lookup(); - } - }); - - $input.blur($scope.inputBlur); - - $compile(elem.contents())($scope); - } - }; - }); - - coreModule.default.directive('metricSegmentModel', function(uiSegmentSrv, $q) { - return { - template: '', - restrict: 'E', - scope: { - property: "=", - options: "=", - getOptions: "&", - onChange: "&", - }, - link: { - pre: function postLink($scope, elem, attrs) { - var cachedOptions; - - $scope.valueToSegment = function(value) { - var option = _.find($scope.options, {value: value}); - var segment = { - cssClass: attrs.cssClass, - custom: attrs.custom, - value: option ? option.text : value, - selectMode: attrs.selectMode, - }; - - return uiSegmentSrv.newSegment(segment); - }; - - $scope.getOptionsInternal = function() { - if ($scope.options) { - cachedOptions = $scope.options; - return $q.when(_.map($scope.options, function(option) { - return {value: option.text}; - })); - } else { - return $scope.getOptions().then(function(options) { - cachedOptions = options; - return _.map(options, function(option) { - if (option.html) { - return option; - } - return {value: option.text}; - }); - }); - } - }; - - $scope.onSegmentChange = function() { - if (cachedOptions) { - var option = _.find(cachedOptions, {text: $scope.segment.value}); - if (option && option.value !== $scope.property) { - $scope.property = option.value; - } else if (attrs.custom !== 'false') { - $scope.property = $scope.segment.value; - } - } else { - $scope.property = $scope.segment.value; - } - - // needs to call this after digest so - // property is synced with outerscope - $scope.$$postDigest(function() { - $scope.$apply(function() { - $scope.onChange(); - }); - }); - }; - - $scope.segment = $scope.valueToSegment($scope.property); - } - } - }; - }); -}); diff --git a/public/app/core/directives/metric_segment.ts b/public/app/core/directives/metric_segment.ts new file mode 100644 index 00000000000..3718d7fbd4a --- /dev/null +++ b/public/app/core/directives/metric_segment.ts @@ -0,0 +1,263 @@ +import _ from 'lodash'; +import $ from 'jquery'; +import coreModule from '../core_module'; + +/** @ngInject */ +export function metricSegment($compile, $sce) { + let inputTemplate = + ''; + + let linkTemplate = + ''; + + let selectTemplate = + ''; + + return { + scope: { + segment: '=', + getOptions: '&', + onChange: '&', + debounce: '@', + }, + link: function($scope, elem) { + let $input = $(inputTemplate); + let segment = $scope.segment; + let $button = $(segment.selectMode ? selectTemplate : linkTemplate); + let options = null; + let cancelBlur = null; + let linkMode = true; + let debounceLookup = $scope.debounce; + + $input.appendTo(elem); + $button.appendTo(elem); + + $scope.updateVariableValue = function(value) { + if (value === '' || segment.value === value) { + return; + } + + value = _.unescape(value); + + $scope.$apply(function() { + let selected = _.find($scope.altSegments, { value: value }); + if (selected) { + segment.value = selected.value; + segment.html = selected.html || selected.value; + segment.fake = false; + segment.expandable = selected.expandable; + + if (selected.type) { + segment.type = selected.type; + } + } else if (segment.custom !== 'false') { + segment.value = value; + segment.html = $sce.trustAsHtml(value); + segment.expandable = true; + segment.fake = false; + } + + $scope.onChange(); + }); + }; + + $scope.switchToLink = function(fromClick) { + if (linkMode && !fromClick) { + return; + } + + clearTimeout(cancelBlur); + cancelBlur = null; + linkMode = true; + $input.hide(); + $button.show(); + $scope.updateVariableValue($input.val()); + }; + + $scope.inputBlur = function() { + // happens long before the click event on the typeahead options + // need to have long delay because the blur + cancelBlur = setTimeout($scope.switchToLink, 200); + }; + + $scope.source = function(query, callback) { + $scope.$apply(function() { + $scope.getOptions({ $query: query }).then(function(altSegments) { + $scope.altSegments = altSegments; + options = _.map($scope.altSegments, function(alt) { + return _.escape(alt.value); + }); + + // add custom values + if (segment.custom !== 'false') { + if (!segment.fake && _.indexOf(options, segment.value) === -1) { + options.unshift(segment.value); + } + } + + callback(options); + }); + }); + }; + + $scope.updater = function(value) { + if (value === segment.value) { + clearTimeout(cancelBlur); + $input.focus(); + return value; + } + + $input.val(value); + $scope.switchToLink(true); + + return value; + }; + + $scope.matcher = function(item) { + let str = this.query; + if (str[0] === '/') { + str = str.substring(1); + } + if (str[str.length - 1] === '/') { + str = str.substring(0, str.length - 1); + } + try { + return item.toLowerCase().match(str.toLowerCase()); + } catch (e) { + return false; + } + }; + + $input.attr('data-provide', 'typeahead'); + $input.typeahead({ + source: $scope.source, + minLength: 0, + items: 10000, + updater: $scope.updater, + matcher: $scope.matcher, + }); + + let typeahead = $input.data('typeahead'); + typeahead.lookup = function() { + this.query = this.$element.val() || ''; + let items = this.source(this.query, $.proxy(this.process, this)); + return items ? this.process(items) : items; + }; + + if (debounceLookup) { + typeahead.lookup = _.debounce(typeahead.lookup, 500, { leading: true }); + } + + $button.keydown(function(evt) { + // trigger typeahead on down arrow or enter key + if (evt.keyCode === 40 || evt.keyCode === 13) { + $button.click(); + } + }); + + $button.click(function() { + options = null; + $input.css('width', Math.max($button.width(), 80) + 16 + 'px'); + + $button.hide(); + $input.show(); + $input.focus(); + + linkMode = false; + + let typeahead = $input.data('typeahead'); + if (typeahead) { + $input.val(''); + typeahead.lookup(); + } + }); + + $input.blur($scope.inputBlur); + + $compile(elem.contents())($scope); + }, + }; +} + +/** @ngInject */ +export function metricSegmentModel(uiSegmentSrv, $q) { + return { + template: + '', + restrict: 'E', + scope: { + property: '=', + options: '=', + getOptions: '&', + onChange: '&', + }, + link: { + pre: function postLink($scope, elem, attrs) { + let cachedOptions; + + $scope.valueToSegment = function(value) { + let option = _.find($scope.options, { value: value }); + let segment = { + cssClass: attrs.cssClass, + custom: attrs.custom, + value: option ? option.text : value, + selectMode: attrs.selectMode, + }; + + return uiSegmentSrv.newSegment(segment); + }; + + $scope.getOptionsInternal = function() { + if ($scope.options) { + cachedOptions = $scope.options; + return $q.when( + _.map($scope.options, function(option) { + return { value: option.text }; + }) + ); + } else { + return $scope.getOptions().then(function(options) { + cachedOptions = options; + return _.map(options, function(option) { + if (option.html) { + return option; + } + return { value: option.text }; + }); + }); + } + }; + + $scope.onSegmentChange = function() { + if (cachedOptions) { + let option = _.find(cachedOptions, { text: $scope.segment.value }); + if (option && option.value !== $scope.property) { + $scope.property = option.value; + } else if (attrs.custom !== 'false') { + $scope.property = $scope.segment.value; + } + } else { + $scope.property = $scope.segment.value; + } + + // needs to call this after digest so + // property is synced with outerscope + $scope.$$postDigest(function() { + $scope.$apply(function() { + $scope.onChange(); + }); + }); + }; + + $scope.segment = $scope.valueToSegment($scope.property); + }, + }, + }; +} + +coreModule.directive('metricSegment', metricSegment); +coreModule.directive('metricSegmentModel', metricSegmentModel); diff --git a/public/app/core/directives/value_select_dropdown.js b/public/app/core/directives/value_select_dropdown.js deleted file mode 100644 index a2bae1c34d3..00000000000 --- a/public/app/core/directives/value_select_dropdown.js +++ /dev/null @@ -1,283 +0,0 @@ -define([ - 'angular', - 'lodash', - '../core_module', -], -function (angular, _, coreModule) { - 'use strict'; - - coreModule.default.controller('ValueSelectDropdownCtrl', function($q) { - var vm = this; - - vm.show = function() { - vm.oldVariableText = vm.variable.current.text; - vm.highlightIndex = -1; - - vm.options = vm.variable.options; - vm.selectedValues = _.filter(vm.options, {selected: true}); - - vm.tags = _.map(vm.variable.tags, function(value) { - var tag = { text: value, selected: false }; - _.each(vm.variable.current.tags, function(tagObj) { - if (tagObj.text === value) { - tag = tagObj; - } - }); - return tag; - }); - - vm.search = { - query: '', - options: vm.options.slice(0, Math.min(vm.options.length, 1000)) - }; - - vm.dropdownVisible = true; - }; - - vm.updateLinkText = function() { - var current = vm.variable.current; - - if (current.tags && current.tags.length) { - // filer out values that are in selected tags - var selectedAndNotInTag = _.filter(vm.variable.options, function(option) { - if (!option.selected) { return false; } - for (var i = 0; i < current.tags.length; i++) { - var tag = current.tags[i]; - if (_.indexOf(tag.values, option.value) !== -1) { - return false; - } - } - return true; - }); - - // convert values to text - var currentTexts = _.map(selectedAndNotInTag, 'text'); - - // join texts - vm.linkText = currentTexts.join(' + '); - if (vm.linkText.length > 0) { - vm.linkText += ' + '; - } - } else { - vm.linkText = vm.variable.current.text; - } - }; - - vm.clearSelections = function() { - _.each(vm.options, function(option) { - option.selected = false; - }); - - vm.selectionsChanged(false); - }; - - vm.selectTag = function(tag) { - tag.selected = !tag.selected; - var tagValuesPromise; - if (!tag.values) { - tagValuesPromise = vm.variable.getValuesForTag(tag.text); - } else { - tagValuesPromise = $q.when(tag.values); - } - - tagValuesPromise.then(function(values) { - tag.values = values; - tag.valuesText = values.join(' + '); - _.each(vm.options, function(option) { - if (_.indexOf(tag.values, option.value) !== -1) { - option.selected = tag.selected; - } - }); - - vm.selectionsChanged(false); - }); - }; - - vm.keyDown = function (evt) { - if (evt.keyCode === 27) { - vm.hide(); - } - if (evt.keyCode === 40) { - vm.moveHighlight(1); - } - if (evt.keyCode === 38) { - vm.moveHighlight(-1); - } - if (evt.keyCode === 13) { - if (vm.search.options.length === 0) { - vm.commitChanges(); - } else { - vm.selectValue(vm.search.options[vm.highlightIndex], {}, true, false); - } - } - if (evt.keyCode === 32) { - vm.selectValue(vm.search.options[vm.highlightIndex], {}, false, false); - } - }; - - vm.moveHighlight = function(direction) { - vm.highlightIndex = (vm.highlightIndex + direction) % vm.search.options.length; - }; - - vm.selectValue = function(option, event, commitChange, excludeOthers) { - if (!option) { return; } - - option.selected = vm.variable.multi ? !option.selected: true; - - commitChange = commitChange || false; - excludeOthers = excludeOthers || false; - - var setAllExceptCurrentTo = function(newValue) { - _.each(vm.options, function(other) { - if (option !== other) { other.selected = newValue; } - }); - }; - - // commit action (enter key), should not deselect it - if (commitChange) { - option.selected = true; - } - - if (option.text === 'All' || excludeOthers) { - setAllExceptCurrentTo(false); - commitChange = true; - } - else if (!vm.variable.multi) { - setAllExceptCurrentTo(false); - commitChange = true; - } else if (event.ctrlKey || event.metaKey || event.shiftKey) { - commitChange = true; - setAllExceptCurrentTo(false); - } - - vm.selectionsChanged(commitChange); - }; - - vm.selectionsChanged = function(commitChange) { - vm.selectedValues = _.filter(vm.options, {selected: true}); - - if (vm.selectedValues.length > 1) { - if (vm.selectedValues[0].text === 'All') { - vm.selectedValues[0].selected = false; - vm.selectedValues = vm.selectedValues.slice(1, vm.selectedValues.length); - } - } - - // validate selected tags - _.each(vm.tags, function(tag) { - if (tag.selected) { - _.each(tag.values, function(value) { - if (!_.find(vm.selectedValues, {value: value})) { - tag.selected = false; - } - }); - } - }); - - vm.selectedTags = _.filter(vm.tags, {selected: true}); - vm.variable.current.value = _.map(vm.selectedValues, 'value'); - vm.variable.current.text = _.map(vm.selectedValues, 'text').join(' + '); - vm.variable.current.tags = vm.selectedTags; - - if (!vm.variable.multi) { - vm.variable.current.value = vm.selectedValues[0].value; - } - - if (commitChange) { - vm.commitChanges(); - } - }; - - vm.commitChanges = function() { - // if we have a search query and no options use that - if (vm.search.options.length === 0 && vm.search.query.length > 0) { - vm.variable.current = {text: vm.search.query, value: vm.search.query}; - } - else if (vm.selectedValues.length === 0) { - // make sure one option is selected - vm.options[0].selected = true; - vm.selectionsChanged(false); - } - - vm.dropdownVisible = false; - vm.updateLinkText(); - - if (vm.variable.current.text !== vm.oldVariableText) { - vm.onUpdated(); - } - }; - - vm.queryChanged = function() { - vm.highlightIndex = -1; - vm.search.options = _.filter(vm.options, function(option) { - return option.text.toLowerCase().indexOf(vm.search.query.toLowerCase()) !== -1; - }); - - vm.search.options = vm.search.options.slice(0, Math.min(vm.search.options.length, 1000)); - }; - - vm.init = function() { - vm.selectedTags = vm.variable.current.tags || []; - vm.updateLinkText(); - }; - - }); - - coreModule.default.directive('valueSelectDropdown', function($compile, $window, $timeout, $rootScope) { - return { - scope: { variable: "=", onUpdated: "&"}, - templateUrl: 'public/app/partials/valueSelectDropdown.html', - controller: 'ValueSelectDropdownCtrl', - controllerAs: 'vm', - bindToController: true, - link: function(scope, elem) { - var bodyEl = angular.element($window.document.body); - var linkEl = elem.find('.variable-value-link'); - var inputEl = elem.find('input'); - - function openDropdown() { - inputEl.css('width', Math.max(linkEl.width(), 80) + 'px'); - - inputEl.show(); - linkEl.hide(); - - inputEl.focus(); - $timeout(function() { bodyEl.on('click', bodyOnClick); }, 0, false); - } - - function switchToLink() { - inputEl.hide(); - linkEl.show(); - bodyEl.off('click', bodyOnClick); - } - - function bodyOnClick (e) { - if (elem.has(e.target).length === 0) { - scope.$apply(function() { - scope.vm.commitChanges(); - }); - } - } - - scope.$watch('vm.dropdownVisible', function(newValue) { - if (newValue) { - openDropdown(); - } else { - switchToLink(); - } - }); - - var cleanUp = $rootScope.$on('template-variable-value-updated', function() { - scope.vm.updateLinkText(); - }); - - scope.$on("$destroy", function() { - cleanUp(); - }); - - scope.vm.init(); - }, - }; - }); - -}); diff --git a/public/app/core/directives/value_select_dropdown.ts b/public/app/core/directives/value_select_dropdown.ts new file mode 100644 index 00000000000..d6c6c3af5c5 --- /dev/null +++ b/public/app/core/directives/value_select_dropdown.ts @@ -0,0 +1,305 @@ +import angular from 'angular'; +import _ from 'lodash'; +import coreModule from '../core_module'; + +export class ValueSelectDropdownCtrl { + dropdownVisible: any; + highlightIndex: any; + linkText: any; + oldVariableText: any; + options: any; + search: any; + selectedTags: any; + selectedValues: any; + tags: any; + variable: any; + + hide: any; + onUpdated: any; + + /** @ngInject */ + constructor(private $q) {} + + show() { + this.oldVariableText = this.variable.current.text; + this.highlightIndex = -1; + + this.options = this.variable.options; + this.selectedValues = _.filter(this.options, { selected: true }); + + this.tags = _.map(this.variable.tags, value => { + let tag = { text: value, selected: false }; + _.each(this.variable.current.tags, tagObj => { + if (tagObj.text === value) { + tag = tagObj; + } + }); + return tag; + }); + + this.search = { + query: '', + options: this.options.slice(0, Math.min(this.options.length, 1000)), + }; + + this.dropdownVisible = true; + } + + updateLinkText() { + let current = this.variable.current; + + if (current.tags && current.tags.length) { + // filer out values that are in selected tags + let selectedAndNotInTag = _.filter(this.variable.options, option => { + if (!option.selected) { + return false; + } + for (let i = 0; i < current.tags.length; i++) { + let tag = current.tags[i]; + if (_.indexOf(tag.values, option.value) !== -1) { + return false; + } + } + return true; + }); + + // convert values to text + let currentTexts = _.map(selectedAndNotInTag, 'text'); + + // join texts + this.linkText = currentTexts.join(' + '); + if (this.linkText.length > 0) { + this.linkText += ' + '; + } + } else { + this.linkText = this.variable.current.text; + } + } + + clearSelections() { + _.each(this.options, option => { + option.selected = false; + }); + + this.selectionsChanged(false); + } + + selectTag(tag) { + tag.selected = !tag.selected; + let tagValuesPromise; + if (!tag.values) { + tagValuesPromise = this.variable.getValuesForTag(tag.text); + } else { + tagValuesPromise = this.$q.when(tag.values); + } + + tagValuesPromise.then(values => { + tag.values = values; + tag.valuesText = values.join(' + '); + _.each(this.options, option => { + if (_.indexOf(tag.values, option.value) !== -1) { + option.selected = tag.selected; + } + }); + + this.selectionsChanged(false); + }); + } + + keyDown(evt) { + if (evt.keyCode === 27) { + this.hide(); + } + if (evt.keyCode === 40) { + this.moveHighlight(1); + } + if (evt.keyCode === 38) { + this.moveHighlight(-1); + } + if (evt.keyCode === 13) { + if (this.search.options.length === 0) { + this.commitChanges(); + } else { + this.selectValue(this.search.options[this.highlightIndex], {}, true, false); + } + } + if (evt.keyCode === 32) { + this.selectValue(this.search.options[this.highlightIndex], {}, false, false); + } + } + + moveHighlight(direction) { + this.highlightIndex = (this.highlightIndex + direction) % this.search.options.length; + } + + selectValue(option, event, commitChange, excludeOthers) { + if (!option) { + return; + } + + option.selected = this.variable.multi ? !option.selected : true; + + commitChange = commitChange || false; + excludeOthers = excludeOthers || false; + + let setAllExceptCurrentTo = newValue => { + _.each(this.options, other => { + if (option !== other) { + other.selected = newValue; + } + }); + }; + + // commit action (enter key), should not deselect it + if (commitChange) { + option.selected = true; + } + + if (option.text === 'All' || excludeOthers) { + setAllExceptCurrentTo(false); + commitChange = true; + } else if (!this.variable.multi) { + setAllExceptCurrentTo(false); + commitChange = true; + } else if (event.ctrlKey || event.metaKey || event.shiftKey) { + commitChange = true; + setAllExceptCurrentTo(false); + } + + this.selectionsChanged(commitChange); + } + + selectionsChanged(commitChange) { + this.selectedValues = _.filter(this.options, { selected: true }); + + if (this.selectedValues.length > 1) { + if (this.selectedValues[0].text === 'All') { + this.selectedValues[0].selected = false; + this.selectedValues = this.selectedValues.slice(1, this.selectedValues.length); + } + } + + // validate selected tags + _.each(this.tags, tag => { + if (tag.selected) { + _.each(tag.values, value => { + if (!_.find(this.selectedValues, { value: value })) { + tag.selected = false; + } + }); + } + }); + + this.selectedTags = _.filter(this.tags, { selected: true }); + this.variable.current.value = _.map(this.selectedValues, 'value'); + this.variable.current.text = _.map(this.selectedValues, 'text').join(' + '); + this.variable.current.tags = this.selectedTags; + + if (!this.variable.multi) { + this.variable.current.value = this.selectedValues[0].value; + } + + if (commitChange) { + this.commitChanges(); + } + } + + commitChanges() { + // if we have a search query and no options use that + if (this.search.options.length === 0 && this.search.query.length > 0) { + this.variable.current = { text: this.search.query, value: this.search.query }; + } else if (this.selectedValues.length === 0) { + // make sure one option is selected + this.options[0].selected = true; + this.selectionsChanged(false); + } + + this.dropdownVisible = false; + this.updateLinkText(); + + if (this.variable.current.text !== this.oldVariableText) { + this.onUpdated(); + } + } + + queryChanged() { + this.highlightIndex = -1; + this.search.options = _.filter(this.options, option => { + return option.text.toLowerCase().indexOf(this.search.query.toLowerCase()) !== -1; + }); + + this.search.options = this.search.options.slice(0, Math.min(this.search.options.length, 1000)); + } + + init() { + this.selectedTags = this.variable.current.tags || []; + this.updateLinkText(); + } +} + +/** @ngInject */ +export function valueSelectDropdown($compile, $window, $timeout, $rootScope) { + return { + scope: { variable: '=', onUpdated: '&' }, + templateUrl: 'public/app/partials/valueSelectDropdown.html', + controller: 'ValueSelectDropdownCtrl', + controllerAs: 'vm', + bindToController: true, + link: function(scope, elem) { + let bodyEl = angular.element($window.document.body); + let linkEl = elem.find('.variable-value-link'); + let inputEl = elem.find('input'); + + function openDropdown() { + inputEl.css('width', Math.max(linkEl.width(), 80) + 'px'); + + inputEl.show(); + linkEl.hide(); + + inputEl.focus(); + $timeout( + function() { + bodyEl.on('click', bodyOnClick); + }, + 0, + false + ); + } + + function switchToLink() { + inputEl.hide(); + linkEl.show(); + bodyEl.off('click', bodyOnClick); + } + + function bodyOnClick(e) { + if (elem.has(e.target).length === 0) { + scope.$apply(function() { + scope.vm.commitChanges(); + }); + } + } + + scope.$watch('vm.dropdownVisible', newValue => { + if (newValue) { + openDropdown(); + } else { + switchToLink(); + } + }); + + let cleanUp = $rootScope.$on('template-variable-value-updated', () => { + scope.vm.updateLinkText(); + }); + + scope.$on('$destroy', () => { + cleanUp(); + }); + + scope.vm.init(); + }, + }; +} + +coreModule.controller('ValueSelectDropdownCtrl', ValueSelectDropdownCtrl); +coreModule.directive('valueSelectDropdown', valueSelectDropdown); diff --git a/public/app/core/services/all.js b/public/app/core/services/all.js deleted file mode 100644 index 0a973440c5e..00000000000 --- a/public/app/core/services/all.js +++ /dev/null @@ -1,13 +0,0 @@ -define([ - './alert_srv', - './util_srv', - './context_srv', - './timer', - './analytics', - './popover_srv', - './segment_srv', - './backend_srv', - './dynamic_directive_srv', - './bridge_srv' -], -function () {}); diff --git a/public/app/core/services/all.ts b/public/app/core/services/all.ts new file mode 100644 index 00000000000..989015d2872 --- /dev/null +++ b/public/app/core/services/all.ts @@ -0,0 +1,10 @@ +import './alert_srv'; +import './util_srv'; +import './context_srv'; +import './timer'; +import './analytics'; +import './popover_srv'; +import './segment_srv'; +import './backend_srv'; +import './dynamic_directive_srv'; +import './bridge_srv'; diff --git a/public/app/core/services/analytics.ts b/public/app/core/services/analytics.ts index 370773154e5..d1998f44cbc 100644 --- a/public/app/core/services/analytics.ts +++ b/public/app/core/services/analytics.ts @@ -7,7 +7,11 @@ export class Analytics { constructor(private $rootScope, private $location) {} gaInit() { - $.getScript('https://www.google-analytics.com/analytics.js'); // jQuery shortcut + $.ajax({ + url: 'https://www.google-analytics.com/analytics.js', + dataType: 'script', + cache: true, + }); var ga = ((window).ga = (window).ga || function() { @@ -15,6 +19,7 @@ export class Analytics { }); ga.l = +new Date(); ga('create', (config).googleAnalyticsId, 'auto'); + ga('set', 'anonymizeIp', true); return ga; } diff --git a/public/app/core/services/backend_srv.ts b/public/app/core/services/backend_srv.ts index 8b7ca518e8b..d582b6a3b18 100644 --- a/public/app/core/services/backend_srv.ts +++ b/public/app/core/services/backend_srv.ts @@ -170,7 +170,9 @@ export class BackendSrv { return this.$http(options) .then(response => { - appEvents.emit('ds-request-response', response); + if (!options.silent) { + appEvents.emit('ds-request-response', response); + } return response; }) .catch(err => { @@ -201,8 +203,9 @@ export class BackendSrv { if (err.data && !err.data.message && _.isString(err.data.error)) { err.data.message = err.data.error; } - - appEvents.emit('ds-request-error', err); + if (!options.silent) { + appEvents.emit('ds-request-error', err); + } throw err; }) .finally(() => { diff --git a/public/app/core/services/context_srv.ts b/public/app/core/services/context_srv.ts index 5a879895267..be8a0af7b7b 100644 --- a/public/app/core/services/context_srv.ts +++ b/public/app/core/services/context_srv.ts @@ -11,6 +11,7 @@ export class User { timezone: string; helpFlags1: number; lightTheme: boolean; + hasEditPermissionInFolders: boolean; constructor() { if (config.bootData.user) { @@ -28,6 +29,7 @@ export class ContextSrv { isEditor: any; sidemenu: any; sidemenuSmallBreakpoint = false; + hasEditPermissionInFolders: boolean; constructor() { this.sidemenu = store.getBool('grafana.sidemenu', true); @@ -44,6 +46,7 @@ export class ContextSrv { this.isSignedIn = this.user.isSignedIn; this.isGrafanaAdmin = this.user.isGrafanaAdmin; this.isEditor = this.hasRole('Editor') || this.hasRole('Admin'); + this.hasEditPermissionInFolders = this.user.hasEditPermissionInFolders; } hasRole(role) { diff --git a/public/app/core/services/keybindingSrv.ts b/public/app/core/services/keybindingSrv.ts index 0d468b6980f..94bf9efb31b 100644 --- a/public/app/core/services/keybindingSrv.ts +++ b/public/app/core/services/keybindingSrv.ts @@ -3,6 +3,7 @@ import _ from 'lodash'; import coreModule from 'app/core/core_module'; import appEvents from 'app/core/app_events'; +import { encodePathComponent } from 'app/core/utils/location_util'; import Mousetrap from 'mousetrap'; import 'mousetrap-global-bind'; @@ -10,9 +11,10 @@ import 'mousetrap-global-bind'; export class KeybindingSrv { helpModal: boolean; modalOpen = false; + timepickerOpen = false; /** @ngInject */ - constructor(private $rootScope, private $location) { + constructor(private $rootScope, private $location, private datasourceSrv) { // clear out all shortcuts on route change $rootScope.$on('$routeChangeSuccess', () => { Mousetrap.reset(); @@ -22,6 +24,8 @@ export class KeybindingSrv { this.setupGlobal(); appEvents.on('show-modal', () => (this.modalOpen = true)); + $rootScope.onAppEvent('timepickerOpen', () => (this.timepickerOpen = true)); + $rootScope.onAppEvent('timepickerClosed', () => (this.timepickerOpen = false)); } setupGlobal() { @@ -73,7 +77,12 @@ export class KeybindingSrv { appEvents.emit('hide-modal'); if (!this.modalOpen) { - this.$rootScope.appEvent('panel-change-view', { fullscreen: false, edit: false }); + if (this.timepickerOpen) { + this.$rootScope.appEvent('closeTimepicker'); + this.timepickerOpen = false; + } else { + this.$rootScope.appEvent('panel-change-view', { fullscreen: false, edit: false }); + } } else { this.modalOpen = false; } @@ -168,6 +177,17 @@ export class KeybindingSrv { } }); + this.bind('x', async () => { + if (dashboard.meta.focusPanelId) { + const panel = dashboard.getPanelById(dashboard.meta.focusPanelId); + const datasource = await this.datasourceSrv.get(panel.datasource); + if (datasource && datasource.supportsExplore) { + const exploreState = encodePathComponent(JSON.stringify(datasource.getExploreState(panel))); + this.$location.url(`/explore/${exploreState}`); + } + } + }); + // delete panel this.bind('p r', () => { if (dashboard.meta.focusPanelId && dashboard.meta.canEdit) { diff --git a/public/app/core/services/segment_srv.js b/public/app/core/services/segment_srv.js deleted file mode 100644 index 71d0cbfe7a9..00000000000 --- a/public/app/core/services/segment_srv.js +++ /dev/null @@ -1,111 +0,0 @@ -define([ - 'angular', - 'lodash', - '../core_module', -], -function (angular, _, coreModule) { - 'use strict'; - - coreModule.default.service('uiSegmentSrv', function($sce, templateSrv) { - var self = this; - - function MetricSegment(options) { - if (options === '*' || options.value === '*') { - this.value = '*'; - this.html = $sce.trustAsHtml(''); - this.type = options.type; - this.expandable = true; - return; - } - - if (_.isString(options)) { - this.value = options; - this.html = $sce.trustAsHtml(templateSrv.highlightVariablesAsHtml(this.value)); - return; - } - - // temp hack to work around legacy inconsistency in segment model - this.text = options.value; - - this.cssClass = options.cssClass; - this.custom = options.custom; - this.type = options.type; - this.fake = options.fake; - this.value = options.value; - this.selectMode = options.selectMode; - this.type = options.type; - this.expandable = options.expandable; - this.html = options.html || $sce.trustAsHtml(templateSrv.highlightVariablesAsHtml(this.value)); - } - - this.getSegmentForValue = function(value, fallbackText) { - if (value) { - return this.newSegment(value); - } else { - return this.newSegment({value: fallbackText, fake: true}); - } - }; - - this.newSelectMeasurement = function() { - return new MetricSegment({value: 'select measurement', fake: true}); - }; - - this.newFake = function(text, type, cssClass) { - return new MetricSegment({value: text, fake: true, type: type, cssClass: cssClass}); - }; - - this.newSegment = function(options) { - return new MetricSegment(options); - }; - - this.newKey = function(key) { - return new MetricSegment({value: key, type: 'key', cssClass: 'query-segment-key' }); - }; - - this.newKeyValue = function(value) { - return new MetricSegment({value: value, type: 'value', cssClass: 'query-segment-value' }); - }; - - this.newCondition = function(condition) { - return new MetricSegment({value: condition, type: 'condition', cssClass: 'query-keyword' }); - }; - - this.newOperator = function(op) { - return new MetricSegment({value: op, type: 'operator', cssClass: 'query-segment-operator' }); - }; - - this.newOperators = function(ops) { - return _.map(ops, function(op) { - return new MetricSegment({value: op, type: 'operator', cssClass: 'query-segment-operator' }); - }); - }; - - this.transformToSegments = function(addTemplateVars, variableTypeFilter) { - return function(results) { - var segments = _.map(results, function(segment) { - return self.newSegment({value: segment.text, expandable: segment.expandable}); - }); - - if (addTemplateVars) { - _.each(templateSrv.variables, function(variable) { - if (variableTypeFilter === void 0 || variableTypeFilter === variable.type) { - segments.unshift(self.newSegment({ type: 'value', value: '$' + variable.name, expandable: true })); - } - }); - } - - return segments; - }; - }; - - this.newSelectMetric = function() { - return new MetricSegment({value: 'select metric', fake: true}); - }; - - this.newPlusButton = function() { - return new MetricSegment({fake: true, html: '', type: 'plus-button', cssClass: 'query-part' }); - }; - - }); - -}); diff --git a/public/app/core/services/segment_srv.ts b/public/app/core/services/segment_srv.ts new file mode 100644 index 00000000000..042340e6102 --- /dev/null +++ b/public/app/core/services/segment_srv.ts @@ -0,0 +1,111 @@ +import _ from 'lodash'; +import coreModule from '../core_module'; + +/** @ngInject */ +export function uiSegmentSrv($sce, templateSrv) { + let self = this; + + function MetricSegment(options) { + if (options === '*' || options.value === '*') { + this.value = '*'; + this.html = $sce.trustAsHtml(''); + this.type = options.type; + this.expandable = true; + return; + } + + if (_.isString(options)) { + this.value = options; + this.html = $sce.trustAsHtml(templateSrv.highlightVariablesAsHtml(this.value)); + return; + } + + // temp hack to work around legacy inconsistency in segment model + this.text = options.value; + + this.cssClass = options.cssClass; + this.custom = options.custom; + this.type = options.type; + this.fake = options.fake; + this.value = options.value; + this.selectMode = options.selectMode; + this.type = options.type; + this.expandable = options.expandable; + this.html = options.html || $sce.trustAsHtml(templateSrv.highlightVariablesAsHtml(this.value)); + } + + this.getSegmentForValue = function(value, fallbackText) { + if (value) { + return this.newSegment(value); + } else { + return this.newSegment({ value: fallbackText, fake: true }); + } + }; + + this.newSelectMeasurement = function() { + return new MetricSegment({ value: 'select measurement', fake: true }); + }; + + this.newFake = function(text, type, cssClass) { + return new MetricSegment({ value: text, fake: true, type: type, cssClass: cssClass }); + }; + + this.newSegment = function(options) { + return new MetricSegment(options); + }; + + this.newKey = function(key) { + return new MetricSegment({ value: key, type: 'key', cssClass: 'query-segment-key' }); + }; + + this.newKeyValue = function(value) { + return new MetricSegment({ value: value, type: 'value', cssClass: 'query-segment-value' }); + }; + + this.newCondition = function(condition) { + return new MetricSegment({ value: condition, type: 'condition', cssClass: 'query-keyword' }); + }; + + this.newOperator = function(op) { + return new MetricSegment({ value: op, type: 'operator', cssClass: 'query-segment-operator' }); + }; + + this.newOperators = function(ops) { + return _.map(ops, function(op) { + return new MetricSegment({ value: op, type: 'operator', cssClass: 'query-segment-operator' }); + }); + }; + + this.transformToSegments = function(addTemplateVars, variableTypeFilter) { + return function(results) { + let segments = _.map(results, function(segment) { + return self.newSegment({ value: segment.text, expandable: segment.expandable }); + }); + + if (addTemplateVars) { + _.each(templateSrv.variables, function(variable) { + if (variableTypeFilter === void 0 || variableTypeFilter === variable.type) { + segments.unshift(self.newSegment({ type: 'value', value: '$' + variable.name, expandable: true })); + } + }); + } + + return segments; + }; + }; + + this.newSelectMetric = function() { + return new MetricSegment({ value: 'select metric', fake: true }); + }; + + this.newPlusButton = function() { + return new MetricSegment({ + fake: true, + html: '', + type: 'plus-button', + cssClass: 'query-part', + }); + }; +} + +coreModule.service('uiSegmentSrv', uiSegmentSrv); diff --git a/public/app/core/specs/file_export.jest.ts b/public/app/core/specs/file_export.jest.ts index bbb894094ff..82097227b97 100644 --- a/public/app/core/specs/file_export.jest.ts +++ b/public/app/core/specs/file_export.jest.ts @@ -30,17 +30,17 @@ describe('file_export', () => { it('should export points in proper order', () => { let text = fileExport.convertSeriesListToCsv(ctx.seriesList, ctx.timeFormat); const expectedText = - 'Series;Time;Value\n' + - 'series_1;1500026100;1\n' + - 'series_1;1500026200;2\n' + - 'series_1;1500026300;null\n' + - 'series_1;1500026400;null\n' + - 'series_1;1500026500;null\n' + - 'series_1;1500026600;6\n' + - 'series_2;1500026100;11\n' + - 'series_2;1500026200;12\n' + - 'series_2;1500026300;13\n' + - 'series_2;1500026500;15\n'; + '"Series";"Time";"Value"\r\n' + + '"series_1";"1500026100";1\r\n' + + '"series_1";"1500026200";2\r\n' + + '"series_1";"1500026300";null\r\n' + + '"series_1";"1500026400";null\r\n' + + '"series_1";"1500026500";null\r\n' + + '"series_1";"1500026600";6\r\n' + + '"series_2";"1500026100";11\r\n' + + '"series_2";"1500026200";12\r\n' + + '"series_2";"1500026300";13\r\n' + + '"series_2";"1500026500";15'; expect(text).toBe(expectedText); }); @@ -50,15 +50,79 @@ describe('file_export', () => { it('should export points in proper order', () => { let text = fileExport.convertSeriesListToCsvColumns(ctx.seriesList, ctx.timeFormat); const expectedText = - 'Time;series_1;series_2\n' + - '1500026100;1;11\n' + - '1500026200;2;12\n' + - '1500026300;null;13\n' + - '1500026400;null;null\n' + - '1500026500;null;15\n' + - '1500026600;6;null\n'; + '"Time";"series_1";"series_2"\r\n' + + '"1500026100";1;11\r\n' + + '"1500026200";2;12\r\n' + + '"1500026300";null;13\r\n' + + '"1500026400";null;null\r\n' + + '"1500026500";null;15\r\n' + + '"1500026600";6;null'; expect(text).toBe(expectedText); }); }); + + describe('when exporting table data to csv', () => { + + it('should properly escape special characters and quote all string values', () => { + const inputTable = { + columns: [ + { title: 'integer_value' }, + { text: 'string_value' }, + { title: 'float_value' }, + { text: 'boolean_value' }, + ], + rows: [ + [123, 'some_string', 1.234, true], + [0o765, 'some string with " in the middle', 1e-2, false], + [0o765, 'some string with "" in the middle', 1e-2, false], + [0o765, 'some string with """ in the middle', 1e-2, false], + [0o765, '"some string with " at the beginning', 1e-2, false], + [0o765, 'some string with " at the end"', 1e-2, false], + [0x123, 'some string with \n in the middle', 10.01, false], + [0b1011, 'some string with ; in the middle', -12.34, true], + [123, 'some string with ;; in the middle', -12.34, true], + ], + }; + + const returnedText = fileExport.convertTableDataToCsv(inputTable, false); + + const expectedText = + '"integer_value";"string_value";"float_value";"boolean_value"\r\n' + + '123;"some_string";1.234;true\r\n' + + '501;"some string with "" in the middle";0.01;false\r\n' + + '501;"some string with """" in the middle";0.01;false\r\n' + + '501;"some string with """""" in the middle";0.01;false\r\n' + + '501;"""some string with "" at the beginning";0.01;false\r\n' + + '501;"some string with "" at the end""";0.01;false\r\n' + + '291;"some string with \n in the middle";10.01;false\r\n' + + '11;"some string with ; in the middle";-12.34;true\r\n' + + '123;"some string with ;; in the middle";-12.34;true'; + + expect(returnedText).toBe(expectedText); + }); + + it('should decode HTML encoded characters', function() { + const inputTable = { + columns: [ + { text: 'string_value' }, + ], + rows: [ + ['"&ä'], + ['"some html"'], + ['some text'] + ], + }; + + const returnedText = fileExport.convertTableDataToCsv(inputTable, false); + + const expectedText = + '"string_value"\r\n' + + '"""&ä"\r\n' + + '"""some html"""\r\n' + + '"some text"'; + + expect(returnedText).toBe(expectedText); + }); + }); }); diff --git a/public/app/core/specs/kbn.jest.ts b/public/app/core/specs/kbn.jest.ts index 9fad1f30694..68945068043 100644 --- a/public/app/core/specs/kbn.jest.ts +++ b/public/app/core/specs/kbn.jest.ts @@ -101,38 +101,88 @@ describeValueFormat('d', 245, 100, 0, '35 week'); describeValueFormat('d', 2456, 10, 0, '6.73 year'); describe('date time formats', function() { + const epoch = 1505634997920; + const utcTime = moment.utc(epoch); + const browserTime = moment(epoch); + it('should format as iso date', function() { - var str = kbn.valueFormats.dateTimeAsIso(1505634997920, 1); - expect(str).toBe(moment(1505634997920).format('YYYY-MM-DD HH:mm:ss')); + var expected = browserTime.format('YYYY-MM-DD HH:mm:ss'); + var actual = kbn.valueFormats.dateTimeAsIso(epoch); + expect(actual).toBe(expected); + }); + + it('should format as iso date (in UTC)', function() { + var expected = utcTime.format('YYYY-MM-DD HH:mm:ss'); + var actual = kbn.valueFormats.dateTimeAsIso(epoch, true); + expect(actual).toBe(expected); }); it('should format as iso date and skip date when today', function() { var now = moment(); - var str = kbn.valueFormats.dateTimeAsIso(now.valueOf(), 1); - expect(str).toBe(now.format('HH:mm:ss')); + var expected = now.format('HH:mm:ss'); + var actual = kbn.valueFormats.dateTimeAsIso(now.valueOf(), false); + expect(actual).toBe(expected); + }); + + it('should format as iso date (in UTC) and skip date when today', function() { + var now = moment.utc(); + var expected = now.format('HH:mm:ss'); + var actual = kbn.valueFormats.dateTimeAsIso(now.valueOf(), true); + expect(actual).toBe(expected); }); it('should format as US date', function() { - var str = kbn.valueFormats.dateTimeAsUS(1505634997920, 1); - expect(str).toBe(moment(1505634997920).format('MM/DD/YYYY h:mm:ss a')); + var expected = browserTime.format('MM/DD/YYYY h:mm:ss a'); + var actual = kbn.valueFormats.dateTimeAsUS(epoch, false); + expect(actual).toBe(expected); + }); + + it('should format as US date (in UTC)', function() { + var expected = utcTime.format('MM/DD/YYYY h:mm:ss a'); + var actual = kbn.valueFormats.dateTimeAsUS(epoch, true); + expect(actual).toBe(expected); }); it('should format as US date and skip date when today', function() { var now = moment(); - var str = kbn.valueFormats.dateTimeAsUS(now.valueOf(), 1); - expect(str).toBe(now.format('h:mm:ss a')); + var expected = now.format('h:mm:ss a'); + var actual = kbn.valueFormats.dateTimeAsUS(now.valueOf(), false); + expect(actual).toBe(expected); + }); + + it('should format as US date (in UTC) and skip date when today', function() { + var now = moment.utc(); + var expected = now.format('h:mm:ss a'); + var actual = kbn.valueFormats.dateTimeAsUS(now.valueOf(), true); + expect(actual).toBe(expected); }); it('should format as from now with days', function() { var daysAgo = moment().add(-7, 'd'); - var str = kbn.valueFormats.dateTimeFromNow(daysAgo.valueOf(), 1); - expect(str).toBe('7 days ago'); + var expected = '7 days ago'; + var actual = kbn.valueFormats.dateTimeFromNow(daysAgo.valueOf(), false); + expect(actual).toBe(expected); + }); + + it('should format as from now with days (in UTC)', function() { + var daysAgo = moment.utc().add(-7, 'd'); + var expected = '7 days ago'; + var actual = kbn.valueFormats.dateTimeFromNow(daysAgo.valueOf(), true); + expect(actual).toBe(expected); }); it('should format as from now with minutes', function() { var daysAgo = moment().add(-2, 'm'); - var str = kbn.valueFormats.dateTimeFromNow(daysAgo.valueOf(), 1); - expect(str).toBe('2 minutes ago'); + var expected = '2 minutes ago'; + var actual = kbn.valueFormats.dateTimeFromNow(daysAgo.valueOf(), false); + expect(actual).toBe(expected); + }); + + it('should format as from now with minutes (in UTC)', function() { + var daysAgo = moment.utc().add(-2, 'm'); + var expected = '2 minutes ago'; + var actual = kbn.valueFormats.dateTimeFromNow(daysAgo.valueOf(), true); + expect(actual).toBe(expected); }); }); diff --git a/public/app/core/specs/time_series.jest.ts b/public/app/core/specs/time_series.jest.ts index 5043953071a..6214c687add 100644 --- a/public/app/core/specs/time_series.jest.ts +++ b/public/app/core/specs/time_series.jest.ts @@ -281,6 +281,20 @@ describe('TimeSeries', function() { expect(series.zindex).toBe(2); }); }); + + describe('override color', function() { + beforeEach(function() { + series.applySeriesOverrides([{ alias: 'test', color: '#112233' }]); + }); + + it('should set color', function() { + expect(series.color).toBe('#112233'); + }); + + it('should set bars.fillColor', function() { + expect(series.bars.fillColor).toBe('#112233'); + }); + }); }); describe('value formatter', function() { diff --git a/public/app/core/time_series2.ts b/public/app/core/time_series2.ts index 3e02b1ec939..4da64850e59 100644 --- a/public/app/core/time_series2.ts +++ b/public/app/core/time_series2.ts @@ -99,6 +99,7 @@ export default class TimeSeries { this.alias = opts.alias; this.aliasEscaped = _.escape(opts.alias); this.color = opts.color; + this.bars = { fillColor: opts.color }; this.valueFormater = kbn.valueFormats.none; this.stats = {}; this.legend = true; @@ -112,11 +113,11 @@ export default class TimeSeries { dashLength: [], }; this.points = {}; - this.bars = {}; this.yaxis = 1; this.zindex = 0; this.nullPointMode = null; delete this.stack; + delete this.bars.show; for (var i = 0; i < overrides.length; i++) { var override = overrides[i]; @@ -168,7 +169,7 @@ export default class TimeSeries { this.fillBelowTo = override.fillBelowTo; } if (override.color !== void 0) { - this.color = override.color; + this.setColor(override.color); } if (override.transform !== void 0) { this.transform = override.transform; @@ -346,4 +347,9 @@ export default class TimeSeries { return false; } + + setColor(color) { + this.color = color; + this.bars.fillColor = color; + } } diff --git a/public/app/core/utils/css_loader.ts b/public/app/core/utils/css_loader.ts index 42f59a9c27c..ba8623df842 100644 --- a/public/app/core/utils/css_loader.ts +++ b/public/app/core/utils/css_loader.ts @@ -67,7 +67,7 @@ export function fetch(load): any { return ''; } - // dont reload styles loaded in the head + // don't reload styles loaded in the head for (var i = 0; i < linkHrefs.length; i++) { if (load.address === linkHrefs[i]) { return ''; diff --git a/public/app/core/utils/file_export.ts b/public/app/core/utils/file_export.ts index 670326fc068..f25d340a0be 100644 --- a/public/app/core/utils/file_export.ts +++ b/public/app/core/utils/file_export.ts @@ -1,59 +1,108 @@ -import _ from 'lodash'; +import { isBoolean, isNumber, sortedUniq, sortedIndexOf, unescape as htmlUnescaped } from 'lodash'; import moment from 'moment'; import { saveAs } from 'file-saver'; +import { isNullOrUndefined } from 'util'; const DEFAULT_DATETIME_FORMAT = 'YYYY-MM-DDTHH:mm:ssZ'; const POINT_TIME_INDEX = 1; const POINT_VALUE_INDEX = 0; +const END_COLUMN = ';'; +const END_ROW = '\r\n'; +const QUOTE = '"'; +const EXPORT_FILENAME = 'grafana_data_export.csv'; + +function csvEscaped(text) { + if (!text) { + return text; + } + + return text.split(QUOTE).join(QUOTE + QUOTE); +} + +const domParser = new DOMParser(); +function htmlDecoded(text) { + if (!text) { + return text; + } + + const regexp = /&[^;]+;/g; + function htmlDecoded(value) { + const parsedDom = domParser.parseFromString(value, 'text/html'); + return parsedDom.body.textContent; + } + return text.replace(regexp, htmlDecoded).replace(regexp, htmlDecoded); +} + +function formatSpecialHeader(useExcelHeader) { + return useExcelHeader ? `sep=${END_COLUMN}${END_ROW}` : ''; +} + +function formatRow(row, addEndRowDelimiter = true) { + let text = ''; + for (let i = 0; i < row.length; i += 1) { + if (isBoolean(row[i]) || isNullOrUndefined(row[i])) { + text += row[i]; + } else if (isNumber(row[i])) { + text += row[i].toLocaleString(); + } else { + text += `${QUOTE}${csvEscaped(htmlUnescaped(htmlDecoded(row[i])))}${QUOTE}`; + } + + if (i < row.length - 1) { + text += END_COLUMN; + } + } + return addEndRowDelimiter ? text + END_ROW : text; +} + export function convertSeriesListToCsv(seriesList, dateTimeFormat = DEFAULT_DATETIME_FORMAT, excel = false) { - var text = (excel ? 'sep=;\n' : '') + 'Series;Time;Value\n'; - _.each(seriesList, function(series) { - _.each(series.datapoints, function(dp) { - text += - series.alias + ';' + moment(dp[POINT_TIME_INDEX]).format(dateTimeFormat) + ';' + dp[POINT_VALUE_INDEX] + '\n'; - }); - }); + let text = formatSpecialHeader(excel) + formatRow(['Series', 'Time', 'Value']); + for (let seriesIndex = 0; seriesIndex < seriesList.length; seriesIndex += 1) { + for (let i = 0; i < seriesList[seriesIndex].datapoints.length; i += 1) { + text += formatRow( + [ + seriesList[seriesIndex].alias, + moment(seriesList[seriesIndex].datapoints[i][POINT_TIME_INDEX]).format(dateTimeFormat), + seriesList[seriesIndex].datapoints[i][POINT_VALUE_INDEX], + ], + i < seriesList[seriesIndex].datapoints.length - 1 || seriesIndex < seriesList.length - 1 + ); + } + } return text; } export function exportSeriesListToCsv(seriesList, dateTimeFormat = DEFAULT_DATETIME_FORMAT, excel = false) { - var text = convertSeriesListToCsv(seriesList, dateTimeFormat, excel); - saveSaveBlob(text, 'grafana_data_export.csv'); + let text = convertSeriesListToCsv(seriesList, dateTimeFormat, excel); + saveSaveBlob(text, EXPORT_FILENAME); } export function convertSeriesListToCsvColumns(seriesList, dateTimeFormat = DEFAULT_DATETIME_FORMAT, excel = false) { - let text = (excel ? 'sep=;\n' : '') + 'Time;'; // add header - _.each(seriesList, function(series) { - text += series.alias + ';'; - }); - text = text.substring(0, text.length - 1); - text += '\n'; - + let text = + formatSpecialHeader(excel) + + formatRow( + ['Time'].concat( + seriesList.map(function(val) { + return val.alias; + }) + ) + ); // process data seriesList = mergeSeriesByTime(seriesList); - var dataArr = [[]]; - var sIndex = 1; - _.each(seriesList, function(series) { - var cIndex = 0; - dataArr.push([]); - _.each(series.datapoints, function(dp) { - dataArr[0][cIndex] = moment(dp[POINT_TIME_INDEX]).format(dateTimeFormat); - dataArr[sIndex][cIndex] = dp[POINT_VALUE_INDEX]; - cIndex++; - }); - sIndex++; - }); // make text - for (var i = 0; i < dataArr[0].length; i++) { - text += dataArr[0][i] + ';'; - for (var j = 1; j < dataArr.length; j++) { - text += dataArr[j][i] + ';'; - } - text = text.substring(0, text.length - 1); - text += '\n'; + for (let i = 0; i < seriesList[0].datapoints.length; i += 1) { + const timestamp = moment(seriesList[0].datapoints[i][POINT_TIME_INDEX]).format(dateTimeFormat); + text += formatRow( + [timestamp].concat( + seriesList.map(function(series) { + return series.datapoints[i][POINT_VALUE_INDEX]; + }) + ), + i < seriesList[0].datapoints.length - 1 + ); } return text; @@ -71,15 +120,15 @@ function mergeSeriesByTime(seriesList) { timestamps.push(seriesPoints[j][POINT_TIME_INDEX]); } } - timestamps = _.sortedUniq(timestamps.sort()); + timestamps = sortedUniq(timestamps.sort()); for (let i = 0; i < seriesList.length; i++) { let seriesPoints = seriesList[i].datapoints; - let seriesTimestamps = _.map(seriesPoints, p => p[POINT_TIME_INDEX]); + let seriesTimestamps = seriesPoints.map(p => p[POINT_TIME_INDEX]); let extendedSeries = []; let pointIndex; for (let j = 0; j < timestamps.length; j++) { - pointIndex = _.sortedIndexOf(seriesTimestamps, timestamps[j]); + pointIndex = sortedIndexOf(seriesTimestamps, timestamps[j]); if (pointIndex !== -1) { extendedSeries.push(seriesPoints[pointIndex]); } else { @@ -93,27 +142,26 @@ function mergeSeriesByTime(seriesList) { export function exportSeriesListToCsvColumns(seriesList, dateTimeFormat = DEFAULT_DATETIME_FORMAT, excel = false) { let text = convertSeriesListToCsvColumns(seriesList, dateTimeFormat, excel); - saveSaveBlob(text, 'grafana_data_export.csv'); + saveSaveBlob(text, EXPORT_FILENAME); +} + +export function convertTableDataToCsv(table, excel = false) { + let text = formatSpecialHeader(excel); + // add headline + text += formatRow(table.columns.map(val => val.title || val.text)); + // process data + for (let i = 0; i < table.rows.length; i += 1) { + text += formatRow(table.rows[i], i < table.rows.length - 1); + } + return text; } export function exportTableDataToCsv(table, excel = false) { - var text = excel ? 'sep=;\n' : ''; - // add header - _.each(table.columns, function(column) { - text += (column.title || column.text) + ';'; - }); - text += '\n'; - // process data - _.each(table.rows, function(row) { - _.each(row, function(value) { - text += value + ';'; - }); - text += '\n'; - }); - saveSaveBlob(text, 'grafana_data_export.csv'); + let text = convertTableDataToCsv(table, excel); + saveSaveBlob(text, EXPORT_FILENAME); } export function saveSaveBlob(payload, fname) { - var blob = new Blob([payload], { type: 'text/csv;charset=utf-8' }); + let blob = new Blob([payload], { type: 'text/csv;charset=utf-8;header=present;' }); saveAs(blob, fname); } diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index dcb04a3e38e..f1c782846fc 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -485,6 +485,7 @@ kbn.valueFormats.EHs = kbn.formatBuilders.decimalSIPrefix('H/s', 6); // Throughput kbn.valueFormats.ops = kbn.formatBuilders.simpleCountUnit('ops'); +kbn.valueFormats.reqps = kbn.formatBuilders.simpleCountUnit('reqps'); kbn.valueFormats.rps = kbn.formatBuilders.simpleCountUnit('rps'); kbn.valueFormats.wps = kbn.formatBuilders.simpleCountUnit('wps'); kbn.valueFormats.iops = kbn.formatBuilders.simpleCountUnit('iops'); @@ -595,7 +596,7 @@ kbn.valueFormats.radr = kbn.formatBuilders.decimalSIPrefix('R'); kbn.valueFormats.radsvh = kbn.formatBuilders.decimalSIPrefix('Sv/h'); // Concentration -kbn.valueFormats.conppm = kbn.formatBuilders.fixedUnit('ppm'); +kbn.valueFormats.ppm = kbn.formatBuilders.fixedUnit('ppm'); kbn.valueFormats.conppb = kbn.formatBuilders.fixedUnit('ppb'); kbn.valueFormats.conngm3 = kbn.formatBuilders.fixedUnit('ng/m3'); kbn.valueFormats.conngNm3 = kbn.formatBuilders.fixedUnit('ng/Nm3'); @@ -620,13 +621,13 @@ kbn.valueFormats.ms = function(size, decimals, scaledDecimals) { // Less than 1 min return kbn.toFixedScaled(size / 1000, decimals, scaledDecimals, 3, ' s'); } else if (Math.abs(size) < 3600000) { - // Less than 1 hour, devide in minutes + // Less than 1 hour, divide in minutes return kbn.toFixedScaled(size / 60000, decimals, scaledDecimals, 5, ' min'); } else if (Math.abs(size) < 86400000) { - // Less than one day, devide in hours + // Less than one day, divide in hours return kbn.toFixedScaled(size / 3600000, decimals, scaledDecimals, 7, ' hour'); } else if (Math.abs(size) < 31536000000) { - // Less than one year, devide in days + // Less than one year, divide in days return kbn.toFixedScaled(size / 86400000, decimals, scaledDecimals, 8, ' day'); } @@ -638,15 +639,15 @@ kbn.valueFormats.s = function(size, decimals, scaledDecimals) { return ''; } - // Less than 1 µs, devide in ns + // Less than 1 µs, divide in ns if (Math.abs(size) < 0.000001) { return kbn.toFixedScaled(size * 1e9, decimals, scaledDecimals - decimals, -9, ' ns'); } - // Less than 1 ms, devide in µs + // Less than 1 ms, divide in µs if (Math.abs(size) < 0.001) { return kbn.toFixedScaled(size * 1e6, decimals, scaledDecimals - decimals, -6, ' µs'); } - // Less than 1 second, devide in ms + // Less than 1 second, divide in ms if (Math.abs(size) < 1) { return kbn.toFixedScaled(size * 1e3, decimals, scaledDecimals - decimals, -3, ' ms'); } @@ -654,16 +655,16 @@ kbn.valueFormats.s = function(size, decimals, scaledDecimals) { if (Math.abs(size) < 60) { return kbn.toFixed(size, decimals) + ' s'; } else if (Math.abs(size) < 3600) { - // Less than 1 hour, devide in minutes + // Less than 1 hour, divide in minutes return kbn.toFixedScaled(size / 60, decimals, scaledDecimals, 1, ' min'); } else if (Math.abs(size) < 86400) { - // Less than one day, devide in hours + // Less than one day, divide in hours return kbn.toFixedScaled(size / 3600, decimals, scaledDecimals, 4, ' hour'); } else if (Math.abs(size) < 604800) { - // Less than one week, devide in days + // Less than one week, divide in days return kbn.toFixedScaled(size / 86400, decimals, scaledDecimals, 5, ' day'); } else if (Math.abs(size) < 31536000) { - // Less than one year, devide in week + // Less than one year, divide in week return kbn.toFixedScaled(size / 604800, decimals, scaledDecimals, 6, ' week'); } @@ -815,8 +816,8 @@ kbn.valueFormats.timeticks = function(size, decimals, scaledDecimals) { return kbn.valueFormats.s(size / 100, decimals, scaledDecimals); }; -kbn.valueFormats.dateTimeAsIso = function(epoch) { - var time = moment(epoch); +kbn.valueFormats.dateTimeAsIso = function(epoch, isUtc) { + var time = isUtc ? moment.utc(epoch) : moment(epoch); if (moment().isSame(epoch, 'day')) { return time.format('HH:mm:ss'); @@ -824,8 +825,8 @@ kbn.valueFormats.dateTimeAsIso = function(epoch) { return time.format('YYYY-MM-DD HH:mm:ss'); }; -kbn.valueFormats.dateTimeAsUS = function(epoch) { - var time = moment(epoch); +kbn.valueFormats.dateTimeAsUS = function(epoch, isUtc) { + var time = isUtc ? moment.utc(epoch) : moment(epoch); if (moment().isSame(epoch, 'day')) { return time.format('h:mm:ss a'); @@ -833,8 +834,9 @@ kbn.valueFormats.dateTimeAsUS = function(epoch) { return time.format('MM/DD/YYYY h:mm:ss a'); }; -kbn.valueFormats.dateTimeFromNow = function(epoch) { - return moment(epoch).fromNow(); +kbn.valueFormats.dateTimeFromNow = function(epoch, isUtc) { + var time = isUtc ? moment.utc(epoch) : moment(epoch); + return time.fromNow(); }; ///// FORMAT MENU ///// @@ -948,6 +950,7 @@ kbn.getUnitFormats = function() { text: 'throughput', submenu: [ { text: 'ops/sec (ops)', value: 'ops' }, + { text: 'requets/sec (rps)', value: 'reqps' }, { text: 'reads/sec (rps)', value: 'rps' }, { text: 'writes/sec (wps)', value: 'wps' }, { text: 'I/O ops/sec (iops)', value: 'iops' }, @@ -1099,7 +1102,7 @@ kbn.getUnitFormats = function() { { text: 'concentration', submenu: [ - { text: 'parts-per-million (ppm)', value: 'conppm' }, + { text: 'parts-per-million (ppm)', value: 'ppm' }, { text: 'parts-per-billion (ppb)', value: 'conppb' }, { text: 'nanogram per cubic metre (ng/m3)', value: 'conngm3' }, { text: 'nanogram per normal cubic metre (ng/Nm3)', value: 'conngNm3' }, diff --git a/public/app/core/utils/location_util.ts b/public/app/core/utils/location_util.ts index f8d6aa4ee5f..735272285ff 100644 --- a/public/app/core/utils/location_util.ts +++ b/public/app/core/utils/location_util.ts @@ -1,6 +1,11 @@ import config from 'app/core/config'; -const _stripBaseFromUrl = url => { +// Slash encoding for angular location provider, see https://github.com/angular/angular.js/issues/10479 +const SLASH = ''; +export const decodePathComponent = (pc: string) => decodeURIComponent(pc).replace(new RegExp(SLASH, 'g'), '/'); +export const encodePathComponent = (pc: string) => encodeURIComponent(pc.replace(/\//g, SLASH)); + +export const stripBaseFromUrl = url => { const appSubUrl = config.appSubUrl; const stripExtraChars = appSubUrl.endsWith('/') ? 1 : 0; const urlWithoutBase = @@ -9,6 +14,4 @@ const _stripBaseFromUrl = url => { return urlWithoutBase; }; -export default { - stripBaseFromUrl: _stripBaseFromUrl, -}; +export default { stripBaseFromUrl }; diff --git a/public/app/dev.ts b/public/app/dev.ts new file mode 100644 index 00000000000..461ccd1d74e --- /dev/null +++ b/public/app/dev.ts @@ -0,0 +1,9 @@ +import app from './app'; + +/* +Import theme CSS based on env vars, e.g.: `env GRAFANA_THEME=light yarn start` +*/ +declare var GRAFANA_THEME: any; +require('../sass/grafana.' + GRAFANA_THEME + '.scss'); + +app.init(); diff --git a/public/app/features/alerting/alert_def.ts b/public/app/features/alerting/alert_def.ts index d86461780ba..797a67abfd8 100644 --- a/public/app/features/alerting/alert_def.ts +++ b/public/app/features/alerting/alert_def.ts @@ -124,7 +124,7 @@ function joinEvalMatches(matches, separator: string) { } function getAlertAnnotationInfo(ah) { - // backward compatability, can be removed in grafana 5.x + // backward compatibility, can be removed in grafana 5.x // old way stored evalMatches in data property directly, // new way stores it in evalMatches property on new data object diff --git a/public/app/features/alerting/specs/threshold_mapper_specs.ts b/public/app/features/alerting/specs/threshold_mapper_specs.ts index 3b284776b8d..1d68fce7050 100644 --- a/public/app/features/alerting/specs/threshold_mapper_specs.ts +++ b/public/app/features/alerting/specs/threshold_mapper_specs.ts @@ -4,7 +4,7 @@ import { ThresholdMapper } from '../threshold_mapper'; describe('ThresholdMapper', () => { describe('with greater than evaluator', () => { - it('can mapp query conditions to thresholds', () => { + it('can map query conditions to thresholds', () => { var panel: any = { type: 'graph', alert: { @@ -25,7 +25,7 @@ describe('ThresholdMapper', () => { }); describe('with outside range evaluator', () => { - it('can mapp query conditions to thresholds', () => { + it('can map query conditions to thresholds', () => { var panel: any = { type: 'graph', alert: { @@ -49,7 +49,7 @@ describe('ThresholdMapper', () => { }); describe('with inside range evaluator', () => { - it('can mapp query conditions to thresholds', () => { + it('can map query conditions to thresholds', () => { var panel: any = { type: 'graph', alert: { diff --git a/public/app/features/alerting/threshold_mapper.ts b/public/app/features/alerting/threshold_mapper.ts index 3025e13aacd..9142c74b6e3 100644 --- a/public/app/features/alerting/threshold_mapper.ts +++ b/public/app/features/alerting/threshold_mapper.ts @@ -1,9 +1,5 @@ export class ThresholdMapper { static alertToGraphThresholds(panel) { - if (panel.type !== 'graph') { - return false; - } - for (var i = 0; i < panel.alert.conditions.length; i++) { let condition = panel.alert.conditions[i]; if (condition.type !== 'query') { diff --git a/public/app/features/all.js b/public/app/features/all.js deleted file mode 100644 index 759be6c11d2..00000000000 --- a/public/app/features/all.js +++ /dev/null @@ -1,15 +0,0 @@ -define([ - './panellinks/module', - './dashlinks/module', - './annotations/all', - './templating/all', - './plugins/all', - './dashboard/all', - './playlist/all', - './snapshot/all', - './panel/all', - './org/all', - './admin/admin', - './alerting/all', - './styleguide/styleguide', -], function () {}); diff --git a/public/app/features/all.ts b/public/app/features/all.ts new file mode 100644 index 00000000000..df987a8b59b --- /dev/null +++ b/public/app/features/all.ts @@ -0,0 +1,13 @@ +import './panellinks/module'; +import './dashlinks/module'; +import './annotations/all'; +import './templating/all'; +import './plugins/all'; +import './dashboard/all'; +import './playlist/all'; +import './snapshot/all'; +import './panel/all'; +import './org/all'; +import './admin/admin'; +import './alerting/all'; +import './styleguide/styleguide'; diff --git a/public/app/features/annotations/events_processing.ts b/public/app/features/annotations/events_processing.ts index 040bf6425c1..667285d7d43 100644 --- a/public/app/features/annotations/events_processing.ts +++ b/public/app/features/annotations/events_processing.ts @@ -56,7 +56,7 @@ function isStartOfRegion(event): boolean { export function dedupAnnotations(annotations) { let dedup = []; - // Split events by annotationId property existance + // Split events by annotationId property existence let events = _.partition(annotations, 'id'); let eventsById = _.groupBy(events[0], 'id'); diff --git a/public/app/features/dashboard/all.ts b/public/app/features/dashboard/all.ts index f2e2e3dcdc0..a8f491f3ddd 100644 --- a/public/app/features/dashboard/all.ts +++ b/public/app/features/dashboard/all.ts @@ -6,6 +6,7 @@ import './dashnav/dashnav'; import './submenu/submenu'; import './save_as_modal'; import './save_modal'; +import './save_provisioned_modal'; import './shareModalCtrl'; import './share_snapshot_ctrl'; import './dashboard_srv'; diff --git a/public/app/features/dashboard/change_tracker.ts b/public/app/features/dashboard/change_tracker.ts new file mode 100644 index 00000000000..745b76ce347 --- /dev/null +++ b/public/app/features/dashboard/change_tracker.ts @@ -0,0 +1,186 @@ +import angular from 'angular'; +import _ from 'lodash'; +import { DashboardModel } from './dashboard_model'; + +export class ChangeTracker { + current: any; + originalPath: any; + scope: any; + original: any; + next: any; + $window: any; + + /** @ngInject */ + constructor( + dashboard, + scope, + originalCopyDelay, + private $location, + $window, + private $timeout, + private contextSrv, + private $rootScope + ) { + this.$location = $location; + this.$window = $window; + + this.current = dashboard; + this.originalPath = $location.path(); + this.scope = scope; + + // register events + scope.onAppEvent('dashboard-saved', () => { + this.original = this.current.getSaveModelClone(); + this.originalPath = $location.path(); + }); + + $window.onbeforeunload = () => { + if (this.ignoreChanges()) { + return undefined; + } + if (this.hasChanges()) { + return 'There are unsaved changes to this dashboard'; + } + return undefined; + }; + + scope.$on('$locationChangeStart', (event, next) => { + // check if we should look for changes + if (this.originalPath === $location.path()) { + return true; + } + if (this.ignoreChanges()) { + return true; + } + + if (this.hasChanges()) { + event.preventDefault(); + this.next = next; + + this.$timeout(() => { + this.open_modal(); + }); + } + return false; + }); + + if (originalCopyDelay) { + this.$timeout(() => { + // wait for different services to patch the dashboard (missing properties) + this.original = dashboard.getSaveModelClone(); + }, originalCopyDelay); + } else { + this.original = dashboard.getSaveModelClone(); + } + } + + // for some dashboards and users + // changes should be ignored + ignoreChanges() { + if (!this.original) { + return true; + } + if (!this.contextSrv.isEditor) { + return true; + } + if (!this.current || !this.current.meta) { + return true; + } + + var meta = this.current.meta; + return !meta.canSave || meta.fromScript || meta.fromFile; + } + + // remove stuff that should not count in diff + cleanDashboardFromIgnoredChanges(dashData) { + // need to new up the domain model class to get access to expand / collapse row logic + let model = new DashboardModel(dashData); + + // Expand all rows before making comparison. This is required because row expand / collapse + // change order of panel array and panel positions. + model.expandRows(); + + let dash = model.getSaveModelClone(); + + // ignore time and refresh + dash.time = 0; + dash.refresh = 0; + dash.schemaVersion = 0; + + // ignore iteration property + delete dash.iteration; + + dash.panels = _.filter(dash.panels, panel => { + if (panel.repeatPanelId) { + return false; + } + + // remove scopedVars + panel.scopedVars = null; + + // ignore panel legend sort + if (panel.legend) { + delete panel.legend.sort; + delete panel.legend.sortDesc; + } + + return true; + }); + + // ignore template variable values + _.each(dash.templating.list, function(value) { + value.current = null; + value.options = null; + value.filters = null; + }); + + return dash; + } + + hasChanges() { + let current = this.cleanDashboardFromIgnoredChanges(this.current.getSaveModelClone()); + let original = this.cleanDashboardFromIgnoredChanges(this.original); + + var currentTimepicker = _.find(current.nav, { type: 'timepicker' }); + var originalTimepicker = _.find(original.nav, { type: 'timepicker' }); + + if (currentTimepicker && originalTimepicker) { + currentTimepicker.now = originalTimepicker.now; + } + + var currentJson = angular.toJson(current, true); + var originalJson = angular.toJson(original, true); + + return currentJson !== originalJson; + } + + discardChanges() { + this.original = null; + this.gotoNext(); + } + + open_modal() { + this.$rootScope.appEvent('show-modal', { + templateHtml: '', + modalClass: 'modal--narrow confirm-modal', + }); + } + + saveChanges() { + var self = this; + var cancel = this.$rootScope.$on('dashboard-saved', () => { + cancel(); + this.$timeout(() => { + self.gotoNext(); + }); + }); + + this.$rootScope.appEvent('save-dashboard'); + } + + gotoNext() { + var baseLen = this.$location.absUrl().length - this.$location.url().length; + var nextUrl = this.next.substring(baseLen); + this.$location.url(nextUrl); + } +} diff --git a/public/app/features/dashboard/dashboard_model.ts b/public/app/features/dashboard/dashboard_model.ts index 3fa8ed9973a..8a300a80341 100644 --- a/public/app/features/dashboard/dashboard_model.ts +++ b/public/app/features/dashboard/dashboard_model.ts @@ -129,7 +129,7 @@ export class DashboardModel { this.meta = meta; } - // cleans meta data and other non peristent state + // cleans meta data and other non persistent state getSaveModelClone() { // make clone var copy: any = {}; @@ -606,7 +606,7 @@ export class DashboardModel { if (panel.gridPos.x + panel.gridPos.w * 2 <= GRID_COLUMN_COUNT) { newPanel.gridPos.x += panel.gridPos.w; } else { - // add bellow + // add below newPanel.gridPos.y += panel.gridPos.h; } @@ -649,6 +649,7 @@ export class DashboardModel { for (let panel of row.panels) { // make sure y is adjusted (in case row moved while collapsed) + // console.log('yDiff', yDiff); panel.gridPos.y -= yDiff; // insert after row this.panels.splice(insertPos, 0, new PanelModel(panel)); @@ -657,7 +658,7 @@ export class DashboardModel { yMax = Math.max(yMax, panel.gridPos.y + panel.gridPos.h); } - const pushDownAmount = yMax - row.gridPos.y; + const pushDownAmount = yMax - row.gridPos.y - 1; // push panels below down for (let panelIndex = insertPos; panelIndex < this.panels.length; panelIndex++) { diff --git a/public/app/features/dashboard/dashboard_srv.ts b/public/app/features/dashboard/dashboard_srv.ts index 9d766fdfc3f..b1419df7376 100644 --- a/public/app/features/dashboard/dashboard_srv.ts +++ b/public/app/features/dashboard/dashboard_srv.ts @@ -100,11 +100,15 @@ export class DashboardSrv { .catch(this.handleSaveDashboardError.bind(this, clone, options)); } - saveDashboard(options, clone) { + saveDashboard(options?, clone?) { if (clone) { this.setCurrent(this.create(clone, this.dash.meta)); } + if (this.dash.meta.provisioned) { + return this.showDashboardProvisionedModal(); + } + if (!this.dash.meta.canSave && options.makeEditable !== true) { return Promise.resolve(); } @@ -120,6 +124,16 @@ export class DashboardSrv { return this.save(this.dash.getSaveModelClone(), options); } + saveJSONDashboard(json: string) { + return this.save(JSON.parse(json), {}); + } + + showDashboardProvisionedModal() { + this.$rootScope.appEvent('show-modal', { + templateHtml: '', + }); + } + showSaveAsModal() { this.$rootScope.appEvent('show-modal', { templateHtml: '', diff --git a/public/app/features/dashboard/dashgrid/AddPanelPanel.tsx b/public/app/features/dashboard/dashgrid/AddPanelPanel.tsx index aeb840c317a..094bc49b708 100644 --- a/public/app/features/dashboard/dashgrid/AddPanelPanel.tsx +++ b/public/app/features/dashboard/dashgrid/AddPanelPanel.tsx @@ -1,12 +1,13 @@ import React from 'react'; import _ from 'lodash'; - +import classNames from 'classnames'; import config from 'app/core/config'; import { PanelModel } from '../panel_model'; import { PanelContainer } from './PanelContainer'; import ScrollBar from 'app/core/components/ScrollBar/ScrollBar'; import store from 'app/core/store'; import { LS_PANEL_COPY_KEY } from 'app/core/constants'; +import Highlighter from 'react-highlight-words'; export interface AddPanelPanelProps { panel: PanelModel; @@ -16,21 +17,42 @@ export interface AddPanelPanelProps { export interface AddPanelPanelState { filter: string; panelPlugins: any[]; + copiedPanelPlugins: any[]; + tab: string; } export class AddPanelPanel extends React.Component { + private scrollbar: ScrollBar; + constructor(props) { super(props); this.handleCloseAddPanel = this.handleCloseAddPanel.bind(this); this.renderPanelItem = this.renderPanelItem.bind(this); + this.panelSizeChanged = this.panelSizeChanged.bind(this); this.state = { - panelPlugins: this.getPanelPlugins(), + panelPlugins: this.getPanelPlugins(''), + copiedPanelPlugins: this.getCopiedPanelPlugins(''), filter: '', + tab: 'Add', }; } - getPanelPlugins() { + componentDidMount() { + this.props.panel.events.on('panel-size-changed', this.panelSizeChanged); + } + + componentWillUnmount() { + this.props.panel.events.off('panel-size-changed', this.panelSizeChanged); + } + + panelSizeChanged() { + setTimeout(() => { + this.scrollbar.update(); + }); + } + + getPanelPlugins(filter) { let panels = _.chain(config.panels) .filter({ hideFromList: false }) .map(item => item) @@ -39,6 +61,19 @@ export class AddPanelPanel extends React.Component item) + .value(); + let copiedPanels = []; + let copiedPanelJson = store.get(LS_PANEL_COPY_KEY); if (copiedPanelJson) { let copiedPanel = JSON.parse(copiedPanelJson); @@ -48,12 +83,13 @@ export class AddPanelPanel extends React.Component { @@ -92,28 +128,117 @@ export class AddPanelPanel extends React.Component; + } + renderPanelItem(panel, index) { return (
    this.onAddPanel(panel)} title={panel.name}> -
    {panel.name}
    +
    {this.renderText(panel.name)}
    ); } + noCopiedPanelPlugins() { + return
    No copied panels yet.
    ; + } + + filterChange(evt) { + this.setState({ + filter: evt.target.value, + panelPlugins: this.getPanelPlugins(evt.target.value), + copiedPanelPlugins: this.getCopiedPanelPlugins(evt.target.value), + }); + } + + filterPanels(panels, filter) { + let regex = new RegExp(filter, 'i'); + return panels.filter(panel => { + return regex.test(panel.name); + }); + } + + openCopy() { + this.setState({ + tab: 'Copy', + filter: '', + panelPlugins: this.getPanelPlugins(''), + copiedPanelPlugins: this.getCopiedPanelPlugins(''), + }); + } + + openAdd() { + this.setState({ + tab: 'Add', + filter: '', + panelPlugins: this.getPanelPlugins(''), + copiedPanelPlugins: this.getCopiedPanelPlugins(''), + }); + } + render() { + let addClass = classNames({ + 'active active--panel': this.state.tab === 'Add', + '': this.state.tab === 'Copy', + }); + + let copyClass = classNames({ + '': this.state.tab === 'Add', + 'active active--panel': this.state.tab === 'Copy', + }); + + let panelTab; + + if (this.state.tab === 'Add') { + panelTab = this.state.panelPlugins.map(this.renderPanelItem); + } else if (this.state.tab === 'Copy') { + if (this.state.copiedPanelPlugins.length > 0) { + panelTab = this.state.copiedPanelPlugins.map(this.renderPanelItem); + } else { + panelTab = this.noCopiedPanelPlugins(); + } + } + return ( -
    +
    New Panel - Select a visualization +
      +
    • +
      + Add +
      +
    • +
    • +
      + Paste +
      +
    • +
    - {this.state.panelPlugins.map(this.renderPanelItem)} + (this.scrollbar = element)} className="add-panel__items"> +
    + +
    + {panelTab} +
    ); diff --git a/public/app/features/dashboard/dashgrid/DashboardRow.tsx b/public/app/features/dashboard/dashgrid/DashboardRow.tsx index c2a84cb7da9..b133d4450bb 100644 --- a/public/app/features/dashboard/dashgrid/DashboardRow.tsx +++ b/public/app/features/dashboard/dashgrid/DashboardRow.tsx @@ -4,7 +4,6 @@ import { PanelModel } from '../panel_model'; import { PanelContainer } from './PanelContainer'; import templateSrv from 'app/features/templating/template_srv'; import appEvents from 'app/core/app_events'; -import config from 'app/core/config'; export interface DashboardRowProps { panel: PanelModel; @@ -95,7 +94,7 @@ export class DashboardRow extends React.Component { {title} ({hiddenPanels} hidden panels) - {config.bootData.user.orgRole !== 'Viewer' && ( + {this.dashboard.meta.canEdit === true && (
    diff --git a/public/app/features/dashboard/folder_picker/folder_picker.ts b/public/app/features/dashboard/folder_picker/folder_picker.ts index cbf23e3ea4b..b8ae18b14d3 100644 --- a/public/app/features/dashboard/folder_picker/folder_picker.ts +++ b/public/app/features/dashboard/folder_picker/folder_picker.ts @@ -19,9 +19,12 @@ export class FolderPickerCtrl { newFolderNameTouched: boolean; hasValidationError: boolean; validationError: any; + isEditor: boolean; /** @ngInject */ - constructor(private backendSrv, private validationSrv) { + constructor(private backendSrv, private validationSrv, private contextSrv) { + this.isEditor = this.contextSrv.isEditor; + if (!this.labelClass) { this.labelClass = 'width-7'; } @@ -38,19 +41,20 @@ export class FolderPickerCtrl { return this.backendSrv.get('api/search', params).then(result => { if ( - query === '' || - query.toLowerCase() === 'g' || - query.toLowerCase() === 'ge' || - query.toLowerCase() === 'gen' || - query.toLowerCase() === 'gene' || - query.toLowerCase() === 'gener' || - query.toLowerCase() === 'genera' || - query.toLowerCase() === 'general' + this.isEditor && + (query === '' || + query.toLowerCase() === 'g' || + query.toLowerCase() === 'ge' || + query.toLowerCase() === 'gen' || + query.toLowerCase() === 'gene' || + query.toLowerCase() === 'gener' || + query.toLowerCase() === 'genera' || + query.toLowerCase() === 'general') ) { result.unshift({ title: this.rootName, id: 0 }); } - if (this.enableCreateNew && query === '') { + if (this.isEditor && this.enableCreateNew && query === '') { result.unshift({ title: '-- New Folder --', id: -1 }); } diff --git a/public/app/features/dashboard/history/history.ts b/public/app/features/dashboard/history/history.ts index d9f0c087438..be6ad5af1ba 100644 --- a/public/app/features/dashboard/history/history.ts +++ b/public/app/features/dashboard/history/history.ts @@ -133,7 +133,7 @@ export class HistoryListCtrl { return this.historySrv .getHistoryList(this.dashboard, options) .then(revisions => { - // set formated dates & default values + // set formatted dates & default values for (let rev of revisions) { rev.createdDateString = this.formatDate(rev.created); rev.ageString = this.formatBasicDate(rev.created); diff --git a/public/app/features/dashboard/save_provisioned_modal.ts b/public/app/features/dashboard/save_provisioned_modal.ts new file mode 100644 index 00000000000..ba96ce0b0b9 --- /dev/null +++ b/public/app/features/dashboard/save_provisioned_modal.ts @@ -0,0 +1,77 @@ +import angular from 'angular'; +import { saveAs } from 'file-saver'; +import coreModule from 'app/core/core_module'; + +const template = ` + +`; + +export class SaveProvisionedDashboardModalCtrl { + dash: any; + dashboardJson: string; + dismiss: () => void; + + /** @ngInject */ + constructor(dashboardSrv) { + this.dash = dashboardSrv.getCurrent().getSaveModelClone(); + delete this.dash.id; + this.dashboardJson = JSON.stringify(this.dash, null, 2); + } + + save() { + var blob = new Blob([angular.toJson(this.dash, true)], { + type: 'application/json;charset=utf-8', + }); + saveAs(blob, this.dash.title + '-' + new Date().getTime() + '.json'); + } + + getJsonForClipboard() { + return this.dashboardJson; + } +} + +export function saveProvisionedDashboardModalDirective() { + return { + restrict: 'E', + template: template, + controller: SaveProvisionedDashboardModalCtrl, + bindToController: true, + controllerAs: 'ctrl', + scope: { dismiss: '&' }, + }; +} + +coreModule.directive('saveProvisionedDashboardModal', saveProvisionedDashboardModalDirective); diff --git a/public/app/features/dashboard/settings/settings.html b/public/app/features/dashboard/settings/settings.html index 5103fab8b75..46d84a7a2fd 100644 --- a/public/app/features/dashboard/settings/settings.html +++ b/public/app/features/dashboard/settings/settings.html @@ -87,12 +87,22 @@
    -
    -

    View JSON

    +
    +

    JSON Model

    +
    + The JSON Model below is data structure that defines the dashboard. Including settings, panel settings & layout, + queries etc. +
    + +
    + +
    diff --git a/public/app/features/dashboard/settings/settings.ts b/public/app/features/dashboard/settings/settings.ts index e9d5c6180be..5acbbcf29c5 100755 --- a/public/app/features/dashboard/settings/settings.ts +++ b/public/app/features/dashboard/settings/settings.ts @@ -17,7 +17,14 @@ export class SettingsCtrl { hasUnsavedFolderChange: boolean; /** @ngInject */ - constructor(private $scope, private $location, private $rootScope, private backendSrv, private dashboardSrv) { + constructor( + private $scope, + private $route, + private $location, + private $rootScope, + private backendSrv, + private dashboardSrv + ) { // temp hack for annotations and variables editors // that rely on inherited scope $scope.dashboard = this.dashboard; @@ -30,7 +37,7 @@ export class SettingsCtrl { }); }); - this.canSaveAs = contextSrv.isEditor; + this.canSaveAs = this.dashboard.meta.canEdit && contextSrv.hasEditPermissionInFolders; this.canSave = this.dashboard.meta.canSave; this.canDelete = this.dashboard.meta.canSave; @@ -93,8 +100,8 @@ export class SettingsCtrl { } this.sections.push({ - title: 'View JSON', - id: 'view_json', + title: 'JSON Model', + id: 'dashboard_json', icon: 'gicon gicon-json', }); @@ -137,6 +144,12 @@ export class SettingsCtrl { this.dashboardSrv.saveDashboard(); } + saveDashboardJson() { + this.dashboardSrv.saveJSONDashboard(this.json).then(() => { + this.$route.reload(); + }); + } + onPostSave() { this.hasUnsavedFolderChange = false; } diff --git a/public/app/features/dashboard/specs/AddPanelPanel.jest.tsx b/public/app/features/dashboard/specs/AddPanelPanel.jest.tsx new file mode 100644 index 00000000000..872d9296d12 --- /dev/null +++ b/public/app/features/dashboard/specs/AddPanelPanel.jest.tsx @@ -0,0 +1,102 @@ +import React from 'react'; +import { AddPanelPanel } from './../dashgrid/AddPanelPanel'; +import { PanelModel } from '../panel_model'; +import { shallow } from 'enzyme'; +import config from '../../../core/config'; + +jest.mock('app/core/store', () => ({ + get: key => { + return null; + }, + delete: key => { + return null; + }, +})); + +describe('AddPanelPanel', () => { + let wrapper, dashboardMock, getPanelContainer, panel; + + beforeEach(() => { + config.panels = [ + { + id: 'singlestat', + hideFromList: false, + name: 'Singlestat', + sort: 2, + info: { + logos: { + small: '', + }, + }, + }, + { + id: 'hidden', + hideFromList: true, + name: 'Hidden', + sort: 100, + info: { + logos: { + small: '', + }, + }, + }, + { + id: 'graph', + hideFromList: false, + name: 'Graph', + sort: 1, + info: { + logos: { + small: '', + }, + }, + }, + { + id: 'alexander_zabbix', + hideFromList: false, + name: 'Zabbix', + sort: 100, + info: { + logos: { + small: '', + }, + }, + }, + { + id: 'piechart', + hideFromList: false, + name: 'Piechart', + sort: 100, + info: { + logos: { + small: '', + }, + }, + }, + ]; + + dashboardMock = { toggleRow: jest.fn() }; + + getPanelContainer = jest.fn().mockReturnValue({ + getDashboard: jest.fn().mockReturnValue(dashboardMock), + getPanelLoader: jest.fn(), + }); + + panel = new PanelModel({ collapsed: false }); + wrapper = shallow(); + }); + + it('should fetch all panels sorted with core plugins first', () => { + //console.log(wrapper.debug()); + //console.log(wrapper.find('.add-panel__item').get(0).props.title); + expect(wrapper.find('.add-panel__item').get(1).props.title).toBe('Singlestat'); + expect(wrapper.find('.add-panel__item').get(4).props.title).toBe('Piechart'); + }); + + it('should filter', () => { + wrapper.find('input').simulate('change', { target: { value: 'p' } }); + + expect(wrapper.find('.add-panel__item').get(1).props.title).toBe('Piechart'); + expect(wrapper.find('.add-panel__item').get(0).props.title).toBe('Graph'); + }); +}); diff --git a/public/app/features/dashboard/specs/DashboardRow.jest.tsx b/public/app/features/dashboard/specs/DashboardRow.jest.tsx index c0ac172aa26..8424346b0c5 100644 --- a/public/app/features/dashboard/specs/DashboardRow.jest.tsx +++ b/public/app/features/dashboard/specs/DashboardRow.jest.tsx @@ -2,17 +2,15 @@ import React from 'react'; import { shallow } from 'enzyme'; import { DashboardRow } from '../dashgrid/DashboardRow'; import { PanelModel } from '../panel_model'; -import config from '../../../core/config'; describe('DashboardRow', () => { let wrapper, panel, getPanelContainer, dashboardMock; beforeEach(() => { - dashboardMock = { toggleRow: jest.fn() }; - - config.bootData = { - user: { - orgRole: 'Admin', + dashboardMock = { + toggleRow: jest.fn(), + meta: { + canEdit: true, }, }; @@ -41,8 +39,8 @@ describe('DashboardRow', () => { expect(wrapper.find('.dashboard-row__actions .pointer')).toHaveLength(2); }); - it('should have zero actions as viewer', () => { - config.bootData.user.orgRole = 'Viewer'; + it('should have zero actions when cannot edit', () => { + dashboardMock.meta.canEdit = false; panel = new PanelModel({ collapsed: false }); wrapper = shallow(); expect(wrapper.find('.dashboard-row__actions .pointer')).toHaveLength(0); diff --git a/public/app/features/dashboard/specs/change_tracker.jest.ts b/public/app/features/dashboard/specs/change_tracker.jest.ts new file mode 100644 index 00000000000..5ec84aadbd0 --- /dev/null +++ b/public/app/features/dashboard/specs/change_tracker.jest.ts @@ -0,0 +1,99 @@ +import { ChangeTracker } from 'app/features/dashboard/change_tracker'; +import { contextSrv } from 'app/core/services/context_srv'; +import { DashboardModel } from '../dashboard_model'; +import { PanelModel } from '../panel_model'; + +jest.mock('app/core/services/context_srv', () => ({ + contextSrv: { + user: { orgId: 1 }, + }, +})); + +describe('ChangeTracker', () => { + let rootScope; + let location; + let timeout; + let tracker: ChangeTracker; + let dash; + let scope; + + beforeEach(() => { + dash = new DashboardModel({ + refresh: false, + panels: [ + { + id: 1, + type: 'graph', + gridPos: { x: 0, y: 0, w: 24, h: 6 }, + legend: { sortDesc: false }, + }, + { + id: 2, + type: 'row', + gridPos: { x: 0, y: 6, w: 24, h: 2 }, + collapsed: true, + panels: [ + { id: 3, type: 'graph', gridPos: { x: 0, y: 6, w: 12, h: 2 } }, + { id: 4, type: 'graph', gridPos: { x: 12, y: 6, w: 12, h: 2 } }, + ], + }, + { id: 5, type: 'row', gridPos: { x: 0, y: 6, w: 1, h: 1 } }, + ], + }); + + scope = { + appEvent: jest.fn(), + onAppEvent: jest.fn(), + $on: jest.fn(), + }; + + rootScope = { + appEvent: jest.fn(), + onAppEvent: jest.fn(), + $on: jest.fn(), + }; + + location = { + path: jest.fn(), + }; + + tracker = new ChangeTracker(dash, scope, undefined, location, window, timeout, contextSrv, rootScope); + }); + + it('No changes should not have changes', () => { + expect(tracker.hasChanges()).toBe(false); + }); + + it('Simple change should be registered', () => { + dash.title = 'google'; + expect(tracker.hasChanges()).toBe(true); + }); + + it('Should ignore a lot of changes', () => { + dash.time = { from: '1h' }; + dash.refresh = true; + dash.schemaVersion = 10; + expect(tracker.hasChanges()).toBe(false); + }); + + it('Should ignore .iteration changes', () => { + dash.iteration = new Date().getTime() + 1; + expect(tracker.hasChanges()).toBe(false); + }); + + it('Should ignore row collapse change', () => { + dash.toggleRow(dash.panels[1]); + expect(tracker.hasChanges()).toBe(false); + }); + + it('Should ignore panel legend changes', () => { + dash.panels[0].legend.sortDesc = true; + dash.panels[0].legend.sort = 'avg'; + expect(tracker.hasChanges()).toBe(false); + }); + + it('Should ignore panel repeats', () => { + dash.panels.push(new PanelModel({ repeatPanelId: 10 })); + expect(tracker.hasChanges()).toBe(false); + }); +}); diff --git a/public/app/features/dashboard/specs/dashboard_import_ctrl.jest.ts b/public/app/features/dashboard/specs/dashboard_import_ctrl.jest.ts index 1cb59ef5bac..737eb360461 100644 --- a/public/app/features/dashboard/specs/dashboard_import_ctrl.jest.ts +++ b/public/app/features/dashboard/specs/dashboard_import_ctrl.jest.ts @@ -56,7 +56,7 @@ describe('DashboardImportCtrl', function() { }); }); - describe('when specifing grafana.com url', function() { + describe('when specifying grafana.com url', function() { beforeEach(function() { ctx.ctrl.gnetUrl = 'http://grafana.com/dashboards/123'; // setup api mock @@ -73,7 +73,7 @@ describe('DashboardImportCtrl', function() { }); }); - describe('when specifing dashbord id', function() { + describe('when specifying dashboard id', function() { beforeEach(function() { ctx.ctrl.gnetUrl = '2342'; // setup api mock diff --git a/public/app/features/dashboard/specs/dashboard_model.jest.ts b/public/app/features/dashboard/specs/dashboard_model.jest.ts index 99fe727c49d..6f0b45c9ba8 100644 --- a/public/app/features/dashboard/specs/dashboard_model.jest.ts +++ b/public/app/features/dashboard/specs/dashboard_model.jest.ts @@ -374,14 +374,14 @@ describe('DashboardModel', function() { { id: 2, type: 'row', - gridPos: { x: 0, y: 6, w: 24, h: 2 }, + gridPos: { x: 0, y: 6, w: 24, h: 1 }, collapsed: true, panels: [ - { id: 3, type: 'graph', gridPos: { x: 0, y: 2, w: 12, h: 2 } }, - { id: 4, type: 'graph', gridPos: { x: 12, y: 2, w: 12, h: 2 } }, + { id: 3, type: 'graph', gridPos: { x: 0, y: 7, w: 12, h: 2 } }, + { id: 4, type: 'graph', gridPos: { x: 12, y: 7, w: 12, h: 2 } }, ], }, - { id: 5, type: 'row', gridPos: { x: 0, y: 6, w: 1, h: 1 } }, + { id: 5, type: 'row', gridPos: { x: 0, y: 7, w: 1, h: 1 } }, ], }); dashboard.toggleRow(dashboard.panels[1]); @@ -399,7 +399,7 @@ describe('DashboardModel', function() { it('should position them below row', function() { expect(dashboard.panels[2].gridPos).toMatchObject({ x: 0, - y: 8, + y: 7, w: 12, h: 2, }); @@ -408,7 +408,7 @@ describe('DashboardModel', function() { it('should move panels below down', function() { expect(dashboard.panels[4].gridPos).toMatchObject({ x: 0, - y: 10, + y: 9, w: 1, h: 1, }); diff --git a/public/app/features/dashboard/specs/save_provisioned_modal.jest.ts b/public/app/features/dashboard/specs/save_provisioned_modal.jest.ts new file mode 100644 index 00000000000..ce921cee8c8 --- /dev/null +++ b/public/app/features/dashboard/specs/save_provisioned_modal.jest.ts @@ -0,0 +1,30 @@ +import { SaveProvisionedDashboardModalCtrl } from '../save_provisioned_modal'; + +describe('SaveProvisionedDashboardModalCtrl', () => { + var json = { + title: 'name', + id: 5, + }; + + var mockDashboardSrv = { + getCurrent: function() { + return { + id: 5, + meta: {}, + getSaveModelClone: function() { + return json; + }, + }; + }, + }; + + var ctrl = new SaveProvisionedDashboardModalCtrl(mockDashboardSrv); + + it('should remove id from dashboard model', () => { + expect(ctrl.dash.id).toBeUndefined(); + }); + + it('should remove id from dashboard model in clipboard json', () => { + expect(ctrl.getJsonForClipboard()).toBe(JSON.stringify({ title: 'name' }, null, 2)); + }); +}); diff --git a/public/app/features/dashboard/specs/time_srv_specs.ts b/public/app/features/dashboard/specs/time_srv_specs.ts index ca75f0ffcf9..6e180679ff2 100644 --- a/public/app/features/dashboard/specs/time_srv_specs.ts +++ b/public/app/features/dashboard/specs/time_srv_specs.ts @@ -44,7 +44,7 @@ describe('timeSrv', function() { expect(time.raw.to).to.be('now'); }); - it('should handle formated dates', function() { + it('should handle formatted dates', function() { ctx.$location.search({ from: '20140410T052010', to: '20140520T031022' }); ctx.service.init(_dashboard); var time = ctx.service.timeRange(true); @@ -52,7 +52,7 @@ describe('timeSrv', function() { expect(time.to.valueOf()).to.equal(new Date('2014-05-20T03:10:22Z').getTime()); }); - it('should handle formated dates without time', function() { + it('should handle formatted dates without time', function() { ctx.$location.search({ from: '20140410', to: '20140520' }); ctx.service.init(_dashboard); var time = ctx.service.timeRange(true); diff --git a/public/app/features/dashboard/specs/unsaved_changes_srv_specs.ts b/public/app/features/dashboard/specs/unsaved_changes_srv_specs.ts deleted file mode 100644 index 8bd639de681..00000000000 --- a/public/app/features/dashboard/specs/unsaved_changes_srv_specs.ts +++ /dev/null @@ -1,95 +0,0 @@ -import { describe, beforeEach, it, expect, sinon, angularMocks } from 'test/lib/common'; -import { Tracker } from 'app/features/dashboard/unsaved_changes_srv'; -import 'app/features/dashboard/dashboard_srv'; -import { contextSrv } from 'app/core/core'; - -describe('unsavedChangesSrv', function() { - var _dashboardSrv; - var _contextSrvStub = { isEditor: true }; - var _rootScope; - var _location; - var _timeout; - var _window; - var tracker; - var dash; - var scope; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($provide) { - $provide.value('contextSrv', _contextSrvStub); - $provide.value('$window', {}); - }) - ); - - beforeEach( - angularMocks.inject(function($location, $rootScope, dashboardSrv, $timeout, $window) { - _dashboardSrv = dashboardSrv; - _rootScope = $rootScope; - _location = $location; - _timeout = $timeout; - _window = $window; - }) - ); - - beforeEach(function() { - dash = _dashboardSrv.create({ - refresh: false, - panels: [{ test: 'asd', legend: {} }], - rows: [ - { - panels: [{ test: 'asd', legend: {} }], - }, - ], - }); - scope = _rootScope.$new(); - scope.appEvent = sinon.spy(); - scope.onAppEvent = sinon.spy(); - - tracker = new Tracker(dash, scope, undefined, _location, _window, _timeout, contextSrv, _rootScope); - }); - - it('No changes should not have changes', function() { - expect(tracker.hasChanges()).to.be(false); - }); - - it('Simple change should be registered', function() { - dash.property = 'google'; - expect(tracker.hasChanges()).to.be(true); - }); - - it('Should ignore a lot of changes', function() { - dash.time = { from: '1h' }; - dash.refresh = true; - dash.schemaVersion = 10; - expect(tracker.hasChanges()).to.be(false); - }); - - it('Should ignore .iteration changes', () => { - dash.iteration = new Date().getTime() + 1; - expect(tracker.hasChanges()).to.be(false); - }); - - it.skip('Should ignore row collapse change', function() { - dash.rows[0].collapse = true; - expect(tracker.hasChanges()).to.be(false); - }); - - it('Should ignore panel legend changes', function() { - dash.panels[0].legend.sortDesc = true; - dash.panels[0].legend.sort = 'avg'; - expect(tracker.hasChanges()).to.be(false); - }); - - it.skip('Should ignore panel repeats', function() { - dash.rows[0].panels.push({ repeatPanelId: 10 }); - expect(tracker.hasChanges()).to.be(false); - }); - - it.skip('Should ignore row repeats', function() { - dash.addEmptyRow(); - dash.rows[1].repeatRowId = 10; - expect(tracker.hasChanges()).to.be(false); - }); -}); diff --git a/public/app/features/dashboard/timepicker/timepicker.ts b/public/app/features/dashboard/timepicker/timepicker.ts index 2434e691515..33cfff92e7f 100644 --- a/public/app/features/dashboard/timepicker/timepicker.ts +++ b/public/app/features/dashboard/timepicker/timepicker.ts @@ -22,7 +22,6 @@ export class TimePickerCtrl { refresh: any; isUtc: boolean; firstDayOfWeek: number; - closeDropdown: any; isOpen: boolean; /** @ngInject */ @@ -32,6 +31,7 @@ export class TimePickerCtrl { $rootScope.onAppEvent('shift-time-forward', () => this.move(1), $scope); $rootScope.onAppEvent('shift-time-backward', () => this.move(-1), $scope); $rootScope.onAppEvent('refresh', this.onRefresh.bind(this), $scope); + $rootScope.onAppEvent('closeTimepicker', this.openDropdown.bind(this), $scope); // init options this.panel = this.dashboard.timepicker; @@ -96,7 +96,7 @@ export class TimePickerCtrl { openDropdown() { if (this.isOpen) { - this.isOpen = false; + this.closeDropdown(); return; } @@ -112,6 +112,12 @@ export class TimePickerCtrl { this.refresh.options.unshift({ text: 'off' }); this.isOpen = true; + this.$rootScope.appEvent('timepickerOpen'); + } + + closeDropdown() { + this.isOpen = false; + this.$rootScope.appEvent('timepickerClosed'); } applyCustom() { @@ -120,7 +126,7 @@ export class TimePickerCtrl { } this.timeSrv.setTime(this.editTimeRaw); - this.isOpen = false; + this.closeDropdown(); } absoluteFromChanged() { @@ -143,7 +149,7 @@ export class TimePickerCtrl { } this.timeSrv.setTime(range); - this.isOpen = false; + this.closeDropdown(); } } diff --git a/public/app/features/dashboard/unsaved_changes_srv.ts b/public/app/features/dashboard/unsaved_changes_srv.ts index ebf0101cee0..0406e6a55d7 100644 --- a/public/app/features/dashboard/unsaved_changes_srv.ts +++ b/public/app/features/dashboard/unsaved_changes_srv.ts @@ -1,217 +1,10 @@ import angular from 'angular'; -import _ from 'lodash'; - -export class Tracker { - current: any; - originalPath: any; - scope: any; - original: any; - next: any; - $window: any; - - /** @ngInject */ - constructor( - dashboard, - scope, - originalCopyDelay, - private $location, - $window, - private $timeout, - private contextSrv, - private $rootScope - ) { - this.$location = $location; - this.$window = $window; - - this.current = dashboard; - this.originalPath = $location.path(); - this.scope = scope; - - // register events - scope.onAppEvent('dashboard-saved', () => { - this.original = this.current.getSaveModelClone(); - this.originalPath = $location.path(); - }); - - $window.onbeforeunload = () => { - if (this.ignoreChanges()) { - return null; - } - if (this.hasChanges()) { - return 'There are unsaved changes to this dashboard'; - } - return null; - }; - - scope.$on('$locationChangeStart', (event, next) => { - // check if we should look for changes - if (this.originalPath === $location.path()) { - return true; - } - if (this.ignoreChanges()) { - return true; - } - - if (this.hasChanges()) { - event.preventDefault(); - this.next = next; - - this.$timeout(() => { - this.open_modal(); - }); - } - return false; - }); - - if (originalCopyDelay) { - this.$timeout(() => { - // wait for different services to patch the dashboard (missing properties) - this.original = dashboard.getSaveModelClone(); - }, originalCopyDelay); - } else { - this.original = dashboard.getSaveModelClone(); - } - } - - // for some dashboards and users - // changes should be ignored - ignoreChanges() { - if (!this.original) { - return true; - } - if (!this.contextSrv.isEditor) { - return true; - } - if (!this.current || !this.current.meta) { - return true; - } - - var meta = this.current.meta; - return !meta.canSave || meta.fromScript || meta.fromFile; - } - - // remove stuff that should not count in diff - cleanDashboardFromIgnoredChanges(dash) { - // ignore time and refresh - dash.time = 0; - dash.refresh = 0; - dash.schemaVersion = 0; - - // ignore iteration property - delete dash.iteration; - - // filter row and panels properties that should be ignored - dash.rows = _.filter(dash.rows, function(row) { - if (row.repeatRowId) { - return false; - } - - row.panels = _.filter(row.panels, function(panel) { - if (panel.repeatPanelId) { - return false; - } - - // remove scopedVars - panel.scopedVars = null; - - // ignore span changes - panel.span = null; - - // ignore panel legend sort - if (panel.legend) { - delete panel.legend.sort; - delete panel.legend.sortDesc; - } - - return true; - }); - - // ignore collapse state - row.collapse = false; - return true; - }); - - dash.panels = _.filter(dash.panels, panel => { - if (panel.repeatPanelId) { - return false; - } - - // remove scopedVars - panel.scopedVars = null; - - // ignore panel legend sort - if (panel.legend) { - delete panel.legend.sort; - delete panel.legend.sortDesc; - } - - return true; - }); - - // ignore template variable values - _.each(dash.templating.list, function(value) { - value.current = null; - value.options = null; - value.filters = null; - }); - } - - hasChanges() { - var current = this.current.getSaveModelClone(); - var original = this.original; - - this.cleanDashboardFromIgnoredChanges(current); - this.cleanDashboardFromIgnoredChanges(original); - - var currentTimepicker = _.find(current.nav, { type: 'timepicker' }); - var originalTimepicker = _.find(original.nav, { type: 'timepicker' }); - - if (currentTimepicker && originalTimepicker) { - currentTimepicker.now = originalTimepicker.now; - } - - var currentJson = angular.toJson(current); - var originalJson = angular.toJson(original); - - return currentJson !== originalJson; - } - - discardChanges() { - this.original = null; - this.gotoNext(); - } - - open_modal() { - this.$rootScope.appEvent('show-modal', { - templateHtml: '', - modalClass: 'modal--narrow confirm-modal', - }); - } - - saveChanges() { - var self = this; - var cancel = this.$rootScope.$on('dashboard-saved', () => { - cancel(); - this.$timeout(() => { - self.gotoNext(); - }); - }); - - this.$rootScope.appEvent('save-dashboard'); - } - - gotoNext() { - var baseLen = this.$location.absUrl().length - this.$location.url().length; - var nextUrl = this.next.substring(baseLen); - this.$location.url(nextUrl); - } -} +import { ChangeTracker } from './change_tracker'; /** @ngInject */ export function unsavedChangesSrv($rootScope, $q, $location, $timeout, contextSrv, dashboardSrv, $window) { - this.Tracker = Tracker; this.init = function(dashboard, scope) { - this.tracker = new Tracker(dashboard, scope, 1000, $location, $window, $timeout, contextSrv, $rootScope); + this.tracker = new ChangeTracker(dashboard, scope, 1000, $location, $window, $timeout, contextSrv, $rootScope); return this.tracker; }; } diff --git a/public/app/features/dashboard/view_state_srv.ts b/public/app/features/dashboard/view_state_srv.ts index 576b8b6fce8..1ed2d61df71 100644 --- a/public/app/features/dashboard/view_state_srv.ts +++ b/public/app/features/dashboard/view_state_srv.ts @@ -38,7 +38,7 @@ export class DashboardViewState { }); // this marks changes to location during this digest cycle as not to add history item - // dont want url changes like adding orgId to add browser history + // don't want url changes like adding orgId to add browser history $location.replace(); this.update(this.getQueryStringState()); } @@ -196,9 +196,10 @@ export class DashboardViewState { this.oldTimeRange = ctrl.range; this.fullscreenPanel = panelScope; + // Firefox doesn't return scrollTop position properly if 'dash-scroll' is emitted after setViewMode() + this.$scope.appEvent('dash-scroll', { animate: false, pos: 0 }); this.dashboard.setViewMode(ctrl.panel, true, ctrl.editMode); this.$scope.appEvent('panel-fullscreen-enter', { panelId: ctrl.panel.id }); - this.$scope.appEvent('dash-scroll', { animate: false, pos: 0 }); } registerPanel(panelScope) { diff --git a/public/app/features/dashlinks/module.ts b/public/app/features/dashlinks/module.ts index 8fa110fe6b4..148d32f4399 100644 --- a/public/app/features/dashlinks/module.ts +++ b/public/app/features/dashlinks/module.ts @@ -15,7 +15,7 @@ function dashLinksContainer() { } /** @ngInject */ -function dashLink($compile, linkSrv) { +function dashLink($compile, $sanitize, linkSrv) { return { restrict: 'E', link: function(scope, elem) { @@ -49,10 +49,21 @@ function dashLink($compile, linkSrv) { var linkInfo = linkSrv.getAnchorInfo(link); span.text(linkInfo.title); anchor.attr('href', linkInfo.href); + sanitizeAnchor(); + + // tooltip + elem.find('a').tooltip({ + title: $sanitize(scope.link.tooltip), + html: true, + container: 'body', + }); + } + + function sanitizeAnchor() { + const anchorSanitized = $sanitize(anchor.parent().html()); + anchor.parent().html(anchorSanitized); } - // tooltip - elem.find('a').tooltip({ title: scope.link.tooltip, html: true, container: 'body' }); icon.attr('class', 'fa fa-fw ' + scope.link.icon); anchor.attr('target', scope.link.target); diff --git a/public/app/features/org/partials/newOrg.html b/public/app/features/org/partials/newOrg.html index 424c55d6eb7..9777107c31a 100644 --- a/public/app/features/org/partials/newOrg.html +++ b/public/app/features/org/partials/newOrg.html @@ -5,7 +5,7 @@ New Organization -

    Each organization contains their own dashboards, data sources and configuration, and cannot be shared between orgs. While users may belong to more than one, mutiple organization are most frequently used in multi-tenant deployments.

    +

    Each organization contains their own dashboards, data sources and configuration, and cannot be shared between orgs. While users may belong to more than one, multiple organization are most frequently used in multi-tenant deployments.

    diff --git a/public/app/features/panel/all.js b/public/app/features/panel/all.js deleted file mode 100644 index aaa6d0d4ed0..00000000000 --- a/public/app/features/panel/all.js +++ /dev/null @@ -1,9 +0,0 @@ -define([ - './panel_header', - './panel_directive', - './solo_panel_ctrl', - './query_ctrl', - './panel_editor_tab', - './query_editor_row', - './query_troubleshooter', -], function () {}); diff --git a/public/app/features/panel/all.ts b/public/app/features/panel/all.ts new file mode 100644 index 00000000000..bdf1a097352 --- /dev/null +++ b/public/app/features/panel/all.ts @@ -0,0 +1,7 @@ +import './panel_header'; +import './panel_directive'; +import './solo_panel_ctrl'; +import './query_ctrl'; +import './panel_editor_tab'; +import './query_editor_row'; +import './query_troubleshooter'; diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index 177f0c7bf00..acf46a193e8 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -6,6 +6,7 @@ import { PanelCtrl } from 'app/features/panel/panel_ctrl'; import * as rangeUtil from 'app/core/utils/rangeutil'; import * as dateMath from 'app/core/utils/datemath'; +import { encodePathComponent } from 'app/core/utils/location_util'; import { metricsTabDirective } from './metrics_tab'; @@ -73,7 +74,7 @@ class MetricsPanelCtrl extends PanelCtrl { if (this.panel.snapshotData) { this.updateTimeRange(); var data = this.panel.snapshotData; - // backward compatability + // backward compatibility if (!_.isArray(data)) { data = data.data; } @@ -309,6 +310,24 @@ class MetricsPanelCtrl extends PanelCtrl { this.refresh(); } + getAdditionalMenuItems() { + const items = []; + if (this.datasource.supportsExplore) { + items.push({ + text: 'Explore', + click: 'ctrl.explore();', + icon: 'fa fa-fw fa-rocket', + shortcut: 'x', + }); + } + return items; + } + + explore() { + const exploreState = encodePathComponent(JSON.stringify(this.datasource.getExploreState(this.panel))); + this.$location.url(`/explore/${exploreState}`); + } + addQuery(target) { target.refId = this.dashboard.getNextQueryLetter(this.panel); diff --git a/public/app/features/panel/panel_ctrl.ts b/public/app/features/panel/panel_ctrl.ts index f54877c2c37..6402227164f 100644 --- a/public/app/features/panel/panel_ctrl.ts +++ b/public/app/features/panel/panel_ctrl.ts @@ -22,6 +22,7 @@ export class PanelCtrl { editorTabs: any; $scope: any; $injector: any; + $location: any; $timeout: any; fullscreen: boolean; inspector: any; @@ -35,6 +36,7 @@ export class PanelCtrl { constructor($scope, $injector) { this.$injector = $injector; + this.$location = $injector.get('$location'); this.$scope = $scope; this.$timeout = $injector.get('$timeout'); this.editorTabIndex = 0; @@ -161,6 +163,9 @@ export class PanelCtrl { shortcut: 'p s', }); + // Additional items from sub-class + menu.push(...this.getAdditionalMenuItems()); + let extendedMenu = this.getExtendedMenu(); menu.push({ text: 'More ...', @@ -194,8 +199,8 @@ export class PanelCtrl { }); menu.push({ - text: 'Add to Panel List', - click: 'ctrl.addToPanelList()', + text: 'Copy', + click: 'ctrl.copyPanel()', role: 'Editor', }); } @@ -209,6 +214,11 @@ export class PanelCtrl { return menu; } + // Override in sub-class to add items before extended menu + getAdditionalMenuItems() { + return []; + } + otherPanelInFullscreenMode() { return this.dashboard.meta.fullscreen && !this.fullscreen; } @@ -260,9 +270,9 @@ export class PanelCtrl { }); } - addToPanelList() { + copyPanel() { store.set(LS_PANEL_COPY_KEY, JSON.stringify(this.panel.getSaveModel())); - appEvents.emit('alert-success', ['Panel temporarily added to panel list']); + appEvents.emit('alert-success', ['Panel copied. Open Add Panel to paste']); } replacePanel(newPanel, oldPanel) { @@ -314,6 +324,7 @@ export class PanelCtrl { } var linkSrv = this.$injector.get('linkSrv'); + var sanitize = this.$injector.get('$sanitize'); var templateSrv = this.$injector.get('templateSrv'); var interpolatedMarkdown = templateSrv.replace(markdown, this.panel.scopedVars); var html = '
    '; @@ -336,7 +347,8 @@ export class PanelCtrl { html += ''; } - return html + '
    '; + html += '
    '; + return sanitize(html); } openInspector() { diff --git a/public/app/features/panel/panel_directive.ts b/public/app/features/panel/panel_directive.ts index dec7868a553..e549ca262d3 100644 --- a/public/app/features/panel/panel_directive.ts +++ b/public/app/features/panel/panel_directive.ts @@ -1,6 +1,7 @@ import angular from 'angular'; +import $ from 'jquery'; import Drop from 'tether-drop'; -import PerfectScrollbar from 'perfect-scrollbar'; +import baron from 'baron'; var module = angular.module('grafana.directives'); @@ -86,6 +87,9 @@ module.directive('grafanaPanel', function($rootScope, $document, $timeout) { function panelHeightUpdated() { panelContent.css({ height: ctrl.height + 'px' }); + } + + function resizeScrollableContent() { if (panelScrollbar) { panelScrollbar.update(); } @@ -100,9 +104,30 @@ module.directive('grafanaPanel', function($rootScope, $document, $timeout) { // update scrollbar after mounting ctrl.events.on('component-did-mount', () => { if (ctrl.__proto__.constructor.scrollable) { - panelScrollbar = new PerfectScrollbar(panelContent[0], { - wheelPropagation: true, + const scrollRootClass = 'baron baron__root baron__clipper panel-content--scrollable'; + const scrollerClass = 'baron__scroller'; + const scrollBarHTML = ` +
    +
    +
    + `; + + let scrollRoot = panelContent; + let scroller = panelContent.find(':first').find(':first'); + + scrollRoot.addClass(scrollRootClass); + $(scrollBarHTML).appendTo(scrollRoot); + scroller.addClass(scrollerClass); + + panelScrollbar = baron({ + root: scrollRoot[0], + scroller: scroller[0], + bar: '.baron__bar', + barOnCls: '_scrollbar', + scrollingCls: '_scrolling', }); + + panelScrollbar.scroll(); } }); @@ -110,6 +135,7 @@ module.directive('grafanaPanel', function($rootScope, $document, $timeout) { ctrl.calculatePanelHeight(); panelHeightUpdated(); $timeout(() => { + resizeScrollableContent(); ctrl.render(); }); }); @@ -199,7 +225,7 @@ module.directive('grafanaPanel', function($rootScope, $document, $timeout) { } if (panelScrollbar) { - panelScrollbar.update(); + panelScrollbar.dispose(); } }); }, diff --git a/public/app/features/playlist/all.js b/public/app/features/playlist/all.js deleted file mode 100644 index 3b07b0d74c5..00000000000 --- a/public/app/features/playlist/all.js +++ /dev/null @@ -1,7 +0,0 @@ -define([ - './playlists_ctrl', - './playlist_search', - './playlist_srv', - './playlist_edit_ctrl', - './playlist_routes' -], function () {}); diff --git a/public/app/features/playlist/all.ts b/public/app/features/playlist/all.ts new file mode 100644 index 00000000000..eb427b883ca --- /dev/null +++ b/public/app/features/playlist/all.ts @@ -0,0 +1,5 @@ +import './playlists_ctrl'; +import './playlist_search'; +import './playlist_srv'; +import './playlist_edit_ctrl'; +import './playlist_routes'; diff --git a/public/app/features/playlist/playlist_routes.js b/public/app/features/playlist/playlist_routes.js deleted file mode 100644 index 193b0b52b20..00000000000 --- a/public/app/features/playlist/playlist_routes.js +++ /dev/null @@ -1,39 +0,0 @@ -define([ - 'angular', - 'lodash' -], -function (angular) { - 'use strict'; - - var module = angular.module('grafana.routes'); - - module.config(function($routeProvider) { - $routeProvider - .when('/playlists', { - templateUrl: 'public/app/features/playlist/partials/playlists.html', - controllerAs: 'ctrl', - controller : 'PlaylistsCtrl' - }) - .when('/playlists/create', { - templateUrl: 'public/app/features/playlist/partials/playlist.html', - controllerAs: 'ctrl', - controller : 'PlaylistEditCtrl' - }) - .when('/playlists/edit/:id', { - templateUrl: 'public/app/features/playlist/partials/playlist.html', - controllerAs: 'ctrl', - controller : 'PlaylistEditCtrl' - }) - .when('/playlists/play/:id', { - templateUrl: 'public/app/features/playlist/partials/playlists.html', - controllerAs: 'ctrl', - controller : 'PlaylistsCtrl', - resolve: { - init: function(playlistSrv, $route) { - var playlistId = $route.current.params.id; - playlistSrv.start(playlistId); - } - } - }); - }); -}); diff --git a/public/app/features/playlist/playlist_routes.ts b/public/app/features/playlist/playlist_routes.ts new file mode 100644 index 00000000000..b898820e371 --- /dev/null +++ b/public/app/features/playlist/playlist_routes.ts @@ -0,0 +1,34 @@ +import angular from 'angular'; + +/** @ngInject */ +function grafanaRoutes($routeProvider) { + $routeProvider + .when('/playlists', { + templateUrl: 'public/app/features/playlist/partials/playlists.html', + controllerAs: 'ctrl', + controller: 'PlaylistsCtrl', + }) + .when('/playlists/create', { + templateUrl: 'public/app/features/playlist/partials/playlist.html', + controllerAs: 'ctrl', + controller: 'PlaylistEditCtrl', + }) + .when('/playlists/edit/:id', { + templateUrl: 'public/app/features/playlist/partials/playlist.html', + controllerAs: 'ctrl', + controller: 'PlaylistEditCtrl', + }) + .when('/playlists/play/:id', { + templateUrl: 'public/app/features/playlist/partials/playlists.html', + controllerAs: 'ctrl', + controller: 'PlaylistsCtrl', + resolve: { + init: function(playlistSrv, $route) { + let playlistId = $route.current.params.id; + playlistSrv.start(playlistId); + }, + }, + }); +} + +angular.module('grafana.routes').config(grafanaRoutes); diff --git a/public/app/features/plugins/datasource_srv.ts b/public/app/features/plugins/datasource_srv.ts index fb7a9ece37a..aef43a4760b 100644 --- a/public/app/features/plugins/datasource_srv.ts +++ b/public/app/features/plugins/datasource_srv.ts @@ -15,7 +15,7 @@ export class DatasourceSrv { this.datasources = {}; } - get(name) { + get(name?) { if (!name) { return this.get(config.defaultDatasource); } diff --git a/public/app/features/plugins/partials/ds_http_settings.html b/public/app/features/plugins/partials/ds_http_settings.html index 03df677ba13..b9f5683129c 100644 --- a/public/app/features/plugins/partials/ds_http_settings.html +++ b/public/app/features/plugins/partials/ds_http_settings.html @@ -1,5 +1,3 @@ - -

    HTTP

    @@ -13,12 +11,12 @@

    Specify a complete HTTP URL (for example http://your_server:8080)

    - Your access method is Direct, this means the URL + Your access method is Browser, this means the URL needs to be accessible from the browser. - Your access method is currently Proxy, this means the URL - needs to be accessible from the grafana backend. + Your access method is Server, this means the URL + needs to be accessible from the grafana backend/server.
    @@ -27,14 +25,38 @@
    Access -
    - - - Direct = URL is used directly from browser
    - Proxy = Grafana backend will proxy the request -
    +
    +
    +
    + +
    +
    + +
    +
    +

    + Access mode controls how requests to the data source will be handled. + Server should be the preferred way if nothing else stated. +

    +
    Server access mode (Default):
    +

    + All requests will be made from the browser to Grafana backend/server which in turn will forward the requests to the data source + and by that circumvent possible Cross-Origin Resource Sharing (CORS) requirements. + The URL needs to be accessible from the grafana backend/server if you select this access mode. +

    +
    Browser access mode:
    +

    + All requests will be made from the browser directly to the data source and may be subject to + Cross-Origin Resource Sharing (CORS) requirements. The URL needs to be accessible from the browser if you select this + access mode. +

    +
    @@ -135,4 +157,3 @@
    - diff --git a/public/app/features/templating/datasource_variable.ts b/public/app/features/templating/datasource_variable.ts index 0c5b226c372..4c326a94e3b 100644 --- a/public/app/features/templating/datasource_variable.ts +++ b/public/app/features/templating/datasource_variable.ts @@ -29,7 +29,7 @@ export class DatasourceVariable implements Variable { getSaveModel() { assignModelProperties(this.model, this, this.defaults); - // dont persist options + // don't persist options this.model.options = []; return this.model; } diff --git a/public/app/features/templating/editor_ctrl.ts b/public/app/features/templating/editor_ctrl.ts index 428770a21e5..75a84cca2bf 100644 --- a/public/app/features/templating/editor_ctrl.ts +++ b/public/app/features/templating/editor_ctrl.ts @@ -10,6 +10,7 @@ export class VariableEditorCtrl { $scope.ctrl = {}; $scope.namePattern = /^(?!__).*$/; $scope._ = _; + $scope.optionsLimit = 20; $scope.refreshOptions = [ { value: 0, text: 'Never' }, @@ -23,6 +24,8 @@ export class VariableEditorCtrl { { value: 2, text: 'Alphabetical (desc)' }, { value: 3, text: 'Numerical (asc)' }, { value: 4, text: 'Numerical (desc)' }, + { value: 5, text: 'Alphabetical (case-insensitive, asc)' }, + { value: 6, text: 'Alphabetical (case-insensitive, desc)' }, ]; $scope.hideOptions = [{ value: 0, text: '' }, { value: 1, text: 'Label' }, { value: 2, text: 'Variable' }]; @@ -94,6 +97,7 @@ export class VariableEditorCtrl { }; $scope.runQuery = function() { + $scope.optionsLimit = 20; return variableSrv.updateOptions($scope.current).catch(err => { if (err.data && err.data.message) { err.message = err.data.message; @@ -163,6 +167,10 @@ export class VariableEditorCtrl { $scope.removeVariable = function(variable) { variableSrv.removeVariable(variable); }; + + $scope.showMoreOptions = function() { + $scope.optionsLimit += 20; + }; } } diff --git a/public/app/features/templating/partials/editor.html b/public/app/features/templating/partials/editor.html index d904aeb4789..0d8b0ace327 100644 --- a/public/app/features/templating/partials/editor.html +++ b/public/app/features/templating/partials/editor.html @@ -280,11 +280,14 @@
    -
    Preview of values (shows max 20)
    +
    Preview of values
    -
    - {{option.text}} -
    +
    + {{option.text}} +
    +
    + Show more +
    diff --git a/public/app/features/templating/query_variable.ts b/public/app/features/templating/query_variable.ts index 58c7b692581..54bd7bb660c 100644 --- a/public/app/features/templating/query_variable.ts +++ b/public/app/features/templating/query_variable.ts @@ -197,6 +197,10 @@ export class QueryVariable implements Variable { return parseInt(matches[1], 10); } }); + } else if (sortType === 3) { + options = _.sortBy(options, opt => { + return _.toLower(opt.text); + }); } if (reverseSort) { diff --git a/public/app/features/templating/specs/adhoc_variable.jest.ts b/public/app/features/templating/specs/adhoc_variable.jest.ts index 863c8401c50..a7b20e8d029 100644 --- a/public/app/features/templating/specs/adhoc_variable.jest.ts +++ b/public/app/features/templating/specs/adhoc_variable.jest.ts @@ -2,7 +2,7 @@ import { AdhocVariable } from '../adhoc_variable'; describe('AdhocVariable', function() { describe('when serializing to url', function() { - it('should set return key value and op seperated by pipe', function() { + it('should set return key value and op separated by pipe', function() { var variable = new AdhocVariable({ filters: [ { key: 'key1', operator: '=', value: 'value1' }, diff --git a/public/app/features/templating/specs/query_variable.jest.ts b/public/app/features/templating/specs/query_variable.jest.ts index 7840d9e4242..39c51874586 100644 --- a/public/app/features/templating/specs/query_variable.jest.ts +++ b/public/app/features/templating/specs/query_variable.jest.ts @@ -40,11 +40,11 @@ describe('QueryVariable', () => { }); describe('can convert and sort metric names', () => { - var variable = new QueryVariable({}, null, null, null, null); - variable.sort = 3; // Numerical (asc) + const variable = new QueryVariable({}, null, null, null, null); + let input; - describe('can sort a mixed array of metric variables', () => { - var input = [ + beforeEach(() => { + input = [ { text: '0', value: '0' }, { text: '1', value: '1' }, { text: null, value: 3 }, @@ -58,11 +58,18 @@ describe('QueryVariable', () => { { text: '', value: undefined }, { text: undefined, value: '' }, ]; + }); + + describe('can sort a mixed array of metric variables in numeric order', () => { + let result; + + beforeEach(() => { + variable.sort = 3; // Numerical (asc) + result = variable.metricNamesToVariableValues(input); + }); - var result = variable.metricNamesToVariableValues(input); it('should return in same order', () => { var i = 0; - expect(result.length).toBe(11); expect(result[i++].text).toBe(''); expect(result[i++].text).toBe('0'); @@ -73,5 +80,26 @@ describe('QueryVariable', () => { expect(result[i++].text).toBe('6'); }); }); + + describe('can sort a mixed array of metric variables in alphabetical order', () => { + let result; + + beforeEach(() => { + variable.sort = 5; // Alphabetical CI (asc) + result = variable.metricNamesToVariableValues(input); + }); + + it('should return in same order', () => { + var i = 0; + expect(result.length).toBe(11); + expect(result[i++].text).toBe(''); + expect(result[i++].text).toBe('0'); + expect(result[i++].text).toBe('1'); + expect(result[i++].text).toBe('10'); + expect(result[i++].text).toBe('3'); + expect(result[i++].text).toBe('4'); + expect(result[i++].text).toBe('5'); + }); + }); }); }); diff --git a/public/app/features/templating/specs/template_srv.jest.ts b/public/app/features/templating/specs/template_srv.jest.ts index f28fbf9ac64..59915776b4f 100644 --- a/public/app/features/templating/specs/template_srv.jest.ts +++ b/public/app/features/templating/specs/template_srv.jest.ts @@ -136,6 +136,11 @@ describe('templateSrv', function() { var target = _templateSrv.replace('this=${test:pipe}', {}); expect(target).toBe('this=value1|value2'); }); + + it('should replace ${test:pipe} with piped value and $test with globbed value', function() { + var target = _templateSrv.replace('${test:pipe},$test', {}, 'glob'); + expect(target).toBe('value1|value2,{value1,value2}'); + }); }); describe('variable with all option', function() { @@ -164,6 +169,11 @@ describe('templateSrv', function() { var target = _templateSrv.replace('this.${test:glob}.filters', {}); expect(target).toBe('this.{value1,value2}.filters'); }); + + it('should replace ${test:pipe} with piped value and $test with globbed value', function() { + var target = _templateSrv.replace('${test:pipe},$test', {}, 'glob'); + expect(target).toBe('value1|value2,{value1,value2}'); + }); }); describe('variable with all option and custom value', function() { @@ -282,7 +292,7 @@ describe('templateSrv', function() { }); }); - describe('can hightlight variables in string', function() { + describe('can highlight variables in string', function() { beforeEach(function() { initTemplateSrv([{ type: 'query', name: 'test', current: { value: 'oogle' } }]); }); diff --git a/public/app/features/templating/template_srv.ts b/public/app/features/templating/template_srv.ts index 5b31072d140..99a9f53d547 100644 --- a/public/app/features/templating/template_srv.ts +++ b/public/app/features/templating/template_srv.ts @@ -74,6 +74,9 @@ export class TemplateSrv { if (typeof value === 'string') { return luceneEscape(value); } + if (value instanceof Array && value.length === 0) { + return '__empty__'; + } var quotedValues = _.map(value, function(val) { return '"' + luceneEscape(val) + '"'; }); @@ -179,16 +182,16 @@ export class TemplateSrv { return target; } - var variable, systemValue, value; + var variable, systemValue, value, fmt; this.regex.lastIndex = 0; return target.replace(this.regex, (match, var1, var2, fmt2, var3, fmt3) => { variable = this.index[var1 || var2 || var3]; - format = fmt2 || fmt3 || format; + fmt = fmt2 || fmt3 || format; if (scopedVars) { value = scopedVars[var1 || var2 || var3]; if (value) { - return this.formatValue(value.value, format, variable); + return this.formatValue(value.value, fmt, variable); } } @@ -198,19 +201,19 @@ export class TemplateSrv { systemValue = this.grafanaVariables[variable.current.value]; if (systemValue) { - return this.formatValue(systemValue, format, variable); + return this.formatValue(systemValue, fmt, variable); } value = variable.current.value; if (this.isAllValue(value)) { value = this.getAllValue(variable); - // skip formating of custom all values + // skip formatting of custom all values if (variable.allValue) { return value; } } - var res = this.formatValue(value, format, variable); + var res = this.formatValue(value, fmt, variable); return res; }); } diff --git a/public/app/partials/dashboard.html b/public/app/partials/dashboard.html index 210275d2200..9506587c515 100644 --- a/public/app/partials/dashboard.html +++ b/public/app/partials/dashboard.html @@ -1,18 +1,18 @@
    -
    - - +
    + + -
    - - +
    + + - - -
    -
    + + +
    +
    diff --git a/public/app/plugins/app/testdata/dashboards/graph_last_1h.json b/public/app/plugins/app/testdata/dashboards/graph_last_1h.json index c56d9e9216f..5a4459cd62c 100644 --- a/public/app/plugins/app/testdata/dashboards/graph_last_1h.json +++ b/public/app/plugins/app/testdata/dashboards/graph_last_1h.json @@ -392,7 +392,7 @@ "thresholds": [], "timeFrom": null, "timeShift": null, - "title": "2 yaxis and axis lables", + "title": "2 yaxis and axis labels", "tooltip": { "msResolution": false, "shared": true, @@ -894,7 +894,7 @@ "thresholds": [], "timeFrom": null, "timeShift": null, - "title": "Legend Table Single Series Should Take Minium Height", + "title": "Legend Table Single Series Should Take Minimum Height", "tooltip": { "shared": true, "sort": 0, diff --git a/public/app/plugins/datasource/elasticsearch/datasource.ts b/public/app/plugins/datasource/elasticsearch/datasource.ts index 7476a36405b..e3eccfb8029 100644 --- a/public/app/plugins/datasource/elasticsearch/datasource.ts +++ b/public/app/plugins/datasource/elasticsearch/datasource.ts @@ -395,6 +395,7 @@ export class ElasticDatasource { } if (query.find === 'terms') { + query.field = this.templateSrv.replace(query.field, {}, 'lucene'); query.query = this.templateSrv.replace(query.query || '*', {}, 'lucene'); return this.getTerms(query); } diff --git a/public/app/plugins/datasource/elasticsearch/elastic_response.ts b/public/app/plugins/datasource/elasticsearch/elastic_response.ts index ede5cb0ba3a..a378ab8b55f 100644 --- a/public/app/plugins/datasource/elasticsearch/elastic_response.ts +++ b/public/app/plugins/datasource/elasticsearch/elastic_response.ts @@ -175,7 +175,7 @@ export class ElasticResponse { } // This is quite complex - // neeed to recurise down the nested buckets to build series + // need to recurise down the nested buckets to build series processBuckets(aggs, target, seriesList, table, props, depth) { var bucket, aggDef, esAgg, aggId; var maxDepth = target.bucketAggs.length - 1; diff --git a/public/app/plugins/datasource/elasticsearch/partials/annotations.editor.html b/public/app/plugins/datasource/elasticsearch/partials/annotations.editor.html index d4e1e7d1b1c..a2e903f231c 100644 --- a/public/app/plugins/datasource/elasticsearch/partials/annotations.editor.html +++ b/public/app/plugins/datasource/elasticsearch/partials/annotations.editor.html @@ -27,7 +27,7 @@
    - Title (depricated) + Title (deprecated)
    diff --git a/public/app/plugins/datasource/elasticsearch/partials/config.html b/public/app/plugins/datasource/elasticsearch/partials/config.html index da23e9ddab1..def59518624 100644 --- a/public/app/plugins/datasource/elasticsearch/partials/config.html +++ b/public/app/plugins/datasource/elasticsearch/partials/config.html @@ -35,7 +35,7 @@
    - Min interval + Min time interval A lower limit for the auto group by time interval. Recommended to be set to write frequency, diff --git a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts b/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts index 629621b8e60..558bccf3d0f 100644 --- a/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts +++ b/public/app/plugins/datasource/elasticsearch/specs/datasource_specs.ts @@ -53,7 +53,7 @@ describe('ElasticDatasource', function() { }); }); - describe('When issueing metric query with interval pattern', function() { + describe('When issuing metric query with interval pattern', function() { var requestOptions, parts, header; beforeEach(function() { @@ -98,7 +98,7 @@ describe('ElasticDatasource', function() { }); }); - describe('When issueing document query', function() { + describe('When issuing document query', function() { var requestOptions, parts, header; beforeEach(function() { diff --git a/public/app/plugins/datasource/graphite/add_graphite_func.ts b/public/app/plugins/datasource/graphite/add_graphite_func.ts index f2a596c7071..444d30b5453 100644 --- a/public/app/plugins/datasource/graphite/add_graphite_func.ts +++ b/public/app/plugins/datasource/graphite/add_graphite_func.ts @@ -4,6 +4,7 @@ import $ from 'jquery'; import rst2html from 'rst2html'; import Drop from 'tether-drop'; +/** @ngInject */ export function graphiteAddFunc($compile) { const inputTemplate = ''; @@ -67,7 +68,7 @@ export function graphiteAddFunc($compile) { }); $input.blur(function() { - // clicking the function dropdown menu wont + // clicking the function dropdown menu won't // work if you remove class at once setTimeout(function() { $input.val(''); diff --git a/public/app/plugins/datasource/graphite/datasource.ts b/public/app/plugins/datasource/graphite/datasource.ts index 335cb400834..0b79673a14c 100644 --- a/public/app/plugins/datasource/graphite/datasource.ts +++ b/public/app/plugins/datasource/graphite/datasource.ts @@ -320,7 +320,7 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv method: 'GET', url: '/tags/autoComplete/tags', params: { - expr: _.map(expressions, expression => templateSrv.replace(expression)), + expr: _.map(expressions, expression => templateSrv.replace((expression || '').trim())), }, // for cancellations requestId: options.requestId, @@ -355,8 +355,8 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv method: 'GET', url: '/tags/autoComplete/values', params: { - expr: _.map(expressions, expression => templateSrv.replace(expression)), - tag: templateSrv.replace(tag), + expr: _.map(expressions, expression => templateSrv.replace((expression || '').trim())), + tag: templateSrv.replace((tag || '').trim()), }, // for cancellations requestId: options.requestId, @@ -453,7 +453,13 @@ export function GraphiteDatasource(instanceSettings, $q, backendSrv, templateSrv }; this.testDatasource = function() { - return this.metricFindQuery('*').then(function() { + let query = { + panelId: 3, + rangeRaw: { from: 'now-1h', to: 'now' }, + targets: [{ target: 'constantLine(100)' }], + maxDataPoints: 300, + }; + return this.query(query).then(function() { return { status: 'success', message: 'Data source is working' }; }); }; diff --git a/public/app/plugins/datasource/graphite/func_editor.ts b/public/app/plugins/datasource/graphite/func_editor.ts index 86135aef343..82a838e7660 100644 --- a/public/app/plugins/datasource/graphite/func_editor.ts +++ b/public/app/plugins/datasource/graphite/func_editor.ts @@ -3,6 +3,7 @@ import _ from 'lodash'; import $ from 'jquery'; import rst2html from 'rst2html'; +/** @ngInject */ export function graphiteFuncEditor($compile, templateSrv, popoverSrv) { const funcSpanTemplate = '{{func.def.name}}('; const paramTemplate = diff --git a/public/app/plugins/datasource/graphite/specs/datasource.jest.ts b/public/app/plugins/datasource/graphite/specs/datasource.jest.ts new file mode 100644 index 00000000000..dac6c2252d8 --- /dev/null +++ b/public/app/plugins/datasource/graphite/specs/datasource.jest.ts @@ -0,0 +1,300 @@ +import { GraphiteDatasource } from '../datasource'; +import moment from 'moment'; +import _ from 'lodash'; +import $q from 'q'; +import { TemplateSrvStub } from 'test/specs/helpers'; + +describe('graphiteDatasource', () => { + let ctx: any = { + backendSrv: {}, + $q: $q, + templateSrv: new TemplateSrvStub(), + }; + + beforeEach(function() { + ctx.instanceSettings = { url: [''], name: 'graphiteProd', jsonData: {} }; + ctx.ds = new GraphiteDatasource(ctx.instanceSettings, ctx.$q, ctx.backendSrv, ctx.templateSrv); + }); + + describe('When querying graphite with one target using query editor target spec', function() { + let query = { + panelId: 3, + rangeRaw: { from: 'now-1h', to: 'now' }, + targets: [{ target: 'prod1.count' }, { target: 'prod2.count' }], + maxDataPoints: 500, + }; + + let results; + let requestOptions; + + beforeEach(async () => { + ctx.backendSrv.datasourceRequest = function(options) { + requestOptions = options; + return ctx.$q.when({ + data: [{ target: 'prod1.count', datapoints: [[10, 1], [12, 1]] }], + }); + }; + + await ctx.ds.query(query).then(function(data) { + results = data; + }); + }); + + it('should generate the correct query', function() { + expect(requestOptions.url).toBe('/render'); + }); + + it('should set unique requestId', function() { + expect(requestOptions.requestId).toBe('graphiteProd.panelId.3'); + }); + + it('should query correctly', function() { + let params = requestOptions.data.split('&'); + expect(params).toContain('target=prod1.count'); + expect(params).toContain('target=prod2.count'); + expect(params).toContain('from=-1h'); + expect(params).toContain('until=now'); + }); + + it('should exclude undefined params', function() { + let params = requestOptions.data.split('&'); + expect(params).not.toContain('cacheTimeout=undefined'); + }); + + it('should return series list', function() { + expect(results.data.length).toBe(1); + expect(results.data[0].target).toBe('prod1.count'); + }); + + it('should convert to millisecond resolution', function() { + expect(results.data[0].datapoints[0][0]).toBe(10); + }); + }); + + describe('when fetching Graphite Events as annotations', () => { + let results; + + const options = { + annotation: { + tags: 'tag1', + }, + range: { + from: moment(1432288354), + to: moment(1432288401), + }, + rangeRaw: { from: 'now-24h', to: 'now' }, + }; + + describe('and tags are returned as string', () => { + const response = { + data: [ + { + when: 1507222850, + tags: 'tag1 tag2', + data: 'some text', + id: 2, + what: 'Event - deploy', + }, + ], + }; + + beforeEach(async () => { + ctx.backendSrv.datasourceRequest = function(options) { + return ctx.$q.when(response); + }; + + await ctx.ds.annotationQuery(options).then(function(data) { + results = data; + }); + }); + + it('should parse the tags string into an array', () => { + expect(_.isArray(results[0].tags)).toEqual(true); + expect(results[0].tags.length).toEqual(2); + expect(results[0].tags[0]).toEqual('tag1'); + expect(results[0].tags[1]).toEqual('tag2'); + }); + }); + + describe('and tags are returned as an array', () => { + const response = { + data: [ + { + when: 1507222850, + tags: ['tag1', 'tag2'], + data: 'some text', + id: 2, + what: 'Event - deploy', + }, + ], + }; + beforeEach(() => { + ctx.backendSrv.datasourceRequest = function(options) { + return ctx.$q.when(response); + }; + + ctx.ds.annotationQuery(options).then(function(data) { + results = data; + }); + // ctx.$rootScope.$apply(); + }); + + it('should parse the tags string into an array', () => { + expect(_.isArray(results[0].tags)).toEqual(true); + expect(results[0].tags.length).toEqual(2); + expect(results[0].tags[0]).toEqual('tag1'); + expect(results[0].tags[1]).toEqual('tag2'); + }); + }); + }); + + describe('building graphite params', function() { + it('should return empty array if no targets', function() { + let results = ctx.ds.buildGraphiteParams({ + targets: [{}], + }); + expect(results.length).toBe(0); + }); + + it('should uri escape targets', function() { + let results = ctx.ds.buildGraphiteParams({ + targets: [{ target: 'prod1.{test,test2}' }, { target: 'prod2.count' }], + }); + expect(results).toContain('target=prod1.%7Btest%2Ctest2%7D'); + }); + + it('should replace target placeholder', function() { + let results = ctx.ds.buildGraphiteParams({ + targets: [{ target: 'series1' }, { target: 'series2' }, { target: 'asPercent(#A,#B)' }], + }); + expect(results[2]).toBe('target=asPercent(series1%2Cseries2)'); + }); + + it('should replace target placeholder for hidden series', function() { + let results = ctx.ds.buildGraphiteParams({ + targets: [ + { target: 'series1', hide: true }, + { target: 'sumSeries(#A)', hide: true }, + { target: 'asPercent(#A,#B)' }, + ], + }); + expect(results[0]).toBe('target=' + encodeURIComponent('asPercent(series1,sumSeries(series1))')); + }); + + it('should replace target placeholder when nesting query references', function() { + let results = ctx.ds.buildGraphiteParams({ + targets: [{ target: 'series1' }, { target: 'sumSeries(#A)' }, { target: 'asPercent(#A,#B)' }], + }); + expect(results[2]).toBe('target=' + encodeURIComponent('asPercent(series1,sumSeries(series1))')); + }); + + it('should fix wrong minute interval parameters', function() { + let results = ctx.ds.buildGraphiteParams({ + targets: [{ target: "summarize(prod.25m.count, '25m', 'sum')" }], + }); + expect(results[0]).toBe('target=' + encodeURIComponent("summarize(prod.25m.count, '25min', 'sum')")); + }); + + it('should fix wrong month interval parameters', function() { + let results = ctx.ds.buildGraphiteParams({ + targets: [{ target: "summarize(prod.5M.count, '5M', 'sum')" }], + }); + expect(results[0]).toBe('target=' + encodeURIComponent("summarize(prod.5M.count, '5mon', 'sum')")); + }); + + it('should ignore empty targets', function() { + let results = ctx.ds.buildGraphiteParams({ + targets: [{ target: 'series1' }, { target: '' }], + }); + expect(results.length).toBe(2); + }); + }); + + describe('querying for template variables', () => { + let results; + let requestOptions; + + beforeEach(() => { + ctx.backendSrv.datasourceRequest = function(options) { + requestOptions = options; + return ctx.$q.when({ + data: ['backend_01', 'backend_02'], + }); + }; + }); + + it('should generate tags query', () => { + ctx.ds.metricFindQuery('tags()').then(data => { + results = data; + }); + + expect(requestOptions.url).toBe('/tags/autoComplete/tags'); + expect(requestOptions.params.expr).toEqual([]); + expect(results).not.toBe(null); + }); + + it('should generate tags query with a filter expression', () => { + ctx.ds.metricFindQuery('tags(server=backend_01)').then(data => { + results = data; + }); + + expect(requestOptions.url).toBe('/tags/autoComplete/tags'); + expect(requestOptions.params.expr).toEqual(['server=backend_01']); + expect(results).not.toBe(null); + }); + + it('should generate tags query for an expression with whitespace after', () => { + ctx.ds.metricFindQuery('tags(server=backend_01 )').then(data => { + results = data; + }); + + expect(requestOptions.url).toBe('/tags/autoComplete/tags'); + expect(requestOptions.params.expr).toEqual(['server=backend_01']); + expect(results).not.toBe(null); + }); + + it('should generate tag values query for one tag', () => { + ctx.ds.metricFindQuery('tag_values(server)').then(data => { + results = data; + }); + + expect(requestOptions.url).toBe('/tags/autoComplete/values'); + expect(requestOptions.params.tag).toBe('server'); + expect(requestOptions.params.expr).toEqual([]); + expect(results).not.toBe(null); + }); + + it('should generate tag values query for a tag and expression', () => { + ctx.ds.metricFindQuery('tag_values(server,server=~backend*)').then(data => { + results = data; + }); + + expect(requestOptions.url).toBe('/tags/autoComplete/values'); + expect(requestOptions.params.tag).toBe('server'); + expect(requestOptions.params.expr).toEqual(['server=~backend*']); + expect(results).not.toBe(null); + }); + + it('should generate tag values query for a tag with whitespace after', () => { + ctx.ds.metricFindQuery('tag_values(server )').then(data => { + results = data; + }); + + expect(requestOptions.url).toBe('/tags/autoComplete/values'); + expect(requestOptions.params.tag).toBe('server'); + expect(requestOptions.params.expr).toEqual([]); + expect(results).not.toBe(null); + }); + + it('should generate tag values query for a tag and expression with whitespace after', () => { + ctx.ds.metricFindQuery('tag_values(server , server=~backend* )').then(data => { + results = data; + }); + + expect(requestOptions.url).toBe('/tags/autoComplete/values'); + expect(requestOptions.params.tag).toBe('server'); + expect(requestOptions.params.expr).toEqual(['server=~backend*']); + expect(results).not.toBe(null); + }); + }); +}); diff --git a/public/app/plugins/datasource/graphite/specs/datasource_specs.ts b/public/app/plugins/datasource/graphite/specs/datasource_specs.ts deleted file mode 100644 index b97b4a56b2b..00000000000 --- a/public/app/plugins/datasource/graphite/specs/datasource_specs.ts +++ /dev/null @@ -1,225 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; -import helpers from 'test/specs/helpers'; -import { GraphiteDatasource } from '../datasource'; -import moment from 'moment'; -import _ from 'lodash'; - -describe('graphiteDatasource', function() { - let ctx = new helpers.ServiceTestContext(); - let instanceSettings: any = { url: [''], name: 'graphiteProd', jsonData: {} }; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach(ctx.providePhase(['backendSrv', 'templateSrv'])); - beforeEach( - angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) { - ctx.$q = $q; - ctx.$httpBackend = $httpBackend; - ctx.$rootScope = $rootScope; - ctx.$injector = $injector; - $httpBackend.when('GET', /\.html$/).respond(''); - }) - ); - - beforeEach(function() { - ctx.ds = ctx.$injector.instantiate(GraphiteDatasource, { - instanceSettings: instanceSettings, - }); - }); - - describe('When querying graphite with one target using query editor target spec', function() { - let query = { - panelId: 3, - rangeRaw: { from: 'now-1h', to: 'now' }, - targets: [{ target: 'prod1.count' }, { target: 'prod2.count' }], - maxDataPoints: 500, - }; - - let results; - let requestOptions; - - beforeEach(function() { - ctx.backendSrv.datasourceRequest = function(options) { - requestOptions = options; - return ctx.$q.when({ - data: [{ target: 'prod1.count', datapoints: [[10, 1], [12, 1]] }], - }); - }; - - ctx.ds.query(query).then(function(data) { - results = data; - }); - ctx.$rootScope.$apply(); - }); - - it('should generate the correct query', function() { - expect(requestOptions.url).to.be('/render'); - }); - - it('should set unique requestId', function() { - expect(requestOptions.requestId).to.be('graphiteProd.panelId.3'); - }); - - it('should query correctly', function() { - let params = requestOptions.data.split('&'); - expect(params).to.contain('target=prod1.count'); - expect(params).to.contain('target=prod2.count'); - expect(params).to.contain('from=-1h'); - expect(params).to.contain('until=now'); - }); - - it('should exclude undefined params', function() { - let params = requestOptions.data.split('&'); - expect(params).to.not.contain('cacheTimeout=undefined'); - }); - - it('should return series list', function() { - expect(results.data.length).to.be(1); - expect(results.data[0].target).to.be('prod1.count'); - }); - - it('should convert to millisecond resolution', function() { - expect(results.data[0].datapoints[0][0]).to.be(10); - }); - }); - - describe('when fetching Graphite Events as annotations', () => { - let results; - - const options = { - annotation: { - tags: 'tag1', - }, - range: { - from: moment(1432288354), - to: moment(1432288401), - }, - rangeRaw: { from: 'now-24h', to: 'now' }, - }; - - describe('and tags are returned as string', () => { - const response = { - data: [ - { - when: 1507222850, - tags: 'tag1 tag2', - data: 'some text', - id: 2, - what: 'Event - deploy', - }, - ], - }; - - beforeEach(() => { - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when(response); - }; - - ctx.ds.annotationQuery(options).then(function(data) { - results = data; - }); - ctx.$rootScope.$apply(); - }); - - it('should parse the tags string into an array', () => { - expect(_.isArray(results[0].tags)).to.eql(true); - expect(results[0].tags.length).to.eql(2); - expect(results[0].tags[0]).to.eql('tag1'); - expect(results[0].tags[1]).to.eql('tag2'); - }); - }); - - describe('and tags are returned as an array', () => { - const response = { - data: [ - { - when: 1507222850, - tags: ['tag1', 'tag2'], - data: 'some text', - id: 2, - what: 'Event - deploy', - }, - ], - }; - beforeEach(() => { - ctx.backendSrv.datasourceRequest = function(options) { - return ctx.$q.when(response); - }; - - ctx.ds.annotationQuery(options).then(function(data) { - results = data; - }); - ctx.$rootScope.$apply(); - }); - - it('should parse the tags string into an array', () => { - expect(_.isArray(results[0].tags)).to.eql(true); - expect(results[0].tags.length).to.eql(2); - expect(results[0].tags[0]).to.eql('tag1'); - expect(results[0].tags[1]).to.eql('tag2'); - }); - }); - }); - - describe('building graphite params', function() { - it('should return empty array if no targets', function() { - let results = ctx.ds.buildGraphiteParams({ - targets: [{}], - }); - expect(results.length).to.be(0); - }); - - it('should uri escape targets', function() { - let results = ctx.ds.buildGraphiteParams({ - targets: [{ target: 'prod1.{test,test2}' }, { target: 'prod2.count' }], - }); - expect(results).to.contain('target=prod1.%7Btest%2Ctest2%7D'); - }); - - it('should replace target placeholder', function() { - let results = ctx.ds.buildGraphiteParams({ - targets: [{ target: 'series1' }, { target: 'series2' }, { target: 'asPercent(#A,#B)' }], - }); - expect(results[2]).to.be('target=asPercent(series1%2Cseries2)'); - }); - - it('should replace target placeholder for hidden series', function() { - let results = ctx.ds.buildGraphiteParams({ - targets: [ - { target: 'series1', hide: true }, - { target: 'sumSeries(#A)', hide: true }, - { target: 'asPercent(#A,#B)' }, - ], - }); - expect(results[0]).to.be('target=' + encodeURIComponent('asPercent(series1,sumSeries(series1))')); - }); - - it('should replace target placeholder when nesting query references', function() { - let results = ctx.ds.buildGraphiteParams({ - targets: [{ target: 'series1' }, { target: 'sumSeries(#A)' }, { target: 'asPercent(#A,#B)' }], - }); - expect(results[2]).to.be('target=' + encodeURIComponent('asPercent(series1,sumSeries(series1))')); - }); - - it('should fix wrong minute interval parameters', function() { - let results = ctx.ds.buildGraphiteParams({ - targets: [{ target: "summarize(prod.25m.count, '25m', 'sum')" }], - }); - expect(results[0]).to.be('target=' + encodeURIComponent("summarize(prod.25m.count, '25min', 'sum')")); - }); - - it('should fix wrong month interval parameters', function() { - let results = ctx.ds.buildGraphiteParams({ - targets: [{ target: "summarize(prod.5M.count, '5M', 'sum')" }], - }); - expect(results[0]).to.be('target=' + encodeURIComponent("summarize(prod.5M.count, '5mon', 'sum')")); - }); - - it('should ignore empty targets', function() { - let results = ctx.ds.buildGraphiteParams({ - targets: [{ target: 'series1' }, { target: '' }], - }); - expect(results.length).to.be(2); - }); - }); -}); diff --git a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts b/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts index f8b70b05940..b4f7718930f 100644 --- a/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts +++ b/public/app/plugins/datasource/graphite/specs/query_ctrl_specs.ts @@ -97,7 +97,7 @@ describe('GraphiteQueryCtrl', function() { }); }); - describe('when initalizing target without metric expression and only function', function() { + describe('when initializing target without metric expression and only function', function() { beforeEach(function() { ctx.ctrl.target.target = 'asPercent(#A, #B)'; ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); @@ -130,7 +130,7 @@ describe('GraphiteQueryCtrl', function() { }); }); - describe('when initalizing target without metric expression and function with series-ref', function() { + describe('when initializing target without metric expression and function with series-ref', function() { beforeEach(function() { ctx.ctrl.target.target = 'asPercent(metric.node.count, #A)'; ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); @@ -146,7 +146,7 @@ describe('GraphiteQueryCtrl', function() { }); }); - describe('when getting altSegments and metricFindQuery retuns empty array', function() { + describe('when getting altSegments and metricFindQuery returns empty array', function() { beforeEach(function() { ctx.ctrl.target.target = 'test.count'; ctx.ctrl.datasource.metricFindQuery.returns(ctx.$q.when([])); diff --git a/public/app/plugins/datasource/influxdb/datasource.ts b/public/app/plugins/datasource/influxdb/datasource.ts index 1eff9bfa527..f971ac2f649 100644 --- a/public/app/plugins/datasource/influxdb/datasource.ts +++ b/public/app/plugins/datasource/influxdb/datasource.ts @@ -54,7 +54,7 @@ export default class InfluxDatasource { queryTargets.push(target); - // backward compatability + // backward compatibility scopedVars.interval = scopedVars.__interval; queryModel = new InfluxQuery(target, this.templateSrv, scopedVars); @@ -82,7 +82,7 @@ export default class InfluxDatasource { // replace templated variables allQueries = this.templateSrv.replace(allQueries, scopedVars); - return this._seriesQuery(allQueries).then((data): any => { + return this._seriesQuery(allQueries, options).then((data): any => { if (!data || !data.results) { return []; } @@ -135,7 +135,7 @@ export default class InfluxDatasource { var query = options.annotation.query.replace('$timeFilter', timeFilter); query = this.templateSrv.replace(query, null, 'regex'); - return this._seriesQuery(query).then(data => { + return this._seriesQuery(query, options).then(data => { if (!data || !data.results || !data.results[0]) { throw { message: 'No results in response from InfluxDB' }; } @@ -164,30 +164,30 @@ export default class InfluxDatasource { return false; } - metricFindQuery(query) { + metricFindQuery(query: string, options?: any) { var interpolated = this.templateSrv.replace(query, null, 'regex'); - return this._seriesQuery(interpolated).then(_.curry(this.responseParser.parse)(query)); + return this._seriesQuery(interpolated, options).then(_.curry(this.responseParser.parse)(query)); } getTagKeys(options) { var queryBuilder = new InfluxQueryBuilder({ measurement: '', tags: [] }, this.database); var query = queryBuilder.buildExploreQuery('TAG_KEYS'); - return this.metricFindQuery(query); + return this.metricFindQuery(query, options); } getTagValues(options) { var queryBuilder = new InfluxQueryBuilder({ measurement: '', tags: [] }, this.database); var query = queryBuilder.buildExploreQuery('TAG_VALUES', options.key); - return this.metricFindQuery(query); + return this.metricFindQuery(query, options); } - _seriesQuery(query) { + _seriesQuery(query: string, options?: any) { if (!query) { return this.$q.when({ results: [] }); } - return this._influxRequest('GET', '/query', { q: query, epoch: 'ms' }); + return this._influxRequest('GET', '/query', { q: query, epoch: 'ms' }, options); } serializeParams(params) { @@ -225,21 +225,21 @@ export default class InfluxDatasource { }); } - _influxRequest(method, url, data) { - var self = this; + _influxRequest(method: string, url: string, data: any, options?: any) { + const currentUrl = this.urls.shift(); + this.urls.push(currentUrl); - var currentUrl = self.urls.shift(); - self.urls.push(currentUrl); + let params: any = {}; - var params: any = {}; - - if (self.username) { - params.u = self.username; - params.p = self.password; + if (this.username) { + params.u = this.username; + params.p = this.password; } - if (self.database) { - params.db = self.database; + if (options && options.database) { + params.db = options.database; + } else if (this.database) { + params.db = this.database; } if (method === 'GET') { @@ -247,7 +247,7 @@ export default class InfluxDatasource { data = null; } - var options: any = { + let req: any = { method: method, url: currentUrl + url, params: params, @@ -257,15 +257,15 @@ export default class InfluxDatasource { paramSerializer: this.serializeParams, }; - options.headers = options.headers || {}; + req.headers = req.headers || {}; if (this.basicAuth || this.withCredentials) { - options.withCredentials = true; + req.withCredentials = true; } - if (self.basicAuth) { - options.headers.Authorization = self.basicAuth; + if (this.basicAuth) { + req.headers.Authorization = this.basicAuth; } - return this.backendSrv.datasourceRequest(options).then( + return this.backendSrv.datasourceRequest(req).then( result => { return result.data; }, diff --git a/public/app/plugins/datasource/influxdb/influx_query.ts b/public/app/plugins/datasource/influxdb/influx_query.ts index 656647b4413..2ef74170068 100644 --- a/public/app/plugins/datasource/influxdb/influx_query.ts +++ b/public/app/plugins/datasource/influxdb/influx_query.ts @@ -230,7 +230,7 @@ export default class InfluxQuery { for (i = 0; i < this.groupByParts.length; i++) { var part = this.groupByParts[i]; if (i > 0) { - // for some reason fill has no seperator + // for some reason fill has no separator groupBySection += part.def.type === 'fill' ? ' ' : ', '; } groupBySection += part.render(''); diff --git a/public/app/plugins/datasource/influxdb/influx_series.ts b/public/app/plugins/datasource/influxdb/influx_series.ts index a177ef3bb73..89e2d01b85b 100644 --- a/public/app/plugins/datasource/influxdb/influx_series.ts +++ b/public/app/plugins/datasource/influxdb/influx_series.ts @@ -151,11 +151,17 @@ export default class InfluxSeries { _.each(this.series, (series, seriesIndex) => { if (seriesIndex === 0) { - table.columns.push({ text: 'Time', type: 'time' }); + j = 0; + // Check that the first column is indeed 'time' + if (series.columns[0] === 'time') { + // Push this now before the tags and with the right type + table.columns.push({ text: 'Time', type: 'time' }); + j++; + } _.each(_.keys(series.tags), function(key) { table.columns.push({ text: key }); }); - for (j = 1; j < series.columns.length; j++) { + for (; j < series.columns.length; j++) { table.columns.push({ text: series.columns[j] }); } } diff --git a/public/app/plugins/datasource/influxdb/partials/annotations.editor.html b/public/app/plugins/datasource/influxdb/partials/annotations.editor.html index 2f54ff28275..48991426c1e 100644 --- a/public/app/plugins/datasource/influxdb/partials/annotations.editor.html +++ b/public/app/plugins/datasource/influxdb/partials/annotations.editor.html @@ -17,7 +17,7 @@
    - Title (depricated) + Title (deprecated)
    diff --git a/public/app/plugins/datasource/influxdb/partials/config.html b/public/app/plugins/datasource/influxdb/partials/config.html index 9cb6f5ba749..a70a1de98a4 100644 --- a/public/app/plugins/datasource/influxdb/partials/config.html +++ b/public/app/plugins/datasource/influxdb/partials/config.html @@ -23,6 +23,20 @@
    + +
    +
    +
    Database Access
    +

    + Setting the database for this datasource does not deny access to other databases. The InfluxDB query syntax allows + switching the database in the query. For example: + SHOW MEASUREMENTS ON _internal or SELECT * FROM "_internal".."database" LIMIT 10 +

    + To support data isolation and security, make sure appropriate permissions are configured in InfluxDB. +

    +
    +
    +
    diff --git a/public/app/plugins/datasource/influxdb/query_help.md b/public/app/plugins/datasource/influxdb/query_help.md index 0d4fd941ca5..4930ccbc83f 100644 --- a/public/app/plugins/datasource/influxdb/query_help.md +++ b/public/app/plugins/datasource/influxdb/query_help.md @@ -10,7 +10,7 @@ - When stacking is enabled it is important that points align - If there are missing points for one series it can cause gaps or missing bars - You must use fill(0), and select a group by time low limit -- Use the group by time option below your queries and specify for example >10s if your metrics are written every 10 seconds +- Use the group by time option below your queries and specify for example 10s if your metrics are written every 10 seconds - This will insert zeros for series that are missing measurements and will make stacking work properly #### Group by time @@ -18,8 +18,7 @@ - Leave the group by time field empty for each query and it will be calculated based on time range and pixel width of the graph - If you use fill(0) or fill(null) set a low limit for the auto group by time interval - The low limit can only be set in the group by time option below your queries -- You set a low limit by adding a greater sign before the interval -- Example: >60s if you write metrics to InfluxDB every 60 seconds +- Example: 60s if you write metrics to InfluxDB every 60 seconds #### Documentation links: diff --git a/public/app/plugins/datasource/influxdb/response_parser.ts b/public/app/plugins/datasource/influxdb/response_parser.ts index 78ce67e7a37..746a8c0e05a 100644 --- a/public/app/plugins/datasource/influxdb/response_parser.ts +++ b/public/app/plugins/datasource/influxdb/response_parser.ts @@ -11,14 +11,23 @@ export default class ResponseParser { return []; } - var influxdb11format = query.toLowerCase().indexOf('show tag values') >= 0; - var res = {}; _.each(influxResults.series, serie => { _.each(serie.values, value => { if (_.isArray(value)) { - if (influxdb11format) { - addUnique(res, value[1] || value[0]); + // In general, there are 2 possible shapes for the returned value. + // The first one is a two-element array, + // where the first element is somewhat a metadata value: + // the tag name for SHOW TAG VALUES queries, + // the time field for SELECT queries, etc. + // The second shape is an one-element array, + // that is containing an immediate value. + // For example, SHOW FIELD KEYS queries return such shape. + // Note, pre-0.11 versions return + // the second shape for SHOW TAG VALUES queries + // (while the newer versions—first). + if (value[1] !== undefined) { + addUnique(res, value[1]); } else { addUnique(res, value[0]); } @@ -29,7 +38,7 @@ export default class ResponseParser { }); return _.map(res, value => { - return { text: value }; + return { text: value.toString() }; }); } } diff --git a/public/app/plugins/datasource/influxdb/specs/influx_series.jest.ts b/public/app/plugins/datasource/influxdb/specs/influx_series.jest.ts index 8ae4a335828..8c8fee9ab9f 100644 --- a/public/app/plugins/datasource/influxdb/specs/influx_series.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/influx_series.jest.ts @@ -195,10 +195,34 @@ describe('when generating timeseries from influxdb response', function() { expect(table.type).toBe('table'); expect(table.columns.length).toBe(5); + expect(table.columns[0].text).toEqual('Time'); expect(table.rows[0]).toEqual([1431946625000, 'Africa', 'server2', 23, 10]); }); }); + describe('given table response from SHOW CARDINALITY', function() { + var options = { + alias: '', + series: [ + { + name: 'cpu', + columns: ['count'], + values: [[37]], + }, + ], + }; + + it('should return table', function() { + var series = new InfluxSeries(options); + var table = series.getTable(); + + expect(table.type).toBe('table'); + expect(table.columns.length).toBe(1); + expect(table.columns[0].text).toEqual('count'); + expect(table.rows[0]).toEqual([37]); + }); + }); + describe('given annotation response', function() { describe('with empty tagsColumn', function() { var options = { diff --git a/public/app/plugins/datasource/influxdb/specs/query_builder.jest.ts b/public/app/plugins/datasource/influxdb/specs/query_builder.jest.ts index 439bf7b1fc5..eeae987b139 100644 --- a/public/app/plugins/datasource/influxdb/specs/query_builder.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/query_builder.jest.ts @@ -97,7 +97,7 @@ describe('InfluxQueryBuilder', function() { expect(query).toBe('SHOW TAG VALUES FROM "one_week"."cpu" WITH KEY = "app" WHERE "host" = \'server1\''); }); - it('should not includ policy when policy is default', function() { + it('should not include policy when policy is default', function() { var builder = new InfluxQueryBuilder({ measurement: 'cpu', policy: 'default', diff --git a/public/app/plugins/datasource/influxdb/specs/response_parser.jest.ts b/public/app/plugins/datasource/influxdb/specs/response_parser.jest.ts index 8ddc0fcdaf1..c0652f5fca3 100644 --- a/public/app/plugins/datasource/influxdb/specs/response_parser.jest.ts +++ b/public/app/plugins/datasource/influxdb/specs/response_parser.jest.ts @@ -85,6 +85,32 @@ describe('influxdb response parser', () => { }); }); + describe('SELECT response', () => { + var query = 'SELECT "usage_iowait" FROM "cpu" LIMIT 10'; + var response = { + results: [ + { + series: [ + { + name: 'cpu', + columns: ['time', 'usage_iowait'], + values: [[1488465190006040638, 0.0], [1488465190006040638, 15.0], [1488465190006040638, 20.2]], + }, + ], + }, + ], + }; + + var result = parser.parse(query, response); + + it('should return second column', () => { + expect(_.size(result)).toBe(3); + expect(result[0].text).toBe('0'); + expect(result[1].text).toBe('15'); + expect(result[2].text).toBe('20.2'); + }); + }); + describe('SHOW FIELD response', () => { var query = 'SHOW FIELD KEYS FROM "cpu"'; describe('response from 0.10.0', () => { diff --git a/public/app/plugins/datasource/mssql/partials/annotations.editor.html b/public/app/plugins/datasource/mssql/partials/annotations.editor.html index 75eaa3ed1d9..b2c0d7b97a6 100644 --- a/public/app/plugins/datasource/mssql/partials/annotations.editor.html +++ b/public/app/plugins/datasource/mssql/partials/annotations.editor.html @@ -18,7 +18,7 @@
    Annotation Query Format
    -An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the time column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned. +An annotation is an event that is overlaid on top of graphs. The query can have up to three columns per row, the time column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned. - column with alias: time for the annotation event time. Use epoch time or any native date data type. - column with alias: text for the annotation text. @@ -28,7 +28,7 @@ An annotation is an event that is overlayed on top of graphs. The query can have Macros: - $__time(column) -> column AS time - $__timeEpoch(column) -> DATEDIFF(second, '1970-01-01', column) AS time -- $__timeFilter(column) -> column >= DATEADD(s, 18446744066914186738, '1970-01-01') AND column &t;= DATEADD(s, 18446744066914187038, '1970-01-01') +- $__timeFilter(column) -> column >= DATEADD(s, 18446744066914186738, '1970-01-01') AND column <= DATEADD(s, 18446744066914187038, '1970-01-01') - $__unixEpochFilter(column) -> column >= 1492750877 AND column <= 1492750877 Or build your own conditionals using these macros which just return the values: diff --git a/public/app/plugins/datasource/mssql/partials/query.editor.html b/public/app/plugins/datasource/mssql/partials/query.editor.html index c7dc030be6e..f29dfa18db2 100644 --- a/public/app/plugins/datasource/mssql/partials/query.editor.html +++ b/public/app/plugins/datasource/mssql/partials/query.editor.html @@ -49,7 +49,7 @@ Table: Macros: - $__time(column) -> column AS time - $__timeEpoch(column) -> DATEDIFF(second, '1970-01-01', column) AS time -- $__timeFilter(column) -> column >= DATEADD(s, 18446744066914186738, '1970-01-01') AND column &t;= DATEADD(s, 18446744066914187038, '1970-01-01') +- $__timeFilter(column) -> column >= DATEADD(s, 18446744066914186738, '1970-01-01') AND column <= DATEADD(s, 18446744066914187038, '1970-01-01') - $__unixEpochFilter(column) -> column >= 1492750877 AND column <= 1492750877 - $__timeGroup(column, '5m'[, fillvalue]) -> CAST(ROUND(DATEDIFF(second, '1970-01-01', column)/300.0, 0) as bigint)*300. Providing a fillValue of NULL or floating value will automatically fill empty series in timerange with that value. diff --git a/public/app/plugins/datasource/mysql/partials/annotations.editor.html b/public/app/plugins/datasource/mysql/partials/annotations.editor.html index d142e091fed..23ec726a9f0 100644 --- a/public/app/plugins/datasource/mysql/partials/annotations.editor.html +++ b/public/app/plugins/datasource/mysql/partials/annotations.editor.html @@ -18,7 +18,7 @@
    Annotation Query Format
    -An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the time or time_sec column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned. +An annotation is an event that is overlaid on top of graphs. The query can have up to three columns per row, the time or time_sec column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned. - column with alias: time or time_sec for the annotation event time. Use epoch time or any native date data type. - column with alias: text for the annotation text diff --git a/public/app/plugins/datasource/postgres/img/postgresql_logo.svg b/public/app/plugins/datasource/postgres/img/postgresql_logo.svg index d98e3659c39..40a39970070 100644 --- a/public/app/plugins/datasource/postgres/img/postgresql_logo.svg +++ b/public/app/plugins/datasource/postgres/img/postgresql_logo.svg @@ -3,7 +3,7 @@ "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> - + @@ -19,4 +19,4 @@ - \ No newline at end of file + diff --git a/public/app/plugins/datasource/postgres/module.ts b/public/app/plugins/datasource/postgres/module.ts index acd23318b6d..a24971fa1a1 100644 --- a/public/app/plugins/datasource/postgres/module.ts +++ b/public/app/plugins/datasource/postgres/module.ts @@ -8,7 +8,7 @@ class PostgresConfigCtrl { /** @ngInject **/ constructor($scope) { - this.current.jsonData.sslmode = this.current.jsonData.sslmode || 'require'; + this.current.jsonData.sslmode = this.current.jsonData.sslmode || 'verify-full'; } } diff --git a/public/app/plugins/datasource/postgres/partials/annotations.editor.html b/public/app/plugins/datasource/postgres/partials/annotations.editor.html index 09232d6f8ed..b83f5a14832 100644 --- a/public/app/plugins/datasource/postgres/partials/annotations.editor.html +++ b/public/app/plugins/datasource/postgres/partials/annotations.editor.html @@ -18,7 +18,7 @@
    Annotation Query Format
    -An annotation is an event that is overlayed on top of graphs. The query can have up to three columns per row, the time column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned. +An annotation is an event that is overlaid on top of graphs. The query can have up to three columns per row, the time column is mandatory. Annotation rendering is expensive so it is important to limit the number of rows returned. - column with alias: time for the annotation event time. Use epoch time or any native date data type. - column with alias: text for the annotation text @@ -28,12 +28,12 @@ An annotation is an event that is overlayed on top of graphs. The query can have Macros: - $__time(column) -> column as "time" - $__timeEpoch -> extract(epoch from column) as "time" -- $__timeFilter(column) -> column ≥ to_timestamp(1492750877) AND column ≤ to_timestamp(1492750877) -- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877 +- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z' +- $__unixEpochFilter(column) -> column >= 1492750877 AND column <= 1492750877 Or build your own conditionals using these macros which just return the values: -- $__timeFrom() -> to_timestamp(1492750877) -- $__timeTo() -> to_timestamp(1492750877) +- $__timeFrom() -> '2017-04-21T05:01:17Z' +- $__timeTo() -> '2017-04-21T05:01:17Z' - $__unixEpochFrom() -> 1492750877 - $__unixEpochTo() -> 1492750877
    diff --git a/public/app/plugins/datasource/postgres/partials/query.editor.html b/public/app/plugins/datasource/postgres/partials/query.editor.html index 163970a9ad5..26392c17356 100644 --- a/public/app/plugins/datasource/postgres/partials/query.editor.html +++ b/public/app/plugins/datasource/postgres/partials/query.editor.html @@ -48,8 +48,8 @@ Table: Macros: - $__time(column) -> column as "time" - $__timeEpoch -> extract(epoch from column) as "time" -- $__timeFilter(column) -> extract(epoch from column) BETWEEN 1492750877 AND 1492750877 -- $__unixEpochFilter(column) -> column > 1492750877 AND column < 1492750877 +- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z' +- $__unixEpochFilter(column) -> column >= 1492750877 AND column <= 1492750877 - $__timeGroup(column,'5m') -> (extract(epoch from column)/300)::bigint*300 AS time Example of group by and order by with $__timeGroup: @@ -61,8 +61,8 @@ GROUP BY time ORDER BY time Or build your own conditionals using these macros which just return the values: -- $__timeFrom() -> to_timestamp(1492750877) -- $__timeTo() -> to_timestamp(1492750877) +- $__timeFrom() -> '2017-04-21T05:01:17Z' +- $__timeTo() -> '2017-04-21T05:01:17Z' - $__unixEpochFrom() -> 1492750877 - $__unixEpochTo() -> 1492750877
    diff --git a/public/app/plugins/datasource/prometheus/datasource.ts b/public/app/plugins/datasource/prometheus/datasource.ts index 4c736f2c664..1820cb1306d 100644 --- a/public/app/plugins/datasource/prometheus/datasource.ts +++ b/public/app/plugins/datasource/prometheus/datasource.ts @@ -5,15 +5,21 @@ import kbn from 'app/core/utils/kbn'; import * as dateMath from 'app/core/utils/datemath'; import PrometheusMetricFindQuery from './metric_find_query'; import { ResultTransformer } from './result_transformer'; +import { BackendSrv } from 'app/core/services/backend_srv'; -function prometheusSpecialRegexEscape(value) { - return value.replace(/[\\^$*+?.()|[\]{}]/g, '\\\\$&'); +export function prometheusRegularEscape(value) { + return value.replace(/'/g, "\\\\'"); +} + +export function prometheusSpecialRegexEscape(value) { + return prometheusRegularEscape(value.replace(/\\/g, '\\\\\\\\').replace(/[$^*{}\[\]+?.()]/g, '\\\\$&')); } export class PrometheusDatasource { type: string; editorSrc: string; name: string; + supportsExplore: boolean; supportMetrics: boolean; url: string; directUrl: string; @@ -25,10 +31,11 @@ export class PrometheusDatasource { resultTransformer: ResultTransformer; /** @ngInject */ - constructor(instanceSettings, private $q, private backendSrv, private templateSrv, private timeSrv) { + constructor(instanceSettings, private $q, private backendSrv: BackendSrv, private templateSrv, private timeSrv) { this.type = 'prometheus'; this.editorSrc = 'app/features/prometheus/partials/query.editor.html'; this.name = instanceSettings.name; + this.supportsExplore = true; this.supportMetrics = true; this.url = instanceSettings.url; this.directUrl = instanceSettings.directUrl; @@ -39,13 +46,13 @@ export class PrometheusDatasource { this.resultTransformer = new ResultTransformer(templateSrv); } - _request(method, url, data?, requestId?) { + _request(url, data?, options?: any) { var options: any = { url: this.url + url, - method: method, - requestId: requestId, + method: this.httpMethod, + ...options, }; - if (method === 'GET') { + if (options.method === 'GET') { if (!_.isEmpty(data)) { options.url = options.url + @@ -77,10 +84,15 @@ export class PrometheusDatasource { return this.backendSrv.datasourceRequest(options); } + // Use this for tab completion features, wont publish response to other components + metadataRequest(url) { + return this._request(url, null, { method: 'GET', silent: true }); + } + interpolateQueryExpr(value, variable, defaultFormatFn) { // if no multi or include all do not regexEscape if (!variable.multi && !variable.includeAll) { - return value; + return prometheusRegularEscape(value); } if (typeof value === 'string') { @@ -143,6 +155,7 @@ export class PrometheusDatasource { end: end, responseListLength: responseList.length, responseIndex: index, + refId: activeTargets[index].refId, }; this.resultTransformer.transform(result, response, transformerOptions); @@ -202,7 +215,7 @@ export class PrometheusDatasource { end: end, step: query.step, }; - return this._request(this.httpMethod, url, data, query.requestId); + return this._request(url, data, { requestId: query.requestId }); } performInstantQuery(query, time) { @@ -211,7 +224,7 @@ export class PrometheusDatasource { query: query.expr, time: time, }; - return this._request(this.httpMethod, url, data, query.requestId); + return this._request(url, data, { requestId: query.requestId }); } performSuggestQuery(query, cache = false) { @@ -225,7 +238,7 @@ export class PrometheusDatasource { ); } - return this._request('GET', url).then(result => { + return this.metadataRequest(url).then(result => { this.metricsNameCache = { data: result.data.data, expire: Date.now() + 60 * 1000, @@ -313,10 +326,29 @@ export class PrometheusDatasource { }); } + getExploreState(panel) { + let state = {}; + if (panel.targets) { + const queries = panel.targets.map(t => ({ + query: this.templateSrv.replace(t.expr, {}, this.interpolateQueryExpr), + format: t.format, + })); + state = { + ...state, + queries, + }; + } + return state; + } + getPrometheusTime(date, roundUp) { if (_.isString(date)) { date = dateMath.parse(date, roundUp); } return Math.ceil(date.valueOf() / 1000); } + + getOriginalMetricName(labelData) { + return this.resultTransformer.getOriginalMetricName(labelData); + } } diff --git a/public/app/plugins/datasource/prometheus/metric_find_query.ts b/public/app/plugins/datasource/prometheus/metric_find_query.ts index b27f1cd50af..13b6d7df8e3 100644 --- a/public/app/plugins/datasource/prometheus/metric_find_query.ts +++ b/public/app/plugins/datasource/prometheus/metric_find_query.ts @@ -46,7 +46,7 @@ export default class PrometheusMetricFindQuery { // return label values globally url = '/api/v1/label/' + label + '/values'; - return this.datasource._request('GET', url).then(function(result) { + return this.datasource.metadataRequest(url).then(function(result) { return _.map(result.data.data, function(value) { return { text: value }; }); @@ -56,7 +56,7 @@ export default class PrometheusMetricFindQuery { var end = this.datasource.getPrometheusTime(this.range.to, true); url = '/api/v1/series?match[]=' + encodeURIComponent(metric) + '&start=' + start + '&end=' + end; - return this.datasource._request('GET', url).then(function(result) { + return this.datasource.metadataRequest(url).then(function(result) { var _labels = _.map(result.data.data, function(metric) { return metric[label] || ''; }).filter(function(label) { @@ -76,7 +76,7 @@ export default class PrometheusMetricFindQuery { metricNameQuery(metricFilterPattern) { var url = '/api/v1/label/__name__/values'; - return this.datasource._request('GET', url).then(function(result) { + return this.datasource.metadataRequest(url).then(function(result) { return _.chain(result.data.data) .filter(function(metricName) { var r = new RegExp(metricFilterPattern); @@ -120,8 +120,8 @@ export default class PrometheusMetricFindQuery { var url = '/api/v1/series?match[]=' + encodeURIComponent(query) + '&start=' + start + '&end=' + end; var self = this; - return this.datasource._request('GET', url).then(function(result) { - return _.map(result.data.data, function(metric) { + return this.datasource.metadataRequest(url).then(function(result) { + return _.map(result.data.data, metric => { return { text: self.datasource.getOriginalMetricName(metric), expandable: true, diff --git a/public/app/plugins/datasource/prometheus/partials/query.editor.html b/public/app/plugins/datasource/prometheus/partials/query.editor.html index 8d6e89c3406..68791d96c19 100644 --- a/public/app/plugins/datasource/prometheus/partials/query.editor.html +++ b/public/app/plugins/datasource/prometheus/partials/query.editor.html @@ -14,7 +14,7 @@ data-min-length=0 data-items=1000 ng-model-onblur ng-change="ctrl.refreshMetricData()"> - Controls the name of the time series, using name or pattern. For example {{hostname}} will be replaced with label value for + Controls the name of the time series, using name or pattern. For example {{hostname}} will be replaced with label value for the label hostname.
    diff --git a/public/app/plugins/datasource/prometheus/result_transformer.ts b/public/app/plugins/datasource/prometheus/result_transformer.ts index 6d97b783983..d5feda7d28c 100644 --- a/public/app/plugins/datasource/prometheus/result_transformer.ts +++ b/public/app/plugins/datasource/prometheus/result_transformer.ts @@ -8,7 +8,7 @@ export class ResultTransformer { let prometheusResult = response.data.data.result; if (options.format === 'table') { - result.push(this.transformMetricDataToTable(prometheusResult, options.responseListLength, options.responseIndex)); + result.push(this.transformMetricDataToTable(prometheusResult, options.responseListLength, options.refId)); } else if (options.format === 'heatmap') { let seriesList = []; prometheusResult.sort(sortSeriesByLabel); @@ -58,7 +58,7 @@ export class ResultTransformer { return { target: metricLabel, datapoints: dps }; } - transformMetricDataToTable(md, resultCount: number, resultIndex: number) { + transformMetricDataToTable(md, resultCount: number, refId: string) { var table = new TableModel(); var i, j; var metricLabels = {}; @@ -83,7 +83,7 @@ export class ResultTransformer { metricLabels[label] = labelIndex + 1; table.columns.push({ text: label }); }); - let valueText = resultCount > 1 ? `Value #${String.fromCharCode(65 + resultIndex)}` : 'Value'; + let valueText = resultCount > 1 ? `Value #${refId}` : 'Value'; table.columns.push({ text: valueText }); // Populate rows, set value to empty string when label not present. diff --git a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts index cca74e023e7..2ab2895d731 100644 --- a/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/datasource.jest.ts @@ -1,7 +1,7 @@ import _ from 'lodash'; import moment from 'moment'; import q from 'q'; -import { PrometheusDatasource } from '../datasource'; +import { PrometheusDatasource, prometheusSpecialRegexEscape, prometheusRegularEscape } from '../datasource'; describe('PrometheusDatasource', () => { let ctx: any = {}; @@ -14,6 +14,7 @@ describe('PrometheusDatasource', () => { }; ctx.backendSrvMock = {}; + ctx.templateSrvMock = { replace: a => a, }; @@ -23,6 +24,45 @@ describe('PrometheusDatasource', () => { ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); }); + describe('Datasource metadata requests', () => { + it('should perform a GET request with the default config', () => { + ctx.backendSrvMock.datasourceRequest = jest.fn(); + ctx.ds.metadataRequest('/foo'); + expect(ctx.backendSrvMock.datasourceRequest.mock.calls.length).toBe(1); + expect(ctx.backendSrvMock.datasourceRequest.mock.calls[0][0].method).toBe('GET'); + }); + + it('should still perform a GET request with the DS HTTP method set to POST', () => { + ctx.backendSrvMock.datasourceRequest = jest.fn(); + const postSettings = _.cloneDeep(instanceSettings); + postSettings.jsonData.httpMethod = 'POST'; + const ds = new PrometheusDatasource(postSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); + ds.metadataRequest('/foo'); + expect(ctx.backendSrvMock.datasourceRequest.mock.calls.length).toBe(1); + expect(ctx.backendSrvMock.datasourceRequest.mock.calls[0][0].method).toBe('GET'); + }); + }); + + describe('When performing performSuggestQuery', () => { + it('should cache response', async () => { + ctx.backendSrvMock.datasourceRequest.mockReturnValue( + Promise.resolve({ + status: 'success', + data: { data: ['value1', 'value2', 'value3'] }, + }) + ); + + let results = await ctx.ds.performSuggestQuery('value', true); + + expect(results).toHaveLength(3); + + ctx.backendSrvMock.datasourceRequest.mockReset(); + results = await ctx.ds.performSuggestQuery('value', true); + + expect(results).toHaveLength(3); + }); + }); + describe('When converting prometheus histogram to heatmap format', () => { beforeEach(() => { ctx.query = { @@ -101,4 +141,41 @@ describe('PrometheusDatasource', () => { }); }); }); + + describe('Prometheus regular escaping', function() { + it('should not escape simple string', function() { + expect(prometheusRegularEscape('cryptodepression')).toEqual('cryptodepression'); + }); + it("should escape '", function() { + expect(prometheusRegularEscape("looking'glass")).toEqual("looking\\\\'glass"); + }); + it('should escape multiple characters', function() { + expect(prometheusRegularEscape("'looking'glass'")).toEqual("\\\\'looking\\\\'glass\\\\'"); + }); + }); + + describe('Prometheus regexes escaping', function() { + it('should not escape simple string', function() { + expect(prometheusSpecialRegexEscape('cryptodepression')).toEqual('cryptodepression'); + }); + it('should escape $^*+?.()\\', function() { + expect(prometheusSpecialRegexEscape("looking'glass")).toEqual("looking\\\\'glass"); + expect(prometheusSpecialRegexEscape('looking{glass')).toEqual('looking\\\\{glass'); + expect(prometheusSpecialRegexEscape('looking}glass')).toEqual('looking\\\\}glass'); + expect(prometheusSpecialRegexEscape('looking[glass')).toEqual('looking\\\\[glass'); + expect(prometheusSpecialRegexEscape('looking]glass')).toEqual('looking\\\\]glass'); + expect(prometheusSpecialRegexEscape('looking$glass')).toEqual('looking\\\\$glass'); + expect(prometheusSpecialRegexEscape('looking^glass')).toEqual('looking\\\\^glass'); + expect(prometheusSpecialRegexEscape('looking*glass')).toEqual('looking\\\\*glass'); + expect(prometheusSpecialRegexEscape('looking+glass')).toEqual('looking\\\\+glass'); + expect(prometheusSpecialRegexEscape('looking?glass')).toEqual('looking\\\\?glass'); + expect(prometheusSpecialRegexEscape('looking.glass')).toEqual('looking\\\\.glass'); + expect(prometheusSpecialRegexEscape('looking(glass')).toEqual('looking\\\\(glass'); + expect(prometheusSpecialRegexEscape('looking)glass')).toEqual('looking\\\\)glass'); + expect(prometheusSpecialRegexEscape('looking\\glass')).toEqual('looking\\\\\\\\glass'); + }); + it('should escape multiple special characters', function() { + expect(prometheusSpecialRegexEscape('+looking$glass?')).toEqual('\\\\+looking\\\\$glass\\\\?'); + }); + }); }); diff --git a/public/app/plugins/datasource/prometheus/specs/metric_find_query.jest.ts b/public/app/plugins/datasource/prometheus/specs/metric_find_query.jest.ts new file mode 100644 index 00000000000..88f6830cd31 --- /dev/null +++ b/public/app/plugins/datasource/prometheus/specs/metric_find_query.jest.ts @@ -0,0 +1,205 @@ +import moment from 'moment'; +import { PrometheusDatasource } from '../datasource'; +import PrometheusMetricFindQuery from '../metric_find_query'; +import q from 'q'; + +describe('PrometheusMetricFindQuery', function() { + let instanceSettings = { + url: 'proxied', + directUrl: 'direct', + user: 'test', + password: 'mupp', + jsonData: { httpMethod: 'GET' }, + }; + const raw = { + from: moment.utc('2018-04-25 10:00'), + to: moment.utc('2018-04-25 11:00'), + }; + let ctx: any = { + backendSrvMock: { + datasourceRequest: jest.fn(() => Promise.resolve({})), + }, + templateSrvMock: { + replace: a => a, + }, + timeSrvMock: { + timeRange: () => ({ + from: raw.from, + to: raw.to, + raw: raw, + }), + }, + }; + + ctx.setupMetricFindQuery = (data: any) => { + ctx.backendSrvMock.datasourceRequest.mockReturnValue(Promise.resolve({ status: 'success', data: data.response })); + return new PrometheusMetricFindQuery(ctx.ds, data.query, ctx.timeSrvMock); + }; + + beforeEach(() => { + ctx.backendSrvMock.datasourceRequest.mockReset(); + ctx.ds = new PrometheusDatasource(instanceSettings, q, ctx.backendSrvMock, ctx.templateSrvMock, ctx.timeSrvMock); + }); + + describe('When performing metricFindQuery', () => { + it('label_values(resource) should generate label search query', async () => { + const query = ctx.setupMetricFindQuery({ + query: 'label_values(resource)', + response: { + data: ['value1', 'value2', 'value3'], + }, + }); + const results = await query.process(); + + expect(results).toHaveLength(3); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({ + method: 'GET', + url: 'proxied/api/v1/label/resource/values', + silent: true, + }); + }); + + it('label_values(metric, resource) should generate series query with correct time', async () => { + const query = ctx.setupMetricFindQuery({ + query: 'label_values(metric, resource)', + response: { + data: [ + { __name__: 'metric', resource: 'value1' }, + { __name__: 'metric', resource: 'value2' }, + { __name__: 'metric', resource: 'value3' }, + ], + }, + }); + const results = await query.process(); + + expect(results).toHaveLength(3); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({ + method: 'GET', + url: `proxied/api/v1/series?match[]=metric&start=${raw.from.unix()}&end=${raw.to.unix()}`, + silent: true, + }); + }); + + it('label_values(metric{label1="foo", label2="bar", label3="baz"}, resource) should generate series query with correct time', async () => { + const query = ctx.setupMetricFindQuery({ + query: 'label_values(metric{label1="foo", label2="bar", label3="baz"}, resource)', + response: { + data: [ + { __name__: 'metric', resource: 'value1' }, + { __name__: 'metric', resource: 'value2' }, + { __name__: 'metric', resource: 'value3' }, + ], + }, + }); + const results = await query.process(); + + expect(results).toHaveLength(3); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({ + method: 'GET', + url: `proxied/api/v1/series?match[]=${encodeURIComponent( + 'metric{label1="foo", label2="bar", label3="baz"}' + )}&start=${raw.from.unix()}&end=${raw.to.unix()}`, + silent: true, + }); + }); + + it('label_values(metric, resource) result should not contain empty string', async () => { + const query = ctx.setupMetricFindQuery({ + query: 'label_values(metric, resource)', + response: { + data: [ + { __name__: 'metric', resource: 'value1' }, + { __name__: 'metric', resource: 'value2' }, + { __name__: 'metric', resource: '' }, + ], + }, + }); + const results = await query.process(); + + expect(results).toHaveLength(2); + expect(results[0].text).toBe('value1'); + expect(results[1].text).toBe('value2'); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({ + method: 'GET', + url: `proxied/api/v1/series?match[]=metric&start=${raw.from.unix()}&end=${raw.to.unix()}`, + silent: true, + }); + }); + + it('metrics(metric.*) should generate metric name query', async () => { + const query = ctx.setupMetricFindQuery({ + query: 'metrics(metric.*)', + response: { + data: ['metric1', 'metric2', 'metric3', 'nomatch'], + }, + }); + const results = await query.process(); + + expect(results).toHaveLength(3); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({ + method: 'GET', + url: 'proxied/api/v1/label/__name__/values', + silent: true, + }); + }); + + it('query_result(metric) should generate metric name query', async () => { + const query = ctx.setupMetricFindQuery({ + query: 'query_result(metric)', + response: { + data: { + resultType: 'vector', + result: [ + { + metric: { __name__: 'metric', job: 'testjob' }, + value: [1443454528.0, '3846'], + }, + ], + }, + }, + }); + const results = await query.process(); + + expect(results).toHaveLength(1); + expect(results[0].text).toBe('metric{job="testjob"} 3846 1443454528000'); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({ + method: 'GET', + url: `proxied/api/v1/query?query=metric&time=${raw.to.unix()}`, + requestId: undefined, + }); + }); + + it('up{job="job1"} should fallback using generate series query', async () => { + const query = ctx.setupMetricFindQuery({ + query: 'up{job="job1"}', + response: { + data: [ + { __name__: 'up', instance: '127.0.0.1:1234', job: 'job1' }, + { __name__: 'up', instance: '127.0.0.1:5678', job: 'job1' }, + { __name__: 'up', instance: '127.0.0.1:9102', job: 'job1' }, + ], + }, + }); + const results = await query.process(); + + expect(results).toHaveLength(3); + expect(results[0].text).toBe('up{instance="127.0.0.1:1234",job="job1"}'); + expect(results[1].text).toBe('up{instance="127.0.0.1:5678",job="job1"}'); + expect(results[2].text).toBe('up{instance="127.0.0.1:9102",job="job1"}'); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledTimes(1); + expect(ctx.backendSrvMock.datasourceRequest).toHaveBeenCalledWith({ + method: 'GET', + url: `proxied/api/v1/series?match[]=${encodeURIComponent( + 'up{job="job1"}' + )}&start=${raw.from.unix()}&end=${raw.to.unix()}`, + silent: true, + }); + }); + }); +}); diff --git a/public/app/plugins/datasource/prometheus/specs/metric_find_query_specs.ts b/public/app/plugins/datasource/prometheus/specs/metric_find_query_specs.ts deleted file mode 100644 index e5d7aa81210..00000000000 --- a/public/app/plugins/datasource/prometheus/specs/metric_find_query_specs.ts +++ /dev/null @@ -1,181 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; - -import moment from 'moment'; -import helpers from 'test/specs/helpers'; -import { PrometheusDatasource } from '../datasource'; -import PrometheusMetricFindQuery from '../metric_find_query'; - -describe('PrometheusMetricFindQuery', function() { - var ctx = new helpers.ServiceTestContext(); - var instanceSettings = { - url: 'proxied', - directUrl: 'direct', - user: 'test', - password: 'mupp', - jsonData: { httpMethod: 'GET' }, - }; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) { - ctx.$q = $q; - ctx.$httpBackend = $httpBackend; - ctx.$rootScope = $rootScope; - ctx.ds = $injector.instantiate(PrometheusDatasource, { - instanceSettings: instanceSettings, - }); - $httpBackend.when('GET', /\.html$/).respond(''); - }) - ); - - describe('When performing metricFindQuery', function() { - var results; - var response; - it('label_values(resource) should generate label search query', function() { - response = { - status: 'success', - data: ['value1', 'value2', 'value3'], - }; - ctx.$httpBackend.expect('GET', 'proxied/api/v1/label/resource/values').respond(response); - var pm = new PrometheusMetricFindQuery(ctx.ds, 'label_values(resource)', ctx.timeSrv); - pm.process().then(function(data) { - results = data; - }); - ctx.$httpBackend.flush(); - ctx.$rootScope.$apply(); - expect(results.length).to.be(3); - }); - it('label_values(metric, resource) should generate series query', function() { - response = { - status: 'success', - data: [ - { __name__: 'metric', resource: 'value1' }, - { __name__: 'metric', resource: 'value2' }, - { __name__: 'metric', resource: 'value3' }, - ], - }; - ctx.$httpBackend.expect('GET', /proxied\/api\/v1\/series\?match\[\]=metric&start=.*&end=.*/).respond(response); - var pm = new PrometheusMetricFindQuery(ctx.ds, 'label_values(metric, resource)', ctx.timeSrv); - pm.process().then(function(data) { - results = data; - }); - ctx.$httpBackend.flush(); - ctx.$rootScope.$apply(); - expect(results.length).to.be(3); - }); - it('label_values(metric, resource) should pass correct time', function() { - ctx.timeSrv.setTime({ - from: moment.utc('2011-01-01'), - to: moment.utc('2015-01-01'), - }); - ctx.$httpBackend - .expect('GET', /proxied\/api\/v1\/series\?match\[\]=metric&start=1293840000&end=1420070400/) - .respond(response); - var pm = new PrometheusMetricFindQuery(ctx.ds, 'label_values(metric, resource)', ctx.timeSrv); - pm.process().then(function(data) { - results = data; - }); - ctx.$httpBackend.flush(); - ctx.$rootScope.$apply(); - }); - it('label_values(metric{label1="foo", label2="bar", label3="baz"}, resource) should generate series query', function() { - response = { - status: 'success', - data: [ - { __name__: 'metric', resource: 'value1' }, - { __name__: 'metric', resource: 'value2' }, - { __name__: 'metric', resource: 'value3' }, - ], - }; - ctx.$httpBackend.expect('GET', /proxied\/api\/v1\/series\?match\[\]=metric&start=.*&end=.*/).respond(response); - var pm = new PrometheusMetricFindQuery(ctx.ds, 'label_values(metric, resource)', ctx.timeSrv); - pm.process().then(function(data) { - results = data; - }); - ctx.$httpBackend.flush(); - ctx.$rootScope.$apply(); - expect(results.length).to.be(3); - }); - it('label_values(metric, resource) result should not contain empty string', function() { - response = { - status: 'success', - data: [ - { __name__: 'metric', resource: 'value1' }, - { __name__: 'metric', resource: 'value2' }, - { __name__: 'metric', resource: '' }, - ], - }; - ctx.$httpBackend.expect('GET', /proxied\/api\/v1\/series\?match\[\]=metric&start=.*&end=.*/).respond(response); - var pm = new PrometheusMetricFindQuery(ctx.ds, 'label_values(metric, resource)', ctx.timeSrv); - pm.process().then(function(data) { - results = data; - }); - ctx.$httpBackend.flush(); - ctx.$rootScope.$apply(); - expect(results.length).to.be(2); - expect(results[0].text).to.be('value1'); - expect(results[1].text).to.be('value2'); - }); - it('metrics(metric.*) should generate metric name query', function() { - response = { - status: 'success', - data: ['metric1', 'metric2', 'metric3', 'nomatch'], - }; - ctx.$httpBackend.expect('GET', 'proxied/api/v1/label/__name__/values').respond(response); - var pm = new PrometheusMetricFindQuery(ctx.ds, 'metrics(metric.*)', ctx.timeSrv); - pm.process().then(function(data) { - results = data; - }); - ctx.$httpBackend.flush(); - ctx.$rootScope.$apply(); - expect(results.length).to.be(3); - }); - it('query_result(metric) should generate metric name query', function() { - response = { - status: 'success', - data: { - resultType: 'vector', - result: [ - { - metric: { __name__: 'metric', job: 'testjob' }, - value: [1443454528.0, '3846'], - }, - ], - }, - }; - ctx.$httpBackend.expect('GET', /proxied\/api\/v1\/query\?query=metric&time=.*/).respond(response); - var pm = new PrometheusMetricFindQuery(ctx.ds, 'query_result(metric)', ctx.timeSrv); - pm.process().then(function(data) { - results = data; - }); - ctx.$httpBackend.flush(); - ctx.$rootScope.$apply(); - expect(results.length).to.be(1); - expect(results[0].text).to.be('metric{job="testjob"} 3846 1443454528000'); - }); - }); - - describe('When performing performSuggestQuery', function() { - var results; - var response; - it('cache response', function() { - response = { - status: 'success', - data: ['value1', 'value2', 'value3'], - }; - ctx.$httpBackend.expect('GET', 'proxied/api/v1/label/__name__/values').respond(response); - ctx.ds.performSuggestQuery('value', true).then(function(data) { - results = data; - }); - ctx.$httpBackend.flush(); - ctx.$rootScope.$apply(); - expect(results.length).to.be(3); - ctx.ds.performSuggestQuery('value', true).then(function(data) { - // get from cache, no need to flush - results = data; - expect(results.length).to.be(3); - }); - }); - }); -}); diff --git a/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts b/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts index abcc46d7ea8..64b983fc8a7 100644 --- a/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts +++ b/public/app/plugins/datasource/prometheus/specs/result_transformer.jest.ts @@ -47,6 +47,18 @@ describe('Prometheus Result Transformer', () => { { text: 'Value' }, ]); }); + + it('should column title include refId if response count is more than 2', () => { + var table = ctx.resultTransformer.transformMetricDataToTable(response.data.result, 2, "B"); + expect(table.type).toBe('table'); + expect(table.columns).toEqual([ + { text: 'Time', type: 'time' }, + { text: '__name__' }, + { text: 'instance' }, + { text: 'job' }, + { text: 'Value #B' }, + ]); + }); }); describe('When resultFormat is table and instant = true', () => { diff --git a/public/app/plugins/panel/dashlist/module.html b/public/app/plugins/panel/dashlist/module.html index 8fa3e7ef71f..fdba0c79f35 100644 --- a/public/app/plugins/panel/dashlist/module.html +++ b/public/app/plugins/panel/dashlist/module.html @@ -1,17 +1,19 @@ -
    -
    -
    - {{group.header}} -
    -
    - - - {{dash.title}} - - - - - +
    +
    +
    +
    + {{group.header}} +
    +
    diff --git a/public/app/plugins/panel/graph/graph.ts b/public/app/plugins/panel/graph/graph.ts index 8a2aea8c4c2..2de53b6dce0 100755 --- a/public/app/plugins/panel/graph/graph.ts +++ b/public/app/plugins/panel/graph/graph.ts @@ -443,7 +443,8 @@ function graphDirective(timeSrv, popoverSrv, contextSrv) { // Expand ticks for pretty view min = Math.floor(min / tickStep) * tickStep; - max = Math.ceil(max / tickStep) * tickStep; + // 1.01 is 101% - ensure we have enough space for last bar + max = Math.ceil(max * 1.01 / tickStep) * tickStep; ticks = []; for (let i = min; i <= max; i += tickStep) { @@ -634,6 +635,9 @@ function graphDirective(timeSrv, popoverSrv, contextSrv) { function configureAxisMode(axis, format) { axis.tickFormatter = function(val, axis) { + if (!kbn.valueFormats[format]) { + throw new Error(`Unit '${format}' is not supported`); + } return kbn.valueFormats[format](val, axis.tickDecimals, axis.scaledDecimals); }; } diff --git a/public/app/plugins/panel/graph/graph_tooltip.js b/public/app/plugins/panel/graph/graph_tooltip.js deleted file mode 100644 index 89197717e42..00000000000 --- a/public/app/plugins/panel/graph/graph_tooltip.js +++ /dev/null @@ -1,292 +0,0 @@ -define([ - 'jquery', - 'app/core/core', -], -function ($, core) { - 'use strict'; - - var appEvents = core.appEvents; - - function GraphTooltip(elem, dashboard, scope, getSeriesFn) { - var self = this; - var ctrl = scope.ctrl; - var panel = ctrl.panel; - - var $tooltip = $('
    '); - - this.destroy = function() { - $tooltip.remove(); - }; - - this.findHoverIndexFromDataPoints = function(posX, series, last) { - var ps = series.datapoints.pointsize; - var initial = last*ps; - var len = series.datapoints.points.length; - for (var j = initial; j < len; j += ps) { - // Special case of a non stepped line, highlight the very last point just before a null point - if ((!series.lines.steps && series.datapoints.points[initial] != null && series.datapoints.points[j] == null) - //normal case - || series.datapoints.points[j] > posX) { - return Math.max(j - ps, 0)/ps; - } - } - return j/ps - 1; - }; - - this.findHoverIndexFromData = function(posX, series) { - var lower = 0; - var upper = series.data.length - 1; - var middle; - while (true) { - if (lower > upper) { - return Math.max(upper, 0); - } - middle = Math.floor((lower + upper) / 2); - if (series.data[middle][0] === posX) { - return middle; - } else if (series.data[middle][0] < posX) { - lower = middle + 1; - } else { - upper = middle - 1; - } - } - }; - - this.renderAndShow = function(absoluteTime, innerHtml, pos, xMode) { - if (xMode === 'time') { - innerHtml = '
    '+ absoluteTime + '
    ' + innerHtml; - } - $tooltip.html(innerHtml).place_tt(pos.pageX + 20, pos.pageY); - }; - - this.getMultiSeriesPlotHoverInfo = function(seriesList, pos) { - var value, i, series, hoverIndex, hoverDistance, pointTime, yaxis; - // 3 sub-arrays, 1st for hidden series, 2nd for left yaxis, 3rd for right yaxis. - var results = [[],[],[]]; - - //now we know the current X (j) position for X and Y values - var last_value = 0; //needed for stacked values - - var minDistance, minTime; - - for (i = 0; i < seriesList.length; i++) { - series = seriesList[i]; - - if (!series.data.length || (panel.legend.hideEmpty && series.allIsNull)) { - // Init value so that it does not brake series sorting - results[0].push({ hidden: true, value: 0 }); - continue; - } - - if (!series.data.length || (panel.legend.hideZero && series.allIsZero)) { - // Init value so that it does not brake series sorting - results[0].push({ hidden: true, value: 0 }); - continue; - } - - hoverIndex = this.findHoverIndexFromData(pos.x, series); - hoverDistance = pos.x - series.data[hoverIndex][0]; - pointTime = series.data[hoverIndex][0]; - - // Take the closest point before the cursor, or if it does not exist, the closest after - if (! minDistance - || (hoverDistance >=0 && (hoverDistance < minDistance || minDistance < 0)) - || (hoverDistance < 0 && hoverDistance > minDistance)) { - minDistance = hoverDistance; - minTime = pointTime; - } - - if (series.stack) { - if (panel.tooltip.value_type === 'individual') { - value = series.data[hoverIndex][1]; - } else if (!series.stack) { - value = series.data[hoverIndex][1]; - } else { - last_value += series.data[hoverIndex][1]; - value = last_value; - } - } else { - value = series.data[hoverIndex][1]; - } - - // Highlighting multiple Points depending on the plot type - if (series.lines.steps || series.stack) { - // stacked and steppedLine plots can have series with different length. - // Stacked series can increase its length on each new stacked serie if null points found, - // to speed the index search we begin always on the last found hoverIndex. - hoverIndex = this.findHoverIndexFromDataPoints(pos.x, series, hoverIndex); - } - - // Be sure we have a yaxis so that it does not brake series sorting - yaxis = 0; - if (series.yaxis) { - yaxis = series.yaxis.n; - } - - results[yaxis].push({ - value: value, - hoverIndex: hoverIndex, - color: series.color, - label: series.aliasEscaped, - time: pointTime, - distance: hoverDistance, - index: i - }); - } - - // Contat the 3 sub-arrays - results = results[0].concat(results[1],results[2]); - - // Time of the point closer to pointer - results.time = minTime; - - return results; - }; - - elem.mouseleave(function () { - if (panel.tooltip.shared) { - var plot = elem.data().plot; - if (plot) { - $tooltip.detach(); - plot.unhighlight(); - } - } - appEvents.emit('graph-hover-clear'); - }); - - elem.bind("plothover", function (event, pos, item) { - self.show(pos, item); - - // broadcast to other graph panels that we are hovering! - pos.panelRelY = (pos.pageY - elem.offset().top) / elem.height(); - appEvents.emit('graph-hover', {pos: pos, panel: panel}); - }); - - elem.bind("plotclick", function (event, pos, item) { - appEvents.emit('graph-click', {pos: pos, panel: panel, item: item}); - }); - - this.clear = function(plot) { - $tooltip.detach(); - plot.clearCrosshair(); - plot.unhighlight(); - }; - - this.show = function(pos, item) { - var plot = elem.data().plot; - var plotData = plot.getData(); - var xAxes = plot.getXAxes(); - var xMode = xAxes[0].options.mode; - var seriesList = getSeriesFn(); - var allSeriesMode = panel.tooltip.shared; - var group, value, absoluteTime, hoverInfo, i, series, seriesHtml, tooltipFormat; - - // if panelRelY is defined another panel wants us to show a tooltip - // get pageX from position on x axis and pageY from relative position in original panel - if (pos.panelRelY) { - var pointOffset = plot.pointOffset({x: pos.x}); - if (Number.isNaN(pointOffset.left) || pointOffset.left < 0 || pointOffset.left > elem.width()) { - self.clear(plot); - return; - } - pos.pageX = elem.offset().left + pointOffset.left; - pos.pageY = elem.offset().top + elem.height() * pos.panelRelY; - var isVisible = pos.pageY >= $(window).scrollTop() && pos.pageY <= $(window).innerHeight() + $(window).scrollTop(); - if (!isVisible) { - self.clear(plot); - return; - } - plot.setCrosshair(pos); - allSeriesMode = true; - - if (dashboard.sharedCrosshairModeOnly()) { - // if only crosshair mode we are done - return; - } - } - - if (seriesList.length === 0) { - return; - } - - if (seriesList[0].hasMsResolution) { - tooltipFormat = 'YYYY-MM-DD HH:mm:ss.SSS'; - } else { - tooltipFormat = 'YYYY-MM-DD HH:mm:ss'; - } - - if (allSeriesMode) { - plot.unhighlight(); - - var seriesHoverInfo = self.getMultiSeriesPlotHoverInfo(plotData, pos); - - seriesHtml = ''; - - absoluteTime = dashboard.formatDate(seriesHoverInfo.time, tooltipFormat); - - // Dynamically reorder the hovercard for the current time point if the - // option is enabled. - if (panel.tooltip.sort === 2) { - seriesHoverInfo.sort(function(a, b) { - return b.value - a.value; - }); - } else if (panel.tooltip.sort === 1) { - seriesHoverInfo.sort(function(a, b) { - return a.value - b.value; - }); - } - - for (i = 0; i < seriesHoverInfo.length; i++) { - hoverInfo = seriesHoverInfo[i]; - - if (hoverInfo.hidden) { - continue; - } - - var highlightClass = ''; - if (item && hoverInfo.index === item.seriesIndex) { - highlightClass = 'graph-tooltip-list-item--highlight'; - } - - series = seriesList[hoverInfo.index]; - - value = series.formatValue(hoverInfo.value); - - seriesHtml += '
    '; - seriesHtml += ' ' + hoverInfo.label + ':
    '; - seriesHtml += '
    ' + value + '
    '; - plot.highlight(hoverInfo.index, hoverInfo.hoverIndex); - } - - self.renderAndShow(absoluteTime, seriesHtml, pos, xMode); - } - // single series tooltip - else if (item) { - series = seriesList[item.seriesIndex]; - group = '
    '; - group += ' ' + series.aliasEscaped + ':
    '; - - if (panel.stack && panel.tooltip.value_type === 'individual') { - value = item.datapoint[1] - item.datapoint[2]; - } - else { - value = item.datapoint[1]; - } - - value = series.formatValue(value); - - absoluteTime = dashboard.formatDate(item.datapoint[0], tooltipFormat); - - group += '
    ' + value + '
    '; - - self.renderAndShow(absoluteTime, group, pos, xMode); - } - // no hit - else { - $tooltip.detach(); - } - }; - } - - return GraphTooltip; -}); diff --git a/public/app/plugins/panel/graph/graph_tooltip.ts b/public/app/plugins/panel/graph/graph_tooltip.ts new file mode 100644 index 00000000000..509d15b8a25 --- /dev/null +++ b/public/app/plugins/panel/graph/graph_tooltip.ts @@ -0,0 +1,289 @@ +import $ from 'jquery'; +import { appEvents } from 'app/core/core'; + +export default function GraphTooltip(elem, dashboard, scope, getSeriesFn) { + let self = this; + let ctrl = scope.ctrl; + let panel = ctrl.panel; + + let $tooltip = $('
    '); + + this.destroy = function() { + $tooltip.remove(); + }; + + this.findHoverIndexFromDataPoints = function(posX, series, last) { + let ps = series.datapoints.pointsize; + let initial = last * ps; + let len = series.datapoints.points.length; + let j; + for (j = initial; j < len; j += ps) { + // Special case of a non stepped line, highlight the very last point just before a null point + if ( + (!series.lines.steps && series.datapoints.points[initial] != null && series.datapoints.points[j] == null) || + //normal case + series.datapoints.points[j] > posX + ) { + return Math.max(j - ps, 0) / ps; + } + } + return j / ps - 1; + }; + + this.findHoverIndexFromData = function(posX, series) { + let lower = 0; + let upper = series.data.length - 1; + let middle; + while (true) { + if (lower > upper) { + return Math.max(upper, 0); + } + middle = Math.floor((lower + upper) / 2); + if (series.data[middle][0] === posX) { + return middle; + } else if (series.data[middle][0] < posX) { + lower = middle + 1; + } else { + upper = middle - 1; + } + } + }; + + this.renderAndShow = function(absoluteTime, innerHtml, pos, xMode) { + if (xMode === 'time') { + innerHtml = '
    ' + absoluteTime + '
    ' + innerHtml; + } + $tooltip.html(innerHtml).place_tt(pos.pageX + 20, pos.pageY); + }; + + this.getMultiSeriesPlotHoverInfo = function(seriesList, pos) { + let value, i, series, hoverIndex, hoverDistance, pointTime, yaxis; + // 3 sub-arrays, 1st for hidden series, 2nd for left yaxis, 3rd for right yaxis. + let results: any = [[], [], []]; + + //now we know the current X (j) position for X and Y values + let last_value = 0; //needed for stacked values + + let minDistance, minTime; + + for (i = 0; i < seriesList.length; i++) { + series = seriesList[i]; + + if (!series.data.length || (panel.legend.hideEmpty && series.allIsNull)) { + // Init value so that it does not brake series sorting + results[0].push({ hidden: true, value: 0 }); + continue; + } + + if (!series.data.length || (panel.legend.hideZero && series.allIsZero)) { + // Init value so that it does not brake series sorting + results[0].push({ hidden: true, value: 0 }); + continue; + } + + hoverIndex = this.findHoverIndexFromData(pos.x, series); + hoverDistance = pos.x - series.data[hoverIndex][0]; + pointTime = series.data[hoverIndex][0]; + + // Take the closest point before the cursor, or if it does not exist, the closest after + if ( + !minDistance || + (hoverDistance >= 0 && (hoverDistance < minDistance || minDistance < 0)) || + (hoverDistance < 0 && hoverDistance > minDistance) + ) { + minDistance = hoverDistance; + minTime = pointTime; + } + + if (series.stack) { + if (panel.tooltip.value_type === 'individual') { + value = series.data[hoverIndex][1]; + } else if (!series.stack) { + value = series.data[hoverIndex][1]; + } else { + last_value += series.data[hoverIndex][1]; + value = last_value; + } + } else { + value = series.data[hoverIndex][1]; + } + + // Highlighting multiple Points depending on the plot type + if (series.lines.steps || series.stack) { + // stacked and steppedLine plots can have series with different length. + // Stacked series can increase its length on each new stacked serie if null points found, + // to speed the index search we begin always on the last found hoverIndex. + hoverIndex = this.findHoverIndexFromDataPoints(pos.x, series, hoverIndex); + } + + // Be sure we have a yaxis so that it does not brake series sorting + yaxis = 0; + if (series.yaxis) { + yaxis = series.yaxis.n; + } + + results[yaxis].push({ + value: value, + hoverIndex: hoverIndex, + color: series.color, + label: series.aliasEscaped, + time: pointTime, + distance: hoverDistance, + index: i, + }); + } + + // Contat the 3 sub-arrays + results = results[0].concat(results[1], results[2]); + + // Time of the point closer to pointer + results.time = minTime; + + return results; + }; + + elem.mouseleave(function() { + if (panel.tooltip.shared) { + let plot = elem.data().plot; + if (plot) { + $tooltip.detach(); + plot.unhighlight(); + } + } + appEvents.emit('graph-hover-clear'); + }); + + elem.bind('plothover', function(event, pos, item) { + self.show(pos, item); + + // broadcast to other graph panels that we are hovering! + pos.panelRelY = (pos.pageY - elem.offset().top) / elem.height(); + appEvents.emit('graph-hover', { pos: pos, panel: panel }); + }); + + elem.bind('plotclick', function(event, pos, item) { + appEvents.emit('graph-click', { pos: pos, panel: panel, item: item }); + }); + + this.clear = function(plot) { + $tooltip.detach(); + plot.clearCrosshair(); + plot.unhighlight(); + }; + + this.show = function(pos, item) { + let plot = elem.data().plot; + let plotData = plot.getData(); + let xAxes = plot.getXAxes(); + let xMode = xAxes[0].options.mode; + let seriesList = getSeriesFn(); + let allSeriesMode = panel.tooltip.shared; + let group, value, absoluteTime, hoverInfo, i, series, seriesHtml, tooltipFormat; + + // if panelRelY is defined another panel wants us to show a tooltip + // get pageX from position on x axis and pageY from relative position in original panel + if (pos.panelRelY) { + let pointOffset = plot.pointOffset({ x: pos.x }); + if (Number.isNaN(pointOffset.left) || pointOffset.left < 0 || pointOffset.left > elem.width()) { + self.clear(plot); + return; + } + pos.pageX = elem.offset().left + pointOffset.left; + pos.pageY = elem.offset().top + elem.height() * pos.panelRelY; + let isVisible = + pos.pageY >= $(window).scrollTop() && pos.pageY <= $(window).innerHeight() + $(window).scrollTop(); + if (!isVisible) { + self.clear(plot); + return; + } + plot.setCrosshair(pos); + allSeriesMode = true; + + if (dashboard.sharedCrosshairModeOnly()) { + // if only crosshair mode we are done + return; + } + } + + if (seriesList.length === 0) { + return; + } + + if (seriesList[0].hasMsResolution) { + tooltipFormat = 'YYYY-MM-DD HH:mm:ss.SSS'; + } else { + tooltipFormat = 'YYYY-MM-DD HH:mm:ss'; + } + + if (allSeriesMode) { + plot.unhighlight(); + + let seriesHoverInfo = self.getMultiSeriesPlotHoverInfo(plotData, pos); + + seriesHtml = ''; + + absoluteTime = dashboard.formatDate(seriesHoverInfo.time, tooltipFormat); + + // Dynamically reorder the hovercard for the current time point if the + // option is enabled. + if (panel.tooltip.sort === 2) { + seriesHoverInfo.sort(function(a, b) { + return b.value - a.value; + }); + } else if (panel.tooltip.sort === 1) { + seriesHoverInfo.sort(function(a, b) { + return a.value - b.value; + }); + } + + for (i = 0; i < seriesHoverInfo.length; i++) { + hoverInfo = seriesHoverInfo[i]; + + if (hoverInfo.hidden) { + continue; + } + + let highlightClass = ''; + if (item && hoverInfo.index === item.seriesIndex) { + highlightClass = 'graph-tooltip-list-item--highlight'; + } + + series = seriesList[hoverInfo.index]; + + value = series.formatValue(hoverInfo.value); + + seriesHtml += + '
    '; + seriesHtml += + ' ' + hoverInfo.label + ':
    '; + seriesHtml += '
    ' + value + '
    '; + plot.highlight(hoverInfo.index, hoverInfo.hoverIndex); + } + + self.renderAndShow(absoluteTime, seriesHtml, pos, xMode); + } else if (item) { + // single series tooltip + series = seriesList[item.seriesIndex]; + group = '
    '; + group += + ' ' + series.aliasEscaped + ':
    '; + + if (panel.stack && panel.tooltip.value_type === 'individual') { + value = item.datapoint[1] - item.datapoint[2]; + } else { + value = item.datapoint[1]; + } + + value = series.formatValue(value); + + absoluteTime = dashboard.formatDate(item.datapoint[0], tooltipFormat); + + group += '
    ' + value + '
    '; + + self.renderAndShow(absoluteTime, group, pos, xMode); + } else { + // no hit + $tooltip.detach(); + } + }; +} diff --git a/public/app/plugins/panel/graph/jquery.flot.events.js b/public/app/plugins/panel/graph/jquery.flot.events.js index 1aa79c5056f..3ea3ca8f330 100644 --- a/public/app/plugins/panel/graph/jquery.flot.events.js +++ b/public/app/plugins/panel/graph/jquery.flot.events.js @@ -52,14 +52,14 @@ function ($, _, angular, Drop) { var eventManager = plot.getOptions().events.manager; if (eventManager.editorOpen) { // update marker element to attach to (needed in case of legend on the right - // when there is a double render pass and the inital marker element is removed) + // when there is a double render pass and the initial marker element is removed) markerElementToAttachTo = element; return; } // mark as openend eventManager.editorOpened(); - // set marker elment to attache to + // set marker element to attache to markerElementToAttachTo = element; // wait for element to be attached and positioned diff --git a/public/app/plugins/panel/graph/legend.ts b/public/app/plugins/panel/graph/legend.ts index d1186ae0b1e..6b6c89444dc 100644 --- a/public/app/plugins/panel/graph/legend.ts +++ b/public/app/plugins/panel/graph/legend.ts @@ -1,7 +1,7 @@ import angular from 'angular'; import _ from 'lodash'; import $ from 'jquery'; -import PerfectScrollbar from 'perfect-scrollbar'; +import baron from 'baron'; var module = angular.module('grafana.directives'); @@ -16,11 +16,10 @@ module.directive('graphLegend', function(popoverSrv, $timeout) { var i; var legendScrollbar; const legendRightDefaultWidth = 10; + let legendElem = elem.parent(); scope.$on('$destroy', function() { - if (legendScrollbar) { - legendScrollbar.destroy(); - } + destroyScrollbar(); }); ctrl.events.on('render-legend', () => { @@ -112,7 +111,7 @@ module.directive('graphLegend', function(popoverSrv, $timeout) { } function render() { - let legendWidth = elem.width(); + let legendWidth = legendElem.width(); if (!ctrl.panel.legend.show) { elem.empty(); firstRender = true; @@ -130,9 +129,12 @@ module.directive('graphLegend', function(popoverSrv, $timeout) { elem.empty(); - // Set min-width if side style and there is a value, otherwise remove the CSS propery - var width = panel.legend.rightSide && panel.legend.sideWidth ? panel.legend.sideWidth + 'px' : ''; - elem.css('min-width', width); + // Set min-width if side style and there is a value, otherwise remove the CSS property + // Set width so it works with IE11 + var width: any = panel.legend.rightSide && panel.legend.sideWidth ? panel.legend.sideWidth + 'px' : ''; + var ieWidth: any = panel.legend.rightSide && panel.legend.sideWidth ? panel.legend.sideWidth - 1 + 'px' : ''; + legendElem.css('min-width', width); + legendElem.css('width', ieWidth); elem.toggleClass('graph-legend-table', panel.legend.alignAsTable === true); @@ -238,8 +240,10 @@ module.directive('graphLegend', function(popoverSrv, $timeout) { tbodyElem.append(tableHeaderElem); tbodyElem.append(seriesElements); elem.append(tbodyElem); + tbodyElem.wrap('
    '); } else { - elem.append(seriesElements); + elem.append('
    '); + elem.find('.graph-legend-scroll').append(seriesElements); } if (!panel.legend.rightSide || (panel.legend.rightSide && legendWidth !== legendRightDefaultWidth)) { @@ -250,23 +254,45 @@ module.directive('graphLegend', function(popoverSrv, $timeout) { } function addScrollbar() { - const scrollbarOptions = { - // Number of pixels the content height can surpass the container height without enabling the scroll bar. - scrollYMarginOffset: 2, - suppressScrollX: true, - wheelPropagation: true, + const scrollRootClass = 'baron baron__root'; + const scrollerClass = 'baron__scroller'; + const scrollBarHTML = ` +
    +
    +
    + `; + + let scrollRoot = elem; + let scroller = elem.find('.graph-legend-scroll'); + + // clear existing scroll bar track to prevent duplication + scrollRoot.find('.baron__track').remove(); + + scrollRoot.addClass(scrollRootClass); + $(scrollBarHTML).appendTo(scrollRoot); + scroller.addClass(scrollerClass); + + let scrollbarParams = { + root: scrollRoot[0], + scroller: scroller[0], + bar: '.baron__bar', + track: '.baron__track', + barOnCls: '_scrollbar', + scrollingCls: '_scrolling', }; if (!legendScrollbar) { - legendScrollbar = new PerfectScrollbar(elem[0], scrollbarOptions); + legendScrollbar = baron(scrollbarParams); } else { - legendScrollbar.update(); + destroyScrollbar(); + legendScrollbar = baron(scrollbarParams); } + legendScrollbar.scroll(); } function destroyScrollbar() { if (legendScrollbar) { - legendScrollbar.destroy(); + legendScrollbar.dispose(); legendScrollbar = undefined; } } diff --git a/public/app/plugins/panel/graph/module.ts b/public/app/plugins/panel/graph/module.ts index 6cebbe65ab8..ef82fb395a5 100644 --- a/public/app/plugins/panel/graph/module.ts +++ b/public/app/plugins/panel/graph/module.ts @@ -235,7 +235,7 @@ class GraphCtrl extends MetricsPanelCtrl { } changeSeriesColor(series, color) { - series.color = color; + series.setColor(color); this.panel.aliasColors[series.alias] = series.color; this.render(); } diff --git a/public/app/plugins/panel/graph/series_overrides_ctrl.ts b/public/app/plugins/panel/graph/series_overrides_ctrl.ts index 703c4648716..ecf79a8a4fb 100644 --- a/public/app/plugins/panel/graph/series_overrides_ctrl.ts +++ b/public/app/plugins/panel/graph/series_overrides_ctrl.ts @@ -31,7 +31,7 @@ export class SeriesOverridesCtrl { $scope.override[item.propertyName] = subItem.value; - // automatically disable lines for this series and the fill bellow to series + // automatically disable lines for this series and the fill below to series // can be removed by the user if they still want lines if (item.propertyName === 'fillBelowTo') { $scope.override['lines'] = false; diff --git a/public/app/plugins/panel/graph/specs/tooltip_specs.ts b/public/app/plugins/panel/graph/specs/tooltip_specs.ts index c12697eadac..7dd5ed9b8a9 100644 --- a/public/app/plugins/panel/graph/specs/tooltip_specs.ts +++ b/public/app/plugins/panel/graph/specs/tooltip_specs.ts @@ -11,6 +11,7 @@ var scope = { var elem = $('
    '); var dashboard = {}; +var getSeriesFn; function describeSharedTooltip(desc, fn) { var ctx: any = {}; @@ -30,7 +31,7 @@ function describeSharedTooltip(desc, fn) { describe(desc, function() { beforeEach(function() { ctx.setupFn(); - var tooltip = new GraphTooltip(elem, dashboard, scope); + var tooltip = new GraphTooltip(elem, dashboard, scope, getSeriesFn); ctx.results = tooltip.getMultiSeriesPlotHoverInfo(ctx.data, ctx.pos); }); @@ -39,7 +40,7 @@ function describeSharedTooltip(desc, fn) { } describe('findHoverIndexFromData', function() { - var tooltip = new GraphTooltip(elem, dashboard, scope); + var tooltip = new GraphTooltip(elem, dashboard, scope, getSeriesFn); var series = { data: [[100, 0], [101, 0], [102, 0], [103, 0], [104, 0], [105, 0], [106, 0], [107, 0]], }; diff --git a/public/app/plugins/panel/graph/template.ts b/public/app/plugins/panel/graph/template.ts index 0b9eb8227df..c897327fe1a 100644 --- a/public/app/plugins/panel/graph/template.ts +++ b/public/app/plugins/panel/graph/template.ts @@ -3,7 +3,9 @@ var template = `
    -
    +
    +
    +
    `; diff --git a/public/app/plugins/panel/pluginlist/module.ts b/public/app/plugins/panel/pluginlist/module.ts index e97b1a8fbf9..acfa69b171c 100644 --- a/public/app/plugins/panel/pluginlist/module.ts +++ b/public/app/plugins/panel/pluginlist/module.ts @@ -12,7 +12,7 @@ class PluginListCtrl extends PanelCtrl { panelDefaults = {}; /** @ngInject */ - constructor($scope, $injector, private backendSrv, private $location) { + constructor($scope, $injector, private backendSrv) { super($scope, $injector); _.defaults(this.panel, this.panelDefaults); diff --git a/public/app/plugins/panel/singlestat/module.ts b/public/app/plugins/panel/singlestat/module.ts index 776033536dd..b1996d8ffc9 100644 --- a/public/app/plugins/panel/singlestat/module.ts +++ b/public/app/plugins/panel/singlestat/module.ts @@ -77,7 +77,7 @@ class SingleStatCtrl extends MetricsPanelCtrl { }; /** @ngInject */ - constructor($scope, $injector, private $location, private linkSrv) { + constructor($scope, $injector, private linkSrv) { super($scope, $injector); _.defaults(this.panel, this.panelDefaults); @@ -159,8 +159,8 @@ class SingleStatCtrl extends MetricsPanelCtrl { } setTableColumnToSensibleDefault(tableData) { - if (this.tableColumnOptions.length === 1) { - this.panel.tableColumn = this.tableColumnOptions[0]; + if (tableData.columns.length === 1) { + this.panel.tableColumn = tableData.columns[0].text; } else { this.panel.tableColumn = _.find(tableData.columns, col => { return col.type !== 'time'; @@ -308,7 +308,7 @@ class SingleStatCtrl extends MetricsPanelCtrl { let formatFunc = kbn.valueFormats[this.panel.format]; data.value = lastPoint[1]; data.valueRounded = data.value; - data.valueFormatted = formatFunc(data.value, 0, 0); + data.valueFormatted = formatFunc(data.value, this.dashboard.isTimezoneUtc()); } else { data.value = this.series[0].stats[this.panel.valueName]; data.flotpairs = this.series[0].flotpairs; diff --git a/public/app/plugins/panel/singlestat/specs/singlestat_specs.ts b/public/app/plugins/panel/singlestat/specs/singlestat_specs.ts index f80465052f9..217ec5ee04c 100644 --- a/public/app/plugins/panel/singlestat/specs/singlestat_specs.ts +++ b/public/app/plugins/panel/singlestat/specs/singlestat_specs.ts @@ -82,6 +82,19 @@ describe('SingleStatCtrl', function() { }); }); + singleStatScenario('showing last iso time instead of value (in UTC)', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeAsIso'; + ctx.setIsUtc(true); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).to.be(moment.utc(1505634997920).format('YYYY-MM-DD HH:mm:ss')); + }); + }); + singleStatScenario('showing last us time instead of value', function(ctx) { ctx.setup(function() { ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; @@ -99,6 +112,19 @@ describe('SingleStatCtrl', function() { }); }); + singleStatScenario('showing last us time instead of value (in UTC)', function(ctx) { + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeAsUS'; + ctx.setIsUtc(true); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).to.be(moment.utc(1505634997920).format('MM/DD/YYYY h:mm:ss a')); + }); + }); + singleStatScenario('showing last time from now instead of value', function(ctx) { beforeEach(() => { clock = sinon.useFakeTimers(epoch); @@ -124,6 +150,27 @@ describe('SingleStatCtrl', function() { }); }); + singleStatScenario('showing last time from now instead of value (in UTC)', function(ctx) { + beforeEach(() => { + clock = sinon.useFakeTimers(epoch); + }); + + ctx.setup(function() { + ctx.data = [{ target: 'test.cpu1', datapoints: [[10, 12], [20, 1505634997920]] }]; + ctx.ctrl.panel.valueName = 'last_time'; + ctx.ctrl.panel.format = 'dateTimeFromNow'; + ctx.setIsUtc(true); + }); + + it('should set formatted value', function() { + expect(ctx.data.valueFormatted).to.be('2 days ago'); + }); + + afterEach(() => { + clock.restore(); + }); + }); + singleStatScenario('MainValue should use same number for decimals as displayed when checking thresholds', function( ctx ) { diff --git a/public/app/plugins/panel/table/column_options.html b/public/app/plugins/panel/table/column_options.html index 9e8e4b404ae..4a4a6d0db9c 100644 --- a/public/app/plugins/panel/table/column_options.html +++ b/public/app/plugins/panel/table/column_options.html @@ -163,10 +163,10 @@ Use special variables to specify cell values:
    - $__cell refers to current cell value + ${__cell} refers to current cell value
    - $__cell_n refers to Nth column value in current row. Column indexes are started from 0. For instance, - $__cell_1 refers to second column's value. + ${__cell_n} refers to Nth column value in current row. Column indexes are started from 0. For instance, + ${__cell_1} refers to second column's value.
    diff --git a/public/app/plugins/panel/table/module.ts b/public/app/plugins/panel/table/module.ts index 27eab205f09..03d92f7e48f 100644 --- a/public/app/plugins/panel/table/module.ts +++ b/public/app/plugins/panel/table/module.ts @@ -154,6 +154,11 @@ class TablePanelCtrl extends MetricsPanelCtrl { this.render(); } + moveQuery(target, direction) { + super.moveQuery(target, direction); + super.refresh(); + } + exportCsv() { var scope = this.$scope.$new(true); scope.tableData = this.renderer.render_values(); @@ -218,13 +223,13 @@ class TablePanelCtrl extends MetricsPanelCtrl { } function renderPanel() { - var panelElem = elem.parents('.panel'); + var panelElem = elem.parents('.panel-content'); var rootElem = elem.find('.table-panel-scroll'); var tbodyElem = elem.find('tbody'); var footerElem = elem.find('.table-panel-footer'); elem.css({ 'font-size': panel.fontSize }); - panelElem.addClass('table-panel-wrapper'); + panelElem.addClass('table-panel-content'); appendTableRows(tbodyElem); appendPaginationControls(footerElem); diff --git a/public/app/plugins/panel/table/renderer.ts b/public/app/plugins/panel/table/renderer.ts index 78f224d723f..f6950dada52 100644 --- a/public/app/plugins/panel/table/renderer.ts +++ b/public/app/plugins/panel/table/renderer.ts @@ -247,7 +247,7 @@ export class TableRenderer { var scopedVars = this.renderRowVariables(rowIndex); scopedVars['__cell'] = { value: value }; - var cellLink = this.templateSrv.replace(column.style.linkUrl, scopedVars); + var cellLink = this.templateSrv.replace(column.style.linkUrl, scopedVars, encodeURIComponent); var cellLinkTooltip = this.templateSrv.replace(column.style.linkTooltip, scopedVars); var cellTarget = column.style.linkTargetBlank ? '_blank' : ''; diff --git a/public/app/plugins/panel/table/specs/transformers.jest.ts b/public/app/plugins/panel/table/specs/transformers.jest.ts index a59b3ae48ee..eefe3f9bdc0 100644 --- a/public/app/plugins/panel/table/specs/transformers.jest.ts +++ b/public/app/plugins/panel/table/specs/transformers.jest.ts @@ -221,7 +221,7 @@ describe('when transforming time series table', () => { expect(table.rows[0][2]).toBe(42); }); - it('should return 2 rows for a mulitple queries with same label values plus one extra row', () => { + it('should return 2 rows for a multiple queries with same label values plus one extra row', () => { table = transformDataToTable(multipleQueriesDataSameLabels, panel); expect(table.rows.length).toBe(2); expect(table.rows[0][0]).toBe(time); @@ -238,7 +238,7 @@ describe('when transforming time series table', () => { expect(table.rows[1][5]).toBe(7); }); - it('should return 2 rows for mulitple queries with different label values', () => { + it('should return 2 rows for multiple queries with different label values', () => { table = transformDataToTable(multipleQueriesDataDifferentLabels, panel); expect(table.rows.length).toBe(2); expect(table.columns.length).toBe(6); diff --git a/public/app/plugins/panel/table/transformers.ts b/public/app/plugins/panel/table/transformers.ts index 43088dc22ac..1659ba3e3aa 100644 --- a/public/app/plugins/panel/table/transformers.ts +++ b/public/app/plugins/panel/table/transformers.ts @@ -243,7 +243,7 @@ transformers['table'] = { row[columnIndex] = matchedRow[columnIndex]; } } - // Dont visit this row again + // Don't visit this row again mergedRows[match] = matchedRow; // Keep looking for more rows to merge offset = match + 1; diff --git a/public/app/routes/ReactContainer.tsx b/public/app/routes/ReactContainer.tsx index b7613d9474d..db6938cc878 100644 --- a/public/app/routes/ReactContainer.tsx +++ b/public/app/routes/ReactContainer.tsx @@ -1,8 +1,11 @@ import React from 'react'; import ReactDOM from 'react-dom'; +import { Provider } from 'mobx-react'; + import coreModule from 'app/core/core_module'; import { store } from 'app/stores/store'; -import { Provider } from 'mobx-react'; +import { BackendSrv } from 'app/core/services/backend_srv'; +import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; function WrapInProvider(store, Component, props) { return ( @@ -13,14 +16,20 @@ function WrapInProvider(store, Component, props) { } /** @ngInject */ -export function reactContainer($route, $location, backendSrv) { +export function reactContainer($route, $location, backendSrv: BackendSrv, datasourceSrv: DatasourceSrv) { return { restrict: 'E', template: '', link(scope, elem) { let component = $route.current.locals.component; - let props = { + // Dynamic imports return whole module, need to extract default export + if (component.default) { + component = component.default; + } + const props = { backendSrv: backendSrv, + datasourceSrv: datasourceSrv, + routeParams: $route.current.params, }; ReactDOM.render(WrapInProvider(store, component, props), elem[0]); diff --git a/public/app/routes/routes.ts b/public/app/routes/routes.ts index 487e9b27b22..6a61315f956 100644 --- a/public/app/routes/routes.ts +++ b/public/app/routes/routes.ts @@ -1,9 +1,11 @@ import './dashboard_loaders'; import './ReactContainer'; -import { ServerStats } from 'app/containers/ServerStats/ServerStats'; -import { AlertRuleList } from 'app/containers/AlertRuleList/AlertRuleList'; -import { FolderSettings } from 'app/containers/ManageDashboards/FolderSettings'; -import { FolderPermissions } from 'app/containers/ManageDashboards/FolderPermissions'; + +import ServerStats from 'app/containers/ServerStats/ServerStats'; +import AlertRuleList from 'app/containers/AlertRuleList/AlertRuleList'; +// import Explore from 'app/containers/Explore/Explore'; +import FolderSettings from 'app/containers/ManageDashboards/FolderSettings'; +import FolderPermissions from 'app/containers/ManageDashboards/FolderPermissions'; /** @ngInject **/ export function setupAngularRoutes($routeProvider, $locationProvider) { @@ -109,6 +111,12 @@ export function setupAngularRoutes($routeProvider, $locationProvider) { controller: 'FolderDashboardsCtrl', controllerAs: 'ctrl', }) + .when('/explore/:initial?', { + template: '', + resolve: { + component: () => import(/* webpackChunkName: "explore" */ 'app/containers/Explore/Explore'), + }, + }) .when('/org', { templateUrl: 'public/app/features/org/partials/orgDetails.html', controller: 'OrgDetailsCtrl', diff --git a/public/app/stores/PermissionsStore/PermissionsStore.jest.ts b/public/app/stores/PermissionsStore/PermissionsStore.jest.ts index c3bc6016e50..6d88401e0d6 100644 --- a/public/app/stores/PermissionsStore/PermissionsStore.jest.ts +++ b/public/app/stores/PermissionsStore/PermissionsStore.jest.ts @@ -15,7 +15,24 @@ describe('PermissionsStore', () => { permission: 1, permissionName: 'View', teamId: 1, - teamName: 'MyTestTeam', + team: 'MyTestTeam', + inherited: true, + }, + { + id: 5, + dashboardId: 1, + permission: 1, + permissionName: 'View', + userId: 1, + userLogin: 'MyTestUser', + }, + { + id: 6, + dashboardId: 1, + permission: 1, + permissionName: 'Edit', + teamId: 2, + team: 'MyTestTeam2', }, ]) ); @@ -48,15 +65,24 @@ describe('PermissionsStore', () => { }); it('should save removed permissions automatically', async () => { - expect(store.items.length).toBe(3); + expect(store.items.length).toBe(5); await store.removeStoreItem(2); - expect(store.items.length).toBe(2); + expect(store.items.length).toBe(4); expect(backendSrv.post.mock.calls.length).toBe(1); expect(backendSrv.post.mock.calls[0][0]).toBe('/api/dashboards/id/1/permissions'); }); + it('should be sorted by sort rank and alphabetically', async () => { + expect(store.items[0].name).toBe('MyTestTeam'); + expect(store.items[0].dashboardId).toBe(10); + expect(store.items[1].name).toBe('Editor'); + expect(store.items[2].name).toBe('Viewer'); + expect(store.items[3].name).toBe('MyTestTeam2'); + expect(store.items[4].name).toBe('MyTestUser'); + }); + describe('when one inherited and one not inherited team permission are added', () => { beforeEach(async () => { const overridingItemForChildDashboard = { @@ -73,7 +99,18 @@ describe('PermissionsStore', () => { }); it('should add new overriding permission', () => { - expect(store.items.length).toBe(4); + expect(store.items.length).toBe(6); + }); + + it('should be sorted by sort rank and alphabetically', async () => { + expect(store.items[0].name).toBe('MyTestTeam'); + expect(store.items[0].dashboardId).toBe(10); + expect(store.items[1].name).toBe('Editor'); + expect(store.items[2].name).toBe('Viewer'); + expect(store.items[3].name).toBe('MyTestTeam'); + expect(store.items[3].dashboardId).toBe(1); + expect(store.items[4].name).toBe('MyTestTeam2'); + expect(store.items[5].name).toBe('MyTestUser'); }); }); }); diff --git a/public/app/stores/PermissionsStore/PermissionsStore.ts b/public/app/stores/PermissionsStore/PermissionsStore.ts index 79df593f06e..95d63c8527a 100644 --- a/public/app/stores/PermissionsStore/PermissionsStore.ts +++ b/public/app/stores/PermissionsStore/PermissionsStore.ts @@ -30,6 +30,8 @@ export const NewPermissionsItem = types ), userId: types.maybe(types.number), userLogin: types.maybe(types.string), + userAvatarUrl: types.maybe(types.string), + teamAvatarUrl: types.maybe(types.string), teamId: types.maybe(types.number), team: types.maybe(types.string), permission: types.optional(types.number, 1), @@ -50,17 +52,19 @@ export const NewPermissionsItem = types }, })) .actions(self => ({ - setUser(userId: number, userLogin: string) { + setUser(userId: number, userLogin: string, userAvatarUrl: string) { self.userId = userId; self.userLogin = userLogin; + self.userAvatarUrl = userAvatarUrl; self.teamId = null; self.team = null; }, - setTeam(teamId: number, team: string) { + setTeam(teamId: number, team: string, teamAvatarUrl: string) { self.userId = null; self.userLogin = null; self.teamId = teamId; self.team = team; + self.teamAvatarUrl = teamAvatarUrl; }, setPermission(permission: number) { self.permission = permission; @@ -121,16 +125,20 @@ export const PermissionsStore = types teamId: undefined, userLogin: undefined, userId: undefined, + userAvatarUrl: undefined, + teamAvatarUrl: undefined, role: undefined, }; switch (self.newItem.type) { case aclTypeValues.GROUP.value: item.team = self.newItem.team; item.teamId = self.newItem.teamId; + item.teamAvatarUrl = self.newItem.teamAvatarUrl; break; case aclTypeValues.USER.value: item.userLogin = self.newItem.userLogin; item.userId = self.newItem.userId; + item.userAvatarUrl = self.newItem.userAvatarUrl; break; case aclTypeValues.VIEWER.value: case aclTypeValues.EDITOR.value: @@ -147,6 +155,8 @@ export const PermissionsStore = types try { yield updateItems(self, updatedItems); self.items.push(newItem); + let sortedItems = self.items.sort((a, b) => b.sortRank - a.sortRank || a.name.localeCompare(b.name)); + self.items = sortedItems; resetNewTypeInternal(); } catch {} yield Promise.resolve(); @@ -206,31 +216,26 @@ const updateItems = (self, items) => { }; const prepareServerResponse = (response, dashboardId: number, isFolder: boolean, isInRoot: boolean) => { - return response.map(item => { - return prepareItem(item, dashboardId, isFolder, isInRoot); - }); + return response + .map(item => { + return prepareItem(item, dashboardId, isFolder, isInRoot); + }) + .sort((a, b) => b.sortRank - a.sortRank || a.name.localeCompare(b.name)); }; const prepareItem = (item, dashboardId: number, isFolder: boolean, isInRoot: boolean) => { - item.inherited = !isFolder && !isInRoot && dashboardId !== item.dashboardId; - item.sortRank = 0; if (item.userId > 0) { - item.icon = 'fa fa-fw fa-user'; - item.nameHtml = item.userLogin; - item.sortName = item.userLogin; + item.name = item.userLogin; item.sortRank = 10; } else if (item.teamId > 0) { - item.icon = 'fa fa-fw fa-users'; - item.nameHtml = item.team; - item.sortName = item.team; + item.name = item.team; item.sortRank = 20; } else if (item.role) { item.icon = 'fa fa-fw fa-street-view'; - item.nameHtml = `Everyone with ${item.role} Role`; - item.sortName = item.role; + item.name = item.role; item.sortRank = 30; - if (item.role === 'Viewer') { + if (item.role === 'Editor') { item.sortRank += 1; } } diff --git a/public/app/stores/PermissionsStore/PermissionsStoreItem.ts b/public/app/stores/PermissionsStore/PermissionsStoreItem.ts index 92dca0220ca..c4873cb9c01 100644 --- a/public/app/stores/PermissionsStore/PermissionsStoreItem.ts +++ b/public/app/stores/PermissionsStore/PermissionsStoreItem.ts @@ -14,8 +14,9 @@ export const PermissionsStoreItem = types inherited: types.maybe(types.boolean), sortRank: types.maybe(types.number), icon: types.maybe(types.string), - nameHtml: types.maybe(types.string), - sortName: types.maybe(types.string), + name: types.maybe(types.string), + teamAvatarUrl: types.maybe(types.string), + userAvatarUrl: types.maybe(types.string), }) .actions(self => ({ updateRole: role => { diff --git a/public/app/stores/store.ts b/public/app/stores/store.ts index 8ad53607ac2..dfbd8141198 100644 --- a/public/app/stores/store.ts +++ b/public/app/stores/store.ts @@ -3,11 +3,11 @@ import config from 'app/core/config'; export let store: IRootStore; -export function createStore(backendSrv) { +export function createStore(services) { store = RootStore.create( {}, { - backendSrv: backendSrv, + ...services, navTree: config.bootData.navTree, } ); diff --git a/public/dashboards/scripted_templated.js b/public/dashboards/scripted_templated.js index 5a05aa55b5d..f1b0b115fa1 100644 --- a/public/dashboards/scripted_templated.js +++ b/public/dashboards/scripted_templated.js @@ -22,7 +22,7 @@ var dashboard; // All url parameters are available via the ARGS object var ARGS; -// Intialize a skeleton with nothing but a rows array and service object +// Initialize a skeleton with nothing but a rows array and service object dashboard = { rows : [], schemaVersion: 13, diff --git a/public/img/graph404.svg b/public/img/graph404.svg index c9d29fdd049..9e36eeeb788 100644 --- a/public/img/graph404.svg +++ b/public/img/graph404.svg @@ -1,4 +1,4 @@ - + @@ -58,7 +58,6 @@ } - @@ -69,5 +68,5 @@ - + diff --git a/public/img/icons_dark_theme/icon_editor.svg b/public/img/icons_dark_theme/icon_editor.svg new file mode 100644 index 00000000000..00c60902fbc --- /dev/null +++ b/public/img/icons_dark_theme/icon_editor.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + diff --git a/public/img/icons_dark_theme/icon_viewer.svg b/public/img/icons_dark_theme/icon_viewer.svg new file mode 100644 index 00000000000..aec3e6b7e5b --- /dev/null +++ b/public/img/icons_dark_theme/icon_viewer.svg @@ -0,0 +1,17 @@ + + + + + + + + + + diff --git a/public/img/icons_light_theme/icon_editor.svg b/public/img/icons_light_theme/icon_editor.svg new file mode 100644 index 00000000000..a6581072a17 --- /dev/null +++ b/public/img/icons_light_theme/icon_editor.svg @@ -0,0 +1,19 @@ + + + + + + + + + + + + + diff --git a/public/img/icons_light_theme/icon_viewer.svg b/public/img/icons_light_theme/icon_viewer.svg new file mode 100644 index 00000000000..85d9b7109f4 --- /dev/null +++ b/public/img/icons_light_theme/icon_viewer.svg @@ -0,0 +1,17 @@ + + + + + + + + + + diff --git a/public/sass/_grafana.scss b/public/sass/_grafana.scss index 36072fe8929..afc869f8b15 100644 --- a/public/sass/_grafana.scss +++ b/public/sass/_grafana.scss @@ -104,5 +104,6 @@ @import 'pages/signup'; @import 'pages/styleguide'; @import 'pages/errorpage'; +@import 'pages/explore'; @import 'old_responsive'; @import 'components/view_states.scss'; diff --git a/public/sass/_variables.light.scss b/public/sass/_variables.light.scss index a59350d2195..bb8f93dbe69 100644 --- a/public/sass/_variables.light.scss +++ b/public/sass/_variables.light.scss @@ -59,9 +59,8 @@ $critical: #ec2128; $body-bg: $gray-7; $page-bg: $gray-7; $body-color: $gray-1; -//$text-color: $dark-4; $text-color: $gray-1; -$text-color-strong: $white; +$text-color-strong: $dark-2; $text-color-weak: $gray-2; $text-color-faint: $gray-4; $text-color-emphasis: $dark-5; diff --git a/public/sass/base/_fonts.scss b/public/sass/base/_fonts.scss index 558381e169a..ca3dd6344e2 100644 --- a/public/sass/base/_fonts.scss +++ b/public/sass/base/_fonts.scss @@ -29,6 +29,7 @@ unicode-range: U+1f00-1fff; } /* greek */ +/* not available @font-face { font-family: 'Roboto'; font-style: normal; @@ -37,6 +38,7 @@ url(../fonts/roboto/u0TOpm082MNkS5K0Q4rhqvesZW2xOQ-xsNqO47m55DA.woff2) format('woff2'); unicode-range: U+0370-03ff; } +*/ /* vietnamese */ @font-face { font-family: 'Roboto'; diff --git a/public/sass/base/_icons.scss b/public/sass/base/_icons.scss index c701cc1249e..bf66d4dc68d 100644 --- a/public/sass/base/_icons.scss +++ b/public/sass/base/_icons.scss @@ -120,6 +120,10 @@ background-image: url('../img/icons_#{$theme-name}_theme/icon_data_sources.svg'); } +.gicon-editor { + background-image: url('../img/icons_#{$theme-name}_theme/icon_editor.svg'); +} + .gicon-folder-new { background-image: url('../img/icons_#{$theme-name}_theme/icon_add_folder.svg'); } @@ -180,6 +184,10 @@ background-image: url('../img/icons_#{$theme-name}_theme/icon_variable.svg'); } +.gicon-viewer { + background-image: url('../img/icons_#{$theme-name}_theme/icon_viewer.svg'); +} + .gicon-zoom-out { background-image: url('../img/icons_#{$theme-name}_theme/icon_zoom_out.svg'); } diff --git a/public/sass/components/_dashboard_settings.scss b/public/sass/components/_dashboard_settings.scss index 11d943eb13c..5e17e025196 100644 --- a/public/sass/components/_dashboard_settings.scss +++ b/public/sass/components/_dashboard_settings.scss @@ -53,6 +53,13 @@ margin-bottom: $spacer*2; } +.dashboard-settings__subheader { + color: $text-muted; + font-style: italic; + position: relative; + top: -1.5rem; +} + .dashboard-settings__nav-item { padding: 7px 12px; color: $text-color; @@ -64,8 +71,13 @@ background: $page-bg; } - i { - padding-right: 5px; + .gicon { + margin-bottom: 2px; + } + + .fa { + font-size: 17px; + width: 16px; } } @@ -80,3 +92,7 @@ margin-bottom: 10px; } } + +.dashboard-settings__json-save-button { + margin-top: $spacer; +} diff --git a/public/sass/components/_dropdown.scss b/public/sass/components/_dropdown.scss index cc94a379e07..37dbdcd89ef 100644 --- a/public/sass/components/_dropdown.scss +++ b/public/sass/components/_dropdown.scss @@ -256,17 +256,15 @@ // Caret to indicate there is a submenu .dropdown-submenu > a::after { - display: block; - content: ' '; - float: right; - width: 0; - height: 0; - border-color: transparent; - border-style: solid; - border-width: 5px 0 5px 5px; - border-left-color: $text-color-weak; - margin-top: 5px; - margin-right: -4px; + position: absolute; + top: 35%; + right: $input-padding-x; + background-color: transparent; + color: $text-color-weak; + font: normal normal normal $font-size-sm/1 FontAwesome; + content: '\f0da'; + pointer-events: none; + font-size: 11px; } .dropdown-submenu:hover > a::after { border-left-color: $dropdownLinkColorHover; diff --git a/public/sass/components/_filter-table.scss b/public/sass/components/_filter-table.scss index 00f9b93dcfd..bfa9fbbbc5a 100644 --- a/public/sass/components/_filter-table.scss +++ b/public/sass/components/_filter-table.scss @@ -85,3 +85,7 @@ } } } +.filter-table__weak-italic { + font-style: italic; + color: $text-color-weak; +} diff --git a/public/sass/components/_form_select_box.scss b/public/sass/components/_form_select_box.scss index beee2db15ab..0401f06eba8 100644 --- a/public/sass/components/_form_select_box.scss +++ b/public/sass/components/_form_select_box.scss @@ -102,5 +102,6 @@ $select-option-selected-bg: $dropdownLinkBackgroundActive; .gf-form-input--form-dropdown-right { .Select-menu-outer { right: 0; + left: unset; } } diff --git a/public/sass/components/_gf-form.scss b/public/sass/components/_gf-form.scss index 6603cfa072b..7a3046444fe 100644 --- a/public/sass/components/_gf-form.scss +++ b/public/sass/components/_gf-form.scss @@ -341,19 +341,19 @@ $input-border: 1px solid $input-border-color; margin-right: $gf-form-margin; position: relative; background-color: $input-bg; - padding-right: $input-padding-x; border: $input-border; border-radius: $input-border-radius; &::after { position: absolute; top: 35%; - right: $input-padding-x/2; + right: $input-padding-x; background-color: transparent; color: $input-color; font: normal normal normal $font-size-sm/1 FontAwesome; content: '\f0d7'; pointer-events: none; + font-size: 11px; } .gf-form-input { diff --git a/public/sass/components/_panel_add_panel.scss b/public/sass/components/_panel_add_panel.scss index 51754a54d92..5bfff31a108 100644 --- a/public/sass/components/_panel_add_panel.scss +++ b/public/sass/components/_panel_add_panel.scss @@ -1,11 +1,22 @@ -.add-panel { +.add-panel-container { height: 100%; } +.add-panel { + height: 100%; + + .baron__root { + height: calc(100% - 43px); + } +} + .add-panel__header { - padding: 5px 15px; + padding: 0 15px; display: flex; align-items: center; + background: $page-header-bg; + box-shadow: $page-header-shadow; + border-bottom: 1px solid $page-header-border-color; .gicon { font-size: 30px; @@ -23,7 +34,7 @@ .add-panel__title { font-size: $font-size-md; - margin-right: $spacer/2; + margin-right: $spacer*2; } .add-panel__sub-title { @@ -39,7 +50,7 @@ flex-direction: row; flex-wrap: wrap; overflow: auto; - height: calc(100% - 43px); + height: 100%; align-content: flex-start; justify-content: space-around; position: relative; @@ -77,3 +88,16 @@ .add-panel__item-icon { padding: 2px; } + +.add-panel__searchbar { + width: 100%; + margin-bottom: 10px; + margin-top: 7px; +} + +.add-panel__no-panels { + color: $text-color-weak; + font-style: italic; + width: 100%; + padding: 3px 8px; +} diff --git a/public/sass/components/_panel_graph.scss b/public/sass/components/_panel_graph.scss index e15cd576367..72f3ca3dbbe 100644 --- a/public/sass/components/_panel_graph.scss +++ b/public/sass/components/_panel_graph.scss @@ -49,6 +49,7 @@ } .graph-legend { + display: flex; flex: 0 1 auto; max-height: 30%; margin: 0; @@ -56,11 +57,27 @@ padding-top: 6px; position: relative; + // fix for Firefox (white stripe on the right of scrollbar) + width: calc(100% - 1px); + .popover-content { padding: 0; } } +.graph-legend-content { + position: relative; + + // fix for Firefox (white stripe on the right of scrollbar) + width: calc(100% - 1px); +} + +.graph-legend-scroll { + position: relative; + overflow: auto !important; + padding: 1px; +} + .graph-legend-icon { position: relative; padding-right: 4px; @@ -115,8 +132,20 @@ // fix for phantomjs .body--phantomjs { .graph-panel--legend-right { + .graph-legend { + display: inline-block; + } + + .graph-panel__chart { + display: flex; + } + .graph-legend-table { display: table; + + .graph-legend-scroll { + display: table; + } } } } @@ -124,9 +153,9 @@ .graph-legend-table { tbody { display: block; + position: relative; overflow-y: auto; overflow-x: hidden; - height: 100%; padding-bottom: 1px; padding-right: 5px; padding-left: 5px; diff --git a/public/sass/components/_panel_table.scss b/public/sass/components/_panel_table.scss index f120fcc8b35..8e0ecf15896 100644 --- a/public/sass/components/_panel_table.scss +++ b/public/sass/components/_panel_table.scss @@ -1,7 +1,6 @@ -.table-panel-wrapper { - .panel-content { - padding: 0; - } +.table-panel-content { + padding: 0; + .panel-title-container { padding-bottom: 4px; } diff --git a/public/sass/components/_scrollbar.scss b/public/sass/components/_scrollbar.scss index 42818e786f6..78173b73f47 100644 --- a/public/sass/components/_scrollbar.scss +++ b/public/sass/components/_scrollbar.scss @@ -9,6 +9,11 @@ -ms-touch-action: auto; } +// ._scrollbar { +// overflow-x: hidden !important; +// overflow-y: auto; +// } + /* * Scrollbar rail styles */ @@ -101,7 +106,7 @@ opacity: 0.9; } -// Srollbars +// Scrollbars // ::-webkit-scrollbar { @@ -172,3 +177,120 @@ border-top: 1px solid $scrollbarBorder; border-left: 1px solid $scrollbarBorder; } + +// Baron styles + +.baron { + // display: inline-block; // this brakes phantomjs rendering (width becomes 0) + overflow: hidden; +} + +// Fix for side menu on mobile devices +.main-view.baron { + width: unset; +} + +.baron__clipper { + position: relative; + overflow: hidden; +} + +.baron__scroller { + overflow-y: scroll; + -ms-overflow-style: none; + -moz-box-sizing: border-box; + box-sizing: border-box; + margin: 0; + border: 0; + padding: 0; + width: 100%; + height: 100%; + -webkit-overflow-scrolling: touch; + /* remove line to customize scrollbar in iOs */ +} + +.baron__scroller::-webkit-scrollbar { + width: 0; + height: 0; +} + +.baron__track { + display: none; + position: absolute; + top: 0; + right: 0; + bottom: 0; +} + +.baron._scrollbar .baron__track { + display: block; +} + +.baron__free { + position: absolute; + top: 0; + bottom: 0; + right: 0; +} + +.baron__bar { + display: none; + position: absolute; + right: 0; + z-index: 1; + // width: 10px; + background: #999; + + // height: 15px; + width: 15px; + transition: background-color 0.2s linear, opacity 0.2s linear; + opacity: 0; +} + +.baron._scrollbar .baron__bar { + display: block; + + @include gradient-vertical($scrollbarBackground, $scrollbarBackground2); + border-radius: 6px; + width: 6px; + /* there must be 'right' for ps__thumb-y */ + right: 0px; + /* please don't change 'position' */ + position: absolute; + + // background-color: transparent; + // opacity: 0.6; + + &:hover, + &:focus { + // background-color: transparent; + opacity: 0.9; + } +} + +.panel-hover-highlight .baron__track .baron__bar { + opacity: 0.6; +} + +.baron._scrolling > .baron__track .baron__bar { + opacity: 0.9; +} + +// fix for phantomjs +.body--phantomjs .baron__track .baron__bar { + opacity: 0 !important; +} + +.baron__control { + display: none; +} + +.baron.panel-content--scrollable { + // Width needs to be set to prevent content width issues + // Set to less than 100% for fixing Firefox issue (white stripe on the right of scrollbar) + width: calc(100% - 2px); + + .baron__scroller { + padding-top: 1px; + } +} diff --git a/public/sass/components/_search.scss b/public/sass/components/_search.scss index 47d4a926968..8338a5d72ae 100644 --- a/public/sass/components/_search.scss +++ b/public/sass/components/_search.scss @@ -31,7 +31,6 @@ //padding: 0.5rem 1.5rem 0.5rem 0; padding: 1rem 1rem 0.75rem 1rem; height: 51px; - line-height: 51px; box-sizing: border-box; outline: none; background: $side-menu-bg; @@ -61,6 +60,10 @@ display: flex; flex-direction: column; flex-grow: 1; + + .search-item--indent { + margin-left: 14px; + } } .search-dropdown__col_2 { @@ -99,14 +102,21 @@ } } +.search-results-scroller { + display: flex; + position: relative; +} + .search-results-container { - height: 100%; display: block; padding: $spacer; position: relative; flex-grow: 10; margin-bottom: 1rem; + // Fix for search scroller in mobile view + height: unset; + .label-tag { margin-left: 6px; font-size: 11px; diff --git a/public/sass/components/_sidemenu.scss b/public/sass/components/_sidemenu.scss index 8a5c3779714..5fdb1a5e32e 100644 --- a/public/sass/components/_sidemenu.scss +++ b/public/sass/components/_sidemenu.scss @@ -123,6 +123,8 @@ position: relative; opacity: 0.7; font-size: 130%; + height: 22px; + width: 22px; } .fa { @@ -147,6 +149,15 @@ color: #ebedf2; } +.sidemenu-subtitle { + padding: 0.5rem 1rem 0.5rem; + font-size: $font-size-sm; + color: $text-color-weak; + border-bottom: 1px solid $dropdownDividerBottom; + margin-bottom: 0.25rem; + white-space: nowrap; +} + li.sidemenu-org-switcher { border-bottom: 1px solid $dropdownDividerBottom; } @@ -178,6 +189,7 @@ li.sidemenu-org-switcher { padding: 0.4rem 1rem 0.4rem 0.65rem; min-height: $navbarHeight; position: relative; + height: $navbarHeight - 1px; &:hover { background: $navbarButtonBackgroundHighlight; diff --git a/public/sass/components/_tabbed_view.scss b/public/sass/components/_tabbed_view.scss index dfd760753fe..bf95d453504 100644 --- a/public/sass/components/_tabbed_view.scss +++ b/public/sass/components/_tabbed_view.scss @@ -43,7 +43,7 @@ font-size: 120%; } &:hover { - color: $white; + color: $text-color-strong; } } diff --git a/public/sass/components/_tabs.scss b/public/sass/components/_tabs.scss index 197d5892652..eb3c8ce13f5 100644 --- a/public/sass/components/_tabs.scss +++ b/public/sass/components/_tabs.scss @@ -44,18 +44,16 @@ &::before { display: block; - content: " "; + content: ' '; position: absolute; left: 0; right: 0; height: 2px; top: 0; - background-image: linear-gradient( - to right, - #ffd500 0%, - #ff4400 99%, - #ff4400 100% - ); + background-image: linear-gradient(to right, #ffd500 0%, #ff4400 99%, #ff4400 100%); } } + &.active--panel { + background: $panel-bg !important; + } } diff --git a/public/sass/components/_tags.scss b/public/sass/components/_tags.scss index 9d015f952fe..014d9f0be1e 100644 --- a/public/sass/components/_tags.scss +++ b/public/sass/components/_tags.scss @@ -4,7 +4,7 @@ display: inline-block; padding: 2px 4px; font-size: $font-size-base * 0.846; - font-weight: bold; + font-weight: $font-weight-semi-bold; line-height: 14px; // ensure proper line-height if floated color: $white; vertical-align: baseline; diff --git a/public/sass/components/_timepicker.scss b/public/sass/components/_timepicker.scss index 2d7a12c3d01..e4d8f4555e0 100644 --- a/public/sass/components/_timepicker.scss +++ b/public/sass/components/_timepicker.scss @@ -71,21 +71,29 @@ td { padding: 1px; } - button.btn-sm { + button { @include buttonBackground($btn-inverse-bg, $btn-inverse-bg-hl); - font-size: $font-size-sm; background-image: none; border: none; - padding: 5px 11px; color: $text-color; &.active span { - color: $blue; + color: $query-blue; font-weight: bold; } .text-info { color: $orange; font-weight: bold; } + &.btn-sm { + font-size: $font-size-sm; + padding: 5px 11px; + } + &:hover { + color: $text-color-strong; + } + &[disabled] { + color: $text-color; + } } } @@ -103,10 +111,10 @@ } .fa-chevron-left::before { - content: "\f053"; + content: '\f053'; } .fa-chevron-right::before { - content: "\f054"; + content: '\f054'; } .glyphicon-chevron-right { diff --git a/public/sass/layout/_page.scss b/public/sass/layout/_page.scss index 03941a47408..faa5b94d4ad 100644 --- a/public/sass/layout/_page.scss +++ b/public/sass/layout/_page.scss @@ -23,17 +23,32 @@ @include clearfix(); } +.page-full { + margin-left: $page-sidebar-margin; + padding-left: $spacer; + padding-right: $spacer; + @include clearfix(); +} + .scroll-canvas { position: absolute; width: 100%; overflow: auto; height: 100%; + -webkit-overflow-scrolling: touch; &--dashboard { height: calc(100% - 56px); } } +// fix for phantomjs +.body--phantomjs { + .scroll-canvas { + overflow: hidden; + } +} + .page-body { padding-top: $spacer*2; min-height: 500px; diff --git a/public/sass/pages/_alerting.scss b/public/sass/pages/_alerting.scss index f44e26d5c20..fb6b6e78d1b 100644 --- a/public/sass/pages/_alerting.scss +++ b/public/sass/pages/_alerting.scss @@ -108,7 +108,8 @@ justify-content: center; align-items: center; width: 40px; - padding: 0 28px 0 16px; + //margin-right: 8px; + padding: 0 4px 0 2px; .icon-gf, .fa { font-size: 200%; diff --git a/public/sass/pages/_dashboard.scss b/public/sass/pages/_dashboard.scss index 871db4dfc2d..471e90ed9cf 100644 --- a/public/sass/pages/_dashboard.scss +++ b/public/sass/pages/_dashboard.scss @@ -33,7 +33,6 @@ div.flot-text { border: $panel-border; position: relative; border-radius: 3px; - height: 100%; &.panel-transparent { background-color: transparent; @@ -45,7 +44,8 @@ div.flot-text { padding: $panel-padding; height: calc(100% - 27px); position: relative; - overflow: hidden; + // Fixes scrolling on mobile devices + overflow: auto; } .panel-title-container { diff --git a/public/sass/pages/_explore.scss b/public/sass/pages/_explore.scss new file mode 100644 index 00000000000..855d11cb859 --- /dev/null +++ b/public/sass/pages/_explore.scss @@ -0,0 +1,323 @@ +.explore { + .graph-legend { + flex-wrap: wrap; + } +} + +.query-row { + position: relative; + + & + & { + margin-top: 0.5rem; + } +} + +.query-row-tools { + position: absolute; + left: -4rem; + top: 0.33rem; + > * { + margin-right: 0.25rem; + } +} + +.query-field { + font-size: 14px; + font-family: Consolas, Menlo, Courier, monospace; + height: auto; +} + +.query-field-wrapper { + position: relative; + display: inline-block; + padding: 6px 7px 4px; + width: 100%; + cursor: text; + line-height: 1.5; + color: rgba(0, 0, 0, 0.65); + background-color: #fff; + background-image: none; + border: 1px solid lightgray; + border-radius: 3px; + transition: all 0.3s; +} + +.explore { + .typeahead { + position: absolute; + z-index: auto; + top: -10000px; + left: -10000px; + opacity: 0; + border-radius: 4px; + transition: opacity 0.75s; + border: 1px solid #e4e4e4; + max-height: calc(66vh); + overflow-y: scroll; + max-width: calc(66%); + overflow-x: hidden; + outline: none; + list-style: none; + background: #fff; + color: rgba(0, 0, 0, 0.65); + transition: opacity 0.4s ease-out; + } + + .typeahead-group__title { + color: rgba(0, 0, 0, 0.43); + font-size: 12px; + line-height: 1.5; + padding: 8px 16px; + } + + .typeahead-item { + line-height: 200%; + height: auto; + font-family: Consolas, Menlo, Courier, monospace; + padding: 0 16px 0 28px; + font-size: 12px; + text-overflow: ellipsis; + overflow: hidden; + margin-left: -1px; + left: 1px; + position: relative; + z-index: 1; + display: block; + white-space: nowrap; + cursor: pointer; + transition: color 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), border-color 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), + background 0.3s cubic-bezier(0.645, 0.045, 0.355, 1), padding 0.15s cubic-bezier(0.645, 0.045, 0.355, 1); + } + + .typeahead-item__selected { + background-color: #ecf6fd; + color: #108ee9; + } +} + +/* SYNTAX */ + +/** + * prism.js Coy theme for JavaScript, CoffeeScript, CSS and HTML + * Based on https://github.com/tshedor/workshop-wp-theme (Example: http://workshop.kansan.com/category/sessions/basics or http://workshop.timshedor.com/category/sessions/basics); + * @author Tim Shedor + */ + +code[class*='language-'], +pre[class*='language-'] { + color: black; + background: none; + font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace; + text-align: left; + white-space: pre; + word-spacing: normal; + word-break: normal; + word-wrap: normal; + line-height: 1.5; + + -moz-tab-size: 4; + -o-tab-size: 4; + tab-size: 4; + + -webkit-hyphens: none; + -moz-hyphens: none; + -ms-hyphens: none; + hyphens: none; +} + +/* Code blocks */ +pre[class*='language-'] { + position: relative; + margin: 0.5em 0; + overflow: visible; + padding: 0; +} +pre[class*='language-'] > code { + position: relative; + border-left: 10px solid #358ccb; + box-shadow: -1px 0px 0px 0px #358ccb, 0px 0px 0px 1px #dfdfdf; + background-color: #fdfdfd; + background-image: linear-gradient(transparent 50%, rgba(69, 142, 209, 0.04) 50%); + background-size: 3em 3em; + background-origin: content-box; + background-attachment: local; +} + +code[class*='language'] { + max-height: inherit; + height: inherit; + padding: 0 1em; + display: block; + overflow: auto; +} + +/* Margin bottom to accomodate shadow */ +:not(pre) > code[class*='language-'], +pre[class*='language-'] { + background-color: #fdfdfd; + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; + margin-bottom: 1em; +} + +/* Inline code */ +:not(pre) > code[class*='language-'] { + position: relative; + padding: 0.2em; + border-radius: 0.3em; + color: #c92c2c; + border: 1px solid rgba(0, 0, 0, 0.1); + display: inline; + white-space: normal; +} + +pre[class*='language-']:before, +pre[class*='language-']:after { + content: ''; + z-index: -2; + display: block; + position: absolute; + bottom: 0.75em; + left: 0.18em; + width: 40%; + height: 20%; + max-height: 13em; + box-shadow: 0px 13px 8px #979797; + -webkit-transform: rotate(-2deg); + -moz-transform: rotate(-2deg); + -ms-transform: rotate(-2deg); + -o-transform: rotate(-2deg); + transform: rotate(-2deg); +} + +:not(pre) > code[class*='language-']:after, +pre[class*='language-']:after { + right: 0.75em; + left: auto; + -webkit-transform: rotate(2deg); + -moz-transform: rotate(2deg); + -ms-transform: rotate(2deg); + -o-transform: rotate(2deg); + transform: rotate(2deg); +} + +.token.comment, +.token.block-comment, +.token.prolog, +.token.doctype, +.token.cdata { + color: #7d8b99; +} + +.token.punctuation { + color: #5f6364; +} + +.token.property, +.token.tag, +.token.boolean, +.token.number, +.token.function-name, +.token.constant, +.token.symbol, +.token.deleted { + color: #c92c2c; +} + +.token.selector, +.token.attr-name, +.token.string, +.token.char, +.token.function, +.token.builtin, +.token.inserted { + color: #2f9c0a; +} + +.token.operator, +.token.entity, +.token.url, +.token.variable { + color: #a67f59; + background: rgba(255, 255, 255, 0.5); +} + +.token.atrule, +.token.attr-value, +.token.keyword, +.token.class-name { + color: #1990b8; +} + +.token.regex, +.token.important { + color: #e90; +} + +.language-css .token.string, +.style .token.string { + color: #a67f59; + background: rgba(255, 255, 255, 0.5); +} + +.token.important { + font-weight: normal; +} + +.token.bold { + font-weight: bold; +} +.token.italic { + font-style: italic; +} + +.token.entity { + cursor: help; +} + +.namespace { + opacity: 0.7; +} + +@media screen and (max-width: 767px) { + pre[class*='language-']:before, + pre[class*='language-']:after { + bottom: 14px; + box-shadow: none; + } +} + +/* Plugin styles */ +.token.tab:not(:empty):before, +.token.cr:before, +.token.lf:before { + color: #e0d7d1; +} + +/* Plugin styles: Line Numbers */ +pre[class*='language-'].line-numbers { + padding-left: 0; +} + +pre[class*='language-'].line-numbers code { + padding-left: 3.8em; +} + +pre[class*='language-'].line-numbers .line-numbers-rows { + left: 0; +} + +/* Plugin styles: Line Highlight */ +pre[class*='language-'][data-line] { + padding-top: 0; + padding-bottom: 0; + padding-left: 0; +} +pre[data-line] code { + position: relative; + padding-left: 4em; +} +pre .line-highlight { + margin-top: 0; +} diff --git a/public/sass/pages/_login.scss b/public/sass/pages/_login.scss index 8622eec4e99..de10808f122 100644 --- a/public/sass/pages/_login.scss +++ b/public/sass/pages/_login.scss @@ -3,6 +3,7 @@ $login-border: #8daac5; .login { background-position: center; min-height: 85vh; + height: 80vh; background-repeat: no-repeat; min-width: 100%; margin-left: 0; @@ -290,9 +291,14 @@ select:-webkit-autofill:focus { } @include media-breakpoint-up(md) { + .login-content { + flex: 1 0 100%; + } + .login-branding { width: 45%; padding: 2rem 4rem; + flex-grow: 1; .logo-icon { width: 130px; @@ -371,7 +377,7 @@ select:-webkit-autofill:focus { left: 0; right: 0; height: 100%; - content: ""; + content: ''; display: block; } diff --git a/public/test/specs/helpers.ts b/public/test/specs/helpers.ts index 8e83915362f..276d9867ec4 100644 --- a/public/test/specs/helpers.ts +++ b/public/test/specs/helpers.ts @@ -1,14 +1,15 @@ import _ from 'lodash'; import config from 'app/core/config'; import * as dateMath from 'app/core/utils/datemath'; -import {angularMocks, sinon} from '../lib/common'; -import {PanelModel} from 'app/features/dashboard/panel_model'; +import { angularMocks, sinon } from '../lib/common'; +import { PanelModel } from 'app/features/dashboard/panel_model'; export function ControllerTestContext() { var self = this; this.datasource = {}; this.$element = {}; + this.$sanitize = {}; this.annotationsSrv = {}; this.timeSrv = new TimeSrvStub(); this.templateSrv = new TemplateSrvStub(); @@ -22,6 +23,7 @@ export function ControllerTestContext() { }; }, }; + this.isUtc = false; this.providePhase = function(mocks) { return angularMocks.module(function($provide) { @@ -30,6 +32,7 @@ export function ControllerTestContext() { $provide.value('timeSrv', self.timeSrv); $provide.value('templateSrv', self.templateSrv); $provide.value('$element', self.$element); + $provide.value('$sanitize', self.$sanitize); _.each(mocks, function(value, key) { $provide.value(key, value); }); @@ -42,8 +45,12 @@ export function ControllerTestContext() { self.$location = $location; self.$browser = $browser; self.$q = $q; - self.panel = new PanelModel({type: 'test'}); - self.dashboard = {meta: {}}; + self.panel = new PanelModel({ type: 'test' }); + self.dashboard = { meta: {} }; + self.isUtc = false; + self.dashboard.isTimezoneUtc = function() { + return self.isUtc; + }; $rootScope.appEvent = sinon.spy(); $rootScope.onAppEvent = sinon.spy(); @@ -53,14 +60,14 @@ export function ControllerTestContext() { $rootScope.colors.push('#' + i); } - config.panels['test'] = {info: {}}; + config.panels['test'] = { info: {} }; self.ctrl = $controller( Ctrl, - {$scope: self.scope}, + { $scope: self.scope }, { panel: self.panel, dashboard: self.dashboard, - }, + } ); }); }; @@ -72,7 +79,7 @@ export function ControllerTestContext() { self.$browser = $browser; self.scope.contextSrv = {}; self.scope.panel = {}; - self.scope.dashboard = {meta: {}}; + self.scope.dashboard = { meta: {} }; self.scope.dashboardMeta = {}; self.scope.dashboardViewState = new DashboardViewStateStub(); self.scope.appEvent = sinon.spy(); @@ -91,6 +98,10 @@ export function ControllerTestContext() { }); }); }; + + this.setIsUtc = function(isUtc = false) { + self.isUtc = isUtc; + }; } export function ServiceTestContext() { @@ -131,7 +142,7 @@ export function DashboardViewStateStub() { export function TimeSrvStub() { this.init = sinon.spy(); - this.time = {from: 'now-1h', to: 'now'}; + this.time = { from: 'now-1h', to: 'now' }; this.timeRange = function(parse) { if (parse === false) { return this.time; @@ -159,7 +170,7 @@ export function ContextSrvStub() { export function TemplateSrvStub() { this.variables = []; - this.templateSettings = {interpolate: /\[\[([\s\S]+?)\]\]/g}; + this.templateSettings = { interpolate: /\[\[([\s\S]+?)\]\]/g }; this.data = {}; this.replace = function(text) { return _.template(text, this.templateSettings)(this.data); @@ -188,7 +199,7 @@ var allDeps = { TimeSrvStub: TimeSrvStub, ControllerTestContext: ControllerTestContext, ServiceTestContext: ServiceTestContext, - DashboardViewStateStub: DashboardViewStateStub + DashboardViewStateStub: DashboardViewStateStub, }; // for legacy diff --git a/public/views/index.template.html b/public/views/index.template.html index 2d408f70f8c..79da1d7179c 100644 --- a/public/views/index.template.html +++ b/public/views/index.template.html @@ -16,7 +16,7 @@ @@ -40,7 +40,7 @@
    -
    +