diff --git a/.circleci/config.yml b/.circleci/config.yml index bad5a7c1cd0..44f34d42926 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,6 +1,59 @@ +aliases: + # Workflow filters + - &filter-only-release + branches: + ignore: /.*/ + tags: + only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/ + - &filter-not-release + tags: + ignore: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/ + - &filter-only-master + branches: + only: master + version: 2 jobs: + mysql-integration-test: + docker: + - image: circleci/golang:1.10 + - image: circleci/mysql:5.6-ram + environment: + MYSQL_ROOT_PASSWORD: rootpass + MYSQL_DATABASE: grafana_tests + MYSQL_USER: grafana + MYSQL_PASSWORD: password + working_directory: /go/src/github.com/grafana/grafana + steps: + - checkout + - run: sudo apt update + - run: sudo apt install -y mysql-client + - run: dockerize -wait tcp://127.0.0.1:3306 -timeout 120s + - run: cat docker/blocks/mysql_tests/setup.sql | mysql -h 127.0.0.1 -P 3306 -u root -prootpass + - run: + name: mysql integration tests + command: 'GRAFANA_TEST_DB=mysql go test ./pkg/services/sqlstore/... ./pkg/tsdb/mysql/... ' + + postgres-integration-test: + docker: + - image: circleci/golang:1.10 + - image: circleci/postgres:9.3-ram + environment: + POSTGRES_USER: grafanatest + POSTGRES_PASSWORD: grafanatest + POSTGRES_DB: grafanatest + working_directory: /go/src/github.com/grafana/grafana + steps: + - checkout + - run: sudo apt update + - run: sudo apt install -y postgresql-client + - run: dockerize -wait tcp://127.0.0.1:5432 -timeout 120s + - run: 'PGPASSWORD=grafanatest psql -p 5432 -h 127.0.0.1 -U grafanatest -d grafanatest -f docker/blocks/postgres_tests/setup.sql' + - run: + name: postgres integration tests + command: 'GRAFANA_TEST_DB=postgres go test ./pkg/services/sqlstore/... ./pkg/tsdb/postgres/...' + codespell: docker: - image: circleci/python @@ -35,18 +88,17 @@ jobs: - run: name: run linters command: 'gometalinter.v2 --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...' + - run: + name: run go vet + command: 'go vet ./pkg/...' test-frontend: docker: - - image: circleci/node:6.11.4 + - image: circleci/node:8 steps: - checkout - - run: - name: install yarn - command: 'sudo npm install -g yarn --quiet' - restore_cache: key: dependency-cache-{{ checksum "yarn.lock" }} - # Could we skip this step if the cache has been restored? `[ -d node_modules ] || yarn install ...` should be able to apply to build step as well - run: name: yarn install command: 'yarn install --pure-lockfile --no-progress' @@ -68,15 +120,27 @@ jobs: name: build backend and run go tests command: './scripts/circle-test-backend.sh' - build: + build-all: docker: - - image: grafana/build-container:v0.1 + - image: grafana/build-container:1.0.0 working_directory: /go/src/github.com/grafana/grafana steps: - checkout + - run: + name: prepare build tools + command: '/tmp/bootstrap.sh' + - restore_cache: + key: phantomjs-binaries-{{ checksum "scripts/build/download-phantomjs.sh" }} + - run: + name: download phantomjs binaries + command: './scripts/build/download-phantomjs.sh' + - save_cache: + key: phantomjs-binaries-{{ checksum "scripts/build/download-phantomjs.sh" }} + paths: + - /tmp/phantomjs - run: name: build and package grafana - command: './scripts/build/build.sh' + command: './scripts/build/build-all.sh' - run: name: sign packages command: './scripts/build/sign_packages.sh' @@ -92,6 +156,8 @@ jobs: - dist/grafana* - scripts/*.sh - scripts/publish + - store_artifacts: + path: dist build-enterprise: docker: @@ -100,7 +166,7 @@ jobs: steps: - checkout - run: - name: build and package grafana + name: build, test and package grafana enterprise command: './scripts/build/build_enterprise.sh' - run: name: sign packages @@ -108,6 +174,26 @@ jobs: - run: name: sha-sum packages command: 'go run build.go sha-dist' + - run: + name: move enterprise packages into their own folder + command: 'mv dist enterprise-dist' + - persist_to_workspace: + root: . + paths: + - enterprise-dist/grafana-enterprise* + + deploy-enterprise-master: + docker: + - image: circleci/python:2.7-stretch + steps: + - attach_workspace: + at: . + - run: + name: install awscli + command: 'sudo pip install awscli' + - run: + name: deploy to s3 + command: 'aws s3 sync ./enterprise-dist s3://$ENTERPRISE_BUCKET_NAME/master' deploy-master: docker: @@ -120,16 +206,21 @@ jobs: command: 'sudo pip install awscli' - run: name: deploy to s3 - command: 'aws s3 sync ./dist s3://$BUCKET_NAME/master' + command: | + # Also + cp dist/grafana-latest.linux-x64.tar.gz dist/grafana-master-$(echo "${CIRCLE_SHA1}" | cut -b1-7).linux-x64.tar.gz + aws s3 sync ./dist s3://$BUCKET_NAME/master - run: name: Trigger Windows build command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} master' - run: name: Trigger Docker build - command: './scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN}' + command: './scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN} master-$(echo "${CIRCLE_SHA1}" | cut -b1-7)' - run: name: Publish to Grafana.com - command: './scripts/publish -apiKey ${GRAFANA_COM_API_KEY}' + command: | + rm dist/grafana-master-$(echo "${CIRCLE_SHA1}" | cut -b1-7).linux-x64.tar.gz + ./scripts/publish -apiKey ${GRAFANA_COM_API_KEY} deploy-release: docker: @@ -154,45 +245,67 @@ workflows: version: 2 test-and-build: jobs: + - build-all: + filters: *filter-only-master + - build-enterprise: + filters: *filter-only-master - codespell: - filters: - tags: - only: /.*/ + filters: *filter-not-release - gometalinter: - filters: - tags: - only: /.*/ - - build: - filters: - tags: - only: /.*/ + filters: *filter-not-release - test-frontend: - filters: - tags: - only: /.*/ + filters: *filter-not-release - test-backend: - filters: - tags: - only: /.*/ + filters: *filter-not-release + - mysql-integration-test: + filters: *filter-not-release + - postgres-integration-test: + filters: *filter-not-release - deploy-master: requires: + - build-all - test-backend - test-frontend - - build - filters: - branches: - only: master + - codespell + - gometalinter + - mysql-integration-test + - postgres-integration-test + filters: *filter-only-master + - deploy-enterprise-master: + requires: + - build-all + - test-backend + - test-frontend + - codespell + - gometalinter + - mysql-integration-test + - postgres-integration-test + - build-enterprise + filters: *filter-only-master + + release: + jobs: + - build-all: + filters: *filter-only-release + - codespell: + filters: *filter-only-release + - gometalinter: + filters: *filter-only-release + - test-frontend: + filters: *filter-only-release + - test-backend: + filters: *filter-only-release + - mysql-integration-test: + filters: *filter-only-release + - postgres-integration-test: + filters: *filter-only-release - deploy-release: requires: + - build-all - test-backend - test-frontend - - build - filters: - branches: - ignore: /.*/ - tags: - only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/ - # - build-enterprise: - # filters: - # tags: - # only: /.*/ + - codespell + - gometalinter + - mysql-integration-test + - postgres-integration-test + filters: *filter-only-release diff --git a/.dockerignore b/.dockerignore index c79fe777899..e50dfd86aa3 100644 --- a/.dockerignore +++ b/.dockerignore @@ -11,8 +11,5 @@ dump.rdb node_modules /local /tmp -/vendor *.yml *.md -/vendor -/tmp diff --git a/.gitignore b/.gitignore index 953c98d04aa..11df66360d9 100644 --- a/.gitignore +++ b/.gitignore @@ -33,6 +33,7 @@ public/css/*.min.css *.tmp .DS_Store .vscode/ +.vs/ /data/* /bin/* @@ -42,6 +43,8 @@ fig.yml docker-compose.yml docker-compose.yaml /conf/provisioning/**/custom.yaml +/conf/provisioning/**/dev.yaml +/conf/ldap_dev.toml profile.cov /grafana /local @@ -65,4 +68,6 @@ debug.test /vendor/**/*_test.go /vendor/**/.editorconfig /vendor/**/appengine* -*.orig \ No newline at end of file +*.orig + +/devenv/dashboards/bulk-testing/*.json diff --git a/CHANGELOG.md b/CHANGELOG.md index 146520d13a1..e53b3a904a3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,17 +1,139 @@ -# 5.2.0 (unreleased) +# 5.3.0 (unreleased) + +* **Dataproxy**: Pass configured/auth headers to a Datasource [#10971](https://github.com/grafana/grafana/issues/10971), thx [@mrsiano](https://github.com/mrsiano) +* **Cleanup**: Make temp file time to live configurable [#11607](https://github.com/grafana/grafana/issues/11607), thx [@xapon](https://github.com/xapon) +* **LDAP**: Define Grafana Admin permission in ldap group mappings [#2469](https://github.com/grafana/grafana/issues/2496), PR [#12622](https://github.com/grafana/grafana/issues/12622) +* **Cloudwatch**: CloudWatch GetMetricData support [#11487](https://github.com/grafana/grafana/issues/11487), thx [@mtanda](https://github.com/mtanda) ### Minor +* **Api**: Delete nonexistent datasource should return 404 [#12313](https://github.com/grafana/grafana/issues/12313), thx [@AustinWinstanley](https://github.com/AustinWinstanley) +* **Dashboard**: Fix selecting current dashboard from search should not reload dashboard [#12248](https://github.com/grafana/grafana/issues/12248) +* **Singlestat**: Make colorization of prefix and postfix optional in singlestat [#11892](https://github.com/grafana/grafana/pull/11892), thx [@ApsOps](https://github.com/ApsOps) +* **Table**: Make table sorting stable when null values exist [#12362](https://github.com/grafana/grafana/pull/12362), thx [@bz2](https://github.com/bz2) +* **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) +* **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484) +* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda) +* **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm) +* **MySQL/MSSQL**: Use datetime format instead of epoch for $__timeFilter, $__timeFrom and $__timeTo macros [#11618](https://github.com/grafana/grafana/issues/11618) [#11619](https://github.com/grafana/grafana/issues/11619), thx [@AustinWinstanley](https://github.com/AustinWinstanley) +* **Github OAuth**: Allow changes of user info at Github to be synched to Grafana when signing in [#11818](https://github.com/grafana/grafana/issues/11818), thx [@rwaweber](https://github.com/rwaweber) +* **Alerting**: Fix diff and percent_diff reducers [#11563](https://github.com/grafana/grafana/issues/11563), thx [@jessetane](https://github.com/jessetane) + +# 5.2.2 (unreleased) + +### Minor + +* **Prometheus**: Fix graph panel bar width issue in aligned prometheus queries [#12379](https://github.com/grafana/grafana/issues/12379) +* **Dashboard**: Dashboard links not updated when changing variables [#12506](https://github.com/grafana/grafana/issues/12506) + +# 5.2.1 (2018-06-29) + +### Minor + +* **Auth Proxy**: Important security fix for whitelist of IP address feature [#12444](https://github.com/grafana/grafana/pull/12444) +* **UI**: Fix - Grafana footer overlapping page [#12430](https://github.com/grafana/grafana/issues/12430) +* **Logging**: Errors should be reported before crashing [#12438](https://github.com/grafana/grafana/issues/12438) + +# 5.2.0-stable (2018-06-27) + +### Minor + +* **Plugins**: Handle errors correctly when loading datasource plugin [#12383](https://github.com/grafana/grafana/pull/12383) thx [@rozetko](https://github.com/rozetko) +* **Render**: Enhance error message if phantomjs executable is not found [#11868](https://github.com/grafana/grafana/issues/11868) +* **Dashboard**: Set correct text in drop down when variable is present in url [#11968](https://github.com/grafana/grafana/issues/11968) + +### 5.2.0-beta3 fixes + +* **LDAP**: Handle "dn" ldap attribute more gracefully [#12385](https://github.com/grafana/grafana/pull/12385), reverts [#10970](https://github.com/grafana/grafana/pull/10970) + +# 5.2.0-beta3 (2018-06-21) + +### Minor + +* **Build**: All rpm packages should be signed [#12359](https://github.com/grafana/grafana/issues/12359) + +# 5.2.0-beta2 (2018-06-20) + +### New Features + +* **Dashboard**: Import dashboard to folder [#10796](https://github.com/grafana/grafana/issues/10796) + +### Minor + +* **Permissions**: Important security fix for API keys with viewer role [#12343](https://github.com/grafana/grafana/issues/12343) +* **Dashboard**: Fix so panel titles doesn't wrap [#11074](https://github.com/grafana/grafana/issues/11074) +* **Dashboard**: Prevent double-click when saving dashboard [#11963](https://github.com/grafana/grafana/issues/11963) +* **Dashboard**: AutoFocus the add-panel search filter [#12189](https://github.com/grafana/grafana/pull/12189) thx [@ryantxu](https://github.com/ryantxu) +* **Units**: W/m2 (energy), l/h (flow) and kPa (pressure) [#11233](https://github.com/grafana/grafana/pull/11233), thx [@flopp999](https://github.com/flopp999) +* **Units**: Litre/min (flow) and milliLitre/min (flow) [#12282](https://github.com/grafana/grafana/pull/12282), thx [@flopp999](https://github.com/flopp999) +* **Alerting**: Fix mobile notifications for Microsoft Teams alert notifier [#11484](https://github.com/grafana/grafana/pull/11484), thx [@manacker](https://github.com/manacker) +* **Influxdb**: Add support for mode function [#12286](https://github.com/grafana/grafana/issues/12286) +* **Cloudwatch**: Fixes panic caused by bad timerange settings [#12199](https://github.com/grafana/grafana/issues/12199) +* **Auth Proxy**: Whitelist proxy IP address instead of client IP address [#10707](https://github.com/grafana/grafana/issues/10707) +* **User Management**: Make sure that a user always has a current org assigned [#11076](https://github.com/grafana/grafana/issues/11076) +* **Snapshots**: Fix: annotations not properly extracted leading to incorrect rendering of annotations [#12278](https://github.com/grafana/grafana/issues/12278) +* **LDAP**: Allow use of DN in group_search_filter_user_attribute and member_of [#3132](https://github.com/grafana/grafana/issues/3132), thx [@mmolnar](https://github.com/mmolnar) +* **Graph**: Fix legend decimals precision calculation [#11792](https://github.com/grafana/grafana/issues/11792) +* **Dashboard**: Make sure to process panels in collapsed rows when exporting dashboard [#12256](https://github.com/grafana/grafana/issues/12256) + +### 5.2.0-beta1 fixes + +* **Dashboard**: Dashboard link doesn't work when "As dropdown" option is checked [#12315](https://github.com/grafana/grafana/issues/12315) +* **Dashboard**: Fix regressions after save modal changes, including adhoc template issues [#12240](https://github.com/grafana/grafana/issues/12240) +* **Docker**: Config keys ending with _FILE are not respected [#170](https://github.com/grafana/grafana-docker/issues/170) + +# 5.2.0-beta1 (2018-06-05) + +### New Features + +* **Elasticsearch**: Alerting support [#5893](https://github.com/grafana/grafana/issues/5893), thx [@WPH95](https://github.com/WPH95) +* **Build**: Crosscompile and packages Grafana on arm, windows, linux and darwin [#11920](https://github.com/grafana/grafana/pull/11920), thx [@fg2it](https://github.com/fg2it) +* **Login**: Change admin password after first login [#11882](https://github.com/grafana/grafana/issues/11882) +* **Alert list panel**: Updated to support filtering alerts by name, dashboard title, folder, tags [#11500](https://github.com/grafana/grafana/issues/11500), [#8168](https://github.com/grafana/grafana/issues/8168), [#6541](https://github.com/grafana/grafana/issues/6541) + +### Minor + +* **Dashboard**: Modified time range and variables are now not saved by default [#10748](https://github.com/grafana/grafana/issues/10748), [#8805](https://github.com/grafana/grafana/issues/8805) * **Graph**: Show invisible highest value bucket in histogram [#11498](https://github.com/grafana/grafana/issues/11498) * **Dashboard**: Enable "Save As..." if user has edit permission [#11625](https://github.com/grafana/grafana/issues/11625) +* **Prometheus**: Query dates are now step-aligned [#10434](https://github.com/grafana/grafana/pull/10434) * **Prometheus**: Table columns order now changes when rearrange queries [#11690](https://github.com/grafana/grafana/issues/11690), thx [@mtanda](https://github.com/mtanda) * **Variables**: Fix variable interpolation when using multiple formatting types [#11800](https://github.com/grafana/grafana/issues/11800), thx [@svenklemm](https://github.com/svenklemm) * **Dashboard**: Fix date selector styling for dark/light theme in time picker control [#11616](https://github.com/grafana/grafana/issues/11616) * **Discord**: Alert notification channel type for Discord, [#7964](https://github.com/grafana/grafana/issues/7964) thx [@jereksel](https://github.com/jereksel), * **InfluxDB**: Support SELECT queries in templating query, [#5013](https://github.com/grafana/grafana/issues/5013) +* **InfluxDB**: Support count distinct aggregation [#11645](https://github.com/grafana/grafana/issues/11645), thx [@kichristensen](https://github.com/kichristensen) * **Dashboard**: JSON Model under dashboard settings can now be updated & changes saved, [#1429](https://github.com/grafana/grafana/issues/1429), thx [@jereksel](https://github.com/jereksel) * **Security**: Fix XSS vulnerabilities in dashboard links [#11813](https://github.com/grafana/grafana/pull/11813) * **Singlestat**: Fix "time of last point" shows local time when dashboard timezone set to UTC [#10338](https://github.com/grafana/grafana/issues/10338) +* **Prometheus**: Add support for passing timeout parameter to Prometheus [#11788](https://github.com/grafana/grafana/pull/11788), thx [@mtanda](https://github.com/mtanda) +* **Login**: Add optional option sign out url for generic oauth [#9847](https://github.com/grafana/grafana/issues/9847), thx [@roidelapluie](https://github.com/roidelapluie) +* **Login**: Use proxy server from environment variable if available [#9703](https://github.com/grafana/grafana/issues/9703), thx [@iyeonok](https://github.com/iyeonok) +* **Invite users**: Friendlier error message when smtp is not configured [#12087](https://github.com/grafana/grafana/issues/12087), thx [@thurt](https://github.com/thurt) +* **Graphite**: Don't send distributed tracing headers when using direct/browser access mode [#11494](https://github.com/grafana/grafana/issues/11494) +* **Sidenav**: Show create dashboard link for viewers if at least editor in one folder [#11858](https://github.com/grafana/grafana/issues/11858) +* **SQL**: Second epochs are now correctly converted to ms. [#12085](https://github.com/grafana/grafana/pull/12085) +* **Singlestat**: Fix singlestat threshold tooltip [#11971](https://github.com/grafana/grafana/issues/11971) +* **Dashboard**: Hide grid controls in fullscreen/low-activity views [#11771](https://github.com/grafana/grafana/issues/11771) +* **Dashboard**: Validate uid when importing dashboards [#11515](https://github.com/grafana/grafana/issues/11515) +* **Docker**: Support for env variables ending with _FILE [grafana-docker #166](https://github.com/grafana/grafana-docker/pull/166), thx [@efrecon](https://github.com/efrecon) +* **Alert list panel**: Show alerts for user with viewer role [#11167](https://github.com/grafana/grafana/issues/11167) +* **Provisioning**: Verify checksum of dashboards before updating to reduce load on database [#11670](https://github.com/grafana/grafana/issues/11670) +* **Provisioning**: Support symlinked files in dashboard provisioning config files [#11958](https://github.com/grafana/grafana/issues/11958) +* **Dashboard list panel**: Search dashboards by folder [#11525](https://github.com/grafana/grafana/issues/11525) +* **Sidenav**: Always show server admin link in sidenav if grafana admin [#11657](https://github.com/grafana/grafana/issues/11657) + +# 5.1.5 (2018-06-27) + +* **Docker**: Config keys ending with _FILE are not respected [#170](https://github.com/grafana/grafana-docker/issues/170) + +# 5.1.4 (2018-06-19) + +* **Permissions**: Important security fix for API keys with viewer role [#12343](https://github.com/grafana/grafana/issues/12343) + +# 5.1.3 (2018-05-16) + +* **Scroll**: Graph panel / legend texts shifts on the left each time we move scrollbar on firefox [#11830](https://github.com/grafana/grafana/issues/11830) # 5.1.2 (2018-05-09) @@ -1232,7 +1354,7 @@ Grafana 2.x is fundamentally different from 1.x; it now ships with an integrated **New features** - [Issue #1623](https://github.com/grafana/grafana/issues/1623). Share Dashboard: Dashboard snapshot sharing (dash and data snapshot), save to local or save to public snapshot dashboard snapshots.raintank.io site - [Issue #1622](https://github.com/grafana/grafana/issues/1622). Share Panel: The share modal now has an embed option, gives you an iframe that you can use to embedd a single graph on another web site -- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is promted with a warning if he really wants to overwrite the other's changes +- [Issue #718](https://github.com/grafana/grafana/issues/718). Dashboard: When saving a dashboard and another user has made changes in between the user is prompted with a warning if he really wants to overwrite the other's changes - [Issue #1331](https://github.com/grafana/grafana/issues/1331). Graph & Singlestat: New axis/unit format selector and more units (kbytes, Joule, Watt, eV), and new design for graph axis & grid tab and single stat options tab views - [Issue #1241](https://github.com/grafana/grafana/issues/1242). Timepicker: New option in timepicker (under dashboard settings), to change ``now`` to be for example ``now-1m``, useful when you want to ignore last minute because it contains incomplete data - [Issue #171](https://github.com/grafana/grafana/issues/171). Panel: Different time periods, panels can override dashboard relative time and/or add a time shift @@ -1690,3 +1812,4 @@ Thanks to everyone who contributed fixes and provided feedback :+1: # 1.0.0 (2014-01-19) First public release + diff --git a/Gopkg.lock b/Gopkg.lock index 41fc92313d1..6f08e208ecd 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -4,8 +4,8 @@ [[projects]] name = "cloud.google.com/go" packages = ["compute/metadata"] - revision = "767c40d6a2e058483c25fa193e963a22da17236d" - version = "v0.18.0" + revision = "056a55f54a6cc77b440b31a56a5e7c3982d32811" + version = "v0.22.0" [[projects]] name = "github.com/BurntSushi/toml" @@ -19,12 +19,6 @@ packages = ["."] revision = "7677a1d7c1137cd3dd5ba7a076d0c898a1ef4520" -[[projects]] - name = "github.com/apache/thrift" - packages = ["lib/go/thrift"] - revision = "b2a4d4ae21c789b689dd162deb819665567f481c" - version = "0.10.0" - [[projects]] name = "github.com/aws/aws-sdk-go" packages = [ @@ -38,15 +32,20 @@ "aws/credentials/ec2rolecreds", "aws/credentials/endpointcreds", "aws/credentials/stscreds", + "aws/csm", "aws/defaults", "aws/ec2metadata", "aws/endpoints", "aws/request", "aws/session", "aws/signer/v4", + "internal/sdkio", + "internal/sdkrand", "internal/shareddefaults", "private/protocol", "private/protocol/ec2query", + "private/protocol/eventstream", + "private/protocol/eventstream/eventstreamapi", "private/protocol/query", "private/protocol/query/queryutil", "private/protocol/rest", @@ -58,8 +57,8 @@ "service/s3", "service/sts" ] - revision = "decd990ddc5dcdf2f73309cbcab90d06b996ca28" - version = "v1.12.67" + revision = "fde4ded7becdeae4d26bf1212916aabba79349b4" + version = "v1.14.12" [[projects]] branch = "master" @@ -71,7 +70,7 @@ branch = "master" name = "github.com/beorn7/perks" packages = ["quantile"] - revision = "4c0e84591b9aa9e6dcfdf3e020114cd81f89d5f9" + revision = "3a771d992973f24aa725d07868b467d1ddfceafb" [[projects]] branch = "master" @@ -126,14 +125,14 @@ [[projects]] name = "github.com/fatih/color" packages = ["."] - revision = "570b54cabe6b8eb0bc2dfce68d964677d63b5260" - version = "v1.5.0" + revision = "5b77d2a35fb0ede96d138fc9a99f5c9b6aef11b4" + version = "v1.7.0" [[projects]] name = "github.com/go-ini/ini" packages = ["."] - revision = "32e4c1e6bc4e7d0d8451aa6b75200d19e37a536a" - version = "v1.32.0" + revision = "6529cf7c58879c08d927016dde4477f18a0634cb" + version = "v1.36.0" [[projects]] name = "github.com/go-ldap/ldap" @@ -182,10 +181,10 @@ version = "v1.7.0" [[projects]] - branch = "master" name = "github.com/go-xorm/builder" packages = ["."] - revision = "488224409dd8aa2ce7a5baf8d10d55764a913738" + revision = "bad0a612f0d6277b953910822ab5dfb30dd18237" + version = "v0.2.0" [[projects]] name = "github.com/go-xorm/core" @@ -209,13 +208,13 @@ "ptypes/duration", "ptypes/timestamp" ] - revision = "c65a0412e71e8b9b3bfd22925720d23c0f054237" + revision = "927b65914520a8b7d44f5c9057611cfec6b2e2d0" [[projects]] branch = "master" name = "github.com/gopherjs/gopherjs" packages = ["js"] - revision = "178c176a91fe05e3e6c58fa5c989bad19e6cdcb3" + revision = "8dffc02ea1cb8398bb73f30424697c60fcf8d4c5" [[projects]] name = "github.com/gorilla/websocket" @@ -231,32 +230,35 @@ [[projects]] branch = "master" - name = "github.com/grafana/grafana_plugin_model" - packages = ["go/datasource"] - revision = "dfe5dc0a6ce05825ba7fe2d0323d92e631bffa89" + name = "github.com/grafana/grafana-plugin-model" + packages = [ + "go/datasource", + "go/renderer" + ] + revision = "84176c64269d8060f99e750ee8aba6f062753336" [[projects]] branch = "master" name = "github.com/hashicorp/go-hclog" packages = ["."] - revision = "5bcb0f17e36442247290887cc914a6e507afa5c4" + revision = "69ff559dc25f3b435631604f573a5fa1efdb6433" [[projects]] name = "github.com/hashicorp/go-plugin" packages = ["."] - revision = "3e6d191694b5a3a2b99755f31b47fa209e4bcd09" + revision = "e8d22c780116115ae5624720c9af0c97afe4f551" [[projects]] branch = "master" name = "github.com/hashicorp/go-version" packages = ["."] - revision = "4fe82ae3040f80a03d04d2cccb5606a626b8e1ee" + revision = "23480c0665776210b5fbbac6eaaee40e3e6a96b7" [[projects]] branch = "master" name = "github.com/hashicorp/yamux" packages = ["."] - revision = "683f49123a33db61abfb241b7ac5e4af4dc54d55" + revision = "2658be15c5f05e76244154714161f17e3e77de2e" [[projects]] name = "github.com/inconshreveable/log15" @@ -297,16 +299,16 @@ version = "v1.1" [[projects]] - branch = "master" name = "github.com/kr/pretty" packages = ["."] - revision = "cfb55aafdaf3ec08f0db22699ab822c50091b1c4" + revision = "73f6ac0b30a98e433b289500d779f50c1a6f0712" + version = "v0.1.0" [[projects]] - branch = "master" name = "github.com/kr/text" packages = ["."] - revision = "7cafcd837844e784b526369c9bce262804aebc60" + revision = "e2ffdb16a802fe2bb95e2e35ff34f0e53aeef34f" + version = "v0.1.0" [[projects]] branch = "master" @@ -315,7 +317,7 @@ ".", "oid" ] - revision = "61fe37aa2ee24fabcdbe5c4ac1d4ac566f88f345" + revision = "d34b9ff171c21ad295489235aec8b6626023cd04" [[projects]] name = "github.com/mattn/go-colorable" @@ -332,8 +334,8 @@ [[projects]] name = "github.com/mattn/go-sqlite3" packages = ["."] - revision = "6c771bb9887719704b210e87e934f08be014bdb1" - version = "v1.6.0" + revision = "323a32be5a2421b8c7087225079c6c900ec397cd" + version = "v1.7.0" [[projects]] name = "github.com/matttproud/golang_protobuf_extensions" @@ -347,6 +349,12 @@ packages = ["."] revision = "a61a99592b77c9ba629d254a693acffaeb4b7e28" +[[projects]] + name = "github.com/oklog/run" + packages = ["."] + revision = "4dadeb3030eda0273a12382bb2348ffc7c9d1a39" + version = "v1.0.0" + [[projects]] name = "github.com/opentracing/opentracing-go" packages = [ @@ -394,7 +402,7 @@ "internal/bitbucket.org/ww/goautoneg", "model" ] - revision = "89604d197083d4781071d3c65855d24ecfb0a563" + revision = "d811d2e9bf898806ecfb6ef6296774b13ffc314c" [[projects]] branch = "master" @@ -402,10 +410,10 @@ packages = [ ".", "internal/util", - "nfsd", + "nfs", "xfs" ] - revision = "85fadb6e89903ef7cca6f6a804474cd5ea85b6e1" + revision = "8b1c2da0d56deffdbb9e48d4414b4e674bd8083e" [[projects]] branch = "master" @@ -414,10 +422,16 @@ revision = "cb7f23ec59bec0d61b19c56cd88cee3d0cc1870c" [[projects]] - branch = "master" name = "github.com/sergi/go-diff" packages = ["diffmatchpatch"] revision = "1744e2970ca51c86172c8190fadad617561ed6e7" + version = "v1.0.0" + +[[projects]] + branch = "master" + name = "github.com/shurcooL/sanitized_anchor_name" + packages = ["."] + revision = "86672fcb3f950f35f2e675df2240550f2a50762f" [[projects]] name = "github.com/smartystreets/assertions" @@ -426,8 +440,8 @@ "internal/go-render/render", "internal/oglematchers" ] - revision = "0b37b35ec7434b77e77a4bb29b79677cced992ea" - version = "1.8.1" + revision = "7678a5452ebea5b7090a6b163f844c133f523da2" + version = "1.8.3" [[projects]] name = "github.com/smartystreets/goconvey" @@ -453,8 +467,11 @@ "internal/baggage", "internal/baggage/remote", "internal/spanlog", + "internal/throttler", + "internal/throttler/remote", "log", "rpcmetrics", + "thrift", "thrift-gen/agent", "thrift-gen/baggage", "thrift-gen/jaeger", @@ -462,14 +479,14 @@ "thrift-gen/zipkincore", "utils" ] - revision = "3ac96c6e679cb60a74589b0d0aa7c70a906183f7" - version = "v2.11.2" + revision = "b043381d944715b469fd6b37addfd30145ca1758" + version = "v2.14.0" [[projects]] name = "github.com/uber/jaeger-lib" packages = ["metrics"] - revision = "7f95f4f7e80028096410abddaae2556e4c61b59f" - version = "v1.3.1" + revision = "ed3a127ec5fef7ae9ea95b01b542c47fbd999ce5" + version = "v1.5.0" [[projects]] name = "github.com/yudai/gojsondiff" @@ -493,7 +510,7 @@ "md4", "pbkdf2" ] - revision = "3d37316aaa6bd9929127ac9a527abf408178ea7b" + revision = "1a580b3eff7814fc9b40602fd35256c63b50f491" [[projects]] branch = "master" @@ -501,14 +518,14 @@ packages = [ "context", "context/ctxhttp", + "http/httpguts", "http2", "http2/hpack", "idna", "internal/timeseries", - "lex/httplex", "trace" ] - revision = "5ccada7d0a7ba9aeb5d3aca8d3501b4c2a509fec" + revision = "2491c5de3490fced2f6cff376127c667efeed857" [[projects]] branch = "master" @@ -520,22 +537,21 @@ "jws", "jwt" ] - revision = "b28fcf2b08a19742b43084fb40ab78ac6c3d8067" + revision = "cdc340f7c179dbbfa4afd43b7614e8fcadde4269" [[projects]] branch = "master" name = "golang.org/x/sync" packages = ["errgroup"] - revision = "fd80eb99c8f653c847d294a001bdf2a3a6f768f5" + revision = "1d60e4601c6fd243af51cc01ddf169918a5407ca" [[projects]] branch = "master" name = "golang.org/x/sys" packages = ["unix"] - revision = "af50095a40f9041b3b38960738837185c26e9419" + revision = "7c87d13f8e835d2fb3a70a2912c811ed0c1d241b" [[projects]] - branch = "master" name = "golang.org/x/text" packages = [ "collate", @@ -553,7 +569,8 @@ "unicode/norm", "unicode/rangetable" ] - revision = "e19ae1496984b1c655b8044a65c0300a3c878dd3" + revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0" + version = "v0.3.0" [[projects]] name = "google.golang.org/appengine" @@ -577,7 +594,7 @@ branch = "master" name = "google.golang.org/genproto" packages = ["googleapis/rpc/status"] - revision = "a8101f21cf983e773d0c1133ebc5424792003214" + revision = "7bb2a897381c9c5ab2aeb8614f758d7766af68ff" [[projects]] name = "google.golang.org/grpc" @@ -590,6 +607,7 @@ "connectivity", "credentials", "encoding", + "encoding/proto", "grpclb/grpc_lb_v1/messages", "grpclog", "health", @@ -607,8 +625,8 @@ "tap", "transport" ] - revision = "6b51017f791ae1cfbec89c52efdf444b13b550ef" - version = "v1.9.2" + revision = "1e2570b1b19ade82d8dbb31bba4e65e9f9ef5b34" + version = "v1.11.1" [[projects]] branch = "v3" @@ -631,14 +649,14 @@ [[projects]] name = "gopkg.in/ini.v1" packages = ["."] - revision = "32e4c1e6bc4e7d0d8451aa6b75200d19e37a536a" - version = "v1.32.0" + revision = "6529cf7c58879c08d927016dde4477f18a0634cb" + version = "v1.36.0" [[projects]] name = "gopkg.in/macaron.v1" packages = ["."] - revision = "75f2e9b42e99652f0d82b28ccb73648f44615faa" - version = "v1.2.4" + revision = "c1be95e6d21e769e44e1ec33cec9da5837861c10" + version = "v1.3.1" [[projects]] branch = "v2" @@ -653,14 +671,14 @@ version = "v2.3.2" [[projects]] - branch = "v2" name = "gopkg.in/yaml.v2" packages = ["."] - revision = "d670f9405373e636a5a2765eea47fac0c9bc91a4" + revision = "5420a8b6744d3b0345ab293f6fcba19c978f1183" + version = "v2.2.1" [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "bd54a1a836599d90b36d4ac1af56d716ef9ca5be4865e217bddd49e3d32a1997" + inputs-digest = "cb8e7fd81f23ec987fc4d5dd9d31ae0f1164bc2f30cbea2fe86e0d97dd945beb" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index a9f79c402df..6c91ec37221 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -36,7 +36,7 @@ ignored = [ [[constraint]] name = "github.com/aws/aws-sdk-go" - version = "1.12.65" + version = "1.13.56" [[constraint]] branch = "master" @@ -85,11 +85,11 @@ ignored = [ [[constraint]] name = "github.com/go-xorm/core" - version = "0.5.7" + version = "=0.5.7" [[constraint]] name = "github.com/go-xorm/xorm" - version = "0.6.4" + version = "=0.6.4" [[constraint]] name = "github.com/gorilla/websocket" @@ -101,12 +101,16 @@ ignored = [ [[constraint]] branch = "master" - name = "github.com/grafana/grafana_plugin_model" + name = "github.com/grafana/grafana-plugin-model" [[constraint]] branch = "master" name = "github.com/hashicorp/go-hclog" +[[constraint]] + name = "github.com/hashicorp/go-plugin" + revision = "e8d22c780116115ae5624720c9af0c97afe4f551" + [[constraint]] branch = "master" name = "github.com/hashicorp/go-version" @@ -125,7 +129,7 @@ ignored = [ [[constraint]] name = "github.com/mattn/go-sqlite3" - version = "1.6.0" + version = "1.7.0" [[constraint]] name = "github.com/opentracing/opentracing-go" diff --git a/Gruntfile.js b/Gruntfile.js index a0607ef49dc..23276e8a122 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -12,6 +12,10 @@ module.exports = function (grunt) { platform: process.platform.replace('win32', 'windows'), }; + if (grunt.option('platform')) { + config.platform = grunt.option('platform'); + } + if (grunt.option('arch')) { config.arch = grunt.option('arch'); } else { diff --git a/README.md b/README.md index 12b0c8cc74a..322523d703b 100644 --- a/README.md +++ b/README.md @@ -64,8 +64,6 @@ Run karma tests npm run karma ``` -Run - ### Recompile backend on source change To rebuild on source change. diff --git a/ROADMAP.md b/ROADMAP.md index 7b9c043fef1..6f8111fd2d4 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -1,28 +1,21 @@ -# Roadmap (2018-05-06) +# Roadmap (2018-06-26) This roadmap is a tentative plan for the core development team. Things change constantly as PRs come in and priorities change. But it will give you an idea of our current vision and plan. ### Short term (1-2 months) - - - Elasticsearch alerting - - Crossplatform builds - - Backend service refactorings - - Explore UI - - First login registration view - -### Mid term (2-4 months) - Multi-Stat panel + - Metrics & Log Explore UI + +### Mid term (2-4 months) - React Panels + - Change visualization (panel type) on the fly. - Templating Query Editor UI Plugin hook ### Long term (4 - 8 months) - Alerting improvements (silence, per series tracking, etc) - Progress on React migration -- Change visualization (panel type) on the fly. -- Multi stat panel (vertical version of singlestat with bars/graph mode with big number etc) -- Repeat panel by query results ### In a distant future far far away diff --git a/appveyor.yml b/appveyor.yml index a71eb9f81b4..5cdec1b8bf5 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -38,16 +38,3 @@ artifacts: - path: grafana-*windows-*.* name: binzip type: zip - -deploy: - - provider: Environment - name: GrafanaReleaseMaster - on: - buildType: master - - - provider: Environment - name: GrafanaReleaseRelease - on: - buildType: release - - diff --git a/build.go b/build.go index 7e7183b8b83..bcb9b2ddf7d 100644 --- a/build.go +++ b/build.go @@ -27,8 +27,7 @@ var ( goarch string goos string gocc string - gocxx string - cgo string + cgo bool pkgArch string version string = "v1" // deb & rpm does not support semver so have to handle their version a little differently @@ -53,8 +52,7 @@ func main() { flag.StringVar(&goarch, "goarch", runtime.GOARCH, "GOARCH") flag.StringVar(&goos, "goos", runtime.GOOS, "GOOS") flag.StringVar(&gocc, "cc", "", "CC") - flag.StringVar(&gocxx, "cxx", "", "CXX") - flag.StringVar(&cgo, "cgo-enabled", "", "CGO_ENABLED") + flag.BoolVar(&cgo, "cgo-enabled", cgo, "Enable cgo") flag.StringVar(&pkgArch, "pkg-arch", "", "PKG ARCH") flag.StringVar(&phjsToRelease, "phjs", "", "PhantomJS binary") flag.BoolVar(&race, "race", race, "Use race detector") @@ -93,20 +91,24 @@ func main() { build("grafana-server", "./pkg/cmd/grafana-server", []string{}) case "build": - clean() + //clean() for _, binary := range binaries { build(binary, "./pkg/cmd/"+binary, []string{}) } + case "build-frontend": + grunt(gruntBuildArg("build")...) + case "test": test("./pkg/...") grunt("test") case "package": - grunt(gruntBuildArg("release")...) - if runtime.GOOS != "windows" { - createLinuxPackages() - } + grunt(gruntBuildArg("build")...) + packageGrafana() + + case "package-only": + packageGrafana() case "pkg-rpm": grunt(gruntBuildArg("release")...) @@ -131,6 +133,22 @@ func main() { } } +func packageGrafana() { + platformArg := fmt.Sprintf("--platform=%v", goos) + previousPkgArch := pkgArch + if pkgArch == "" { + pkgArch = goarch + } + postProcessArgs := gruntBuildArg("package") + postProcessArgs = append(postProcessArgs, platformArg) + grunt(postProcessArgs...) + pkgArch = previousPkgArch + + if goos == "linux" { + createLinuxPackages() + } +} + func makeLatestDistCopies() { files, err := ioutil.ReadDir("dist") if err != nil { @@ -138,9 +156,9 @@ func makeLatestDistCopies() { } latestMapping := map[string]string{ - ".deb": "dist/grafana_latest_amd64.deb", - ".rpm": "dist/grafana-latest-1.x86_64.rpm", - ".tar.gz": "dist/grafana-latest.linux-x64.tar.gz", + "_amd64.deb": "dist/grafana_latest_amd64.deb", + ".x86_64.rpm": "dist/grafana-latest-1.x86_64.rpm", + ".linux-amd64.tar.gz": "dist/grafana-latest.linux-x64.tar.gz", } for _, file := range files { @@ -211,6 +229,10 @@ type linuxPackageOptions struct { } func createDebPackages() { + previousPkgArch := pkgArch + if pkgArch == "armv7" { + pkgArch = "armhf" + } createPackage(linuxPackageOptions{ packageType: "deb", homeDir: "/usr/share/grafana", @@ -228,9 +250,17 @@ func createDebPackages() { depends: []string{"adduser", "libfontconfig"}, }) + pkgArch = previousPkgArch } func createRpmPackages() { + previousPkgArch := pkgArch + switch { + case pkgArch == "armv7": + pkgArch = "armhfp" + case pkgArch == "arm64": + pkgArch = "aarch64" + } createPackage(linuxPackageOptions{ packageType: "rpm", homeDir: "/usr/share/grafana", @@ -248,6 +278,7 @@ func createRpmPackages() { depends: []string{"/sbin/service", "fontconfig", "freetype", "urw-fonts"}, }) + pkgArch = previousPkgArch } func createLinuxPackages() { @@ -299,6 +330,7 @@ func createPackage(options linuxPackageOptions) { name := "grafana" if enterprise { name += "-enterprise" + args = append(args, "--replaces", "grafana") } args = append(args, "--name", name) @@ -386,7 +418,12 @@ func test(pkg string) { } func build(binaryName, pkg string, tags []string) { - binary := "./bin/" + binaryName + binary := fmt.Sprintf("./bin/%s-%s/%s", goos, goarch, binaryName) + if isDev { + //dont include os and arch in output path in dev environment + binary = fmt.Sprintf("./bin/%s", binaryName) + } + if goos == "windows" { binary += ".exe" } @@ -408,6 +445,7 @@ func build(binaryName, pkg string, tags []string) { if !isDev { setBuildEnv() runPrint("go", "version") + fmt.Printf("Targeting %s/%s\n", goos, goarch) } runPrint("go", args...) @@ -428,7 +466,6 @@ func ldflags() string { b.WriteString(fmt.Sprintf(" -X main.version=%s", version)) b.WriteString(fmt.Sprintf(" -X main.commit=%s", getGitSha())) b.WriteString(fmt.Sprintf(" -X main.buildstamp=%d", buildStamp())) - b.WriteString(fmt.Sprintf(" -X main.enterprise=%t", enterprise)) return b.String() } @@ -451,6 +488,14 @@ func clean() { func setBuildEnv() { os.Setenv("GOOS", goos) + if goos == "windows" { + // require windows >=7 + os.Setenv("CGO_CFLAGS", "-D_WIN32_WINNT=0x0601") + } + if goarch != "amd64" || goos != "linux" { + // needed for all other archs + cgo = true + } if strings.HasPrefix(goarch, "armv") { os.Setenv("GOARCH", "arm") os.Setenv("GOARM", goarch[4:]) @@ -460,15 +505,12 @@ func setBuildEnv() { if goarch == "386" { os.Setenv("GO386", "387") } - if cgo != "" { - os.Setenv("CGO_ENABLED", cgo) + if cgo { + os.Setenv("CGO_ENABLED", "1") } if gocc != "" { os.Setenv("CC", gocc) } - if gocxx != "" { - os.Setenv("CXX", gocxx) - } } func getGitSha() string { diff --git a/conf/defaults.ini b/conf/defaults.ini index d45e270d65d..5faba3ea7bd 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -14,6 +14,9 @@ instance_name = ${HOSTNAME} # Path to where grafana can store temp files, sessions, and the sqlite3 db (if that is used) data = data +# Temporary files in `data` directory older than given duration will be removed +temp_data_lifetime = 24h + # Directory where grafana can store logs logs = data/log @@ -237,6 +240,9 @@ disable_login_form = false # Set to true to disable the signout link in the side menu. useful if you use auth.proxy disable_signout_menu = false +# URL to redirect the user to after sign out +signout_redirect_url = + #################################### Anonymous Auth ###################### [auth.anonymous] # enable anonymous access diff --git a/conf/ldap.toml b/conf/ldap.toml index 166d85eabb1..a74b2b6cc2c 100644 --- a/conf/ldap.toml +++ b/conf/ldap.toml @@ -72,6 +72,8 @@ email = "email" [[servers.group_mappings]] group_dn = "cn=admins,dc=grafana,dc=org" org_role = "Admin" +# To make user an instance admin (Grafana Admin) uncomment line below +# grafana_admin = true # The Grafana organization database id, optional, if left out the default org (id 1) will be used # org_id = 1 diff --git a/conf/provisioning/datasources/sample.yaml b/conf/provisioning/datasources/sample.yaml index 877e229183d..37487dc4b3b 100644 --- a/conf/provisioning/datasources/sample.yaml +++ b/conf/provisioning/datasources/sample.yaml @@ -40,11 +40,14 @@ apiVersion: 1 # graphiteVersion: "1.1" # tlsAuth: true # tlsAuthWithCACert: true +# httpHeaderName1: "Authorization" # # json object of data that will be encrypted. # secureJsonData: # tlsCACert: "..." # tlsClientCert: "..." # tlsClientKey: "..." +# # +# httpHeaderValue1: "Bearer xf5yhfkpsnmgo" # version: 1 # # allow users to edit datasources from the UI. # editable: false diff --git a/conf/sample.ini b/conf/sample.ini index f12d917039d..87544a5ac39 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -14,6 +14,9 @@ # Path to where grafana can store temp files, sessions, and the sqlite3 db (if that is used) ;data = /var/lib/grafana +# Temporary files in `data` directory older than given duration will be removed +;temp_data_lifetime = 24h + # Directory where grafana can store logs ;logs = /var/log/grafana @@ -217,6 +220,9 @@ log_queries = # Set to true to disable the signout link in the side menu. useful if you use auth.proxy, defaults to false ;disable_signout_menu = false +# URL to redirect the user to after sign out +;signout_redirect_url = + #################################### Anonymous Auth ########################## [auth.anonymous] # enable anonymous access diff --git a/devenv/README.md b/devenv/README.md new file mode 100644 index 00000000000..9abf3596776 --- /dev/null +++ b/devenv/README.md @@ -0,0 +1,16 @@ +This folder contains useful scripts and configuration for... + +* Configuring dev datasources in Grafana +* Configuring dev & test scenarios dashboards. + +```bash +./setup.sh +``` + +After restarting grafana server there should now be a number of datasources named `gdev-` provisioned as well as a dashboard folder named `gdev dashboards`. This folder contains dashboard & panel features tests dashboards. + +# Dev dashboards + +Please update these dashboards or make new ones as new panels & dashboards features are developed or new bugs are found. The dashboards are located in the `devenv/dev-dashboards` folder. + + diff --git a/devenv/bulk-dashboards/bulk-dashboards.yaml b/devenv/bulk-dashboards/bulk-dashboards.yaml new file mode 100644 index 00000000000..e0ba8a88e68 --- /dev/null +++ b/devenv/bulk-dashboards/bulk-dashboards.yaml @@ -0,0 +1,9 @@ +apiVersion: 1 + +providers: + - name: 'Bulk dashboards' + folder: 'Bulk dashboards' + type: file + options: + path: devenv/dashboards/bulk-testing + diff --git a/devenv/bulk-dashboards/bulkdash.jsonnet b/devenv/bulk-dashboards/bulkdash.jsonnet new file mode 100644 index 00000000000..4c82fd36f69 --- /dev/null +++ b/devenv/bulk-dashboards/bulkdash.jsonnet @@ -0,0 +1,1140 @@ +{ + "annotations": { + "enable": false, + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 1, + "links": [], + "panels": [ + { + "aliasColors": { + "cpu": "#E24D42", + "memory": "#1f78c1", + "statsd.fakesite.counters.session_start.desktop.count": "#6ED0E0" + }, + "annotate": { + "enable": false + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "editable": true, + "fill": 3, + "grid": { + "max": null, + "min": 0 + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 0 + }, + "id": 4, + "interactive": true, + "legend": { + "avg": false, + "current": true, + "max": false, + "min": true, + "show": true, + "total": false, + "values": false + }, + "legend_counts": true, + "lines": true, + "linewidth": 2, + "nullPointMode": "connected", + "options": false, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "resolution": 100, + "scale": 1, + "seriesOverrides": [ + { + "alias": "cpu", + "fill": 0, + "lines": true, + "yaxis": 2, + "zindex": 2 + }, + { + "alias": "memory", + "pointradius": 2, + "points": true + } + ], + "spaceLength": 10, + "spyable": true, + "stack": false, + "steppedLine": false, + "targets": [ + { + "hide": false, + "refId": "A", + "target": "alias(movingAverage(scaleToSeconds(apps.fakesite.web_server_01.counters.request_status.code_302.count, 10), 20), 'cpu')" + }, + { + "refId": "B", + "target": "alias(statsd.fakesite.counters.session_start.desktop.count, 'memory')" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "timezone": "browser", + "title": "Memory / CPU", + "tooltip": { + "msResolution": false, + "query_as_alias": true, + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "bytes", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "percent", + "logBase": 1, + "max": null, + "min": 0, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + }, + "zerofill": true + }, + { + "aliasColors": { + "logins": "#5195ce", + "logins (-1 day)": "#447EBC", + "logins (-1 hour)": "#705da0" + }, + "annotate": { + "enable": false + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "editable": true, + "fill": 1, + "grid": { + "max": null, + "min": 0 + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 0 + }, + "id": 3, + "interactive": true, + "legend": { + "alignAsTable": false, + "avg": false, + "current": true, + "max": true, + "min": true, + "rightSide": false, + "show": true, + "total": false, + "values": false + }, + "legend_counts": true, + "lines": true, + "linewidth": 1, + "nullPointMode": "connected", + "options": false, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "resolution": 100, + "scale": 1, + "seriesOverrides": [], + "spaceLength": 10, + "spyable": true, + "stack": true, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "target": "alias(movingAverage(scaleToSeconds(apps.fakesite.web_server_01.counters.requests.count, 1), 2), 'logins')" + }, + { + "refId": "B", + "target": "alias(movingAverage(timeShift(scaleToSeconds(apps.fakesite.web_server_01.counters.requests.count, 1), '1h'), 2), 'logins (-1 hour)')" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": "1h", + "timezone": "browser", + "title": "logins", + "tooltip": { + "msResolution": false, + "query_as_alias": true, + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + }, + "zerofill": true + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "#629e51", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": null, + "editable": true, + "error": false, + "format": "bytes", + "gauge": { + "maxValue": 300, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 4, + "x": 16, + "y": 0 + }, + "id": 22, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "refId": "A", + "target": "scale(apps.backend.backend_01.counters.requests.count, 0.4)" + } + ], + "thresholds": "200,270", + "title": "Memory", + "type": "singlestat", + "valueFontSize": "100%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 3, + "w": 4, + "x": 20, + "y": 0 + }, + "id": 16, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "tableColumn": "", + "targets": [ + { + "refId": "A", + "target": "apps.backend.backend_02.counters.requests.count" + } + ], + "thresholds": "100,270", + "title": "Sign ups", + "type": "singlestat", + "valueFontSize": "100%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 3, + "w": 4, + "x": 20, + "y": 3 + }, + "id": 17, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "tableColumn": "", + "targets": [ + { + "refId": "A", + "target": "apps.backend.backend_04.counters.requests.count" + } + ], + "thresholds": "100,270", + "title": "Sign outs", + "type": "singlestat", + "valueFontSize": "100%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 3, + "w": 4, + "x": 20, + "y": 6 + }, + "id": 15, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "tableColumn": "", + "targets": [ + { + "refId": "A", + "target": "scale(apps.backend.backend_01.counters.requests.count, 0.7)" + } + ], + "thresholds": "100,270", + "title": "Logins", + "type": "singlestat", + "valueFontSize": "100%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "aliasColors": { + "web_server_01": "#badff4", + "web_server_02": "#5195ce", + "web_server_03": "#1f78c1", + "web_server_04": "#0a437c" + }, + "annotate": { + "enable": false + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": null, + "editable": true, + "fill": 6, + "grid": { + "max": null, + "min": 0 + }, + "gridPos": { + "h": 11, + "w": 16, + "x": 0, + "y": 7 + }, + "id": 2, + "interactive": true, + "legend": { + "alignAsTable": false, + "avg": false, + "current": false, + "max": false, + "min": false, + "rightSide": false, + "show": true, + "total": false, + "values": false + }, + "legend_counts": true, + "lines": true, + "linewidth": 1, + "nullPointMode": "connected", + "options": false, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "resolution": 100, + "scale": 1, + "seriesOverrides": [], + "spaceLength": 10, + "spyable": true, + "stack": true, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "target": "aliasByNode(movingAverage(scaleToSeconds(apps.fakesite.*.counters.requests.count, 1), 2), 2)" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "timezone": "browser", + "title": "server requests", + "tooltip": { + "msResolution": false, + "query_as_alias": true, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + }, + "zerofill": true + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "#629e51", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": null, + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 300, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 5, + "w": 4, + "x": 16, + "y": 7 + }, + "id": 21, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "refId": "A", + "target": "scale(apps.backend.backend_01.counters.requests.count, 0.8)" + } + ], + "thresholds": "200,270", + "title": "Logouts", + "type": "singlestat", + "valueFontSize": "100%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "datasource": null, + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 3, + "w": 4, + "x": 20, + "y": 9 + }, + "id": 18, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "tableColumn": "", + "targets": [ + { + "refId": "A", + "target": "scale(apps.backend.backend_03.counters.requests.count, 0.3)" + } + ], + "thresholds": "100,270", + "title": "Support calls", + "type": "singlestat", + "valueFontSize": "100%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "#629e51", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": null, + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 300, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 16, + "y": 12 + }, + "id": 26, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "refId": "A", + "target": "scale(apps.backend.backend_01.counters.requests.count, 0.2)" + } + ], + "thresholds": "200,270", + "title": "Google hits", + "type": "singlestat", + "valueFontSize": "100%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "#629e51", + "rgba(237, 129, 40, 0.89)", + "rgba(245, 54, 54, 0.9)" + ], + "datasource": null, + "editable": true, + "error": false, + "format": "none", + "gauge": { + "maxValue": 300, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 6, + "w": 4, + "x": 20, + "y": 12 + }, + "id": 24, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "refId": "A", + "target": "scale(apps.backend.backend_01.counters.requests.count, 0.2)" + } + ], + "thresholds": "200,270", + "title": "Google hits", + "type": "singlestat", + "valueFontSize": "100%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "aliasColors": { + "upper_25": "#F9E2D2", + "upper_50": "#F2C96D", + "upper_75": "#EAB839" + }, + "annotate": { + "enable": false + }, + "bars": true, + "dashLength": 10, + "dashes": false, + "datasource": null, + "editable": true, + "fill": 1, + "grid": { + "max": null, + "min": 0 + }, + "gridPos": { + "h": 11, + "w": 24, + "x": 0, + "y": 18 + }, + "id": 5, + "interactive": true, + "legend": { + "alignAsTable": true, + "avg": true, + "current": false, + "max": false, + "min": false, + "rightSide": true, + "show": true, + "total": false, + "values": true + }, + "legend_counts": true, + "lines": false, + "linewidth": 2, + "nullPointMode": "connected", + "options": false, + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "resolution": 100, + "scale": 1, + "seriesOverrides": [], + "spaceLength": 10, + "spyable": true, + "stack": true, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "target": "aliasByNode(summarize(statsd.fakesite.timers.ads_timer.*, '4min', 'avg'), 4)" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "timezone": "browser", + "title": "client side full page load", + "tooltip": { + "msResolution": false, + "query_as_alias": true, + "shared": false, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "ms", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + }, + "zerofill": true + } + ], + "refresh": false, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "demo" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "collapse": false, + "enable": true, + "notice": false, + "now": true, + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "status": "Stable", + "time_options": [ + "5m", + "15m", + "1h", + "2h", + " 6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ], + "type": "timepicker" + }, + "timezone": "browser", + "title": "Big Dashboard", + "uid": "000000003", + "version": 16 +} diff --git a/devenv/dashboards.yaml b/devenv/dashboards.yaml new file mode 100644 index 00000000000..226c1a8b335 --- /dev/null +++ b/devenv/dashboards.yaml @@ -0,0 +1,9 @@ +apiVersion: 1 + +providers: + - name: 'gdev dashboards' + folder: 'gdev dashboards' + type: file + options: + path: devenv/dev-dashboards + diff --git a/devenv/datasources.yaml b/devenv/datasources.yaml new file mode 100644 index 00000000000..241381097b1 --- /dev/null +++ b/devenv/datasources.yaml @@ -0,0 +1,78 @@ +apiVersion: 1 + +datasources: + - name: gdev-graphite + type: graphite + access: proxy + url: http://localhost:8080 + jsonData: + graphiteVersion: "1.1" + + - name: gdev-prometheus + type: prometheus + access: proxy + isDefault: true + url: http://localhost:9090 + + - name: gdev-testdata + type: testdata + + - name: gdev-influxdb + type: influxdb + access: proxy + database: site + user: grafana + password: grafana + url: http://localhost:8086 + jsonData: + timeInterval: "15s" + + - name: gdev-opentsdb + type: opentsdb + access: proxy + url: http://localhost:4242 + jsonData: + tsdbResolution: 1 + tsdbVersion: 1 + + - name: gdev-elasticsearch-metrics + type: elasticsearch + access: proxy + database: "[metrics-]YYYY.MM.DD" + url: http://localhost:9200 + jsonData: + interval: Daily + timeField: "@timestamp" + + - name: gdev-mysql + type: mysql + url: localhost:3306 + database: grafana + user: grafana + password: password + + - name: gdev-mssql + type: mssql + url: localhost:1433 + database: grafana + user: grafana + password: "Password!" + + - name: gdev-postgres + type: postgres + url: localhost:5432 + database: grafana + user: grafana + secureJsonData: + password: password + jsonData: + sslmode: "disable" + + - name: gdev-cloudwatch + type: cloudwatch + editable: true + jsonData: + authType: credentials + defaultRegion: eu-west-2 + + diff --git a/devenv/dev-dashboards/panel_tests_graph.json b/devenv/dev-dashboards/panel_tests_graph.json new file mode 100644 index 00000000000..8a1770f0fa6 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_graph.json @@ -0,0 +1,1558 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 0 + }, + "id": 1, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "no_data_points", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "No Data Points Warning", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 0 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "datapoints_outside_range", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Datapoints Outside Range Warning", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 0 + }, + "id": 3, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "random_walk", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Random walk series", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 16, + "x": 0, + "y": 7 + }, + "id": 4, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenario": "random_walk", + "scenarioId": "random_walk", + "target": "" + } + ], + "thresholds": [], + "timeFrom": "2s", + "timeShift": null, + "title": "Millisecond res x-axis and tooltip", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Just verify that the tooltip time has millisecond resolution ", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 7 + }, + "id": 6, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 9, + "w": 16, + "x": 0, + "y": 14 + }, + "id": 5, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "B-series", + "yaxis": 2 + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "2000,3000,4000,1000,3000,10000", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "2 yaxis and axis labels", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percent", + "label": "Perecent", + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": "Pressure", + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Verify that axis labels look ok", + "editable": true, + "error": false, + "gridPos": { + "h": 9, + "w": 8, + "x": 16, + "y": 14 + }, + "id": 7, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 23 + }, + "id": 8, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "connected", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "null value connected", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 23 + }, + "id": 10, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null as zero", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "null value null as zero", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Should be a long line connecting the null region in the `connected` mode, and in zero it should just be a line with zero value at the null points. ", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 23 + }, + "id": 13, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 16, + "x": 0, + "y": 30 + }, + "id": 9, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "B-series", + "zindex": -3 + } + ], + "spaceLength": 10, + "stack": true, + "steppedLine": false, + "targets": [ + { + "hide": false, + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,10,20,30,40,40,40,100,10,20,20", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,10,20,30,40,40,40,100,10,20,20", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Stacking value ontop of nulls", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Stacking values on top of nulls, should treat the null values as zero. ", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 30 + }, + "id": 14, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "editable": true, + "error": false, + "fill": 1, + "gridPos": { + "h": 7, + "w": 16, + "x": 0, + "y": 37 + }, + "id": 12, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "B-series", + "zindex": -3 + } + ], + "spaceLength": 10, + "stack": true, + "steppedLine": false, + "targets": [ + { + "alias": "", + "hide": false, + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + }, + { + "alias": "", + "hide": false, + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,40,null,null,null,null,null,null,100,10,10,20,30,40,10", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Stacking all series null segment", + "tooltip": { + "msResolution": false, + "shared": true, + "sort": 0, + "value_type": "cumulative" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "content": "Stacking when all values are null should leave a gap in the graph", + "editable": true, + "error": false, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 37 + }, + "id": 15, + "links": [], + "mode": "markdown", + "title": "", + "type": "text" + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 24, + "x": 0, + "y": 44 + }, + "id": 20, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table Single Series Should Take Minimum Height", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 51 + }, + "id": 16, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table No Scroll Visible", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 51 + }, + "id": 17, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "E", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "F", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "G", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "I", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "J", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table Should Scroll", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 0, + "y": 58 + }, + "id": 18, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "rightSide": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table No Scroll Visible", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "gdev-testdata", + "decimals": 3, + "fill": 1, + "gridPos": { + "h": 7, + "w": 12, + "x": 12, + "y": 58 + }, + "id": 19, + "legend": { + "alignAsTable": true, + "avg": true, + "current": true, + "max": true, + "min": true, + "rightSide": true, + "show": true, + "total": true, + "values": true + }, + "lines": true, + "linewidth": 1, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "C", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "D", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "E", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "F", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "G", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "H", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "I", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "J", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "K", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + }, + { + "refId": "L", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Legend Table No Scroll Visible", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ], + "yaxis": { + "align": false, + "alignLevel": null + } + } + ], + "refresh": false, + "revision": 8, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "gdev", + "panel-tests" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Panel Tests - Graph", + "uid": "5SdHCadmz", + "version": 3 +} diff --git a/devenv/dev-dashboards/panel_tests_singlestat.json b/devenv/dev-dashboards/panel_tests_singlestat.json new file mode 100644 index 00000000000..2d69f27bcb6 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_singlestat.json @@ -0,0 +1,574 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "cacheTimeout": null, + "colorBackground": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 0 + }, + "id": 2, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "postfix", + "postfixFontSize": "50%", + "prefix": "prefix", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": false, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,2,3,4,5" + } + ], + "thresholds": "5,10", + "title": "prefix 3 ms (green) postfixt + sparkline", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#d44a3a", + "rgba(237, 129, 40, 0.89)", + "#299c46" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 0 + }, + "id": 3, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": true + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,2,3,4,5" + } + ], + "thresholds": "5,10", + "title": "3 ms (red) + full height sparkline", + "type": "singlestat", + "valueFontSize": "200%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": true, + "colorPrefix": false, + "colorValue": false, + "colors": [ + "#d44a3a", + "rgba(237, 129, 40, 0.89)", + "#299c46" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 100, + "minValue": 0, + "show": false, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 0 + }, + "id": 4, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,2,3,4,5" + } + ], + "thresholds": "5,10", + "title": "3 ms + red background", + "type": "singlestat", + "valueFontSize": "200%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "avg" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 150, + "minValue": 0, + "show": true, + "thresholdLabels": true, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 0, + "y": 7 + }, + "id": 5, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "10,20,80" + } + ], + "thresholds": "81,90", + "title": "80 ms green gauge, thresholds 81, 90", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 150, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": true + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 8, + "y": 7 + }, + "id": 6, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "10,20,80" + } + ], + "thresholds": "81,90", + "title": "80 ms green gauge, thresholds 81, 90, no labels", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + }, + { + "cacheTimeout": null, + "colorBackground": false, + "colorPrefix": false, + "colorValue": true, + "colors": [ + "#299c46", + "rgba(237, 129, 40, 0.89)", + "#d44a3a" + ], + "datasource": "gdev-testdata", + "decimals": null, + "description": "", + "format": "ms", + "gauge": { + "maxValue": 150, + "minValue": 0, + "show": true, + "thresholdLabels": false, + "thresholdMarkers": false + }, + "gridPos": { + "h": 7, + "w": 8, + "x": 16, + "y": 7 + }, + "id": 7, + "interval": null, + "links": [], + "mappingType": 1, + "mappingTypes": [ + { + "name": "value to text", + "value": 1 + }, + { + "name": "range to text", + "value": 2 + } + ], + "maxDataPoints": 100, + "nullPointMode": "connected", + "nullText": null, + "postfix": "", + "postfixFontSize": "50%", + "prefix": "", + "prefixFontSize": "50%", + "rangeMaps": [ + { + "from": "null", + "text": "N/A", + "to": "null" + } + ], + "sparkline": { + "fillColor": "rgba(31, 118, 189, 0.18)", + "full": true, + "lineColor": "rgb(31, 120, 193)", + "show": false + }, + "tableColumn": "", + "targets": [ + { + "expr": "", + "format": "time_series", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "10,20,80" + } + ], + "thresholds": "81,90", + "title": "80 ms green gauge, thresholds 81, 90, no markers or labels", + "type": "singlestat", + "valueFontSize": "80%", + "valueMaps": [ + { + "op": "=", + "text": "N/A", + "value": "null" + } + ], + "valueName": "current" + } + ], + "refresh": false, + "revision": 8, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "gdev", + "panel-tests" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Panel Tests - Singlestat", + "uid": "singlestat", + "version": 14 +} diff --git a/devenv/dev-dashboards/panel_tests_table.json b/devenv/dev-dashboards/panel_tests_table.json new file mode 100644 index 00000000000..8337e9cd746 --- /dev/null +++ b/devenv/dev-dashboards/panel_tests_table.json @@ -0,0 +1,453 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "links": [], + "panels": [ + { + "columns": [], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 11, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 3, + "links": [], + "pageSize": 10, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "cell", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorCell", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": "value", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorValue", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "Bps" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "server1", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + }, + { + "alias": "server2", + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0" + } + ], + "title": "Time series to rows (2 pages)", + "transform": "timeseries_to_rows", + "type": "table" + }, + { + "columns": [ + { + "text": "Avg", + "value": "avg" + }, + { + "text": "Max", + "value": "max" + }, + { + "text": "Current", + "value": "current" + } + ], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 11, + "w": 12, + "x": 12, + "y": 0 + }, + "id": 4, + "links": [], + "pageSize": 10, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "cell", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorCell", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": "value", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorValue", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "Bps" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "server1", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + }, + { + "alias": "server2", + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0" + } + ], + "title": "Time series aggregations", + "transform": "timeseries_aggregations", + "type": "table" + }, + { + "columns": [], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 7, + "w": 24, + "x": 0, + "y": 11 + }, + "id": 5, + "links": [], + "pageSize": null, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "row", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "/Color/", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "ColorValue", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + } + ], + "title": "color row by threshold", + "transform": "timeseries_to_columns", + "type": "table" + }, + { + "columns": [], + "datasource": "gdev-testdata", + "fontSize": "100%", + "gridPos": { + "h": 8, + "w": 24, + "x": 0, + "y": 18 + }, + "id": 2, + "links": [], + "pageSize": null, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": "cell", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorCell", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "currencyUSD" + }, + { + "alias": "", + "colorMode": "value", + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "decimals": 2, + "mappingType": 1, + "pattern": "ColorValue", + "thresholds": [ + "5", + "10" + ], + "type": "number", + "unit": "Bps" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "ColorValue", + "expr": "", + "format": "table", + "intervalFactor": 1, + "refId": "A", + "scenarioId": "csv_metric_values", + "stringInput": "1,20,90,30,5,0,20,10" + }, + { + "alias": "ColorCell", + "refId": "B", + "scenarioId": "csv_metric_values", + "stringInput": "5,1,2,3,4,5,10,20" + } + ], + "title": "Column style thresholds & units", + "transform": "timeseries_to_columns", + "type": "table" + } + ], + "refresh": false, + "revision": 8, + "schemaVersion": 16, + "style": "dark", + "tags": [ + "gdev", + "panel-tests" + ], + "templating": { + "list": [] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "browser", + "title": "Panel Tests - Table", + "uid": "pttable", + "version": 1 +} diff --git a/public/app/plugins/app/testdata/dashboards/alerts.json b/devenv/dev-dashboards/testdata_alerts.json similarity index 98% rename from public/app/plugins/app/testdata/dashboards/alerts.json rename to devenv/dev-dashboards/testdata_alerts.json index 159df0f458b..8c2edebf155 100644 --- a/public/app/plugins/app/testdata/dashboards/alerts.json +++ b/devenv/dev-dashboards/testdata_alerts.json @@ -1,6 +1,6 @@ { "revision": 2, - "title": "TestData - Alerts", + "title": "Alerting with TestData", "tags": [ "grafana-test" ], @@ -48,7 +48,7 @@ }, "aliasColors": {}, "bars": false, - "datasource": "Grafana TestData", + "datasource": "gdev-testdata", "editable": true, "error": false, "fill": 1, @@ -161,7 +161,7 @@ }, "aliasColors": {}, "bars": false, - "datasource": "Grafana TestData", + "datasource": "gdev-testdata", "editable": true, "error": false, "fill": 1, diff --git a/devenv/setup.sh b/devenv/setup.sh new file mode 100755 index 00000000000..6412bbc98ea --- /dev/null +++ b/devenv/setup.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +bulkDashboard() { + + requiresJsonnet + + COUNTER=0 + MAX=400 + while [ $COUNTER -lt $MAX ]; do + jsonnet -o "dashboards/bulk-testing/dashboard${COUNTER}.json" -e "local bulkDash = import 'dashboards/bulk-testing/bulkdash.jsonnet'; bulkDash + { uid: 'uid-${COUNTER}', title: 'title-${COUNTER}' }" + let COUNTER=COUNTER+1 + done + + ln -s -f -r ./dashboards/bulk-testing/bulk-dashboards.yaml ../conf/provisioning/dashboards/custom.yaml +} + +requiresJsonnet() { + if ! type "jsonnet" > /dev/null; then + echo "you need you install jsonnet to run this script" + echo "follow the instructions on https://github.com/google/jsonnet" + exit 1 + fi +} + +devDashboards() { + echo -e "\xE2\x9C\x94 Setting up all dev dashboards using provisioning" + ln -s -f ../../../devenv/dashboards.yaml ../conf/provisioning/dashboards/dev.yaml +} + +devDatasources() { + echo -e "\xE2\x9C\x94 Setting up all dev datasources using provisioning" + + ln -s -f ../../../devenv/datasources.yaml ../conf/provisioning/datasources/dev.yaml +} + +usage() { + echo -e "\n" + echo "Usage:" + echo " bulk-dashboards - create and provisioning 400 dashboards" + echo " no args - provisiong core datasources and dev dashboards" +} + +main() { + echo -e "------------------------------------------------------------------" + echo -e "This script setups provisioning for dev datasources and dashboards" + echo -e "------------------------------------------------------------------" + echo -e "\n" + + local cmd=$1 + + if [[ $cmd == "bulk-dashboards" ]]; then + bulkDashboard + else + devDashboards + devDatasources + fi + + if [[ -z "$cmd" ]]; then + usage + fi + +} + +main "$@" diff --git a/docker/blocks/elastic6/docker-compose.yaml b/docker/blocks/elastic6/docker-compose.yaml new file mode 100644 index 00000000000..dd2439f88e4 --- /dev/null +++ b/docker/blocks/elastic6/docker-compose.yaml @@ -0,0 +1,15 @@ +# You need to run 'sysctl -w vm.max_map_count=262144' on the host machine + + elasticsearch6: + image: docker.elastic.co/elasticsearch/elasticsearch-oss:6.2.4 + command: elasticsearch + ports: + - "11200:9200" + - "11300:9300" + + fake-elastic6-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: elasticsearch6 + FD_PORT: 11200 diff --git a/docker/blocks/elastic6/elasticsearch.yml b/docker/blocks/elastic6/elasticsearch.yml new file mode 100644 index 00000000000..c57b2c12908 --- /dev/null +++ b/docker/blocks/elastic6/elasticsearch.yml @@ -0,0 +1,2 @@ +script.inline: on +script.indexed: on diff --git a/docker/blocks/mysql/docker-compose.yaml b/docker/blocks/mysql/docker-compose.yaml index 53ff9da62a7..381b04a53c8 100644 --- a/docker/blocks/mysql/docker-compose.yaml +++ b/docker/blocks/mysql/docker-compose.yaml @@ -1,5 +1,5 @@ mysql: - image: mysql:latest + image: mysql:5.6 environment: MYSQL_ROOT_PASSWORD: rootpass MYSQL_DATABASE: grafana diff --git a/docker/blocks/mysql_tests/Dockerfile b/docker/blocks/mysql_tests/Dockerfile index fa91fa3c023..89e16bc2ed6 100644 --- a/docker/blocks/mysql_tests/Dockerfile +++ b/docker/blocks/mysql_tests/Dockerfile @@ -1,3 +1,3 @@ -FROM mysql:latest +FROM mysql:5.6 ADD setup.sql /docker-entrypoint-initdb.d -CMD ["mysqld"] \ No newline at end of file +CMD ["mysqld"] diff --git a/docker/blocks/nginx_proxy/Dockerfile b/docker/blocks/nginx_proxy/Dockerfile index 9ded20dfdda..04de507499d 100644 --- a/docker/blocks/nginx_proxy/Dockerfile +++ b/docker/blocks/nginx_proxy/Dockerfile @@ -1,3 +1,4 @@ FROM nginx:alpine -COPY nginx.conf /etc/nginx/nginx.conf \ No newline at end of file +COPY nginx.conf /etc/nginx/nginx.conf +COPY htpasswd /etc/nginx/htpasswd diff --git a/docker/blocks/nginx_proxy/htpasswd b/docker/blocks/nginx_proxy/htpasswd new file mode 100755 index 00000000000..e2c5eeeff7b --- /dev/null +++ b/docker/blocks/nginx_proxy/htpasswd @@ -0,0 +1,3 @@ +user1:$apr1$1odeeQb.$kwV8D/VAAGUDU7pnHuKoV0 +user2:$apr1$A2kf25r.$6S0kp3C7vIuixS5CL0XA9. +admin:$apr1$IWn4DoRR$E2ol7fS/dkI18eU4bXnBO1 diff --git a/docker/blocks/nginx_proxy/nginx.conf b/docker/blocks/nginx_proxy/nginx.conf index 18e27b3fb01..860d3d0b89f 100644 --- a/docker/blocks/nginx_proxy/nginx.conf +++ b/docker/blocks/nginx_proxy/nginx.conf @@ -13,7 +13,26 @@ http { listen 10080; location /grafana/ { + ################################################################ + # Enable these settings to test with basic auth and an auth proxy header + # the htpasswd file contains an admin user with password admin and + # user1: grafana and user2: grafana + ################################################################ + + # auth_basic "Restricted Content"; + # auth_basic_user_file /etc/nginx/htpasswd; + + ################################################################ + # To use the auth proxy header, set the following in custom.ini: + # [auth.proxy] + # enabled = true + # header_name = X-WEBAUTH-USER + # header_property = username + ################################################################ + + # proxy_set_header X-WEBAUTH-USER $remote_user; + proxy_pass http://localhost:3000/; } } -} \ No newline at end of file +} diff --git a/docker/blocks/openldap/Dockerfile b/docker/blocks/openldap/Dockerfile index 54e383a6a97..76172e133a4 100644 --- a/docker/blocks/openldap/Dockerfile +++ b/docker/blocks/openldap/Dockerfile @@ -1,3 +1,5 @@ +# Fork of https://github.com/dinkel/docker-openldap + FROM debian:jessie LABEL maintainer="Christian Luginbühl " @@ -6,7 +8,8 @@ ENV OPENLDAP_VERSION 2.4.40 RUN apt-get update && \ DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \ - slapd=${OPENLDAP_VERSION}* && \ + slapd=${OPENLDAP_VERSION}* \ + ldap-utils && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* @@ -20,6 +23,7 @@ COPY modules/ /etc/ldap.dist/modules COPY prepopulate/ /etc/ldap.dist/prepopulate COPY entrypoint.sh /entrypoint.sh +COPY prepopulate.sh /prepopulate.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/docker/blocks/openldap/entrypoint.sh b/docker/blocks/openldap/entrypoint.sh index d560b78d388..d202ed14b31 100755 --- a/docker/blocks/openldap/entrypoint.sh +++ b/docker/blocks/openldap/entrypoint.sh @@ -76,13 +76,14 @@ EOF IFS=","; declare -a modules=($SLAPD_ADDITIONAL_MODULES); unset IFS for module in "${modules[@]}"; do - slapadd -n0 -F /etc/ldap/slapd.d -l "/etc/ldap/modules/${module}.ldif" >/dev/null 2>&1 + echo "Adding module ${module}" + slapadd -n0 -F /etc/ldap/slapd.d -l "/etc/ldap/modules/${module}.ldif" >/dev/null 2>&1 done fi - for file in `ls /etc/ldap/prepopulate/*.ldif`; do - slapadd -F /etc/ldap/slapd.d -l "$file" - done + # This needs to run in background + # Will prepopulate entries after ldap daemon has started + ./prepopulate.sh & chown -R openldap:openldap /etc/ldap/slapd.d/ /var/lib/ldap/ /var/run/slapd/ else diff --git a/docker/blocks/openldap/ldap_dev.toml b/docker/blocks/openldap/ldap_dev.toml new file mode 100644 index 00000000000..e79771b57de --- /dev/null +++ b/docker/blocks/openldap/ldap_dev.toml @@ -0,0 +1,85 @@ +# To troubleshoot and get more log info enable ldap debug logging in grafana.ini +# [log] +# filters = ldap:debug + +[[servers]] +# Ldap server host (specify multiple hosts space separated) +host = "127.0.0.1" +# Default port is 389 or 636 if use_ssl = true +port = 389 +# Set to true if ldap server supports TLS +use_ssl = false +# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS) +start_tls = false +# set to true if you want to skip ssl cert validation +ssl_skip_verify = false +# set to the path to your root CA certificate or leave unset to use system defaults +# root_ca_cert = "/path/to/certificate.crt" + +# Search user bind dn +bind_dn = "cn=admin,dc=grafana,dc=org" +# Search user bind password +# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;""" +bind_password = 'grafana' + +# User search filter, for example "(cn=%s)" or "(sAMAccountName=%s)" or "(uid=%s)" +search_filter = "(cn=%s)" + +# An array of base dns to search through +search_base_dns = ["dc=grafana,dc=org"] + +# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups. +# This is done by enabling group_search_filter below. You must also set member_of= "cn" +# in [servers.attributes] below. + +# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN +# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of +# below in such a way that the user's recursive group membership is considered. +# +# Nested Groups + Active Directory (AD) Example: +# +# AD groups store the Distinguished Names (DNs) of members, so your filter must +# recursively search your groups for the authenticating user's DN. For example: +# +# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)" +# group_search_filter_user_attribute = "distinguishedName" +# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] +# +# [servers.attributes] +# ... +# member_of = "distinguishedName" + +## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available) +# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))" +## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter. +## Defaults to the value of username in [server.attributes] +## Valid options are any of your values in [servers.attributes] +## If you are using nested groups you probably want to set this and member_of in +## [servers.attributes] to "distinguishedName" +# group_search_filter_user_attribute = "distinguishedName" +## An array of the base DNs to search through for groups. Typically uses ou=groups +# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"] + +# Specify names of the ldap attributes your ldap uses +[servers.attributes] +name = "givenName" +surname = "sn" +username = "cn" +member_of = "memberOf" +email = "email" + +# Map ldap groups to grafana org roles +[[servers.group_mappings]] +group_dn = "cn=admins,ou=groups,dc=grafana,dc=org" +org_role = "Admin" +# The Grafana organization database id, optional, if left out the default org (id 1) will be used +# org_id = 1 + +[[servers.group_mappings]] +group_dn = "cn=editors,ou=groups,dc=grafana,dc=org" +org_role = "Editor" + +[[servers.group_mappings]] +# If you want to match all (or no ldap groups) then you can use wildcard +group_dn = "*" +org_role = "Viewer" diff --git a/docker/blocks/openldap/notes.md b/docker/blocks/openldap/notes.md index 71813c2899a..65155423616 100644 --- a/docker/blocks/openldap/notes.md +++ b/docker/blocks/openldap/notes.md @@ -1,6 +1,6 @@ # Notes on OpenLdap Docker Block -Any ldif files added to the prepopulate subdirectory will be automatically imported into the OpenLdap database. +Any ldif files added to the prepopulate subdirectory will be automatically imported into the OpenLdap database. The ldif files add three users, `ldapviewer`, `ldapeditor` and `ldapadmin`. Two groups, `admins` and `users`, are added that correspond with the group mappings in the default conf/ldap.toml. `ldapadmin` is a member of `admins` and `ldapeditor` is a member of `users`. @@ -11,3 +11,35 @@ After adding ldif files to `prepopulate`: 1. Remove your current docker image: `docker rm docker_openldap_1` 2. Build: `docker-compose build` 3. `docker-compose up` + +## Enabling LDAP in Grafana + +Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block: + +```ini +[auth.ldap] +enabled = true +config_file = conf/ldap_dev.toml +; allow_sign_up = true +``` + +Test groups & users + +admins + ldap-admin + ldap-torkel + ldap-daniel +backend + ldap-carl + ldap-torkel + ldap-leo +frontend + ldap-torkel + ldap-tobias + ldap-daniel +editors + ldap-editors + + +no groups + ldap-viewer diff --git a/docker/blocks/openldap/prepopulate.sh b/docker/blocks/openldap/prepopulate.sh new file mode 100755 index 00000000000..aa11f8aba4f --- /dev/null +++ b/docker/blocks/openldap/prepopulate.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +echo "Pre-populating ldap entries, first waiting for ldap to start" + +sleep 3 + +adminUserDn="cn=admin,dc=grafana,dc=org" +adminPassword="grafana" + +for file in `ls /etc/ldap/prepopulate/*.ldif`; do + ldapadd -x -D $adminUserDn -w $adminPassword -f "$file" +done + + diff --git a/docker/blocks/openldap/prepopulate/1_units.ldif b/docker/blocks/openldap/prepopulate/1_units.ldif new file mode 100644 index 00000000000..22e06303688 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/1_units.ldif @@ -0,0 +1,9 @@ +dn: ou=groups,dc=grafana,dc=org +ou: Groups +objectclass: top +objectclass: organizationalUnit + +dn: ou=users,dc=grafana,dc=org +ou: Users +objectclass: top +objectclass: organizationalUnit diff --git a/docker/blocks/openldap/prepopulate/2_users.ldif b/docker/blocks/openldap/prepopulate/2_users.ldif new file mode 100644 index 00000000000..52e74b1e4b1 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/2_users.ldif @@ -0,0 +1,80 @@ +# ldap-admin +dn: cn=ldap-admin,ou=users,dc=grafana,dc=org +mail: ldap-admin@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-admin +cn: ldap-admin + +dn: cn=ldap-editor,ou=users,dc=grafana,dc=org +mail: ldap-editor@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-editor +cn: ldap-editor + +dn: cn=ldap-viewer,ou=users,dc=grafana,dc=org +mail: ldap-viewer@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-viewer +cn: ldap-viewer + +dn: cn=ldap-carl,ou=users,dc=grafana,dc=org +mail: ldap-carl@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-carl +cn: ldap-carl + +dn: cn=ldap-daniel,ou=users,dc=grafana,dc=org +mail: ldap-daniel@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-daniel +cn: ldap-daniel + +dn: cn=ldap-leo,ou=users,dc=grafana,dc=org +mail: ldap-leo@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-leo +cn: ldap-leo + +dn: cn=ldap-tobias,ou=users,dc=grafana,dc=org +mail: ldap-tobias@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-tobias +cn: ldap-tobias + +dn: cn=ldap-torkel,ou=users,dc=grafana,dc=org +mail: ldap-torkel@grafana.com +userPassword: grafana +objectClass: person +objectClass: top +objectClass: inetOrgPerson +objectClass: organizationalPerson +sn: ldap-torkel +cn: ldap-torkel diff --git a/docker/blocks/openldap/prepopulate/3_groups.ldif b/docker/blocks/openldap/prepopulate/3_groups.ldif new file mode 100644 index 00000000000..8638a089cc8 --- /dev/null +++ b/docker/blocks/openldap/prepopulate/3_groups.ldif @@ -0,0 +1,25 @@ +dn: cn=admins,ou=groups,dc=grafana,dc=org +cn: admins +objectClass: groupOfNames +objectClass: top +member: cn=ldap-admin,ou=users,dc=grafana,dc=org +member: cn=ldap-torkel,ou=users,dc=grafana,dc=org + +dn: cn=editors,ou=groups,dc=grafana,dc=org +cn: editors +objectClass: groupOfNames +member: cn=ldap-editor,ou=users,dc=grafana,dc=org + +dn: cn=backend,ou=groups,dc=grafana,dc=org +cn: backend +objectClass: groupOfNames +member: cn=ldap-carl,ou=users,dc=grafana,dc=org +member: cn=ldap-leo,ou=users,dc=grafana,dc=org +member: cn=ldap-torkel,ou=users,dc=grafana,dc=org + +dn: cn=frontend,ou=groups,dc=grafana,dc=org +cn: frontend +objectClass: groupOfNames +member: cn=ldap-torkel,ou=users,dc=grafana,dc=org +member: cn=ldap-daniel,ou=users,dc=grafana,dc=org +member: cn=ldap-leo,ou=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/admin.ldif b/docker/blocks/openldap/prepopulate/admin.ldif deleted file mode 100644 index 3f4406d5810..00000000000 --- a/docker/blocks/openldap/prepopulate/admin.ldif +++ /dev/null @@ -1,10 +0,0 @@ -dn: cn=ldapadmin,dc=grafana,dc=org -mail: ldapadmin@grafana.com -userPassword: grafana -objectClass: person -objectClass: top -objectClass: inetOrgPerson -objectClass: organizationalPerson -sn: ldapadmin -cn: ldapadmin -memberOf: cn=admins,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/adminsgroup.ldif b/docker/blocks/openldap/prepopulate/adminsgroup.ldif deleted file mode 100644 index d8dece4e458..00000000000 --- a/docker/blocks/openldap/prepopulate/adminsgroup.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: cn=admins,dc=grafana,dc=org -cn: admins -member: cn=ldapadmin,dc=grafana,dc=org -objectClass: groupOfNames -objectClass: top diff --git a/docker/blocks/openldap/prepopulate/editor.ldif b/docker/blocks/openldap/prepopulate/editor.ldif deleted file mode 100644 index eba3adc4352..00000000000 --- a/docker/blocks/openldap/prepopulate/editor.ldif +++ /dev/null @@ -1,10 +0,0 @@ -dn: cn=ldapeditor,dc=grafana,dc=org -mail: ldapeditor@grafana.com -userPassword: grafana -objectClass: person -objectClass: top -objectClass: inetOrgPerson -objectClass: organizationalPerson -sn: ldapeditor -cn: ldapeditor -memberOf: cn=users,dc=grafana,dc=org diff --git a/docker/blocks/openldap/prepopulate/usersgroup.ldif b/docker/blocks/openldap/prepopulate/usersgroup.ldif deleted file mode 100644 index a1de3a50d38..00000000000 --- a/docker/blocks/openldap/prepopulate/usersgroup.ldif +++ /dev/null @@ -1,5 +0,0 @@ -dn: cn=users,dc=grafana,dc=org -cn: users -member: cn=ldapeditor,dc=grafana,dc=org -objectClass: groupOfNames -objectClass: top diff --git a/docker/blocks/openldap/prepopulate/viewer.ldif b/docker/blocks/openldap/prepopulate/viewer.ldif deleted file mode 100644 index f699a7df57b..00000000000 --- a/docker/blocks/openldap/prepopulate/viewer.ldif +++ /dev/null @@ -1,9 +0,0 @@ -dn: cn=ldapviewer,dc=grafana,dc=org -mail: ldapviewer@grafana.com -userPassword: grafana -objectClass: person -objectClass: top -objectClass: inetOrgPerson -objectClass: organizationalPerson -sn: ldapviewer -cn: ldapviewer diff --git a/docker/blocks/postgres/docker-compose.yaml b/docker/blocks/postgres/docker-compose.yaml index 566df7b8877..27736042f7b 100644 --- a/docker/blocks/postgres/docker-compose.yaml +++ b/docker/blocks/postgres/docker-compose.yaml @@ -1,5 +1,5 @@ postgrestest: - image: postgres:latest + image: postgres:9.3 environment: POSTGRES_USER: grafana POSTGRES_PASSWORD: password @@ -13,4 +13,4 @@ network_mode: bridge environment: FD_DATASOURCE: postgres - FD_PORT: 5432 \ No newline at end of file + FD_PORT: 5432 diff --git a/docker/blocks/postgres_tests/Dockerfile b/docker/blocks/postgres_tests/Dockerfile index afe4d199651..df188e1094d 100644 --- a/docker/blocks/postgres_tests/Dockerfile +++ b/docker/blocks/postgres_tests/Dockerfile @@ -1,3 +1,3 @@ -FROM postgres:latest +FROM postgres:9.3 ADD setup.sql /docker-entrypoint-initdb.d -CMD ["postgres"] \ No newline at end of file +CMD ["postgres"] diff --git a/docker/blocks/postgres_tests/setup.sql b/docker/blocks/postgres_tests/setup.sql index b182b7c292d..3b8a48f938d 100644 --- a/docker/blocks/postgres_tests/setup.sql +++ b/docker/blocks/postgres_tests/setup.sql @@ -1,3 +1,3 @@ CREATE DATABASE grafanadstest; REVOKE CONNECT ON DATABASE grafanadstest FROM PUBLIC; -GRANT CONNECT ON DATABASE grafanadstest TO grafanatest; \ No newline at end of file +GRANT CONNECT ON DATABASE grafanadstest TO grafanatest; diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md index 42a0fffeda1..7fff41fb805 100644 --- a/docs/sources/administration/provisioning.md +++ b/docs/sources/administration/provisioning.md @@ -76,7 +76,7 @@ Saltstack | [https://github.com/salt-formulas/salt-formula-grafana](https://gith > This feature is available from v5.0 -It's possible to manage datasources in Grafana by adding one or more yaml config files in the [`provisioning/datasources`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `datasources` that will be added or updated during start up. If the datasource already exists, Grafana will update it to match the configuration file. The config file can also contain a list of datasources that should be deleted. That list is called `delete_datasources`. Grafana will delete datasources listed in `delete_datasources` before inserting/updating those in the `datasource` list. +It's possible to manage datasources in Grafana by adding one or more yaml config files in the [`provisioning/datasources`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `datasources` that will be added or updated during start up. If the datasource already exists, Grafana will update it to match the configuration file. The config file can also contain a list of datasources that should be deleted. That list is called `deleteDatasources`. Grafana will delete datasources listed in `deleteDatasources` before inserting/updating those in the `datasource` list. ### Running Multiple Grafana Instances @@ -94,7 +94,7 @@ deleteDatasources: orgId: 1 # list of datasources to insert/update depending -# whats available in the database +# what's available in the database datasources: # name of the datasource. Required - name: Graphite @@ -154,7 +154,7 @@ Since not all datasources have the same configuration settings we only have the | tlsAuthWithCACert | boolean | *All* | Enable TLS authentication using CA cert | | tlsSkipVerify | boolean | *All* | Controls whether a client verifies the server's certificate chain and host name. | | graphiteVersion | string | Graphite | Graphite version | -| timeInterval | string | Elastic, Influxdb & Prometheus | Lowest interval/step value that should be used for this data source | +| timeInterval | string | Elastic, InfluxDB & Prometheus | Lowest interval/step value that should be used for this data source | | esVersion | string | Elastic | Elasticsearch version as an number (2/5/56) | | timeField | string | Elastic | Which field that should be used as timestamp | | interval | string | Elastic | Index date time format | @@ -162,9 +162,9 @@ Since not all datasources have the same configuration settings we only have the | assumeRoleArn | string | Cloudwatch | ARN of Assume Role | | defaultRegion | string | Cloudwatch | AWS region | | customMetricsNamespaces | string | Cloudwatch | Namespaces of Custom Metrics | -| tsdbVersion | string | OpenTsdb | Version | -| tsdbResolution | string | OpenTsdb | Resolution | -| sslmode | string | Postgre | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' | +| tsdbVersion | string | OpenTSDB | Version | +| tsdbResolution | string | OpenTSDB | Resolution | +| sslmode | string | PostgreSQL | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' | #### Secure Json Data @@ -177,8 +177,8 @@ Secure json data is a map of settings that will be encrypted with [secret key](/ | tlsCACert | string | *All* |CA cert for out going requests | | tlsClientCert | string | *All* |TLS Client cert for outgoing requests | | tlsClientKey | string | *All* |TLS Client key for outgoing requests | -| password | string | Postgre | password | -| user | string | Postgre | user | +| password | string | PostgreSQL | password | +| user | string | PostgreSQL | user | | accessKey | string | Cloudwatch | Access key for connecting to Cloudwatch | | secretKey | string | Cloudwatch | Secret key for connecting to Cloudwatch | @@ -197,6 +197,7 @@ providers: folder: '' type: file disableDeletion: false + updateIntervalSeconds: 3 #how often Grafana will scan for changed dashboards options: path: /var/lib/grafana/dashboards ``` diff --git a/docs/sources/alerting/rules.md b/docs/sources/alerting/rules.md index bcca3c6b2fb..fa7332e7145 100644 --- a/docs/sources/alerting/rules.md +++ b/docs/sources/alerting/rules.md @@ -27,7 +27,9 @@ and the conditions that need to be met for the alert to change state and trigger ## Execution The alert rules are evaluated in the Grafana backend in a scheduler and query execution engine that is part -of core Grafana. Only some data sources are supported right now. They include `Graphite`, `Prometheus`, `InfluxDB`, `OpenTSDB`, `MySQL`, `Postgres` and `Cloudwatch`. +of core Grafana. Only some data sources are supported right now. They include `Graphite`, `Prometheus`, `Elasticsearch`, `InfluxDB`, `OpenTSDB`, `MySQL`, `Postgres` and `Cloudwatch`. + +> Alerting support for Elasticsearch is only available in Grafana v5.2 and above. ### Clustering @@ -152,6 +154,8 @@ filters = alerting.scheduler:debug \ tsdb.prometheus:debug \ tsdb.opentsdb:debug \ tsdb.influxdb:debug \ + tsdb.elasticsearch:debug \ + tsdb.elasticsearch.client:debug \ ``` If you want to log raw query sent to your TSDB and raw response in log you also have to set grafana.ini option `app_mode` to diff --git a/docs/sources/features/datasources/graphite.md b/docs/sources/features/datasources/graphite.md index da58a48225f..8c819726977 100644 --- a/docs/sources/features/datasources/graphite.md +++ b/docs/sources/features/datasources/graphite.md @@ -20,7 +20,7 @@ queries through the use of query references. ## Adding the data source 1. Open the side menu by clicking the Grafana icon in the top header. -2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`. +2. In the side menu under the `Configuration` link you should find a link named `Data Sources`. 3. Click the `+ Add data source` button in the top header. 4. Select `Graphite` from the *Type* dropdown. diff --git a/docs/sources/features/datasources/influxdb.md b/docs/sources/features/datasources/influxdb.md index 1426f55e40b..bc96190e9b1 100644 --- a/docs/sources/features/datasources/influxdb.md +++ b/docs/sources/features/datasources/influxdb.md @@ -188,7 +188,7 @@ queries via the Dashboard menu / Annotations view. An example query: ```SQL -SELECT title, description from events WHERE $timeFilter order asc +SELECT title, description from events WHERE $timeFilter ORDER BY time ASC ``` For InfluxDB you need to enter a query like in the above example. You need to have the ```where $timeFilter``` diff --git a/docs/sources/features/datasources/mssql.md b/docs/sources/features/datasources/mssql.md index 1676cffa0a8..d4d5cc6d73e 100644 --- a/docs/sources/features/datasources/mssql.md +++ b/docs/sources/features/datasources/mssql.md @@ -77,9 +77,9 @@ Macro example | Description ------------ | ------------- *$__time(dateColumn)* | Will be replaced by an expression to rename the column to *time*. For example, *dateColumn as time* *$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert a DATETIME column type to unix timestamp and rename it to *time*.
For example, *DATEDIFF(second, '1970-01-01', dateColumn) AS time* -*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name.
For example, *dateColumn >= DATEADD(s, 1494410783, '1970-01-01') AND dateColumn <= DATEADD(s, 1494410783, '1970-01-01')* -*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *DATEADD(second, 1494410783, '1970-01-01')* -*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *DATEADD(second, 1494410783, '1970-01-01')* +*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name.
For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'* +*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'* +*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'* *$__timeGroup(dateColumn,'5m'[, fillvalue])* | Will be replaced by an expression usable in GROUP BY clause. Providing a *fillValue* of *NULL* or *floating value* will automatically fill empty series in timerange with that value.
For example, *CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)\*300*. *$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example). *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183* diff --git a/docs/sources/features/datasources/mysql.md b/docs/sources/features/datasources/mysql.md index f91417a43b7..ce50053c7ea 100644 --- a/docs/sources/features/datasources/mysql.md +++ b/docs/sources/features/datasources/mysql.md @@ -60,9 +60,9 @@ Macro example | Description ------------ | ------------- *$__time(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec* *$__timeEpoch(dateColumn)* | Will be replaced by an expression to convert to a UNIX timestamp and rename the column to `time_sec`. For example, *UNIX_TIMESTAMP(dateColumn) as time_sec* -*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn > FROM_UNIXTIME(1494410783) AND dateColumn < FROM_UNIXTIME(1494497183)* -*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *FROM_UNIXTIME(1494410783)* -*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *FROM_UNIXTIME(1494497183)* +*$__timeFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name. For example, *dateColumn BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:06:17Z'* +*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'* +*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'* *$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),* *$__timeGroup(dateColumn,'5m',0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example). *$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183* diff --git a/docs/sources/features/panels/table_panel.md b/docs/sources/features/panels/table_panel.md index 32f7764e415..2cbb601820e 100644 --- a/docs/sources/features/panels/table_panel.md +++ b/docs/sources/features/panels/table_panel.md @@ -14,11 +14,53 @@ weight = 2 -The new table panel is very flexible, supporting both multiple modes for time series as well as for +The table panel is very flexible, supporting both multiple modes for time series as well as for table, annotation and raw JSON data. It also provides date formatting and value formatting and coloring options. To view table panels in action and test different configurations with sample data, check out the [Table Panel Showcase in the Grafana Playground](http://play.grafana.org/dashboard/db/table-panel-showcase). +## Querying Data + +The table panel displays the results of a query specified in the **Metrics** tab. +The result being displayed depends on the datasource and the query, but generally there is one row per datapoint, with extra columns for associated keys and values, as well as one column for the numeric value of the datapoint. +You can change the behavior in the section **Data to Table** below. + +### Merge Multiple Queries per Table + +> Only available in Grafana v5.0+. + +Sometimes it is useful to display the results of multiple queries in the same table on corresponding rows, e.g., when comparing capacity and actual usage of resources. +In this example usage and capacity are metrics that will have corresponding datapoints, while their associated keys and values can be used to match them. +(This matching is only available with the **Table Transform** set to **Table**.) + +In its simplest case, both queries return time-series data with a numeric value and a timestamp. +If the timestamps are the same, datapoints will be matched and rendered on the same row. +Some datasources return keys and values (labels, tags) associated with the datapoint. +These are being matched as well if they are present in both results and have the same value. +The following datapoints will end up on the same row with one time column, two label columns ("host" and "job") and two value columns: + +``` +Datapoint for query A: {time: 1, host: "node-2", job: "job-8", value: 3} +Datapoint for query B: {time: 1, host: "node-2", value: 4} +``` + +The following two results cannot be matched and will be rendered on separate rows: + +``` +Different time +Datapoint for query A: {time: 1, host: "node-2", job: "job-8", value: 3} +Datapoint for query B: {time: 2, host: "node-2", value: 4} + +Different label "host" +Datapoint for query A: {time: 1, host: "node-2", job: "job-8", value: 3} +Datapoint for query B: {time: 1, host: "node-9", value: 4} +``` + +You can still merge both of the above cases by changing the conflicting column's **Type** to **hidden** in the **Column Styles**. + +Note that if each datapoint of your query results have multiple value fields like max, min, mean, etc., they will likely have different values and therefore will not match and render on separate rows. +If you intend for rows to be merged but see them rendered on separate rows, check the query results in the **Query Inspector** for field values being identical across datapoints that should be merged into a row. + ## Options overview The table panel has many ways to manipulate your data for optimal presentation. diff --git a/docs/sources/guides/whats-new-in-v4-1.md b/docs/sources/guides/whats-new-in-v4-1.md index 217b21b545e..0cecff68cf5 100644 --- a/docs/sources/guides/whats-new-in-v4-1.md +++ b/docs/sources/guides/whats-new-in-v4-1.md @@ -11,7 +11,7 @@ weight = 3 +++ -## Whats new in Grafana v4.1 +## What's new in Grafana v4.1 - **Graph**: Support for shared tooltip on all graphs as you hover over one graph. [#1578](https://github.com/grafana/grafana/pull/1578), [#6274](https://github.com/grafana/grafana/pull/6274) - **Victorops**: Add VictorOps notification integration [#6411](https://github.com/grafana/grafana/issues/6411), thx [@ichekrygin](https://github.com/ichekrygin) - **Opsgenie**: Add OpsGenie notification integratiion [#6687](https://github.com/grafana/grafana/issues/6687), thx [@kylemcc](https://github.com/kylemcc) @@ -24,7 +24,7 @@ weight = 3 {{< imgbox max-width="60%" img="/img/docs/v41/shared_tooltip.gif" caption="Shared tooltip" >}} -Showing the tooltip on all panels at the same time has been a long standing request in Grafana and we are really happy to finally be able to release it. +Showing the tooltip on all panels at the same time has been a long standing request in Grafana and we are really happy to finally be able to release it. You can enable/disable the shared tooltip from the dashboard settings menu or cycle between default, shared tooltip and shared crosshair by pressing `CTRL + O` or `CMD + O`.
@@ -50,7 +50,7 @@ Panels with a help text available have a little indicator in the top left corner In Grafana 4.1.0 you can configure your Cloudwatch data source with `access key` and `secret key` directly in the data source configuration page. This enables people to use the Cloudwatch data source without having access to the filesystem where Grafana is running. -Once the `access key` and `secret key` have been saved the user will no longer be able to view them. +Once the `access key` and `secret key` have been saved the user will no longer be able to view them.
## Upgrade & Breaking changes diff --git a/docs/sources/guides/whats-new-in-v4-2.md b/docs/sources/guides/whats-new-in-v4-2.md index 4b140a9027e..e976ed24700 100644 --- a/docs/sources/guides/whats-new-in-v4-2.md +++ b/docs/sources/guides/whats-new-in-v4-2.md @@ -10,7 +10,7 @@ parent = "whatsnew" weight = -1 +++ -## Whats new in Grafana v4.2 +## What's new in Grafana v4.2 Grafana v4.2 Beta is now [available for download](https://grafana.com/grafana/download/4.2.0). Just like the last release this one contains lots bug fixes and minor improvements. diff --git a/docs/sources/guides/whats-new-in-v4-6.md b/docs/sources/guides/whats-new-in-v4-6.md index 09955fa58cc..ee0c4ea7a04 100644 --- a/docs/sources/guides/whats-new-in-v4-6.md +++ b/docs/sources/guides/whats-new-in-v4-6.md @@ -64,7 +64,7 @@ This makes exploring and filtering Prometheus data much easier. * **Dataproxy**: Allow grafan to renegotiate tls connection [#9250](https://github.com/grafana/grafana/issues/9250) * **HTTP**: set net.Dialer.DualStack to true for all http clients [#9367](https://github.com/grafana/grafana/pull/9367) * **Alerting**: Add diff and percent diff as series reducers [#9386](https://github.com/grafana/grafana/pull/9386), thx [@shanhuhai5739](https://github.com/shanhuhai5739) -* **Slack**: Allow images to be uploaded to slack when Token is precent [#7175](https://github.com/grafana/grafana/issues/7175), thx [@xginn8](https://github.com/xginn8) +* **Slack**: Allow images to be uploaded to slack when Token is present [#7175](https://github.com/grafana/grafana/issues/7175), thx [@xginn8](https://github.com/xginn8) * **Opsgenie**: Use their latest API instead of old version [#9399](https://github.com/grafana/grafana/pull/9399), thx [@cglrkn](https://github.com/cglrkn) * **Table**: Add support for displaying the timestamp with milliseconds [#9429](https://github.com/grafana/grafana/pull/9429), thx [@s1061123](https://github.com/s1061123) * **Hipchat**: Add metrics, message and image to hipchat notifications [#9110](https://github.com/grafana/grafana/issues/9110), thx [@eloo](https://github.com/eloo) diff --git a/docs/sources/guides/whats-new-in-v5-1.md b/docs/sources/guides/whats-new-in-v5-1.md index d992fd9062a..1f2be3bfedf 100644 --- a/docs/sources/guides/whats-new-in-v5-1.md +++ b/docs/sources/guides/whats-new-in-v5-1.md @@ -115,7 +115,7 @@ Grafana v5.1 brings an improved workflow for provisioned dashboards: Available options in the dialog will let you `Copy JSON to Clipboard` and/or `Save JSON to file` which can help you synchronize your dashboard changes back to the provisioning source. -More information in the [Provisioning documentation](/features/datasources/prometheus/). +More information in the [Provisioning documentation](/administration/provisioning/).
diff --git a/docs/sources/guides/whats-new-in-v5-2.md b/docs/sources/guides/whats-new-in-v5-2.md new file mode 100644 index 00000000000..e084f8618e4 --- /dev/null +++ b/docs/sources/guides/whats-new-in-v5-2.md @@ -0,0 +1,101 @@ ++++ +title = "What's New in Grafana v5.2" +description = "Feature & improvement highlights for Grafana v5.2" +keywords = ["grafana", "new", "documentation", "5.2"] +type = "docs" +[menu.docs] +name = "Version 5.2" +identifier = "v5.2" +parent = "whatsnew" +weight = -8 ++++ + +# What's New in Grafana v5.2 + +Grafana v5.2 brings new features, many enhancements and bug fixes. This article will detail the major new features and enhancements. + +- [Elasticsearch alerting]({{< relref "#elasticsearch-alerting" >}}) it's finally here! +- [Native builds for ARM]({{< relref "#native-builds-for-arm" >}}) native builds of Grafana for many more platforms! +- [Improved Docker image]({{< relref "#improved-docker-image" >}}) with support for docker secrets +- [Security]({{< relref "#security" >}}) make your Grafana instance more secure +- [Prometheus]({{< relref "#prometheus" >}}) with alignment enhancements +- [InfluxDB]({{< relref "#influxdb" >}}) now supports the `mode` function +- [Alerting]({{< relref "#alerting" >}}) with alert notification channel type for Discord +- [Dashboards & Panels]({{< relref "#dashboards-panels" >}}) with save & import enhancements + +## Elasticsearch alerting + +{{< docs-imagebox img="/img/docs/v52/elasticsearch_alerting.png" max-width="800px" class="docs-image--right" >}} + +Grafana v5.2 ships with an updated Elasticsearch datasource with support for alerting. Alerting support for Elasticsearch has been one of +the most requested features by our community and now it's finally here. Please try it out and let us know what you think. + +
+ +## Native builds for ARM + +Grafana v5.2 brings an improved build pipeline with cross-platform support. This enables native builds of Grafana for ARMv7 (x32) and ARM64 (x64). +We've been longing for native ARM build support for ages. With the help from our amazing community this is now finally available. +Please try it out and let us know what you think. + +Another great addition with the improved build pipeline is that binaries for MacOS/Darwin (x64) and Windows (x64) are now automatically built and +published for both stable and nightly builds. + +## Improved Docker image + +The Grafana docker image adds support for Docker secrets which enables you to supply Grafana with configuration through files. More +information in the [Installing using Docker documentation](/installation/docker/#reading-secrets-from-files-support-for-docker-secrets). + +## Security + +{{< docs-imagebox img="/img/docs/v52/login_change_password.png" max-width="800px" class="docs-image--right" >}} + +Starting from Grafana v5.2, when you login with the administrator account using the default password you'll be presented with a form to change the password. +We hope this encourages users to follow Grafana's best practices and change the default administrator password. + +
+ +## Prometheus + +The Prometheus datasource now aligns the start/end of the query sent to Prometheus with the step, which ensures PromQL expressions with *rate* +functions get consistent results, and thus avoids graphs jumping around on reload. + +## InfluxDB + +The InfluxDB datasource now includes support for the *mode* function which returns the most frequent value in a list of field values. + +## Alerting + +By popular demand Grafana now includes support for an alert notification channel type for [Discord](https://discordapp.com/). + +## Dashboards & Panels + +### Modified time range and variables are no longer saved by default + +{{< docs-imagebox img="/img/docs/v52/dashboard_save_modal.png" max-width="800px" class="docs-image--right" >}} + +Starting from Grafana v5.2, a modified time range or variable are no longer saved by default. To save a modified +time range or variable, you'll need to actively select that when saving a dashboard, see screenshot. +This should hopefully make it easier to have sane defaults for time and variables in dashboards and make it more explicit +when you actually want to overwrite those settings. + +
+ +### Import dashboard enhancements + +{{< docs-imagebox img="/img/docs/v52/dashboard_import.png" max-width="800px" class="docs-image--right" >}} + +Grafana v5.2 adds support for specifying an existing folder or creating a new one when importing a dashboard - a long-awaited feature since +Grafana v5.0 introduced support for dashboard folders and permissions. The import dashboard page has also got some general improvements +and should now make it more clear if a possible import will overwrite an existing dashboard, or not. + +This release also adds some improvements for those users only having editor or admin permissions in certain folders. The links to +*Create Dashboard* and *Import Dashboard* are now available in the side navigation, in dashboard search and on the manage dashboards/folder page for a +user that has editor role in an organization or the edit permission in at least one folder. + +
+ +## Changelog + +Checkout the [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md) file for a complete list +of new features, changes, and bug fixes. diff --git a/docs/sources/http_api/admin.md b/docs/sources/http_api/admin.md index 0194c69caac..2d4be21bb78 100644 --- a/docs/sources/http_api/admin.md +++ b/docs/sources/http_api/admin.md @@ -36,11 +36,10 @@ HTTP/1.1 200 Content-Type: application/json { -"DEFAULT": -{ - "app_mode":"production"}, - "analytics": - { + "DEFAULT": { + "app_mode":"production" + }, + "analytics": { "google_analytics_ua_id":"", "reporting_enabled":"false" }, @@ -195,15 +194,16 @@ HTTP/1.1 200 Content-Type: application/json { - "user_count":2, - "org_count":1, - "dashboard_count":4, - "db_snapshot_count":2, - "db_tag_count":6, - "data_source_count":1, - "playlist_count":1, - "starred_db_count":2, - "grafana_admin_count":2 + "users":2, + "orgs":1, + "dashboards":4, + "snapshots":2, + "tags":6, + "datasources":1, + "playlists":1, + "stars":2, + "alerts":2, + "activeUsers":1 } ``` @@ -340,4 +340,4 @@ HTTP/1.1 200 Content-Type: application/json {state: "new state", message: "alerts pause/un paused", "alertsAffected": 100} -``` \ No newline at end of file +``` diff --git a/docs/sources/http_api/alerting.md b/docs/sources/http_api/alerting.md index 3860ae490b1..e4fe0dad3ff 100644 --- a/docs/sources/http_api/alerting.md +++ b/docs/sources/http_api/alerting.md @@ -35,10 +35,15 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk `/api/alerts?dashboardId=1` - - **dashboardId** – Return alerts for a specified dashboard. - - **panelId** – Return alerts for a specified panel on a dashboard. - - **limit** - Limit response to x number of alerts. + - **dashboardId** – Limit response to alerts in specified dashboard(s). You can specify multiple dashboards, e.g. dashboardId=23&dashboardId=35. + - **panelId** – Limit response to alert for a specified panel on a dashboard. + - **query** - Limit response to alerts having a name like this value. - **state** - Return alerts with one or more of the following alert states: `ALL`,`no_data`, `paused`, `alerting`, `ok`, `pending`. To specify multiple states use the following format: `?state=paused&state=alerting` + - **limit** - Limit response to *X* number of alerts. + - **folderId** – Limit response to alerts of dashboards in specified folder(s). You can specify multiple folders, e.g. folderId=23&folderId=35. + - **dashboardQuery** - Limit response to alerts having a dashboard name like this value. + - **dashboardTag** - Limit response to alerts of dashboards with specified tags. To do an "AND" filtering with multiple tags, specify the tags parameter multiple times e.g. dashboardTag=tag1&dashboardTag=tag2. + **Example Response**: @@ -49,18 +54,15 @@ Content-Type: application/json { "id": 1, "dashboardId": 1, + "dashboardUId": "ABcdEFghij" + "dashboardSlug": "sensors", "panelId": 1, "name": "fire place sensor", - "message": "Someone is trying to break in through the fire place", "state": "alerting", + "message": "Someone is trying to break in through the fire place", + "newStateDate": "2018-05-14T05:55:20+02:00", "evalDate": "0001-01-01T00:00:00Z", - "evalData": [ - { - "metric": "fire", - "tags": null, - "value": 5.349999999999999 - } - "newStateDate": "2016-12-25", + "evalData": null, "executionError": "", "url": "http://grafana.com/dashboard/db/sensors" } @@ -88,16 +90,35 @@ Content-Type: application/json { "id": 1, "dashboardId": 1, + "dashboardUId": "ABcdEFghij" + "dashboardSlug": "sensors", "panelId": 1, "name": "fire place sensor", - "message": "Someone is trying to break in through the fire place", "state": "alerting", - "newStateDate": "2016-12-25", + "message": "Someone is trying to break in through the fire place", + "newStateDate": "2018-05-14T05:55:20+02:00", + "evalDate": "0001-01-01T00:00:00Z", + "evalData": "evalMatches": [ + { + "metric": "movement", + "tags": { + "name": "fireplace_chimney" + }, + "value": 98.765 + } + ], "executionError": "", "url": "http://grafana.com/dashboard/db/sensors" } ``` +**Important Note**: +"evalMatches" data is cached in the db when and only when the state of the alert changes +(e.g. transitioning from "ok" to "alerting" state). + +If data from one server triggers the alert first and, before that server is seen leaving alerting state, +a second server also enters a state that would trigger the alert, the second server will not be visible in "evalMatches" data. + ## Pause alert `POST /api/alerts/:id/pause` diff --git a/docs/sources/http_api/auth.md b/docs/sources/http_api/auth.md index 166a5a4fdb9..8ff40b5ef04 100644 --- a/docs/sources/http_api/auth.md +++ b/docs/sources/http_api/auth.md @@ -44,6 +44,14 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk The `Authorization` header value should be `Bearer `. +The API Token can also be passed as a Basic authorization password with the special username `api_key`: + +curl example: +```bash +?curl http://api_key:eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk@localhost:3000/api/org +{"id":1,"name":"Main Org."} +``` + # Auth HTTP resources / actions ## Api Keys diff --git a/docs/sources/http_api/folder.md b/docs/sources/http_api/folder.md index 7ee1f737799..fb318ecf58e 100644 --- a/docs/sources/http_api/folder.md +++ b/docs/sources/http_api/folder.md @@ -19,6 +19,10 @@ The unique identifier (uid) of a folder can be used for uniquely identify folder The uid can have a maximum length of 40 characters. +## A note about the General folder + +The General folder (id=0) is special and is not part of the Folder API which means +that you cannot use this API for retrieving information about the General folder. ## Get all folders @@ -273,14 +277,14 @@ Status Codes: ## Get folder by id -`GET /api/folders/:id` +`GET /api/folders/id/:id` Will return the folder identified by id. **Example Request**: ```http -GET /api/folders/1 HTTP/1.1 +GET /api/folders/id/1 HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk diff --git a/docs/sources/http_api/org.md b/docs/sources/http_api/org.md index 16721b1c89d..c55107d42f8 100644 --- a/docs/sources/http_api/org.md +++ b/docs/sources/http_api/org.md @@ -12,7 +12,13 @@ parent = "http_api" # Organisation API -## Get current Organisation +The Organisation HTTP API is divided in two resources, `/api/org` (current organisation) +and `/api/orgs` (admin organisations). One big difference between these are that +the admin of all organisations API only works with basic authentication, see [Admin Organisations API](#admin-organisations-api) for more information. + +## Current Organisation API + +### Get current Organisation `GET /api/org/` @@ -37,135 +43,7 @@ Content-Type: application/json } ``` -## Get Organisation by Id - -`GET /api/orgs/:orgId` - -**Example Request**: - -```http -GET /api/orgs/1 HTTP/1.1 -Accept: application/json -Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk -``` -Note: The api will only work when you pass the admin name and password -to the request http url, like http://admin:admin@localhost:3000/api/orgs/1 - -**Example Response**: - -```http -HTTP/1.1 200 -Content-Type: application/json - -{ - "id":1, - "name":"Main Org.", - "address":{ - "address1":"", - "address2":"", - "city":"", - "zipCode":"", - "state":"", - "country":"" - } -} -``` -## Get Organisation by Name - -`GET /api/orgs/name/:orgName` - -**Example Request**: - -```http -GET /api/orgs/name/Main%20Org%2E HTTP/1.1 -Accept: application/json -Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk -``` -Note: The api will only work when you pass the admin name and password -to the request http url, like http://admin:admin@localhost:3000/api/orgs/name/Main%20Org%2E - -**Example Response**: - -```http -HTTP/1.1 200 -Content-Type: application/json - -{ - "id":1, - "name":"Main Org.", - "address":{ - "address1":"", - "address2":"", - "city":"", - "zipCode":"", - "state":"", - "country":"" - } -} -``` - -## Create Organisation - -`POST /api/orgs` - -**Example Request**: - -```http -POST /api/orgs HTTP/1.1 -Accept: application/json -Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk - -{ - "name":"New Org." -} -``` -Note: The api will work in the following two ways -1) Need to set GF_USERS_ALLOW_ORG_CREATE=true -2) Set the config users.allow_org_create to true in ini file - -**Example Response**: - -```http -HTTP/1.1 200 -Content-Type: application/json - -{ - "orgId":"1", - "message":"Organization created" -} -``` - - -## Update current Organisation - -`PUT /api/org` - -**Example Request**: - -```http -PUT /api/org HTTP/1.1 -Accept: application/json -Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk - -{ - "name":"Main Org." -} -``` - -**Example Response**: - -```http -HTTP/1.1 200 -Content-Type: application/json - -{"message":"Organization updated"} -``` - -## Get all users within the actual organisation +### Get all users within the current organisation `GET /api/org/users` @@ -195,36 +73,7 @@ Content-Type: application/json ] ``` -## Add a new user to the actual organisation - -`POST /api/org/users` - -Adds a global user to the actual organisation. - -**Example Request**: - -```http -POST /api/org/users HTTP/1.1 -Accept: application/json -Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk - -{ - "role": "Admin", - "loginOrEmail": "admin" -} -``` - -**Example Response**: - -```http -HTTP/1.1 200 -Content-Type: application/json - -{"message":"User added to organization"} -``` - -## Updates the given user +### Updates the given user `PATCH /api/org/users/:userId` @@ -250,7 +99,7 @@ Content-Type: application/json {"message":"Organization user updated"} ``` -## Delete user in actual organisation +### Delete user in current organisation `DELETE /api/org/users/:userId` @@ -272,19 +121,181 @@ Content-Type: application/json {"message":"User removed from organization"} ``` -# Organisations +### Update current Organisation -## Search all Organisations +`PUT /api/org` + +**Example Request**: + +```http +PUT /api/org HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + +{ + "name":"Main Org." +} +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{"message":"Organization updated"} +``` + +### Add a new user to the current organisation + +`POST /api/org/users` + +Adds a global user to the current organisation. + +**Example Request**: + +```http +POST /api/org/users HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + +{ + "role": "Admin", + "loginOrEmail": "admin" +} +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{"message":"User added to organization"} +``` + +## Admin Organisations API + +The Admin Organisations HTTP API does not currently work with an API Token. API Tokens are currently +only linked to an organization and an organization role. They cannot be given the permission of server +admin, only users can be given that permission. So in order to use these API calls you will have to +use Basic Auth and the Grafana user must have the Grafana Admin permission (The default admin user +is called `admin` and has permission to use this API). + +### Get Organisation by Id + +`GET /api/orgs/:orgId` + +Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api). + +**Example Request**: + +```http +GET /api/orgs/1 HTTP/1.1 +Accept: application/json +Content-Type: application/json +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "id":1, + "name":"Main Org.", + "address":{ + "address1":"", + "address2":"", + "city":"", + "zipCode":"", + "state":"", + "country":"" + } +} +``` +### Get Organisation by Name + +`GET /api/orgs/name/:orgName` + +Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api). + +**Example Request**: + +```http +GET /api/orgs/name/Main%20Org%2E HTTP/1.1 +Accept: application/json +Content-Type: application/json +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "id":1, + "name":"Main Org.", + "address":{ + "address1":"", + "address2":"", + "city":"", + "zipCode":"", + "state":"", + "country":"" + } +} +``` + +### Create Organisation + +`POST /api/orgs` + +Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api). + +**Example Request**: + +```http +POST /api/orgs HTTP/1.1 +Accept: application/json +Content-Type: application/json + +{ + "name":"New Org." +} +``` +Note: The api will work in the following two ways +1) Need to set GF_USERS_ALLOW_ORG_CREATE=true +2) Set the config users.allow_org_create to true in ini file + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "orgId":"1", + "message":"Organization created" +} +``` + +### Search all Organisations `GET /api/orgs` +Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api). + **Example Request**: ```http GET /api/orgs HTTP/1.1 Accept: application/json Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ``` Note: The api will only work when you pass the admin name and password to the request http url, like http://admin:admin@localhost:3000/api/orgs @@ -303,11 +314,12 @@ Content-Type: application/json ] ``` -## Update Organisation +### Update Organisation `PUT /api/orgs/:orgId` Update Organisation, fields *Address 1*, *Address 2*, *City* are not implemented yet. +Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api). **Example Request**: @@ -315,7 +327,6 @@ Update Organisation, fields *Address 1*, *Address 2*, *City* are not implemented PUT /api/orgs/1 HTTP/1.1 Accept: application/json Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "name":"Main Org 2." @@ -331,17 +342,40 @@ Content-Type: application/json {"message":"Organization updated"} ``` -## Get Users in Organisation +### Delete Organisation + +`DELETE /api/orgs/:orgId` + +Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api). + +**Example Request**: + +```http +DELETE /api/orgs/1 HTTP/1.1 +Accept: application/json +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{"message":"Organization deleted"} +``` + +### Get Users in Organisation `GET /api/orgs/:orgId/users` +Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api). + **Example Request**: ```http GET /api/orgs/1/users HTTP/1.1 Accept: application/json Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ``` Note: The api will only work when you pass the admin name and password to the request http url, like http://admin:admin@localhost:3000/api/orgs/1/users @@ -363,25 +397,24 @@ Content-Type: application/json ] ``` -## Add User in Organisation +### Add User in Organisation `POST /api/orgs/:orgId/users` +Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api). + **Example Request**: ```http POST /api/orgs/1/users HTTP/1.1 Accept: application/json Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "loginOrEmail":"user", "role":"Viewer" } ``` -Note: The api will only work when you pass the admin name and password -to the request http url, like http://admin:admin@localhost:3000/api/orgs/1/users **Example Response**: @@ -392,17 +425,18 @@ Content-Type: application/json {"message":"User added to organization"} ``` -## Update Users in Organisation +### Update Users in Organisation `PATCH /api/orgs/:orgId/users/:userId` +Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api). + **Example Request**: ```http PATCH /api/orgs/1/users/2 HTTP/1.1 Accept: application/json Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "role":"Admin" @@ -418,17 +452,18 @@ Content-Type: application/json {"message":"Organization user updated"} ``` -## Delete User in Organisation +### Delete User in Organisation `DELETE /api/orgs/:orgId/users/:userId` +Only works with Basic Authentication (username and password), see [introduction](#admin-organisations-api). + **Example Request**: ```http DELETE /api/orgs/1/users/2 HTTP/1.1 Accept: application/json Content-Type: application/json -Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ``` **Example Response**: diff --git a/docs/sources/http_api/playlist.md b/docs/sources/http_api/playlist.md new file mode 100644 index 00000000000..7c33900969b --- /dev/null +++ b/docs/sources/http_api/playlist.md @@ -0,0 +1,286 @@ ++++ +title = "Playlist HTTP API " +description = "Playlist Admin HTTP API" +keywords = ["grafana", "http", "documentation", "api", "playlist"] +aliases = ["/http_api/playlist/"] +type = "docs" +[menu.docs] +name = "Playlist" +parent = "http_api" ++++ + +# Playlist API + +## Search Playlist + +`GET /api/playlists` + +Get all existing playlist for the current organization using pagination + +**Example Request**: + +```bash +GET /api/playlists HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + + Querystring Parameters: + + These parameters are used as querystring parameters. + + - **query** - Limit response to playlist having a name like this value. + - **limit** - Limit response to *X* number of playlist. + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +[ + { + "id": 1, + "name": "my playlist", + "interval": "5m" + } +] +``` + +## Get one playlist + +`GET /api/playlists/:id` + +**Example Request**: + +```bash +GET /api/playlists/1 HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +{ + "id" : 1, + "name": "my playlist", + "interval": "5m", + "orgId": "my org", + "items": [ + { + "id": 1, + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "id": 2, + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] +} +``` + +## Get Playlist items + +`GET /api/playlists/:id/items` + +**Example Request**: + +```bash +GET /api/playlists/1/items HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +[ + { + "id": 1, + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "id": 2, + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } +] +``` + +## Get Playlist dashboards + +`GET /api/playlists/:id/dashboards` + +**Example Request**: + +```bash +GET /api/playlists/1/dashboards HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +[ + { + "id": 3, + "title": "my third dasboard", + "order": 1, + }, + { + "id": 5, + "title":"my other dasboard" + "order": 2, + + } +] +``` + +## Create a playlist + +`POST /api/playlists/` + +**Example Request**: + +```bash +PUT /api/playlists/1 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + { + "name": "my playlist", + "interval": "5m", + "items": [ + { + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] + } +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json + { + "id": 1, + "name": "my playlist", + "interval": "5m" + } +``` + +## Update a playlist + +`PUT /api/playlists/:id` + +**Example Request**: + +```bash +PUT /api/playlists/1 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + { + "name": "my playlist", + "interval": "5m", + "items": [ + { + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] + } +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +{ + "id" : 1, + "name": "my playlist", + "interval": "5m", + "orgId": "my org", + "items": [ + { + "id": 1, + "playlistId": 1, + "type": "dashboard_by_id", + "value": "3", + "order": 1, + "title":"my third dasboard" + }, + { + "id": 2, + "playlistId": 1, + "type": "dashboard_by_tag", + "value": "myTag", + "order": 2, + "title":"my other dasboard" + } + ] +} +``` + +## Delete a playlist + +`DELETE /api/playlists/:id` + +**Example Request**: + +```bash +DELETE /api/playlists/1 HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```json +HTTP/1.1 200 +Content-Type: application/json +{} +``` diff --git a/docs/sources/http_api/snapshot.md b/docs/sources/http_api/snapshot.md index dce3b0a9160..5a76d0118b3 100644 --- a/docs/sources/http_api/snapshot.md +++ b/docs/sources/http_api/snapshot.md @@ -70,7 +70,7 @@ JSON Body schema: Content-Type: application/json { "deleteKey":"XXXXXXX", - "deleteUrl":"myurl/dashboard/snapshot/XXXXXXX", + "deleteUrl":"myurl/api/snapshots-delete/XXXXXXX", "key":"YYYYYYY", "url":"myurl/dashboard/snapshot/YYYYYYY" } @@ -81,7 +81,46 @@ Keys: - **deleteKey** – Key generated to delete the snapshot - **key** – Key generated to share the dashboard -## Get Snapshot by Id +## Get list of Snapshots + +`GET /api/dashboard/snapshots` + +Query parameters: + +- **query** – Search Query +- **limit** – Limit the number of returned results + +**Example Request**: + +```http +GET /api/dashboard/snapshots HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +[ + { + "id":8, + "name":"Home", + "key":"YYYYYYY", + "orgId":1, + "userId":1, + "external":false, + "externalUrl":"", + "expires":"2200-13-32T25:23:23+02:00", + "created":"2200-13-32T28:24:23+02:00", + "updated":"2200-13-32T28:24:23+02:00" + } +] +``` + +## Get Snapshot by Key `GET /api/snapshots/:key` @@ -90,7 +129,6 @@ Keys: ```http GET /api/snapshots/YYYYYYY HTTP/1.1 Accept: application/json -Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ``` @@ -140,16 +178,15 @@ Content-Type: application/json } ``` -## Delete Snapshot by deleteKey +## Delete Snapshot by Key -`GET /api/snapshots-delete/:deleteKey` +`DELETE /api/snapshots/:key` **Example Request**: ```http -GET /api/snapshots/YYYYYYY HTTP/1.1 +DELETE /api/snapshots/YYYYYYY HTTP/1.1 Accept: application/json -Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk ``` @@ -159,5 +196,27 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk HTTP/1.1 200 Content-Type: application/json -{"message":"Snapshot deleted. It might take an hour before it's cleared from a CDN cache."} +{"message":"Snapshot deleted. It might take an hour before it's cleared from any CDN caches."} +``` + +## Delete Snapshot by deleteKey + +This API call can be used without authentication by using the secret delete key for the snapshot. + +`GET /api/snapshots-delete/:deleteKey` + +**Example Request**: + +```http +GET /api/snapshots-delete/XXXXXXX HTTP/1.1 +Accept: application/json +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{"message":"Snapshot deleted. It might take an hour before it's cleared from any CDN caches."} ``` \ No newline at end of file diff --git a/docs/sources/index.md b/docs/sources/index.md index 3c59b9baba0..da977b73e0c 100644 --- a/docs/sources/index.md +++ b/docs/sources/index.md @@ -60,9 +60,9 @@ aliases = ["v1.1", "guides/reference/admin"]

Provisioning

A guide to help you automate your Grafana setup & configuration.

- }}" class="nav-cards__item nav-cards__item--guide"> -

What's new in v5.0

-

Article on all the new cool features and enhancements in v5.0

+
}}" class="nav-cards__item nav-cards__item--guide"> +

What's new in v5.2

+

Article on all the new cool features and enhancements in v5.2

}}" class="nav-cards__item nav-cards__item--guide">

Screencasts

diff --git a/docs/sources/installation/behind_proxy.md b/docs/sources/installation/behind_proxy.md index 89711aecb46..6e3884456ac 100644 --- a/docs/sources/installation/behind_proxy.md +++ b/docs/sources/installation/behind_proxy.md @@ -26,7 +26,7 @@ Otherwise Grafana will not behave correctly. See example below. ## Examples Here are some example configurations for running Grafana behind a reverse proxy. -### Grafana configuration (ex http://foo.bar.com) +### Grafana configuration (ex http://foo.bar) ```bash [server] @@ -47,7 +47,7 @@ server { } ``` -### Examples with **sub path** (ex http://foo.bar.com/grafana) +### Examples with **sub path** (ex http://foo.bar/grafana) #### Grafana configuration with sub path ```bash diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index 2c2a359c7c5..e3db7a1d60b 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -80,6 +80,11 @@ Path to where Grafana stores the sqlite3 database (if used), file based sessions (if used), and other data. This path is usually specified via command line in the init.d script or the systemd service file. +### temp_data_lifetime + +How long temporary images in `data` directory should be kept. Defaults to: `24h`. Supported modifiers: `h` (hours), +`m` (minutes), for example: `168h`, `30m`, `10h30m`. Use `0` to never clean up temporary files. + ### logs Path to where Grafana will store logs. This path is usually specified via @@ -291,6 +296,12 @@ Set to `true` to automatically add new users to the main organization (id 1). When set to `false`, new users will automatically cause a new organization to be created for that new user. +### auto_assign_org_id + +Set this value to automatically add new users to the provided org. +This requires `auto_assign_org` to be set to `true`. Please make sure +that this organization does already exists. + ### auto_assign_org_role The role new users will be assigned for the main organization (if the @@ -419,25 +430,33 @@ allowed_organizations = github google ## [auth.google] -You need to create a Google project. You can do this in the [Google -Developer Console](https://console.developers.google.com/project). When -you create the project you will need to specify a callback URL. Specify -this as callback: +First, you need to create a Google OAuth Client: -```bash -http://:/login/google -``` +1. Go to https://console.developers.google.com/apis/credentials -This callback URL must match the full HTTP address that you use in your -browser to access Grafana, but with the prefix path of `/login/google`. -When the Google project is created you will get a Client ID and a Client -Secret. Specify these in the Grafana configuration file. For example: +2. Click the 'Create Credentials' button, then click 'OAuth Client ID' in the +menu that drops down + +3. Enter the following: + + - Application Type: Web Application + - Name: Grafana + - Authorized Javascript Origins: https://grafana.mycompany.com + - Authorized Redirect URLs: https://grafana.mycompany.com/login/google + + Replace https://grafana.mycompany.com with the URL of your Grafana instance. + +4. Click Create + +5. Copy the Client ID and Client Secret from the 'OAuth Client' modal + +Specify the Client ID and Secret in the Grafana configuration file. For example: ```bash [auth.google] enabled = true -client_id = YOUR_GOOGLE_APP_CLIENT_ID -client_secret = YOUR_GOOGLE_APP_CLIENT_SECRET +client_id = CLIENT_ID +client_secret = CLIENT_SECRET scopes = https://www.googleapis.com/auth/userinfo.profile https://www.googleapis.com/auth/userinfo.email auth_url = https://accounts.google.com/o/oauth2/auth token_url = https://accounts.google.com/o/oauth2/token @@ -844,7 +863,7 @@ Secret key. e.g. AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA Url to where Grafana will send PUT request with images ### public_url -Optional parameter. Url to send to users in notifications, directly appended with the resulting uploaded file name. +Optional parameter. Url to send to users in notifications. If the string contains the sequence ${file}, it will be replaced with the uploaded filename. Otherwise, the file name will be appended to the path part of the url, leaving any query string unchanged. ### username basic auth username diff --git a/docs/sources/installation/debian.md b/docs/sources/installation/debian.md index f0ad89a8e88..4bb245a586e 100644 --- a/docs/sources/installation/debian.md +++ b/docs/sources/installation/debian.md @@ -15,10 +15,9 @@ weight = 1 Description | Download ------------ | ------------- -Stable for Debian-based Linux | [grafana_5.1.2_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.2_amd64.deb) - +Stable for Debian-based Linux | [x86-64](https://grafana.com/grafana/download?platform=linux) +Stable for Debian-based Linux | [ARM64](https://grafana.com/grafana/download?platform=arm) +Stable for Debian-based Linux | [ARMv7](https://grafana.com/grafana/download?platform=arm) Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. @@ -27,17 +26,18 @@ installation. ```bash -wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.1.2_amd64.deb +wget sudo apt-get install -y adduser libfontconfig -sudo dpkg -i grafana_5.1.2_amd64.deb +sudo dpkg -i grafana_5.1.4_amd64.deb ``` - +sudo dpkg -i grafana_5.1.4_amd64.deb +``` ## APT Repository diff --git a/docs/sources/installation/docker.md b/docs/sources/installation/docker.md index e78796845c4..1f755625699 100644 --- a/docs/sources/installation/docker.md +++ b/docs/sources/installation/docker.md @@ -49,6 +49,11 @@ $ docker run \ grafana/grafana:5.1.0 ``` +## Running of the master branch + +For every successful commit we publish a Grafana container to [`grafana/grafana`](https://hub.docker.com/r/grafana/grafana/tags/) and [`grafana/grafana-dev`](https://hub.docker.com/r/grafana/grafana-dev/tags/). In `grafana/grafana` container we will always overwrite the `master` tag with the latest version. In `grafana/grafana-dev` we will include +the git commit in the tag. If you run Grafana master in production we **strongly** recommend that you use the later since different machines might run different version of grafana if they pull the master tag at different times. + ## Installing Plugins for Grafana Pass the plugins you want installed to docker with the `GF_INSTALL_PLUGINS` environment variable as a comma separated list. This will pass each plugin name to `grafana-cli plugins install ${plugin}` and install them when Grafana starts. @@ -130,6 +135,20 @@ ID=$(id -u) # saves your user id in the ID variable docker run -d --user $ID --volume "$PWD/data:/var/lib/grafana" -p 3000:3000 grafana/grafana:5.1.0 ``` +## Reading secrets from files (support for Docker Secrets) + +> Only available in Grafana v5.2+. + +It's possible to supply Grafana with configuration through files. This works well with [Docker Secrets](https://docs.docker.com/engine/swarm/secrets/) as the secrets by default gets mapped into `/run/secrets/` of the container. + +You can do this with any of the configuration options in conf/grafana.ini by setting `GF____FILE` to the path of the file holding the secret. + +Let's say you want to set the admin password this way. + +- Admin password secret: `/run/secrets/admin_password` +- Environment variable: `GF_SECURITY_ADMIN_PASSWORD__FILE=/run/secrets/admin_password` + + ## Migration from a previous version of the docker container to 5.1 or later The docker container for Grafana has seen a major rewrite for 5.1. @@ -147,7 +166,7 @@ The docker container for Grafana has seen a major rewrite for 5.1. Previously `/var/lib/grafana`, `/etc/grafana` and `/var/log/grafana` were defined as volumes in the `Dockerfile`. This led to the creation of three volumes each time a new instance of the Grafana container started, whether you wanted it or not. -You should always be careful to define your own named volume for storage, but if you depended on these volumes you should be aware that an upgraded container will no longer have them. +You should always be careful to define your own named volume for storage, but if you depended on these volumes you should be aware that an upgraded container will no longer have them. **Warning**: when migrating from an earlier version to 5.1 or later using docker compose and implicit volumes you need to use `docker inspect` to find out which volumes your container is mapped to so that you can map them to the upgraded container as well. You will also have to change file ownership (or user) as documented below. @@ -182,7 +201,7 @@ services: #### Modifying permissions -The commands below will run bash inside the Grafana container with your volume mapped in. This makes it possible to modify the file ownership to match the new container. Always be careful when modifying permissions. +The commands below will run bash inside the Grafana container with your volume mapped in. This makes it possible to modify the file ownership to match the new container. Always be careful when modifying permissions. ```bash $ docker run -ti --user root --volume "" --entrypoint bash grafana/grafana:5.1.0 diff --git a/docs/sources/installation/ldap.md b/docs/sources/installation/ldap.md index 85501e51d85..9a381b9e467 100644 --- a/docs/sources/installation/ldap.md +++ b/docs/sources/installation/ldap.md @@ -23,8 +23,9 @@ specific configuration file (default: `/etc/grafana/ldap.toml`). ### Example config ```toml -# Set to true to log user information returned from LDAP -verbose_logging = false +# To troubleshoot and get more log info enable ldap debug logging in grafana.ini +# [log] +# filters = ldap:debug [[servers]] # Ldap server host (specify multiple hosts space separated) @@ -73,6 +74,8 @@ email = "email" [[servers.group_mappings]] group_dn = "cn=admins,dc=grafana,dc=org" org_role = "Admin" +# To make user an instance admin (Grafana Admin) uncomment line below +# grafana_admin = true # The Grafana organization database id, optional, if left out the default org (id 1) will be used. Setting this allows for multiple group_dn's to be assigned to the same org_role provided the org_id differs # org_id = 1 @@ -132,6 +135,10 @@ Users page, this change will be reset the next time the user logs in. If you change the LDAP groups of a user, the change will take effect the next time the user logs in. +### Grafana Admin +with a servers.group_mappings section you can set grafana_admin = true or false to sync Grafana Admin permission. A Grafana server admin has admin access over all orgs & +users. + ### Priority The first group mapping that an LDAP user is matched to will be used for the sync. If you have LDAP users that fit multiple mappings, the topmost mapping in the TOML config will be used. diff --git a/docs/sources/installation/mac.md b/docs/sources/installation/mac.md index b1d4f18f699..12ff4adaab9 100644 --- a/docs/sources/installation/mac.md +++ b/docs/sources/installation/mac.md @@ -11,6 +11,8 @@ weight = 4 # Installing on Mac +## Install using homebrew + Installation can be done using [homebrew](http://brew.sh/) Install latest stable: @@ -75,3 +77,18 @@ If you want to manually install a plugin place it here: `/usr/local/var/lib/graf The default sqlite database is located at `/usr/local/var/lib/grafana` +## Installing from binary tar file + +Download [the latest `.tar.gz` file](https://grafana.com/get) and +extract it. This will extract into a folder named after the version you +downloaded. This folder contains all files required to run Grafana. There are +no init scripts or install scripts in this package. + +To configure Grafana add a configuration file named `custom.ini` to the +`conf` folder and override any of the settings defined in +`conf/defaults.ini`. + +Start Grafana by executing `./bin/grafana-server web`. The `grafana-server` +binary needs the working directory to be the root install directory (where the +binary and the `public` folder is located). + diff --git a/docs/sources/installation/rpm.md b/docs/sources/installation/rpm.md index 2d36bf7e331..13597b9d921 100644 --- a/docs/sources/installation/rpm.md +++ b/docs/sources/installation/rpm.md @@ -15,42 +15,49 @@ weight = 2 Description | Download ------------ | ------------- -Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [5.1.2 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.2-1.x86_64.rpm) - +Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [x86-64](https://grafana.com/grafana/download?platform=linux) +Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [ARM64](https://grafana.com/grafana/download?platform=arm) +Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [ARMv7](https://grafana.com/grafana/download?platform=arm) -Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing -installation. +Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. ## Install Stable You can install Grafana using Yum directly. ```bash -$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.2-1.x86_64.rpm +$ sudo yum install ``` - +$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.4-1.x86_64.rpm +``` -Or install manually using `rpm`. - -#### On CentOS / Fedora / Redhat: +Or install manually using `rpm`. First execute + +```bash +$ wget +``` + +Example: + +```bash +$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.4-1.x86_64.rpm +``` + +### On CentOS / Fedora / Redhat: ```bash -$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.2-1.x86_64.rpm $ sudo yum install initscripts fontconfig -$ sudo rpm -Uvh grafana-5.1.2-1.x86_64.rpm +$ sudo rpm -Uvh ``` -#### On OpenSuse: +### On OpenSuse: ```bash -$ sudo rpm -i --nodeps grafana-5.1.2-1.x86_64.rpm +$ sudo rpm -i --nodeps ``` ## Install via YUM Repository diff --git a/docs/sources/installation/troubleshooting.md b/docs/sources/installation/troubleshooting.md index 12104c6e826..4b777f3248d 100644 --- a/docs/sources/installation/troubleshooting.md +++ b/docs/sources/installation/troubleshooting.md @@ -21,7 +21,7 @@ the data source response. To check this you should use Query Inspector (new in Grafana v4.5). The query Inspector shows query requests and responses. -For more on the query insector read [this guide here](https://community.grafana.com/t/using-grafanas-query-inspector-to-troubleshoot-issues/2630). For +For more on the query inspector read [this guide here](https://community.grafana.com/t/using-grafanas-query-inspector-to-troubleshoot-issues/2630). For older versions of Grafana read the [how troubleshoot metric query issue](https://community.grafana.com/t/how-to-troubleshoot-metric-query-issues/50/2) article. ## Logging diff --git a/docs/sources/installation/windows.md b/docs/sources/installation/windows.md index 0c6eeef203e..5dc87984512 100644 --- a/docs/sources/installation/windows.md +++ b/docs/sources/installation/windows.md @@ -12,17 +12,15 @@ weight = 3 Description | Download ------------ | ------------- -Latest stable package for Windows | [grafana-5.1.2.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.1.2.windows-x64.zip) - - +Latest stable package for Windows | [x64](https://grafana.com/grafana/download?platform=windows) Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. ## Configure +**Important:** After you've downloaded the zip file and before extracting it, make sure to open properties for that file (right-click Properties) and check the `unblock` checkbox and `Ok`. + The zip file contains a folder with the current Grafana version. Extract this folder to anywhere you want Grafana to run from. Go into the `conf` directory and copy `sample.ini` to `custom.ini`. You should edit diff --git a/docs/sources/plugins/developing/auth-for-datasources.md b/docs/sources/plugins/developing/auth-for-datasources.md new file mode 100644 index 00000000000..c03793e745f --- /dev/null +++ b/docs/sources/plugins/developing/auth-for-datasources.md @@ -0,0 +1,99 @@ ++++ +title = "Authentication for Datasource Plugins" +type = "docs" +[menu.docs] +name = "Authentication for Datasource Plugins" +parent = "developing" +weight = 3 ++++ + +# Authentication for Datasource Plugins + +Grafana has a proxy feature that proxies all data requests through the Grafana backend. This is very useful when your datasource plugin calls an external/thirdy-party API. The Grafana proxy adds CORS headers and can authenticate against the external API. This means that a datasource plugin that proxies all requests via Grafana can enable token authentication and the token will be renewed automatically for the user when it expires. + +The plugin config page should save the API key/password to be encrypted (using the `secureJsonData` feature) and then when a request from the datasource is made, the Grafana Proxy will: + + 1. decrypt the API key/password on the backend. + 2. carry out authentication and generate an OAuth token that will be added as an `Authorization` HTTP header to all requests (or it will add a HTTP header with the API key). + 3. renew the token if it expires. + +This means that users that access the datasource config page cannot access the API key or password after is saved the first time and that no secret keys are sent in plain text through the browser where they can be spied on. + +For backend authentication to work, the external/third-party API must either have an OAuth endpoint or that the API accepts an API key as a HTTP header for authentication. + +## Plugin Routes + +You can specify routes in the `plugin.json` file for your datasource plugin. [Here is an example](https://github.com/grafana/azure-monitor-datasource/blob/d74c82145c0a4af07a7e96cc8dde231bfd449bd9/src/plugin.json#L30-L95) with lots of routes (though most plugins will just have one route). + +When you build your url to the third-party API in your datasource class, the url should start with the text specified in the path field for a route. The proxy will strip out the path text and replace it with the value in the url field. + +For example, if my code makes a call to url `azuremonitor/foo/bar` with this code: + +```js +this.backendSrv.datasourceRequest({ + url: url, + method: 'GET', +}) +``` + +and this route: + +```json +"routes": [{ + "path": "azuremonitor", + "method": "GET", + "url": "https://management.azure.com", + ... +}] +``` + +then the Grafana proxy will transform it into "https://management.azure.com/foo/bar" and add CORS headers. + +The `method` parameter is optional. It can be set to any HTTP verb to provide more fine-grained control. + +## Encrypting Sensitive Data + +When a user saves a password or secret with your datasource plugin's Config page, then you can save data to a column in the datasource table called `secureJsonData` that is an encrypted blob. Any data saved in the blob is encrypted by Grafana and can only be decrypted by the Grafana server on the backend. This means once a password is saved, no sensitive data is sent to the browser. If the password is saved in the `jsonData` blob or the `password` field then it is unencrypted and anyone with Admin access (with the help of Chrome Developer Tools) can read it. + +This is an example of using the `secureJsonData` blob to save a property called `password`: + +```html + +``` + +## API Key/HTTP Header Authentication + +Some third-party API's accept a HTTP Header for authentication. The [example](https://github.com/grafana/azure-monitor-datasource/blob/d74c82145c0a4af07a7e96cc8dde231bfd449bd9/src/plugin.json#L91-L93) below has a `headers` section that defines the name of the HTTP Header that the API expects and it uses the `SecureJSONData` blob to fetch an encrypted API key. The Grafana server proxy will decrypt the key, add the `X-API-Key` header to the request and forward it to the third-party API. + +```json +{ + "path": "appinsights", + "method": "GET", + "url": "https://api.applicationinsights.io", + "headers": [ + {"name": "X-API-Key", "content": "{{.SecureJsonData.appInsightsApiKey}}"} + ] +} +``` + +## How Token Authentication Works + +The token auth section in the `plugin.json` file looks like this: + +```json +"tokenAuth": { + "url": "https://login.microsoftonline.com/{{.JsonData.tenantId}}/oauth2/token", + "params": { + "grant_type": "client_credentials", + "client_id": "{{.JsonData.clientId}}", + "client_secret": "{{.SecureJsonData.clientSecret}}", + "resource": "https://management.azure.com/" + } +} +``` + +This interpolates in data from both `jsonData` and `secureJsonData` to generate the token request to the third-party API. It is common for tokens to have a short expiry period (30 minutes). The proxy in Grafana server will automatically renew the token if it has expired. + +## Always Restart the Grafana Server After Route Changes + +The plugin.json files are only loaded when the Grafana server starts so when a route is added or changed then the Grafana server has to be restarted for the changes to take effect. diff --git a/docs/sources/plugins/developing/datasources.md b/docs/sources/plugins/developing/datasources.md index 064f3a850ae..f8792441bbd 100644 --- a/docs/sources/plugins/developing/datasources.md +++ b/docs/sources/plugins/developing/datasources.md @@ -25,7 +25,6 @@ To interact with the rest of grafana the plugins module file can export 5 differ - Datasource (Required) - QueryCtrl (Required) - ConfigCtrl (Required) -- QueryOptionsCtrl - AnnotationsQueryCtrl ## Plugin json @@ -182,12 +181,6 @@ A JavaScript class that will be instantiated and treated as an Angular controlle Requires a static template or templateUrl variable which will be rendered as the view for this controller. -## QueryOptionsCtrl - -A JavaScript class that will be instantiated and treated as an Angular controller when the user edits metrics in a panel. This controller is responsible for handling panel wide settings for the datasource, such as interval, rate and aggregations if needed. - -Requires a static template or templateUrl variable which will be rendered as the view for this controller. - ## AnnotationsQueryCtrl A JavaScript class that will be instantiated and treated as an Angular controller when the user choose this type of datasource in the templating menu in the dashboard. diff --git a/docs/sources/plugins/developing/panels.md b/docs/sources/plugins/developing/panels.md index d679288e2d2..8670c15e093 100644 --- a/docs/sources/plugins/developing/panels.md +++ b/docs/sources/plugins/developing/panels.md @@ -25,7 +25,7 @@ export class MyPanelCtrl extends PanelCtrl { ... ``` -In this case, make sure the template has a single `
...
` root. The plugin loader will modifiy that element adding a scrollbar. +In this case, make sure the template has a single `
...
` root. The plugin loader will modify that element adding a scrollbar. diff --git a/docs/sources/plugins/developing/plugin-review-guidelines.md b/docs/sources/plugins/developing/plugin-review-guidelines.md new file mode 100644 index 00000000000..8efb023cf64 --- /dev/null +++ b/docs/sources/plugins/developing/plugin-review-guidelines.md @@ -0,0 +1,175 @@ ++++ +title = "Plugin Review Guidelines" +type = "docs" +[menu.docs] +name = "Plugin Review Guidelines" +parent = "developing" +weight = 2 ++++ + +# Plugin Review Guidelines + +The Grafana team reviews all plugins that are published on Grafana.com. There are two areas we review, the metadata for the plugin and the plugin functionality. + +## Metadata + +The plugin metadata consists of a `plugin.json` file and the README.md file. These `plugin.json` file is used by Grafana to load the plugin and the README.md file is shown in the plugins section of Grafana and the plugins section of Grafana.com. + +### README.md + +The README.md file is shown on the plugins page in Grafana and the plugin page on Grafana.com. There are some differences between the GitHub markdown and the markdown allowed in Grafana/Grafana.com: + +- Cannot contain inline HTML. +- Any image links should be absolute links. For example: https://raw.githubusercontent.com/grafana/azure-monitor-datasource/master/dist/img/grafana_cloud_install.png + +The README should: + +- describe the purpose of the plugin. +- contain steps on how to get started. + +### Plugin.json + +The `plugin.json` file is the same concept as the `package.json` file for an npm package. When the Grafana server starts it will scan the plugin folders (all folders in the data/plugins subfolder) and load every folder that contains a `plugin.json` file unless the folder contains a subfolder named `dist`. In that case, the Grafana server will load the `dist` folder instead. + +A minimal `plugin.json` file: + +```json +{ + "type": "panel", + "name": "Clock", + "id": "yourorg-clock-panel", + + "info": { + "description": "Clock panel for grafana", + "author": { + "name": "Author Name", + "url": "http://yourwebsite.com" + }, + "keywords": ["clock", "panel"], + "version": "1.0.0", + "updated": "2018-03-24" + }, + + "dependencies": { + "grafanaVersion": "3.x.x", + "plugins": [ ] + } +} +``` + +- The convention for the plugin id is [github username/org]-[plugin name]-[datasource|app|panel] and it has to be unique. Although if org and plugin name are the same then [plugin name]-[datasource|app|panel] is also valid. The org **cannot** be `grafana` unless it is a plugin created by the Grafana core team. + + Examples: + + - raintank-worldping-app + - ryantxu-ajax-panel + - alexanderzobnin-zabbix-app + - hawkular-datasource + +- The `type` field should be either `datasource` `app` or `panel`. +- The `version` field should be in the form: x.x.x e.g. `1.0.0` or `0.4.1`. + +The full file format for the `plugin.json` file is described [here](http://docs.grafana.org/plugins/developing/plugin.json/). + +## Plugin Language + +JavaScript, TypeScript, ES6 (or any other language) are all fine as long as the contents of the `dist` subdirectory are transpiled to JavaScript (ES5). + +## File and Directory Structure Conventions + +Here is a typical directory structure for a plugin. + +```bash +johnnyb-awesome-datasource +|-- dist +|-- src +| |-- img +| | |-- logo.svg +| |-- partials +| | |-- annotations.editor.html +| | |-- config.html +| | |-- query.editor.html +| |-- datasource.js +| |-- module.js +| |-- plugin.json +| |-- query_ctrl.js +|-- Gruntfile.js +|-- LICENSE +|-- package.json +|-- README.md +``` + +Most JavaScript projects have a build step. The generated JavaScript should be placed in the `dist` directory and the source code in the `src` directory. We recommend that the plugin.json file be placed in the src directory and then copied over to the dist directory when building. The `README.md` can be placed in the root or in the dist directory. + +Directories: + +- `src/` contains plugin source files. +- `src/partials` contains html templates. +- `src/img` contains plugin logos and other images. +- `dist/` contains built content. + +## HTML and CSS + +For the HTML on editor tabs, we recommend using the inbuilt Grafana styles rather than defining your own. This makes plugins feel like a more natural part of Grafana. If done correctly, the html will also be responsive and adapt to smaller screens. The `gf-form` css classes should be used for labels and inputs. + +Below is a minimal example of an editor row with one form group and two fields, a dropdown and a text input: + +```html +
+
+
My Plugin Options
+
+ +
+ +
+
+ + +
+
+
+
+``` + +Use the `width-x` and `max-width-x` classes to control the width of your labels and input fields. Try to get labels and input fields to line up neatly by having the same width for all the labels in a group and the same width for all inputs in a group if possible. + +## Data Sources + +A basic guide for data sources can be found [here](http://docs.grafana.org/plugins/developing/datasources/). + +### Config Page Guidelines + +- It should be as easy as possible for a user to configure a url. If the data source is using the `datasource-http-settings` component, it should use the `suggest-url` attribute to suggest the default url or a url that is similar to what it should be (especially important if the url refers to a REST endpoint that is not common knowledge for most users e.g. `https://yourserver:4000/api/custom-endpoint`). + + ```html + + + ``` + +- The `testDatasource` function should make a query to the data source that will also test that the authentication details are correct. This is so the data source is correctly configured when the user tries to write a query in a new dashboard. + +#### Password Security + +If possible, any passwords or secrets should be be saved in the `secureJsonData` blob. To encrypt sensitive data, the Grafana server's proxy feature must be used. The Grafana server has support for token authentication (OAuth) and HTTP Header authentication. If the calls have to be sent directly from the browser to a third-party API then this will not be possible and sensitive data will not be encrypted. + +Read more here about how [Authentication for Datasources]({{< relref "auth-for-datasources.md" >}}) works. + +If using the proxy feature then the Config page should use the `secureJsonData` blob like this: + + - good: `` + - bad: `` + +### Query Editor + +Each query editor is unique and can have a unique style. It should be adapted to what the users of the data source are used to. + +- Should use the Grafana CSS `gf-form` classes. +- Should be neat and tidy. Labels and fields in columns should be aligned and should be the same width if possible. +- The datasource should be able to handle when a user toggles a query (by clicking on the eye icon) and not execute the query. This is done by checking the `hide` property - an [example](https://github.com/grafana/grafana/blob/master/public/app/plugins/datasource/postgres/datasource.ts#L35-L38). +- Should not execute queries if fields in the Query Editor are empty and the query will throw an exception (defensive programming). +- Should handle errors. There are two main ways to do this: + - use the notification system in Grafana to show a toaster popup with the error message. Example [here](https://github.com/alexanderzobnin/grafana-zabbix/blob/fdbbba2fb03f5f2a4b3b0715415e09d5a4cf6cde/src/panel-triggers/triggers_panel_ctrl.js#L467-L471). + - provide an error notification in the query editor like the MySQL/Postgres data sources do. Example code in the `query_ctrl` [here](https://github.com/grafana/azure-monitor-datasource/blob/b184d077f082a69f962120ef0d1f8296a0d46f03/src/query_ctrl.ts#L36-L51) and in the [html](https://github.com/grafana/azure-monitor-datasource/blob/b184d077f082a69f962120ef0d1f8296a0d46f03/src/partials/query.editor.html#L190-L193). diff --git a/docs/sources/project/building_from_source.md b/docs/sources/project/building_from_source.md index 13d71e8dcf4..a0b553594ce 100644 --- a/docs/sources/project/building_from_source.md +++ b/docs/sources/project/building_from_source.md @@ -13,7 +13,7 @@ dev environment. Grafana ships with its own required backend server; also comple ## Dependencies -- [Go 1.9.2](https://golang.org/dl/) +- [Go 1.10](https://golang.org/dl/) - [Git](https://git-scm.com/downloads) - [NodeJS LTS](https://nodejs.org/download/) - node-gyp is the Node.js native addon build tool and it requires extra dependencies: python 2.7, make and GCC. These are already installed for most Linux distros and MacOS. See the Building On Windows section or the [node-gyp installation instructions](https://github.com/nodejs/node-gyp#installation) for more details. @@ -66,13 +66,13 @@ You can run a local instance of Grafana by running: ```bash ./bin/grafana-server ``` -If you built the binary with `go run build.go build`, run `./bin/grafana-server` +Or, if you built the binary with `go run build.go build`, run `./bin/-/grafana-server` If you built it with `go build .`, run `./grafana` Open grafana in your browser (default [http://localhost:3000](http://localhost:3000)) and login with admin user (default user/pass = admin/admin). -## Developing Grafana +# Developing Grafana To add features, customize your config, etc, you'll need to rebuild the backend when you change the source code. We use a tool named `bra` that does this. @@ -124,7 +124,7 @@ Learn more about Grafana config options in the [Configuration section](/installa ## Create a pull requests Please contribute to the Grafana project and submit a pull request! Build new features, write or update documentation, fix bugs and generally make Grafana even more awesome. -## Troubleshooting +# Troubleshooting **Problem**: PhantomJS or node-sass errors when running grunt diff --git a/docs/sources/reference/scripting.md b/docs/sources/reference/scripting.md index 551805b567a..7f218765d39 100644 --- a/docs/sources/reference/scripting.md +++ b/docs/sources/reference/scripting.md @@ -21,42 +21,32 @@ If you open scripted.js you can see how it reads url parameters from ARGS variab ## Example ```javascript -var rows = 1; var seriesName = 'argName'; -if(!_.isUndefined(ARGS.rows)) { - rows = parseInt(ARGS.rows, 10); -} - if(!_.isUndefined(ARGS.name)) { seriesName = ARGS.name; } -for (var i = 0; i < rows; i++) { - - dashboard.rows.push({ - title: 'Scripted Graph ' + i, - height: '300px', - panels: [ - { - title: 'Events', - type: 'graph', - span: 12, - fill: 1, - linewidth: 2, - targets: [ - { - 'target': "randomWalk('" + seriesName + "')" - }, - { - 'target': "randomWalk('random walk2')" - } - ], - } - ] - }); - -} +dashboard.panels.push({ + title: 'Events', + type: 'graph', + fill: 1, + linewidth: 2, + gridPos: { + h: 10, + w: 24, + x: 0, + y: 10, + }, + targets: [ + { + 'target': "randomWalk('" + seriesName + "')" + }, + { + 'target': "randomWalk('random walk2')" + } + ] +}); return dashboard; ``` diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 8341b9770bd..efe9db61e3d 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -11,7 +11,7 @@ weight = 1 # Variables Variables allows for more interactive and dynamic dashboards. Instead of hard-coding things like server, application -and sensor name in you metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of +and sensor name in your metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns make it easy to change the data being displayed in your dashboard. {{< docs-imagebox img="/img/docs/v50/variables_dashboard.png" >}} diff --git a/docs/sources/tutorials/screencasts.md b/docs/sources/tutorials/screencasts.md index e92a07c51a7..882544e7318 100644 --- a/docs/sources/tutorials/screencasts.md +++ b/docs/sources/tutorials/screencasts.md @@ -94,7 +94,7 @@ weight = 10
- #3 Whats New In Grafana 2.0 + #3 What's New In Grafana 2.0
diff --git a/docs/versions.json b/docs/versions.json index 61e471938f2..caefbe198d6 100644 --- a/docs/versions.json +++ b/docs/versions.json @@ -1,5 +1,6 @@ [ - { "version": "v5.1", "path": "/", "archived": false, "current": true }, + { "version": "v5.2", "path": "/", "archived": false, "current": true }, + { "version": "v5.1", "path": "/v5.1", "archived": true }, { "version": "v5.0", "path": "/v5.0", "archived": true }, { "version": "v4.6", "path": "/v4.6", "archived": true }, { "version": "v4.5", "path": "/v4.5", "archived": true }, diff --git a/karma.conf.js b/karma.conf.js index 3f006af08b6..352e8e4e027 100644 --- a/karma.conf.js +++ b/karma.conf.js @@ -19,8 +19,8 @@ module.exports = function(config) { }, webpack: webpackTestConfig, - webpackServer: { - noInfo: true, // please don't spam the console when running in karma! + webpackMiddleware: { + stats: 'minimal', }, // list of files to exclude diff --git a/latest.json b/latest.json index 5a68ca428b4..8e26289c856 100644 --- a/latest.json +++ b/latest.json @@ -1,4 +1,4 @@ { - "stable": "5.0.4", - "testing": "5.0.4" + "stable": "5.2.0", + "testing": "5.2.0" } diff --git a/package.json b/package.json index 06883a6c7ec..c26438230cc 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "company": "Grafana Labs" }, "name": "grafana", - "version": "5.2.0-pre1", + "version": "5.3.0-pre1", "repository": { "type": "git", "url": "http://github.com/grafana/grafana.git" @@ -16,12 +16,11 @@ "@types/node": "^8.0.31", "@types/react": "^16.0.25", "@types/react-dom": "^16.0.3", - "angular-mocks": "^1.6.6", + "angular-mocks": "1.6.6", "autoprefixer": "^6.4.0", - "awesome-typescript-loader": "^3.2.3", "axios": "^0.17.1", "babel-core": "^6.26.0", - "babel-loader": "^7.1.2", + "babel-loader": "^7.1.4", "babel-plugin-syntax-dynamic-import": "^6.18.0", "babel-preset-es2015": "^6.24.1", "clean-webpack-plugin": "^0.1.19", @@ -33,8 +32,9 @@ "es6-shim": "^0.35.3", "expect.js": "~0.2.0", "expose-loader": "^0.7.3", - "extract-text-webpack-plugin": "^3.0.0", - "file-loader": "^0.11.2", + "extract-text-webpack-plugin": "^4.0.0-beta.0", + "file-loader": "^1.1.11", + "fork-ts-checker-webpack-plugin": "^0.4.1", "gaze": "^1.1.2", "glob": "~7.0.0", "grunt": "1.0.1", @@ -57,11 +57,10 @@ "grunt-webpack": "^3.0.2", "html-loader": "^0.5.1", "html-webpack-harddisk-plugin": "^0.2.0", - "html-webpack-plugin": "^2.30.1", + "html-webpack-plugin": "^3.2.0", "husky": "^0.14.3", "jest": "^22.0.4", "jshint-stylish": "~2.2.1", - "json-loader": "^0.5.7", "karma": "1.7.0", "karma-chrome-launcher": "~2.2.0", "karma-expect": "~1.1.3", @@ -69,7 +68,7 @@ "karma-phantomjs-launcher": "1.0.4", "karma-sinon": "^1.0.5", "karma-sourcemap-loader": "^0.3.7", - "karma-webpack": "^2.0.4", + "karma-webpack": "^3.0.0", "lint-staged": "^6.0.0", "load-grunt-tasks": "3.5.2", "mobx-react-devtools": "^4.2.15", @@ -83,30 +82,32 @@ "postcss-loader": "^2.0.6", "postcss-reporter": "^5.0.0", "prettier": "1.9.2", - "react-hot-loader": "^4.0.1", + "react-hot-loader": "^4.2.0", "react-test-renderer": "^16.0.0", "sass-lint": "^1.10.2", - "sass-loader": "^6.0.6", + "sass-loader": "^7.0.1", "sinon": "1.17.6", - "style-loader": "^0.20.3", + "style-loader": "^0.21.0", "systemjs": "0.20.19", "systemjs-plugin-css": "^0.1.36", - "ts-jest": "^22.0.0", - "ts-loader": "^3.2.0", + "ts-loader": "^4.3.0", + "ts-jest": "^22.4.6", "tslint": "^5.8.0", "tslint-loader": "^3.5.3", "typescript": "^2.6.2", - "webpack": "^3.10.0", + "webpack": "^4.8.0", "webpack-bundle-analyzer": "^2.9.0", "webpack-cleanup-plugin": "^0.5.1", - "webpack-dev-server": "2.11.1", + "fork-ts-checker-webpack-plugin": "^0.4.2", + "webpack-cli": "^2.1.4", + "webpack-dev-server": "^3.1.0", "webpack-merge": "^4.1.0", "zone.js": "^0.7.2" }, "scripts": { - "dev": "webpack --progress --colors --config scripts/webpack/webpack.dev.js", - "start": "webpack-dev-server --progress --colors --config scripts/webpack/webpack.dev.js", - "watch": "webpack --progress --colors --watch --config scripts/webpack/webpack.dev.js", + "dev": "webpack --progress --colors --mode development --config scripts/webpack/webpack.dev.js", + "start": "webpack-dev-server --progress --colors --mode development --config scripts/webpack/webpack.hot.js", + "watch": "webpack --progress --colors --watch --mode development --config scripts/webpack/webpack.dev.js", "build": "grunt build", "test": "grunt test", "test:coverage": "grunt test --coverage=true", @@ -137,34 +138,36 @@ }, "license": "Apache-2.0", "dependencies": { - "angular": "^1.6.6", - "angular-bindonce": "^0.3.1", - "angular-native-dragdrop": "^1.2.2", - "angular-route": "^1.6.6", - "angular-sanitize": "^1.6.6", + "angular": "1.6.6", + "angular-bindonce": "0.3.1", + "angular-native-dragdrop": "1.2.2", + "angular-route": "1.6.6", + "angular-sanitize": "1.6.6", "babel-polyfill": "^6.26.0", "baron": "^3.0.3", "brace": "^0.10.0", "classnames": "^2.2.5", "clipboard": "^1.7.1", "d3": "^4.11.0", - "d3-scale-chromatic": "^1.1.1", + "d3-scale-chromatic": "^1.3.0", "eventemitter3": "^2.0.3", "file-saver": "^1.3.3", "immutable": "^3.8.2", "jquery": "^3.2.1", - "lodash": "^4.17.4", + "lodash": "^4.17.10", + "mini-css-extract-plugin": "^0.4.0", "mobx": "^3.4.1", "mobx-react": "^4.3.5", "mobx-state-tree": "^1.3.1", - "moment": "^2.18.1", + "moment": "^2.22.2", "mousetrap": "^1.6.0", "mousetrap-global-bind": "^1.1.0", + "optimize-css-assets-webpack-plugin": "^4.0.2", "prismjs": "^1.6.0", "prop-types": "^15.6.0", "react": "^16.2.0", "react-dom": "^16.2.0", - "react-grid-layout-grafana": "0.16.0", + "react-grid-layout": "0.16.6", "react-highlight-words": "^0.10.0", "react-popper": "^0.7.5", "react-select": "^1.1.0", @@ -178,7 +181,8 @@ "slate-react": "^0.12.4", "tether": "^1.4.0", "tether-drop": "https://github.com/torkelo/drop/tarball/master", - "tinycolor2": "^1.4.1" + "tinycolor2": "^1.4.1", + "uglifyjs-webpack-plugin": "^1.2.7" }, "resolutions": { "caniuse-db": "1.0.30000772" diff --git a/pkg/api/alerting.go b/pkg/api/alerting.go index a9a3773ceb1..60013fe2b10 100644 --- a/pkg/api/alerting.go +++ b/pkg/api/alerting.go @@ -2,12 +2,14 @@ package api import ( "fmt" + "strconv" "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/guardian" + "github.com/grafana/grafana/pkg/services/search" ) func ValidateOrgAlert(c *m.ReqContext) { @@ -46,12 +48,64 @@ func GetAlertStatesForDashboard(c *m.ReqContext) Response { // GET /api/alerts func GetAlerts(c *m.ReqContext) Response { + dashboardQuery := c.Query("dashboardQuery") + dashboardTags := c.QueryStrings("dashboardTag") + stringDashboardIDs := c.QueryStrings("dashboardId") + stringFolderIDs := c.QueryStrings("folderId") + + dashboardIDs := make([]int64, 0) + for _, id := range stringDashboardIDs { + dashboardID, err := strconv.ParseInt(id, 10, 64) + if err == nil { + dashboardIDs = append(dashboardIDs, dashboardID) + } + } + + if dashboardQuery != "" || len(dashboardTags) > 0 || len(stringFolderIDs) > 0 { + folderIDs := make([]int64, 0) + for _, id := range stringFolderIDs { + folderID, err := strconv.ParseInt(id, 10, 64) + if err == nil { + folderIDs = append(folderIDs, folderID) + } + } + + searchQuery := search.Query{ + Title: dashboardQuery, + Tags: dashboardTags, + SignedInUser: c.SignedInUser, + Limit: 1000, + OrgId: c.OrgId, + DashboardIds: dashboardIDs, + Type: string(search.DashHitDB), + FolderIds: folderIDs, + Permission: m.PERMISSION_VIEW, + } + + err := bus.Dispatch(&searchQuery) + if err != nil { + return Error(500, "List alerts failed", err) + } + + for _, d := range searchQuery.Result { + if d.Type == search.DashHitDB && d.Id > 0 { + dashboardIDs = append(dashboardIDs, d.Id) + } + } + + // if we didn't find any dashboards, return empty result + if len(dashboardIDs) == 0 { + return JSON(200, []*m.AlertListItemDTO{}) + } + } + query := m.GetAlertsQuery{ - OrgId: c.OrgId, - DashboardId: c.QueryInt64("dashboardId"), - PanelId: c.QueryInt64("panelId"), - Limit: c.QueryInt64("limit"), - User: c.SignedInUser, + OrgId: c.OrgId, + DashboardIDs: dashboardIDs, + PanelId: c.QueryInt64("panelId"), + Limit: c.QueryInt64("limit"), + User: c.SignedInUser, + Query: c.Query("query"), } states := c.QueryStrings("state") diff --git a/pkg/api/alerting_test.go b/pkg/api/alerting_test.go index 9302ef7beca..331beeef5e4 100644 --- a/pkg/api/alerting_test.go +++ b/pkg/api/alerting_test.go @@ -6,6 +6,7 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/search" . "github.com/smartystreets/goconvey/convey" ) @@ -30,7 +31,7 @@ func TestAlertingApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) @@ -64,6 +65,60 @@ func TestAlertingApiEndpoint(t *testing.T) { }) }) }) + + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/alerts?dashboardId=1", "/api/alerts", m.ROLE_EDITOR, func(sc *scenarioContext) { + var searchQuery *search.Query + bus.AddHandler("test", func(query *search.Query) error { + searchQuery = query + return nil + }) + + var getAlertsQuery *m.GetAlertsQuery + bus.AddHandler("test", func(query *m.GetAlertsQuery) error { + getAlertsQuery = query + return nil + }) + + sc.handlerFunc = GetAlerts + sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() + + So(searchQuery, ShouldBeNil) + So(getAlertsQuery, ShouldNotBeNil) + }) + + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/alerts?dashboardId=1&dashboardId=2&folderId=3&dashboardTag=abc&dashboardQuery=dbQuery&limit=5&query=alertQuery", "/api/alerts", m.ROLE_EDITOR, func(sc *scenarioContext) { + var searchQuery *search.Query + bus.AddHandler("test", func(query *search.Query) error { + searchQuery = query + query.Result = search.HitList{ + &search.Hit{Id: 1}, + &search.Hit{Id: 2}, + } + return nil + }) + + var getAlertsQuery *m.GetAlertsQuery + bus.AddHandler("test", func(query *m.GetAlertsQuery) error { + getAlertsQuery = query + return nil + }) + + sc.handlerFunc = GetAlerts + sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() + + So(searchQuery, ShouldNotBeNil) + So(searchQuery.DashboardIds[0], ShouldEqual, 1) + So(searchQuery.DashboardIds[1], ShouldEqual, 2) + So(searchQuery.FolderIds[0], ShouldEqual, 3) + So(searchQuery.Tags[0], ShouldEqual, "abc") + So(searchQuery.Title, ShouldEqual, "dbQuery") + + So(getAlertsQuery, ShouldNotBeNil) + So(getAlertsQuery.DashboardIDs[0], ShouldEqual, 1) + So(getAlertsQuery.DashboardIDs[1], ShouldEqual, 2) + So(getAlertsQuery.Limit, ShouldEqual, 5) + So(getAlertsQuery.Query, ShouldEqual, "alertQuery") + }) }) } @@ -80,7 +135,7 @@ func postAlertScenario(desc string, url string, routePattern string, role m.Role defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.UserId = TestUserID sc.context.OrgId = TestOrgID diff --git a/pkg/api/annotations.go b/pkg/api/annotations.go index 52eeb57dbb9..55c9c954940 100644 --- a/pkg/api/annotations.go +++ b/pkg/api/annotations.go @@ -37,7 +37,6 @@ func GetAnnotations(c *m.ReqContext) Response { if item.Email != "" { item.AvatarUrl = dtos.GetGravatarUrl(item.Email) } - item.Time = item.Time } return JSON(200, items) @@ -214,7 +213,9 @@ func DeleteAnnotations(c *m.ReqContext, cmd dtos.DeleteAnnotationsCmd) Response repo := annotations.GetRepository() err := repo.Delete(&annotations.DeleteParams{ - AlertId: cmd.PanelId, + OrgId: c.OrgId, + Id: cmd.AnnotationId, + RegionId: cmd.RegionId, DashboardId: cmd.DashboardId, PanelId: cmd.PanelId, }) @@ -235,7 +236,8 @@ func DeleteAnnotationByID(c *m.ReqContext) Response { } err := repo.Delete(&annotations.DeleteParams{ - Id: annotationID, + OrgId: c.OrgId, + Id: annotationID, }) if err != nil { @@ -254,6 +256,7 @@ func DeleteAnnotationRegion(c *m.ReqContext) Response { } err := repo.Delete(&annotations.DeleteParams{ + OrgId: c.OrgId, RegionId: regionID, }) @@ -269,9 +272,9 @@ func canSaveByDashboardID(c *m.ReqContext, dashboardID int64) (bool, error) { return false, nil } - if dashboardID > 0 { - guardian := guardian.New(dashboardID, c.OrgId, c.SignedInUser) - if canEdit, err := guardian.CanEdit(); err != nil || !canEdit { + if dashboardID != 0 { + guard := guardian.New(dashboardID, c.OrgId, c.SignedInUser) + if canEdit, err := guard.CanEdit(); err != nil || !canEdit { return false, err } } diff --git a/pkg/api/annotations_test.go b/pkg/api/annotations_test.go index 9fe96245b9b..08f3018c694 100644 --- a/pkg/api/annotations_test.go +++ b/pkg/api/annotations_test.go @@ -100,6 +100,11 @@ func TestAnnotationsApiEndpoint(t *testing.T) { Id: 1, } + deleteCmd := dtos.DeleteAnnotationsCmd{ + DashboardId: 1, + PanelId: 1, + } + viewerRole := m.ROLE_VIEWER editorRole := m.ROLE_EDITOR @@ -114,7 +119,7 @@ func TestAnnotationsApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) @@ -171,6 +176,25 @@ func TestAnnotationsApiEndpoint(t *testing.T) { }) }) }) + + Convey("When user is an Admin", func() { + role := m.ROLE_ADMIN + Convey("Should be able to do anything", func() { + postAnnotationScenario("When calling POST on", "/api/annotations", "/api/annotations", role, cmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + + putAnnotationScenario("When calling PUT on", "/api/annotations/1", "/api/annotations/:annotationId", role, updateCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + deleteAnnotationsScenario("When calling POST on", "/api/annotations/mass-delete", "/api/annotations/mass-delete", role, deleteCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + }) + }) }) } @@ -199,7 +223,7 @@ func postAnnotationScenario(desc string, url string, routePattern string, role m defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.UserId = TestUserID sc.context.OrgId = TestOrgID @@ -222,7 +246,7 @@ func putAnnotationScenario(desc string, url string, routePattern string, role m. defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.UserId = TestUserID sc.context.OrgId = TestOrgID @@ -239,3 +263,26 @@ func putAnnotationScenario(desc string, url string, routePattern string, role m. fn(sc) }) } + +func deleteAnnotationsScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.DeleteAnnotationsCmd, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.UserId = TestUserID + sc.context.OrgId = TestOrgID + sc.context.OrgRole = role + + return DeleteAnnotations(c, cmd) + }) + + fakeAnnoRepo = &fakeAnnotationsRepo{} + annotations.SetRepository(fakeAnnoRepo) + + sc.m.Post(routePattern, sc.defaultHandler) + + fn(sc) + }) +} diff --git a/pkg/api/api.go b/pkg/api/api.go index 493f9eb9d01..84425fdae3d 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -4,13 +4,12 @@ import ( "github.com/go-macaron/binding" "github.com/grafana/grafana/pkg/api/avatar" "github.com/grafana/grafana/pkg/api/dtos" + "github.com/grafana/grafana/pkg/api/routing" "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" ) -// Register adds http routes func (hs *HTTPServer) registerRoutes() { - macaronR := hs.macaron reqSignedIn := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true}) reqGrafanaAdmin := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true}) reqEditorRole := middleware.RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN) @@ -20,15 +19,12 @@ func (hs *HTTPServer) registerRoutes() { quota := middleware.Quota bind := binding.Bind - // automatically set HEAD for every GET - macaronR.SetAutoHead(true) - r := hs.RouteRegister // not logged in views r.Get("/", reqSignedIn, Index) r.Get("/logout", Logout) - r.Post("/login", quota("session"), bind(dtos.LoginCommand{}), wrap(LoginPost)) + r.Post("/login", quota("session"), bind(dtos.LoginCommand{}), Wrap(LoginPost)) r.Get("/login/:name", quota("session"), OAuthLogin) r.Get("/login", LoginView) r.Get("/invite/:code", Index) @@ -77,6 +73,8 @@ func (hs *HTTPServer) registerRoutes() { r.Get("/dashboards/", reqSignedIn, Index) r.Get("/dashboards/*", reqSignedIn, Index) + r.Get("/explore", reqEditorRole, Index) + r.Get("/playlists/", reqSignedIn, Index) r.Get("/playlists/*", reqSignedIn, Index) r.Get("/alerting/", reqSignedIn, Index) @@ -84,20 +82,20 @@ func (hs *HTTPServer) registerRoutes() { // sign up r.Get("/signup", Index) - r.Get("/api/user/signup/options", wrap(GetSignUpOptions)) - r.Post("/api/user/signup", quota("user"), bind(dtos.SignUpForm{}), wrap(SignUp)) - r.Post("/api/user/signup/step2", bind(dtos.SignUpStep2Form{}), wrap(SignUpStep2)) + r.Get("/api/user/signup/options", Wrap(GetSignUpOptions)) + r.Post("/api/user/signup", quota("user"), bind(dtos.SignUpForm{}), Wrap(SignUp)) + r.Post("/api/user/signup/step2", bind(dtos.SignUpStep2Form{}), Wrap(SignUpStep2)) // invited - r.Get("/api/user/invite/:code", wrap(GetInviteInfoByCode)) - r.Post("/api/user/invite/complete", bind(dtos.CompleteInviteForm{}), wrap(CompleteInvite)) + r.Get("/api/user/invite/:code", Wrap(GetInviteInfoByCode)) + r.Post("/api/user/invite/complete", bind(dtos.CompleteInviteForm{}), Wrap(CompleteInvite)) // reset password r.Get("/user/password/send-reset-email", Index) r.Get("/user/password/reset", Index) - r.Post("/api/user/password/send-reset-email", bind(dtos.SendResetPasswordEmailForm{}), wrap(SendResetPasswordEmail)) - r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), wrap(ResetPassword)) + r.Post("/api/user/password/send-reset-email", bind(dtos.SendResetPasswordEmailForm{}), Wrap(SendResetPasswordEmail)) + r.Post("/api/user/password/reset", bind(dtos.ResetUserPasswordForm{}), Wrap(ResetPassword)) // dashboard snapshots r.Get("/dashboard/snapshot/*", Index) @@ -107,148 +105,149 @@ func (hs *HTTPServer) registerRoutes() { r.Post("/api/snapshots/", bind(m.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot) r.Get("/api/snapshot/shared-options/", GetSharingOptions) r.Get("/api/snapshots/:key", GetDashboardSnapshot) - r.Get("/api/snapshots-delete/:key", reqEditorRole, wrap(DeleteDashboardSnapshot)) + r.Get("/api/snapshots-delete/:deleteKey", Wrap(DeleteDashboardSnapshotByDeleteKey)) + r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot)) // api renew session based on remember cookie r.Get("/api/login/ping", quota("session"), LoginAPIPing) // authed api - r.Group("/api", func(apiRoute RouteRegister) { + r.Group("/api", func(apiRoute routing.RouteRegister) { // user (signed in) - apiRoute.Group("/user", func(userRoute RouteRegister) { - userRoute.Get("/", wrap(GetSignedInUser)) - userRoute.Put("/", bind(m.UpdateUserCommand{}), wrap(UpdateSignedInUser)) - userRoute.Post("/using/:id", wrap(UserSetUsingOrg)) - userRoute.Get("/orgs", wrap(GetSignedInUserOrgList)) + apiRoute.Group("/user", func(userRoute routing.RouteRegister) { + userRoute.Get("/", Wrap(GetSignedInUser)) + userRoute.Put("/", bind(m.UpdateUserCommand{}), Wrap(UpdateSignedInUser)) + userRoute.Post("/using/:id", Wrap(UserSetUsingOrg)) + userRoute.Get("/orgs", Wrap(GetSignedInUserOrgList)) - userRoute.Post("/stars/dashboard/:id", wrap(StarDashboard)) - userRoute.Delete("/stars/dashboard/:id", wrap(UnstarDashboard)) + userRoute.Post("/stars/dashboard/:id", Wrap(StarDashboard)) + userRoute.Delete("/stars/dashboard/:id", Wrap(UnstarDashboard)) - userRoute.Put("/password", bind(m.ChangeUserPasswordCommand{}), wrap(ChangeUserPassword)) - userRoute.Get("/quotas", wrap(GetUserQuotas)) - userRoute.Put("/helpflags/:id", wrap(SetHelpFlag)) + userRoute.Put("/password", bind(m.ChangeUserPasswordCommand{}), Wrap(ChangeUserPassword)) + userRoute.Get("/quotas", Wrap(GetUserQuotas)) + userRoute.Put("/helpflags/:id", Wrap(SetHelpFlag)) // For dev purpose - userRoute.Get("/helpflags/clear", wrap(ClearHelpFlags)) + userRoute.Get("/helpflags/clear", Wrap(ClearHelpFlags)) - userRoute.Get("/preferences", wrap(GetUserPreferences)) - userRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateUserPreferences)) + userRoute.Get("/preferences", Wrap(GetUserPreferences)) + userRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), Wrap(UpdateUserPreferences)) }) // users (admin permission required) - apiRoute.Group("/users", func(usersRoute RouteRegister) { - usersRoute.Get("/", wrap(SearchUsers)) - usersRoute.Get("/search", wrap(SearchUsersWithPaging)) - usersRoute.Get("/:id", wrap(GetUserByID)) - usersRoute.Get("/:id/orgs", wrap(GetUserOrgList)) + apiRoute.Group("/users", func(usersRoute routing.RouteRegister) { + usersRoute.Get("/", Wrap(SearchUsers)) + usersRoute.Get("/search", Wrap(SearchUsersWithPaging)) + usersRoute.Get("/:id", Wrap(GetUserByID)) + usersRoute.Get("/:id/orgs", Wrap(GetUserOrgList)) // query parameters /users/lookup?loginOrEmail=admin@example.com - usersRoute.Get("/lookup", wrap(GetUserByLoginOrEmail)) - usersRoute.Put("/:id", bind(m.UpdateUserCommand{}), wrap(UpdateUser)) - usersRoute.Post("/:id/using/:orgId", wrap(UpdateUserActiveOrg)) + usersRoute.Get("/lookup", Wrap(GetUserByLoginOrEmail)) + usersRoute.Put("/:id", bind(m.UpdateUserCommand{}), Wrap(UpdateUser)) + usersRoute.Post("/:id/using/:orgId", Wrap(UpdateUserActiveOrg)) }, reqGrafanaAdmin) // team (admin permission required) - apiRoute.Group("/teams", func(teamsRoute RouteRegister) { - teamsRoute.Post("/", bind(m.CreateTeamCommand{}), wrap(CreateTeam)) - teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), wrap(UpdateTeam)) - teamsRoute.Delete("/:teamId", wrap(DeleteTeamByID)) - teamsRoute.Get("/:teamId/members", wrap(GetTeamMembers)) - teamsRoute.Post("/:teamId/members", bind(m.AddTeamMemberCommand{}), wrap(AddTeamMember)) - teamsRoute.Delete("/:teamId/members/:userId", wrap(RemoveTeamMember)) + apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) { + teamsRoute.Post("/", bind(m.CreateTeamCommand{}), Wrap(CreateTeam)) + teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), Wrap(UpdateTeam)) + teamsRoute.Delete("/:teamId", Wrap(DeleteTeamByID)) + teamsRoute.Get("/:teamId/members", Wrap(GetTeamMembers)) + teamsRoute.Post("/:teamId/members", bind(m.AddTeamMemberCommand{}), Wrap(AddTeamMember)) + teamsRoute.Delete("/:teamId/members/:userId", Wrap(RemoveTeamMember)) }, reqOrgAdmin) // team without requirement of user to be org admin - apiRoute.Group("/teams", func(teamsRoute RouteRegister) { - teamsRoute.Get("/:teamId", wrap(GetTeamByID)) - teamsRoute.Get("/search", wrap(SearchTeams)) + apiRoute.Group("/teams", func(teamsRoute routing.RouteRegister) { + teamsRoute.Get("/:teamId", Wrap(GetTeamByID)) + teamsRoute.Get("/search", Wrap(SearchTeams)) }) // org information available to all users. - apiRoute.Group("/org", func(orgRoute RouteRegister) { - orgRoute.Get("/", wrap(GetOrgCurrent)) - orgRoute.Get("/quotas", wrap(GetOrgQuotas)) + apiRoute.Group("/org", func(orgRoute routing.RouteRegister) { + orgRoute.Get("/", Wrap(GetOrgCurrent)) + orgRoute.Get("/quotas", Wrap(GetOrgQuotas)) }) // current org - apiRoute.Group("/org", func(orgRoute RouteRegister) { - orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrgCurrent)) - orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddressCurrent)) - orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), wrap(AddOrgUserToCurrentOrg)) - orgRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUserForCurrentOrg)) - orgRoute.Delete("/users/:userId", wrap(RemoveOrgUserForCurrentOrg)) + apiRoute.Group("/org", func(orgRoute routing.RouteRegister) { + orgRoute.Put("/", bind(dtos.UpdateOrgForm{}), Wrap(UpdateOrgCurrent)) + orgRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), Wrap(UpdateOrgAddressCurrent)) + orgRoute.Post("/users", quota("user"), bind(m.AddOrgUserCommand{}), Wrap(AddOrgUserToCurrentOrg)) + orgRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), Wrap(UpdateOrgUserForCurrentOrg)) + orgRoute.Delete("/users/:userId", Wrap(RemoveOrgUserForCurrentOrg)) // invites - orgRoute.Get("/invites", wrap(GetPendingOrgInvites)) - orgRoute.Post("/invites", quota("user"), bind(dtos.AddInviteForm{}), wrap(AddOrgInvite)) - orgRoute.Patch("/invites/:code/revoke", wrap(RevokeInvite)) + orgRoute.Get("/invites", Wrap(GetPendingOrgInvites)) + orgRoute.Post("/invites", quota("user"), bind(dtos.AddInviteForm{}), Wrap(AddOrgInvite)) + orgRoute.Patch("/invites/:code/revoke", Wrap(RevokeInvite)) // prefs - orgRoute.Get("/preferences", wrap(GetOrgPreferences)) - orgRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), wrap(UpdateOrgPreferences)) + orgRoute.Get("/preferences", Wrap(GetOrgPreferences)) + orgRoute.Put("/preferences", bind(dtos.UpdatePrefsCmd{}), Wrap(UpdateOrgPreferences)) }, reqOrgAdmin) // current org without requirement of user to be org admin - apiRoute.Group("/org", func(orgRoute RouteRegister) { - orgRoute.Get("/users", wrap(GetOrgUsersForCurrentOrg)) + apiRoute.Group("/org", func(orgRoute routing.RouteRegister) { + orgRoute.Get("/users", Wrap(GetOrgUsersForCurrentOrg)) }) // create new org - apiRoute.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), wrap(CreateOrg)) + apiRoute.Post("/orgs", quota("org"), bind(m.CreateOrgCommand{}), Wrap(CreateOrg)) // search all orgs - apiRoute.Get("/orgs", reqGrafanaAdmin, wrap(SearchOrgs)) + apiRoute.Get("/orgs", reqGrafanaAdmin, Wrap(SearchOrgs)) // orgs (admin routes) - apiRoute.Group("/orgs/:orgId", func(orgsRoute RouteRegister) { - orgsRoute.Get("/", wrap(GetOrgByID)) - orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), wrap(UpdateOrg)) - orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), wrap(UpdateOrgAddress)) - orgsRoute.Delete("/", wrap(DeleteOrgByID)) - orgsRoute.Get("/users", wrap(GetOrgUsers)) - orgsRoute.Post("/users", bind(m.AddOrgUserCommand{}), wrap(AddOrgUser)) - orgsRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), wrap(UpdateOrgUser)) - orgsRoute.Delete("/users/:userId", wrap(RemoveOrgUser)) - orgsRoute.Get("/quotas", wrap(GetOrgQuotas)) - orgsRoute.Put("/quotas/:target", bind(m.UpdateOrgQuotaCmd{}), wrap(UpdateOrgQuota)) + apiRoute.Group("/orgs/:orgId", func(orgsRoute routing.RouteRegister) { + orgsRoute.Get("/", Wrap(GetOrgByID)) + orgsRoute.Put("/", bind(dtos.UpdateOrgForm{}), Wrap(UpdateOrg)) + orgsRoute.Put("/address", bind(dtos.UpdateOrgAddressForm{}), Wrap(UpdateOrgAddress)) + orgsRoute.Delete("/", Wrap(DeleteOrgByID)) + orgsRoute.Get("/users", Wrap(GetOrgUsers)) + orgsRoute.Post("/users", bind(m.AddOrgUserCommand{}), Wrap(AddOrgUser)) + orgsRoute.Patch("/users/:userId", bind(m.UpdateOrgUserCommand{}), Wrap(UpdateOrgUser)) + orgsRoute.Delete("/users/:userId", Wrap(RemoveOrgUser)) + orgsRoute.Get("/quotas", Wrap(GetOrgQuotas)) + orgsRoute.Put("/quotas/:target", bind(m.UpdateOrgQuotaCmd{}), Wrap(UpdateOrgQuota)) }, reqGrafanaAdmin) // orgs (admin routes) - apiRoute.Group("/orgs/name/:name", func(orgsRoute RouteRegister) { - orgsRoute.Get("/", wrap(GetOrgByName)) + apiRoute.Group("/orgs/name/:name", func(orgsRoute routing.RouteRegister) { + orgsRoute.Get("/", Wrap(GetOrgByName)) }, reqGrafanaAdmin) // auth api keys - apiRoute.Group("/auth/keys", func(keysRoute RouteRegister) { - keysRoute.Get("/", wrap(GetAPIKeys)) - keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), wrap(AddAPIKey)) - keysRoute.Delete("/:id", wrap(DeleteAPIKey)) + apiRoute.Group("/auth/keys", func(keysRoute routing.RouteRegister) { + keysRoute.Get("/", Wrap(GetAPIKeys)) + keysRoute.Post("/", quota("api_key"), bind(m.AddApiKeyCommand{}), Wrap(AddAPIKey)) + keysRoute.Delete("/:id", Wrap(DeleteAPIKey)) }, reqOrgAdmin) // Preferences - apiRoute.Group("/preferences", func(prefRoute RouteRegister) { - prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), wrap(SetHomeDashboard)) + apiRoute.Group("/preferences", func(prefRoute routing.RouteRegister) { + prefRoute.Post("/set-home-dash", bind(m.SavePreferencesCommand{}), Wrap(SetHomeDashboard)) }) // Data sources - apiRoute.Group("/datasources", func(datasourceRoute RouteRegister) { - datasourceRoute.Get("/", wrap(GetDataSources)) - datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), wrap(AddDataSource)) - datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), wrap(UpdateDataSource)) - datasourceRoute.Delete("/:id", wrap(DeleteDataSourceByID)) - datasourceRoute.Delete("/name/:name", wrap(DeleteDataSourceByName)) - datasourceRoute.Get("/:id", wrap(GetDataSourceByID)) - datasourceRoute.Get("/name/:name", wrap(GetDataSourceByName)) + apiRoute.Group("/datasources", func(datasourceRoute routing.RouteRegister) { + datasourceRoute.Get("/", Wrap(GetDataSources)) + datasourceRoute.Post("/", quota("data_source"), bind(m.AddDataSourceCommand{}), Wrap(AddDataSource)) + datasourceRoute.Put("/:id", bind(m.UpdateDataSourceCommand{}), Wrap(UpdateDataSource)) + datasourceRoute.Delete("/:id", Wrap(DeleteDataSourceByID)) + datasourceRoute.Delete("/name/:name", Wrap(DeleteDataSourceByName)) + datasourceRoute.Get("/:id", Wrap(GetDataSourceByID)) + datasourceRoute.Get("/name/:name", Wrap(GetDataSourceByName)) }, reqOrgAdmin) - apiRoute.Get("/datasources/id/:name", wrap(GetDataSourceIDByName), reqSignedIn) + apiRoute.Get("/datasources/id/:name", Wrap(GetDataSourceIDByName), reqSignedIn) - apiRoute.Get("/plugins", wrap(GetPluginList)) - apiRoute.Get("/plugins/:pluginId/settings", wrap(GetPluginSettingByID)) - apiRoute.Get("/plugins/:pluginId/markdown/:name", wrap(GetPluginMarkdown)) + apiRoute.Get("/plugins", Wrap(GetPluginList)) + apiRoute.Get("/plugins/:pluginId/settings", Wrap(GetPluginSettingByID)) + apiRoute.Get("/plugins/:pluginId/markdown/:name", Wrap(GetPluginMarkdown)) - apiRoute.Group("/plugins", func(pluginRoute RouteRegister) { - pluginRoute.Get("/:pluginId/dashboards/", wrap(GetPluginDashboards)) - pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), wrap(UpdatePluginSetting)) + apiRoute.Group("/plugins", func(pluginRoute routing.RouteRegister) { + pluginRoute.Get("/:pluginId/dashboards/", Wrap(GetPluginDashboards)) + pluginRoute.Post("/:pluginId/settings", bind(m.UpdatePluginSettingCmd{}), Wrap(UpdatePluginSetting)) }, reqOrgAdmin) apiRoute.Get("/frontend/settings/", GetFrontendSettings) @@ -256,125 +255,125 @@ func (hs *HTTPServer) registerRoutes() { apiRoute.Any("/datasources/proxy/:id", reqSignedIn, hs.ProxyDataSourceRequest) // Folders - apiRoute.Group("/folders", func(folderRoute RouteRegister) { - folderRoute.Get("/", wrap(GetFolders)) - folderRoute.Get("/id/:id", wrap(GetFolderByID)) - folderRoute.Post("/", bind(m.CreateFolderCommand{}), wrap(CreateFolder)) + apiRoute.Group("/folders", func(folderRoute routing.RouteRegister) { + folderRoute.Get("/", Wrap(GetFolders)) + folderRoute.Get("/id/:id", Wrap(GetFolderByID)) + folderRoute.Post("/", bind(m.CreateFolderCommand{}), Wrap(CreateFolder)) - folderRoute.Group("/:uid", func(folderUidRoute RouteRegister) { - folderUidRoute.Get("/", wrap(GetFolderByUID)) - folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), wrap(UpdateFolder)) - folderUidRoute.Delete("/", wrap(DeleteFolder)) + folderRoute.Group("/:uid", func(folderUidRoute routing.RouteRegister) { + folderUidRoute.Get("/", Wrap(GetFolderByUID)) + folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), Wrap(UpdateFolder)) + folderUidRoute.Delete("/", Wrap(DeleteFolder)) - folderUidRoute.Group("/permissions", func(folderPermissionRoute RouteRegister) { - folderPermissionRoute.Get("/", wrap(GetFolderPermissionList)) - folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateFolderPermissions)) + folderUidRoute.Group("/permissions", func(folderPermissionRoute routing.RouteRegister) { + folderPermissionRoute.Get("/", Wrap(GetFolderPermissionList)) + folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), Wrap(UpdateFolderPermissions)) }) }) }) // Dashboard - apiRoute.Group("/dashboards", func(dashboardRoute RouteRegister) { - dashboardRoute.Get("/uid/:uid", wrap(GetDashboard)) - dashboardRoute.Delete("/uid/:uid", wrap(DeleteDashboardByUID)) + apiRoute.Group("/dashboards", func(dashboardRoute routing.RouteRegister) { + dashboardRoute.Get("/uid/:uid", Wrap(GetDashboard)) + dashboardRoute.Delete("/uid/:uid", Wrap(DeleteDashboardByUID)) - dashboardRoute.Get("/db/:slug", wrap(GetDashboard)) - dashboardRoute.Delete("/db/:slug", wrap(DeleteDashboard)) + dashboardRoute.Get("/db/:slug", Wrap(GetDashboard)) + dashboardRoute.Delete("/db/:slug", Wrap(DeleteDashboard)) - dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), wrap(CalculateDashboardDiff)) + dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), Wrap(CalculateDashboardDiff)) - dashboardRoute.Post("/db", bind(m.SaveDashboardCommand{}), wrap(PostDashboard)) - dashboardRoute.Get("/home", wrap(GetHomeDashboard)) + dashboardRoute.Post("/db", bind(m.SaveDashboardCommand{}), Wrap(PostDashboard)) + dashboardRoute.Get("/home", Wrap(GetHomeDashboard)) dashboardRoute.Get("/tags", GetDashboardTags) - dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), wrap(ImportDashboard)) + dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), Wrap(ImportDashboard)) - dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute RouteRegister) { - dashIdRoute.Get("/versions", wrap(GetDashboardVersions)) - dashIdRoute.Get("/versions/:id", wrap(GetDashboardVersion)) - dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion)) + dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute routing.RouteRegister) { + dashIdRoute.Get("/versions", Wrap(GetDashboardVersions)) + dashIdRoute.Get("/versions/:id", Wrap(GetDashboardVersion)) + dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), Wrap(RestoreDashboardVersion)) - dashIdRoute.Group("/permissions", func(dashboardPermissionRoute RouteRegister) { - dashboardPermissionRoute.Get("/", wrap(GetDashboardPermissionList)) - dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateDashboardPermissions)) + dashIdRoute.Group("/permissions", func(dashboardPermissionRoute routing.RouteRegister) { + dashboardPermissionRoute.Get("/", Wrap(GetDashboardPermissionList)) + dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), Wrap(UpdateDashboardPermissions)) }) }) }) // Dashboard snapshots - apiRoute.Group("/dashboard/snapshots", func(dashboardRoute RouteRegister) { - dashboardRoute.Get("/", wrap(SearchDashboardSnapshots)) + apiRoute.Group("/dashboard/snapshots", func(dashboardRoute routing.RouteRegister) { + dashboardRoute.Get("/", Wrap(SearchDashboardSnapshots)) }) // Playlist - apiRoute.Group("/playlists", func(playlistRoute RouteRegister) { - playlistRoute.Get("/", wrap(SearchPlaylists)) - playlistRoute.Get("/:id", ValidateOrgPlaylist, wrap(GetPlaylist)) - playlistRoute.Get("/:id/items", ValidateOrgPlaylist, wrap(GetPlaylistItems)) - playlistRoute.Get("/:id/dashboards", ValidateOrgPlaylist, wrap(GetPlaylistDashboards)) - playlistRoute.Delete("/:id", reqEditorRole, ValidateOrgPlaylist, wrap(DeletePlaylist)) - playlistRoute.Put("/:id", reqEditorRole, bind(m.UpdatePlaylistCommand{}), ValidateOrgPlaylist, wrap(UpdatePlaylist)) - playlistRoute.Post("/", reqEditorRole, bind(m.CreatePlaylistCommand{}), wrap(CreatePlaylist)) + apiRoute.Group("/playlists", func(playlistRoute routing.RouteRegister) { + playlistRoute.Get("/", Wrap(SearchPlaylists)) + playlistRoute.Get("/:id", ValidateOrgPlaylist, Wrap(GetPlaylist)) + playlistRoute.Get("/:id/items", ValidateOrgPlaylist, Wrap(GetPlaylistItems)) + playlistRoute.Get("/:id/dashboards", ValidateOrgPlaylist, Wrap(GetPlaylistDashboards)) + playlistRoute.Delete("/:id", reqEditorRole, ValidateOrgPlaylist, Wrap(DeletePlaylist)) + playlistRoute.Put("/:id", reqEditorRole, bind(m.UpdatePlaylistCommand{}), ValidateOrgPlaylist, Wrap(UpdatePlaylist)) + playlistRoute.Post("/", reqEditorRole, bind(m.CreatePlaylistCommand{}), Wrap(CreatePlaylist)) }) // Search apiRoute.Get("/search/", Search) // metrics - apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), wrap(QueryMetrics)) - apiRoute.Get("/tsdb/testdata/scenarios", wrap(GetTestDataScenarios)) - apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, wrap(GenerateSQLTestData)) - apiRoute.Get("/tsdb/testdata/random-walk", wrap(GetTestDataRandomWalk)) + apiRoute.Post("/tsdb/query", bind(dtos.MetricRequest{}), Wrap(QueryMetrics)) + apiRoute.Get("/tsdb/testdata/scenarios", Wrap(GetTestDataScenarios)) + apiRoute.Get("/tsdb/testdata/gensql", reqGrafanaAdmin, Wrap(GenerateSQLTestData)) + apiRoute.Get("/tsdb/testdata/random-walk", Wrap(GetTestDataRandomWalk)) - apiRoute.Group("/alerts", func(alertsRoute RouteRegister) { - alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), wrap(AlertTest)) - alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), wrap(PauseAlert)) - alertsRoute.Get("/:alertId", ValidateOrgAlert, wrap(GetAlert)) - alertsRoute.Get("/", wrap(GetAlerts)) - alertsRoute.Get("/states-for-dashboard", wrap(GetAlertStatesForDashboard)) + apiRoute.Group("/alerts", func(alertsRoute routing.RouteRegister) { + alertsRoute.Post("/test", bind(dtos.AlertTestCommand{}), Wrap(AlertTest)) + alertsRoute.Post("/:alertId/pause", reqEditorRole, bind(dtos.PauseAlertCommand{}), Wrap(PauseAlert)) + alertsRoute.Get("/:alertId", ValidateOrgAlert, Wrap(GetAlert)) + alertsRoute.Get("/", Wrap(GetAlerts)) + alertsRoute.Get("/states-for-dashboard", Wrap(GetAlertStatesForDashboard)) }) - apiRoute.Get("/alert-notifications", wrap(GetAlertNotifications)) - apiRoute.Get("/alert-notifiers", wrap(GetAlertNotifiers)) + apiRoute.Get("/alert-notifications", Wrap(GetAlertNotifications)) + apiRoute.Get("/alert-notifiers", Wrap(GetAlertNotifiers)) - apiRoute.Group("/alert-notifications", func(alertNotifications RouteRegister) { - alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), wrap(NotificationTest)) - alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), wrap(CreateAlertNotification)) - alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), wrap(UpdateAlertNotification)) - alertNotifications.Get("/:notificationId", wrap(GetAlertNotificationByID)) - alertNotifications.Delete("/:notificationId", wrap(DeleteAlertNotification)) + apiRoute.Group("/alert-notifications", func(alertNotifications routing.RouteRegister) { + alertNotifications.Post("/test", bind(dtos.NotificationTestCommand{}), Wrap(NotificationTest)) + alertNotifications.Post("/", bind(m.CreateAlertNotificationCommand{}), Wrap(CreateAlertNotification)) + alertNotifications.Put("/:notificationId", bind(m.UpdateAlertNotificationCommand{}), Wrap(UpdateAlertNotification)) + alertNotifications.Get("/:notificationId", Wrap(GetAlertNotificationByID)) + alertNotifications.Delete("/:notificationId", Wrap(DeleteAlertNotification)) }, reqEditorRole) - apiRoute.Get("/annotations", wrap(GetAnnotations)) - apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), wrap(DeleteAnnotations)) + apiRoute.Get("/annotations", Wrap(GetAnnotations)) + apiRoute.Post("/annotations/mass-delete", reqOrgAdmin, bind(dtos.DeleteAnnotationsCmd{}), Wrap(DeleteAnnotations)) - apiRoute.Group("/annotations", func(annotationsRoute RouteRegister) { - annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), wrap(PostAnnotation)) - annotationsRoute.Delete("/:annotationId", wrap(DeleteAnnotationByID)) - annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), wrap(UpdateAnnotation)) - annotationsRoute.Delete("/region/:regionId", wrap(DeleteAnnotationRegion)) - annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), wrap(PostGraphiteAnnotation)) + apiRoute.Group("/annotations", func(annotationsRoute routing.RouteRegister) { + annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), Wrap(PostAnnotation)) + annotationsRoute.Delete("/:annotationId", Wrap(DeleteAnnotationByID)) + annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), Wrap(UpdateAnnotation)) + annotationsRoute.Delete("/region/:regionId", Wrap(DeleteAnnotationRegion)) + annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), Wrap(PostGraphiteAnnotation)) }) // error test - r.Get("/metrics/error", wrap(GenerateError)) + r.Get("/metrics/error", Wrap(GenerateError)) }, reqSignedIn) // admin api - r.Group("/api/admin", func(adminRoute RouteRegister) { + r.Group("/api/admin", func(adminRoute routing.RouteRegister) { adminRoute.Get("/settings", AdminGetSettings) adminRoute.Post("/users", bind(dtos.AdminCreateUserForm{}), AdminCreateUser) adminRoute.Put("/users/:id/password", bind(dtos.AdminUpdateUserPasswordForm{}), AdminUpdateUserPassword) adminRoute.Put("/users/:id/permissions", bind(dtos.AdminUpdateUserPermissionsForm{}), AdminUpdateUserPermissions) adminRoute.Delete("/users/:id", AdminDeleteUser) - adminRoute.Get("/users/:id/quotas", wrap(GetUserQuotas)) - adminRoute.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), wrap(UpdateUserQuota)) + adminRoute.Get("/users/:id/quotas", Wrap(GetUserQuotas)) + adminRoute.Put("/users/:id/quotas/:target", bind(m.UpdateUserQuotaCmd{}), Wrap(UpdateUserQuota)) adminRoute.Get("/stats", AdminGetStats) - adminRoute.Post("/pause-all-alerts", bind(dtos.PauseAllAlertsCommand{}), wrap(PauseAllAlerts)) + adminRoute.Post("/pause-all-alerts", bind(dtos.PauseAllAlertsCommand{}), Wrap(PauseAllAlerts)) }, reqGrafanaAdmin) // rendering - r.Get("/render/*", reqSignedIn, RenderToPng) + r.Get("/render/*", reqSignedIn, hs.RenderToPng) // grafana.net proxy r.Any("/api/gnet/*", reqSignedIn, ProxyGnetRequest) @@ -388,10 +387,4 @@ func (hs *HTTPServer) registerRoutes() { // streams //r.Post("/api/streams/push", reqSignedIn, bind(dtos.StreamMessage{}), liveConn.PushToStream) - - r.Register(macaronR) - - InitAppPluginRoutes(macaronR) - - macaronR.NotFound(NotFoundHandler) } diff --git a/pkg/api/app_routes.go b/pkg/api/app_routes.go index 0b7dcd32ce3..a2137089fc6 100644 --- a/pkg/api/app_routes.go +++ b/pkg/api/app_routes.go @@ -18,7 +18,7 @@ import ( var pluginProxyTransport *http.Transport -func InitAppPluginRoutes(r *macaron.Macaron) { +func (hs *HTTPServer) initAppPluginRoutes(r *macaron.Macaron) { pluginProxyTransport = &http.Transport{ TLSClientConfig: &tls.Config{ InsecureSkipVerify: setting.PluginAppsSkipVerifyTLS, diff --git a/pkg/api/common.go b/pkg/api/common.go index 97f41ff7c72..7973c72c8fa 100644 --- a/pkg/api/common.go +++ b/pkg/api/common.go @@ -30,7 +30,7 @@ type NormalResponse struct { err error } -func wrap(action interface{}) macaron.Handler { +func Wrap(action interface{}) macaron.Handler { return func(c *m.ReqContext) { var res Response diff --git a/pkg/api/common_test.go b/pkg/api/common_test.go index a4a547d8bbf..8b66a7a468b 100644 --- a/pkg/api/common_test.go +++ b/pkg/api/common_test.go @@ -23,7 +23,7 @@ func loggedInUserScenarioWithRole(desc string, method string, url string, routeP defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.UserId = TestUserID sc.context.OrgId = TestOrgID @@ -46,6 +46,31 @@ func loggedInUserScenarioWithRole(desc string, method string, url string, routeP }) } +func anonymousUserScenario(desc string, method string, url string, routePattern string, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { + sc.context = c + if sc.handlerFunc != nil { + return sc.handlerFunc(sc.context) + } + + return nil + }) + + switch method { + case "GET": + sc.m.Get(routePattern, sc.defaultHandler) + case "DELETE": + sc.m.Delete(routePattern, sc.defaultHandler) + } + + fn(sc) + }) +} + func (sc *scenarioContext) fakeReq(method, url string) *scenarioContext { sc.resp = httptest.NewRecorder() req, err := http.NewRequest(method, url, nil) diff --git a/pkg/api/dashboard_permission_test.go b/pkg/api/dashboard_permission_test.go index 24f0bdca365..f65c5f1f5fa 100644 --- a/pkg/api/dashboard_permission_test.go +++ b/pkg/api/dashboard_permission_test.go @@ -194,7 +194,7 @@ func updateDashboardPermissionScenario(desc string, url string, routePattern str sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.OrgId = TestOrgID sc.context.UserId = TestUserID diff --git a/pkg/api/dashboard_snapshot.go b/pkg/api/dashboard_snapshot.go index f474d357df5..e4e9c9d040f 100644 --- a/pkg/api/dashboard_snapshot.go +++ b/pkg/api/dashboard_snapshot.go @@ -91,11 +91,31 @@ func GetDashboardSnapshot(c *m.ReqContext) { c.JSON(200, dto) } -// GET /api/snapshots-delete/:key +// GET /api/snapshots-delete/:deleteKey +func DeleteDashboardSnapshotByDeleteKey(c *m.ReqContext) Response { + key := c.Params(":deleteKey") + + query := &m.GetDashboardSnapshotQuery{DeleteKey: key} + + err := bus.Dispatch(query) + if err != nil { + return Error(500, "Failed to get dashboard snapshot", err) + } + + cmd := &m.DeleteDashboardSnapshotCommand{DeleteKey: query.Result.DeleteKey} + + if err := bus.Dispatch(cmd); err != nil { + return Error(500, "Failed to delete dashboard snapshot", err) + } + + return JSON(200, util.DynMap{"message": "Snapshot deleted. It might take an hour before it's cleared from any CDN caches."}) +} + +// DELETE /api/snapshots/:key func DeleteDashboardSnapshot(c *m.ReqContext) Response { key := c.Params(":key") - query := &m.GetDashboardSnapshotQuery{DeleteKey: key} + query := &m.GetDashboardSnapshotQuery{Key: key} err := bus.Dispatch(query) if err != nil { @@ -118,13 +138,13 @@ func DeleteDashboardSnapshot(c *m.ReqContext) Response { return Error(403, "Access denied to this snapshot", nil) } - cmd := &m.DeleteDashboardSnapshotCommand{DeleteKey: key} + cmd := &m.DeleteDashboardSnapshotCommand{DeleteKey: query.Result.DeleteKey} if err := bus.Dispatch(cmd); err != nil { return Error(500, "Failed to delete dashboard snapshot", err) } - return JSON(200, util.DynMap{"message": "Snapshot deleted. It might take an hour before it's cleared from a CDN cache."}) + return JSON(200, util.DynMap{"message": "Snapshot deleted. It might take an hour before it's cleared from any CDN caches."}) } // GET /api/dashboard/snapshots @@ -154,7 +174,6 @@ func SearchDashboardSnapshots(c *m.ReqContext) Response { Id: snapshot.Id, Name: snapshot.Name, Key: snapshot.Key, - DeleteKey: snapshot.DeleteKey, OrgId: snapshot.OrgId, UserId: snapshot.UserId, External: snapshot.External, diff --git a/pkg/api/dashboard_snapshot_test.go b/pkg/api/dashboard_snapshot_test.go index 87c2b9e99d4..e58f2c4712d 100644 --- a/pkg/api/dashboard_snapshot_test.go +++ b/pkg/api/dashboard_snapshot_test.go @@ -39,7 +39,7 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) { return nil }) - teamResp := []*m.Team{} + teamResp := []*m.TeamDTO{} bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { query.Result = teamResp return nil @@ -47,15 +47,30 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) { Convey("When user has editor role and is not in the ACL", func() { Convey("Should not be able to delete snapshot", func() { - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/snapshots-delete/12345", "/api/snapshots-delete/:key", m.ROLE_EDITOR, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/snapshots/12345", "/api/snapshots/:key", m.ROLE_EDITOR, func(sc *scenarioContext) { sc.handlerFunc = DeleteDashboardSnapshot - sc.fakeReqWithParams("GET", sc.url, map[string]string{"key": "12345"}).exec() + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{"key": "12345"}).exec() So(sc.resp.Code, ShouldEqual, 403) }) }) }) + Convey("When user is anonymous", func() { + Convey("Should be able to delete snapshot by deleteKey", func() { + anonymousUserScenario("When calling GET on", "GET", "/api/snapshots-delete/12345", "/api/snapshots-delete/:deleteKey", func(sc *scenarioContext) { + sc.handlerFunc = DeleteDashboardSnapshotByDeleteKey + sc.fakeReqWithParams("GET", sc.url, map[string]string{"deleteKey": "12345"}).exec() + + So(sc.resp.Code, ShouldEqual, 200) + respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) + So(err, ShouldBeNil) + + So(respJSON.Get("message").MustString(), ShouldStartWith, "Snapshot deleted") + }) + }) + }) + Convey("When user is editor and dashboard has default ACL", func() { aclMockResp = []*m.DashboardAclInfoDTO{ {Role: &viewerRole, Permission: m.PERMISSION_VIEW}, @@ -63,9 +78,9 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) { } Convey("Should be able to delete a snapshot", func() { - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/snapshots-delete/12345", "/api/snapshots-delete/:key", m.ROLE_EDITOR, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/snapshots/12345", "/api/snapshots/:key", m.ROLE_EDITOR, func(sc *scenarioContext) { sc.handlerFunc = DeleteDashboardSnapshot - sc.fakeReqWithParams("GET", sc.url, map[string]string{"key": "12345"}).exec() + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{"key": "12345"}).exec() So(sc.resp.Code, ShouldEqual, 200) respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) @@ -81,9 +96,9 @@ func TestDashboardSnapshotApiEndpoint(t *testing.T) { mockSnapshotResult.UserId = TestUserID Convey("Should be able to delete a snapshot", func() { - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/snapshots-delete/12345", "/api/snapshots-delete/:key", m.ROLE_EDITOR, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/snapshots/12345", "/api/snapshots/:key", m.ROLE_EDITOR, func(sc *scenarioContext) { sc.handlerFunc = DeleteDashboardSnapshot - sc.fakeReqWithParams("GET", sc.url, map[string]string{"key": "12345"}).exec() + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{"key": "12345"}).exec() So(sc.resp.Code, ShouldEqual, 200) respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) diff --git a/pkg/api/dashboard_test.go b/pkg/api/dashboard_test.go index ccde2382787..283a9b5f12c 100644 --- a/pkg/api/dashboard_test.go +++ b/pkg/api/dashboard_test.go @@ -61,7 +61,7 @@ func TestDashboardApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) @@ -230,7 +230,7 @@ func TestDashboardApiEndpoint(t *testing.T) { }) bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = []*m.Team{} + query.Result = []*m.TeamDTO{} return nil }) @@ -882,7 +882,7 @@ func postDashboardScenario(desc string, url string, routePattern string, mock *d defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.SignedInUser = &m.SignedInUser{OrgId: cmd.OrgId, UserId: cmd.UserId} @@ -907,7 +907,7 @@ func postDiffScenario(desc string, url string, routePattern string, cmd dtos.Cal defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.SignedInUser = &m.SignedInUser{ OrgId: TestOrgID, diff --git a/pkg/api/datasources.go b/pkg/api/datasources.go index 99677a93ee6..6ffefea991a 100644 --- a/pkg/api/datasources.go +++ b/pkg/api/datasources.go @@ -103,6 +103,9 @@ func DeleteDataSourceByName(c *m.ReqContext) Response { getCmd := &m.GetDataSourceByNameQuery{Name: name, OrgId: c.OrgId} if err := bus.Dispatch(getCmd); err != nil { + if err == m.ErrDataSourceNotFound { + return Error(404, "Data source not found", nil) + } return Error(500, "Failed to delete datasource", err) } diff --git a/pkg/api/datasources_test.go b/pkg/api/datasources_test.go index 490393727d6..6e52a27758b 100644 --- a/pkg/api/datasources_test.go +++ b/pkg/api/datasources_test.go @@ -46,5 +46,13 @@ func TestDataSourcesProxy(t *testing.T) { So(respJSON[3]["name"], ShouldEqual, "ZZZ") }) }) + + Convey("Should be able to save a data source", func() { + loggedInUserScenario("When calling DELETE on non-existing", "/api/datasources/name/12345", func(sc *scenarioContext) { + sc.handlerFunc = DeleteDataSourceByName + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 404) + }) + }) }) } diff --git a/pkg/api/dtos/index.go b/pkg/api/dtos/index.go index 8c7f505277d..77004899fc3 100644 --- a/pkg/api/dtos/index.go +++ b/pkg/api/dtos/index.go @@ -13,6 +13,7 @@ type IndexViewData struct { Theme string NewGrafanaVersionExists bool NewGrafanaVersion string + AppName string } type PluginCss struct { diff --git a/pkg/api/dtos/models.go b/pkg/api/dtos/models.go index aead67cd04c..6a130e62158 100644 --- a/pkg/api/dtos/models.go +++ b/pkg/api/dtos/models.go @@ -52,7 +52,7 @@ type UserStars struct { func GetGravatarUrl(text string) string { if setting.DisableGravatar { - return "/public/img/user_profile.png" + return setting.AppSubUrl + "/public/img/user_profile.png" } if text == "" { diff --git a/pkg/api/dtos/plugins.go b/pkg/api/dtos/plugins.go index f4281f877b3..78a611c5eeb 100644 --- a/pkg/api/dtos/plugins.go +++ b/pkg/api/dtos/plugins.go @@ -57,4 +57,5 @@ type ImportDashboardCommand struct { Overwrite bool `json:"overwrite"` Dashboard *simplejson.Json `json:"dashboard"` Inputs []plugins.ImportDashboardInput `json:"inputs"` + FolderId int64 `json:"folderId"` } diff --git a/pkg/api/folder_permission_test.go b/pkg/api/folder_permission_test.go index f7458af6dce..64a746ca937 100644 --- a/pkg/api/folder_permission_test.go +++ b/pkg/api/folder_permission_test.go @@ -226,7 +226,7 @@ func updateFolderPermissionScenario(desc string, url string, routePattern string sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.OrgId = TestOrgID sc.context.UserId = TestUserID diff --git a/pkg/api/folder_test.go b/pkg/api/folder_test.go index 0d9b9495686..6e24e432535 100644 --- a/pkg/api/folder_test.go +++ b/pkg/api/folder_test.go @@ -152,7 +152,7 @@ func createFolderScenario(desc string, url string, routePattern string, mock *fa defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.SignedInUser = &m.SignedInUser{OrgId: TestOrgID, UserId: TestUserID} @@ -181,7 +181,7 @@ func updateFolderScenario(desc string, url string, routePattern string, mock *fa defer bus.ClearBusHandlers() sc := setupScenarioContext(url) - sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { sc.context = c sc.context.SignedInUser = &m.SignedInUser{OrgId: TestOrgID, UserId: TestUserID} diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go index 5cd52122c3f..da3c88566c1 100644 --- a/pkg/api/frontendsettings.go +++ b/pkg/api/frontendsettings.go @@ -140,6 +140,7 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) { "authProxyEnabled": setting.AuthProxyEnabled, "ldapEnabled": setting.LdapEnabled, "alertingEnabled": setting.AlertingEnabled, + "exploreEnabled": setting.ExploreEnabled, "googleAnalyticsId": setting.GoogleAnalyticsId, "disableLoginForm": setting.DisableLoginForm, "externalUserMngInfo": setting.ExternalUserMngInfo, @@ -152,6 +153,7 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) { "latestVersion": plugins.GrafanaLatestVersion, "hasUpdate": plugins.GrafanaHasUpdate, "env": setting.Env, + "isEnterprise": setting.IsEnterprise, }, } diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 2afccb8f0d7..0de63ce5e08 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -11,6 +11,7 @@ import ( "path" "time" + "github.com/grafana/grafana/pkg/api/routing" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" @@ -27,11 +28,16 @@ import ( "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/services/rendering" "github.com/grafana/grafana/pkg/setting" ) func init() { - registry.RegisterService(&HTTPServer{}) + registry.Register(®istry.Descriptor{ + Name: "HTTPServer", + Instance: &HTTPServer{}, + InitPriority: registry.High, + }) } type HTTPServer struct { @@ -42,14 +48,20 @@ type HTTPServer struct { cache *gocache.Cache httpSrv *http.Server - RouteRegister RouteRegister `inject:""` - Bus bus.Bus `inject:""` + RouteRegister routing.RouteRegister `inject:""` + Bus bus.Bus `inject:""` + RenderService rendering.Service `inject:""` + Cfg *setting.Cfg `inject:""` } func (hs *HTTPServer) Init() error { hs.log = log.New("http.server") hs.cache = gocache.New(5*time.Minute, 10*time.Minute) + hs.streamManager = live.NewStreamManager() + hs.macaron = hs.newMacaron() + hs.registerRoutes() + return nil } @@ -57,10 +69,8 @@ func (hs *HTTPServer) Run(ctx context.Context) error { var err error hs.context = ctx - hs.streamManager = live.NewStreamManager() - hs.macaron = hs.newMacaron() - hs.registerRoutes() + hs.applyRoutes() hs.streamManager.Run(ctx) listenAddr := fmt.Sprintf("%s:%s", setting.HttpAddr, setting.HttpPort) @@ -160,6 +170,26 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron { macaron.Env = setting.Env m := macaron.New() + // automatically set HEAD for every GET + m.SetAutoHead(true) + + return m +} + +func (hs *HTTPServer) applyRoutes() { + // start with middlewares & static routes + hs.addMiddlewaresAndStaticRoutes() + // then add view routes & api routes + hs.RouteRegister.Register(hs.macaron) + // then custom app proxy routes + hs.initAppPluginRoutes(hs.macaron) + // lastly not found route + hs.macaron.NotFound(NotFoundHandler) +} + +func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() { + m := hs.macaron + m.Use(middleware.Logger()) if setting.EnableGzip { @@ -171,7 +201,7 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron { for _, route := range plugins.StaticRoutes { pluginRoute := path.Join("/public/plugins/", route.PluginId) hs.log.Debug("Plugins: Adding route", "route", pluginRoute, "dir", route.Directory) - hs.mapStatic(m, route.Directory, "", pluginRoute) + hs.mapStatic(hs.macaron, route.Directory, "", pluginRoute) } hs.mapStatic(m, setting.StaticRootPath, "build", "public/build") @@ -179,7 +209,7 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron { hs.mapStatic(m, setting.StaticRootPath, "robots.txt", "robots.txt") if setting.ImageUploadProvider == "local" { - hs.mapStatic(m, setting.ImagesDir, "", "/public/img/attachments") + hs.mapStatic(m, hs.Cfg.ImagesDir, "", "/public/img/attachments") } m.Use(macaron.Renderer(macaron.RenderOptions{ @@ -200,8 +230,6 @@ func (hs *HTTPServer) newMacaron() *macaron.Macaron { } m.Use(middleware.AddDefaultResponseHeaders()) - - return m } func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) { diff --git a/pkg/api/index.go b/pkg/api/index.go index 2a905b474ce..ea10940d3ba 100644 --- a/pkg/api/index.go +++ b/pkg/api/index.go @@ -76,6 +76,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { BuildCommit: setting.BuildCommit, NewGrafanaVersion: plugins.GrafanaLatestVersion, NewGrafanaVersionExists: plugins.GrafanaHasUpdate, + AppName: setting.ApplicationName, } if setting.DisableGravatar { @@ -92,17 +93,23 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { data.Theme = "light" } - if c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR { + if hasEditPermissionInFoldersQuery.Result { + children := []*dtos.NavLink{ + {Text: "Dashboard", Icon: "gicon gicon-dashboard-new", Url: setting.AppSubUrl + "/dashboard/new"}, + } + + if c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR { + children = append(children, &dtos.NavLink{Text: "Folder", SubTitle: "Create a new folder to organize your dashboards", Id: "folder", Icon: "gicon gicon-folder-new", Url: setting.AppSubUrl + "/dashboards/folder/new"}) + } + + children = append(children, &dtos.NavLink{Text: "Import", SubTitle: "Import dashboard from file or Grafana.com", Id: "import", Icon: "gicon gicon-dashboard-import", Url: setting.AppSubUrl + "/dashboard/import"}) + data.NavTree = append(data.NavTree, &dtos.NavLink{ - Text: "Create", - Id: "create", - Icon: "fa fa-fw fa-plus", - Url: setting.AppSubUrl + "/dashboard/new", - Children: []*dtos.NavLink{ - {Text: "Dashboard", Icon: "gicon gicon-dashboard-new", Url: setting.AppSubUrl + "/dashboard/new"}, - {Text: "Folder", SubTitle: "Create a new folder to organize your dashboards", Id: "folder", Icon: "gicon gicon-folder-new", Url: setting.AppSubUrl + "/dashboards/folder/new"}, - {Text: "Import", SubTitle: "Import dashboard from file or Grafana.com", Id: "import", Icon: "gicon gicon-dashboard-import", Url: setting.AppSubUrl + "/dashboard/import"}, - }, + Text: "Create", + Id: "create", + Icon: "fa fa-fw fa-plus", + Url: setting.AppSubUrl + "/dashboard/new", + Children: children, }) } @@ -123,7 +130,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { Children: dashboardChildNavs, }) - if setting.ExploreEnabled { + if setting.ExploreEnabled && (c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR) { data.NavTree = append(data.NavTree, &dtos.NavLink{ Text: "Explore", Id: "explore", @@ -228,7 +235,7 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { } } - if c.OrgRole == m.ROLE_ADMIN { + if c.IsGrafanaAdmin || c.OrgRole == m.ROLE_ADMIN { cfgNode := &dtos.NavLink{ Id: "cfg", Text: "Configuration", @@ -282,10 +289,24 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { }, } - if c.IsGrafanaAdmin { + if c.OrgRole != m.ROLE_ADMIN { + cfgNode = &dtos.NavLink{ + Id: "cfg", + Text: "Configuration", + SubTitle: "Organization: " + c.OrgName, + Icon: "gicon gicon-cog", + Url: setting.AppSubUrl + "/admin/users", + Children: make([]*dtos.NavLink, 0), + } + } + + if c.OrgRole == m.ROLE_ADMIN && c.IsGrafanaAdmin { cfgNode.Children = append(cfgNode.Children, &dtos.NavLink{ Divider: true, HideFromTabs: true, Id: "admin-divider", Text: "Text", }) + } + + if c.IsGrafanaAdmin { cfgNode.Children = append(cfgNode.Children, &dtos.NavLink{ Text: "Server Admin", HideFromTabs: true, diff --git a/pkg/api/login.go b/pkg/api/login.go index 9d0fa31946f..01fa71a6e44 100644 --- a/pkg/api/login.go +++ b/pkg/api/login.go @@ -155,5 +155,9 @@ func Logout(c *m.ReqContext) { c.SetCookie(setting.CookieUserName, "", -1, setting.AppSubUrl+"/") c.SetCookie(setting.CookieRememberName, "", -1, setting.AppSubUrl+"/") c.Session.Destory(c.Context) - c.Redirect(setting.AppSubUrl + "/login") + if setting.SignoutRedirectUrl != "" { + c.Redirect(setting.SignoutRedirectUrl) + } else { + c.Redirect(setting.AppSubUrl + "/login") + } } diff --git a/pkg/api/login_oauth.go b/pkg/api/login_oauth.go index c4a5f8fdacf..fe4fa93b621 100644 --- a/pkg/api/login_oauth.go +++ b/pkg/api/login_oauth.go @@ -78,6 +78,7 @@ func OAuthLogin(ctx *m.ReqContext) { // handle call back tr := &http.Transport{ + Proxy: http.ProxyFromEnvironment, TLSClientConfig: &tls.Config{ InsecureSkipVerify: setting.OAuthService.OAuthInfos[name].TlsSkipVerify, }, diff --git a/pkg/api/org_invite.go b/pkg/api/org_invite.go index d6ab1c9d372..dfb2cf045ed 100644 --- a/pkg/api/org_invite.go +++ b/pkg/api/org_invite.go @@ -74,6 +74,9 @@ func AddOrgInvite(c *m.ReqContext, inviteDto dtos.AddInviteForm) Response { } if err := bus.Dispatch(&emailCmd); err != nil { + if err == m.ErrSmtpNotEnabled { + return Error(412, err.Error(), err) + } return Error(500, "Failed to send email invite", err) } diff --git a/pkg/api/playlist.go b/pkg/api/playlist.go index a90b6425cb6..0963df7d4c4 100644 --- a/pkg/api/playlist.go +++ b/pkg/api/playlist.go @@ -160,6 +160,7 @@ func CreatePlaylist(c *m.ReqContext, cmd m.CreatePlaylistCommand) Response { func UpdatePlaylist(c *m.ReqContext, cmd m.UpdatePlaylistCommand) Response { cmd.OrgId = c.OrgId + cmd.Id = c.ParamsInt64(":id") if err := bus.Dispatch(&cmd); err != nil { return Error(500, "Failed to save playlist", err) diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go index f4eb1140aa0..b420398f9a9 100644 --- a/pkg/api/pluginproxy/ds_proxy.go +++ b/pkg/api/pluginproxy/ds_proxy.go @@ -25,12 +25,9 @@ import ( ) var ( - logger = log.New("data-proxy-log") - client = &http.Client{ - Timeout: time.Second * 30, - Transport: &http.Transport{Proxy: http.ProxyFromEnvironment}, - } - tokenCache = map[int64]*jwtToken{} + logger = log.New("data-proxy-log") + tokenCache = map[string]*jwtToken{} + client = newHTTPClient() ) type jwtToken struct { @@ -48,6 +45,10 @@ type DataSourceProxy struct { plugin *plugins.DataSourcePlugin } +type httpClient interface { + Do(req *http.Request) (*http.Response, error) +} + func NewDataSourceProxy(ds *m.DataSource, plugin *plugins.DataSourcePlugin, ctx *m.ReqContext, proxyPath string) *DataSourceProxy { targetURL, _ := url.Parse(ds.Url) @@ -60,6 +61,13 @@ func NewDataSourceProxy(ds *m.DataSource, plugin *plugins.DataSourcePlugin, ctx } } +func newHTTPClient() httpClient { + return &http.Client{ + Timeout: time.Second * 30, + Transport: &http.Transport{Proxy: http.ProxyFromEnvironment}, + } +} + func (proxy *DataSourceProxy) HandleRequest() { if err := proxy.validateRequest(); err != nil { proxy.ctx.JsonApiErr(403, err.Error(), nil) @@ -109,6 +117,28 @@ func (proxy *DataSourceProxy) addTraceFromHeaderValue(span opentracing.Span, hea } } +func (proxy *DataSourceProxy) useCustomHeaders(req *http.Request) { + decryptSdj := proxy.ds.SecureJsonData.Decrypt() + index := 1 + for { + headerNameSuffix := fmt.Sprintf("httpHeaderName%d", index) + headerValueSuffix := fmt.Sprintf("httpHeaderValue%d", index) + if key := proxy.ds.JsonData.Get(headerNameSuffix).MustString(); key != "" { + if val, ok := decryptSdj[headerValueSuffix]; ok { + // remove if exists + if req.Header.Get(key) != "" { + req.Header.Del(key) + } + req.Header.Add(key, val) + logger.Debug("Using custom header ", "CustomHeaders", key) + } + } else { + break + } + index += 1 + } +} + func (proxy *DataSourceProxy) getDirector() func(req *http.Request) { return func(req *http.Request) { req.URL.Scheme = proxy.targetUrl.Scheme @@ -138,6 +168,11 @@ func (proxy *DataSourceProxy) getDirector() func(req *http.Request) { req.Header.Add("Authorization", util.GetBasicAuthHeader(proxy.ds.BasicAuthUser, proxy.ds.BasicAuthPassword)) } + // Lookup and use custom headers + if proxy.ds.SecureJsonData != nil { + proxy.useCustomHeaders(req) + } + dsAuth := req.Header.Get("X-DS-Authorization") if len(dsAuth) > 0 { req.Header.Del("X-DS-Authorization") @@ -311,7 +346,7 @@ func (proxy *DataSourceProxy) applyRoute(req *http.Request) { } func (proxy *DataSourceProxy) getAccessToken(data templateData) (string, error) { - if cachedToken, found := tokenCache[proxy.ds.Id]; found { + if cachedToken, found := tokenCache[proxy.getAccessTokenCacheKey()]; found { if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) { logger.Info("Using token from cache") return cachedToken.AccessToken, nil @@ -350,12 +385,16 @@ func (proxy *DataSourceProxy) getAccessToken(data templateData) (string, error) expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64) token.ExpiresOn = time.Unix(expiresOnEpoch, 0) - tokenCache[proxy.ds.Id] = &token + tokenCache[proxy.getAccessTokenCacheKey()] = &token logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn) return token.AccessToken, nil } +func (proxy *DataSourceProxy) getAccessTokenCacheKey() string { + return fmt.Sprintf("%v_%v_%v", proxy.ds.Id, proxy.route.Path, proxy.route.Method) +} + func interpolateString(text string, data templateData) (string, error) { t, err := template.New("content").Parse(text) if err != nil { diff --git a/pkg/api/pluginproxy/ds_proxy_test.go b/pkg/api/pluginproxy/ds_proxy_test.go index 3fc1392a851..bb553b4d075 100644 --- a/pkg/api/pluginproxy/ds_proxy_test.go +++ b/pkg/api/pluginproxy/ds_proxy_test.go @@ -1,13 +1,18 @@ package pluginproxy import ( + "bytes" + "fmt" + "io/ioutil" "net/http" "net/url" "testing" + "time" macaron "gopkg.in/macaron.v1" "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/log" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" @@ -100,6 +105,112 @@ func TestDSRouteRule(t *testing.T) { }) }) + Convey("Plugin with multiple routes for token auth", func() { + plugin := &plugins.DataSourcePlugin{ + Routes: []*plugins.AppPluginRoute{ + { + Path: "pathwithtoken1", + Url: "https://api.nr1.io/some/path", + TokenAuth: &plugins.JwtTokenAuth{ + Url: "https://login.server.com/{{.JsonData.tenantId}}/oauth2/token", + Params: map[string]string{ + "grant_type": "client_credentials", + "client_id": "{{.JsonData.clientId}}", + "client_secret": "{{.SecureJsonData.clientSecret}}", + "resource": "https://api.nr1.io", + }, + }, + }, + { + Path: "pathwithtoken2", + Url: "https://api.nr2.io/some/path", + TokenAuth: &plugins.JwtTokenAuth{ + Url: "https://login.server.com/{{.JsonData.tenantId}}/oauth2/token", + Params: map[string]string{ + "grant_type": "client_credentials", + "client_id": "{{.JsonData.clientId}}", + "client_secret": "{{.SecureJsonData.clientSecret}}", + "resource": "https://api.nr2.io", + }, + }, + }, + }, + } + + setting.SecretKey = "password" + key, _ := util.Encrypt([]byte("123"), "password") + + ds := &m.DataSource{ + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "clientId": "asd", + "tenantId": "mytenantId", + }), + SecureJsonData: map[string][]byte{ + "clientSecret": key, + }, + } + + req, _ := http.NewRequest("GET", "http://localhost/asd", nil) + ctx := &m.ReqContext{ + Context: &macaron.Context{ + Req: macaron.Request{Request: req}, + }, + SignedInUser: &m.SignedInUser{OrgRole: m.ROLE_EDITOR}, + } + + Convey("When creating and caching access tokens", func() { + var authorizationHeaderCall1 string + var authorizationHeaderCall2 string + + Convey("first call should add authorization header with access token", func() { + json, err := ioutil.ReadFile("./test-data/access-token-1.json") + So(err, ShouldBeNil) + + client = newFakeHTTPClient(json) + proxy1 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1") + proxy1.route = plugin.Routes[0] + proxy1.applyRoute(req) + + authorizationHeaderCall1 = req.Header.Get("Authorization") + So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path") + So(authorizationHeaderCall1, ShouldStartWith, "Bearer eyJ0e") + + Convey("second call to another route should add a different access token", func() { + json2, err := ioutil.ReadFile("./test-data/access-token-2.json") + So(err, ShouldBeNil) + + req, _ := http.NewRequest("GET", "http://localhost/asd", nil) + client = newFakeHTTPClient(json2) + proxy2 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken2") + proxy2.route = plugin.Routes[1] + proxy2.applyRoute(req) + + authorizationHeaderCall2 = req.Header.Get("Authorization") + + So(req.URL.String(), ShouldEqual, "https://api.nr2.io/some/path") + So(authorizationHeaderCall1, ShouldStartWith, "Bearer eyJ0e") + So(authorizationHeaderCall2, ShouldStartWith, "Bearer eyJ0e") + So(authorizationHeaderCall2, ShouldNotEqual, authorizationHeaderCall1) + + Convey("third call to first route should add cached access token", func() { + req, _ := http.NewRequest("GET", "http://localhost/asd", nil) + + client = newFakeHTTPClient([]byte{}) + proxy3 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1") + proxy3.route = plugin.Routes[0] + proxy3.applyRoute(req) + + authorizationHeaderCall3 := req.Header.Get("Authorization") + So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path") + So(authorizationHeaderCall1, ShouldStartWith, "Bearer eyJ0e") + So(authorizationHeaderCall3, ShouldStartWith, "Bearer eyJ0e") + So(authorizationHeaderCall3, ShouldEqual, authorizationHeaderCall1) + }) + }) + }) + }) + }) + Convey("When proxying graphite", func() { plugin := &plugins.DataSourcePlugin{} ds := &m.DataSource{Url: "htttp://graphite:8080", Type: m.DS_GRAPHITE} @@ -212,5 +323,60 @@ func TestDSRouteRule(t *testing.T) { So(interpolated, ShouldEqual, "0asd+asd") }) + Convey("When proxying a data source with custom headers specified", func() { + plugin := &plugins.DataSourcePlugin{} + + encryptedData, err := util.Encrypt([]byte(`Bearer xf5yhfkpsnmgo`), setting.SecretKey) + ds := &m.DataSource{ + Type: m.DS_PROMETHEUS, + Url: "http://prometheus:9090", + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "httpHeaderName1": "Authorization", + }), + SecureJsonData: map[string][]byte{ + "httpHeaderValue1": encryptedData, + }, + } + + ctx := &m.ReqContext{} + proxy := NewDataSourceProxy(ds, plugin, ctx, "") + + requestURL, _ := url.Parse("http://grafana.com/sub") + req := http.Request{URL: requestURL, Header: make(http.Header)} + proxy.getDirector()(&req) + + if err != nil { + log.Fatal(4, err.Error()) + } + + Convey("Match header value after decryption", func() { + So(req.Header.Get("Authorization"), ShouldEqual, "Bearer xf5yhfkpsnmgo") + }) + }) + }) } + +type httpClientStub struct { + fakeBody []byte +} + +func (c *httpClientStub) Do(req *http.Request) (*http.Response, error) { + bodyJSON, _ := simplejson.NewJson(c.fakeBody) + _, passedTokenCacheTest := bodyJSON.CheckGet("expires_on") + So(passedTokenCacheTest, ShouldBeTrue) + + bodyJSON.Set("expires_on", fmt.Sprint(time.Now().Add(time.Second*60).Unix())) + body, _ := bodyJSON.MarshalJSON() + resp := &http.Response{ + Body: ioutil.NopCloser(bytes.NewReader(body)), + } + + return resp, nil +} + +func newFakeHTTPClient(fakeBody []byte) httpClient { + return &httpClientStub{ + fakeBody: fakeBody, + } +} diff --git a/pkg/api/pluginproxy/test-data/access-token-1.json b/pkg/api/pluginproxy/test-data/access-token-1.json new file mode 100644 index 00000000000..b91d63fc659 --- /dev/null +++ b/pkg/api/pluginproxy/test-data/access-token-1.json @@ -0,0 +1,9 @@ +{ + "token_type": "Bearer", + "expires_in": "3599", + "ext_expires_in": "0", + "expires_on": "1528740417", + "not_before": "1528736517", + "resource": "https://api.nr1.io", + "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImlCakwxUmNxemhpeTRmcHhJeGRacW9oTTJZayIsImtpZCI6ImlCakwxUmNxemhpeTRmcHhJeGRacW9oTTJZayJ9.eyJhdWQiOiJodHRwczovL2FwaS5sb2dhbmFseXRpY3MuaW8iLCJpc3MiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC9lN2YzZjY2MS1hOTMzLTRiM2YtODE3Ni01MWM0Zjk4MmVjNDgvIiwiaWF0IjoxNTI4NzM2NTE3LCJuYmYiOjE1Mjg3MzY1MTcsImV4cCI6MTUyODc0MDQxNywiYWlvIjoiWTJkZ1lBaStzaWRsT3NmQ2JicGhLMSsremttN0NBQT0iLCJhcHBpZCI6IjdmMzJkYjdjLTZmNmYtNGU4OC05M2Q5LTlhZTEyNmMwYTU1ZiIsImFwcGlkYWNyIjoiMSIsImlkcCI6Imh0dHBzOi8vc3RzLndpbmRvd3MubmV0L2U3ZjNmNjYxLWE5MzMtNGIzZi04MTc2LTUxYzRmOTgyZWM0OC8iLCJvaWQiOiI1NDQ5ZmJjOS1mYWJhLTRkNjItODE2Yy05ZmMwMzZkMWViN2UiLCJzdWIiOiI1NDQ5ZmJjOS1mYWJhLTRkNjItODE2Yy05ZmMwMzZkMWViN2UiLCJ0aWQiOiJlN2YzZjY2MS1hOTMzLTRiM2YtODE3Ni01MWM0Zjk4MmVjNDgiLCJ1dGkiOiJZQTlQa2lxUy1VV1hMQjhIRnU0U0FBIiwidmVyIjoiMS4wIn0.ga5qudt4LDMKTStAxUmzjyZH8UFBAaFirJqpTdmYny4NtkH6JT2EILvjTjYxlKeTQisvwx9gof0PyicZIab9d6wlMa2xiLzr2nmaOonYClY8fqBaRTgc1xVjrKFw5SCgpx3FnEyJhIWvVPIfaWaogSHcQbIpe4kdk4tz-ccmrx0D1jsziSI4BZcJcX04aJuHZGz9k4mQZ_AA5sQSeQaNuojIng6rYoIifAXFYBZPTbeeeqmiGq8v0IOLeNKbC0POeQCJC_KKBG6Z_MV2KgPxFEzQuX2ZFmRD_wGPteV5TUBxh1kARdqexA3e0zAKSawR9kmrAiZ21lPr4tX2Br_HDg" +} diff --git a/pkg/api/pluginproxy/test-data/access-token-2.json b/pkg/api/pluginproxy/test-data/access-token-2.json new file mode 100644 index 00000000000..2a2a617ad80 --- /dev/null +++ b/pkg/api/pluginproxy/test-data/access-token-2.json @@ -0,0 +1,9 @@ +{ + "token_type": "Bearer", + "expires_in": "3599", + "ext_expires_in": "0", + "expires_on": "1528662059", + "not_before": "1528658159", + "resource": "https://api.nr2.io", + "access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImlCakwxUmNxemhpeTRmcHhJeGRacW9oTTJZayIsImtpZCI6ImlCakwxUmNxemhpeTRmcHhJeGRacW9oTTJZayJ9.eyJhdWQiOiJodHRwczovL21hbmFnZW1lbnQuYXp1cmUuY29tLyIsImlzcyI6Imh0dHBzOi8vc3RzLndpbmRvd3MubmV0L2U3ZjNmNjYxLWE5MzMtNGIzZi04MTc2LTUxYzRmOTgyZWM0OC8iLCJpYXQiOjE1Mjg2NTgxNTksIm5iZiI6MTUyODY1ODE1OSwiZXhwIjoxNTI4NjYyMDU5LCJhaW8iOiJZMmRnWUFpK3NpZGxPc2ZDYmJwaEsxKyt6a203Q0FBPSIsImFwcGlkIjoiODg5YjdlZDgtMWFlZC00ODZlLTk3ODktODE5NzcwYmJiNjFhIiwiYXBwaWRhY3IiOiIxIiwiaWRwIjoiaHR0cHM6Ly9zdHMud2luZG93cy5uZXQvZTdmM2Y2NjEtYTkzMy00YjNmLTgxNzYtNTFjNGY5ODJlYzQ4LyIsIm9pZCI6IjY0YzQxNjMyLTliOWUtNDczNy05MTYwLTBlNjAzZTg3NjljYyIsInN1YiI6IjY0YzQxNjMyLTliOWUtNDczNy05MTYwLTBlNjAzZTg3NjljYyIsInRpZCI6ImU3ZjNmNjYxLWE5MzMtNGIzZi04MTc2LTUxYzRmOTgyZWM0OCIsInV0aSI6IkQ1ODZHSGUySDBPd0ptOU0xeVlKQUEiLCJ2ZXIiOiIxLjAifQ.Pw8c8gpoZptw3lGreQoHQaMVOozSaTE5D38Vm2aCHRB3DvD3N-Qcm1x0ZCakUEV2sJd7jvx4XtPFuW7063T0V1deExL4rzzvIo0ZfMmURf9tCTiKFKYibqf8_PtfPSz0t9eNDEUGmWDh1Wgssb4W_H-wPqgl9VPMT7T6ynkfIm0-ODPZTBzgSHiY8C_L1-DkhsK7XiqbUlSDgx9FpfChZS3ah8QhA8geqnb_HVuSktg7WhpxmogSpK5QdrwSE3jsbItpzOfLJ4iBd2ExzS2C0y8H_Coluk3Y1YA07tAxJ6Y7oBv-XwGqNfZhveOCQOzX-U3dFod3fXXysjB0UB89WQ" +} diff --git a/pkg/api/plugins.go b/pkg/api/plugins.go index f757f2b9adc..4b44009ab8c 100644 --- a/pkg/api/plugins.go +++ b/pkg/api/plugins.go @@ -174,6 +174,7 @@ func ImportDashboard(c *m.ReqContext, apiCmd dtos.ImportDashboardCommand) Respon Path: apiCmd.Path, Inputs: apiCmd.Inputs, Overwrite: apiCmd.Overwrite, + FolderId: apiCmd.FolderId, Dashboard: apiCmd.Dashboard, } diff --git a/pkg/api/render.go b/pkg/api/render.go index 6e948ed294c..b8ef6cc5cb6 100644 --- a/pkg/api/render.go +++ b/pkg/api/render.go @@ -3,36 +3,66 @@ package api import ( "fmt" "net/http" + "runtime" + "strconv" + "strings" + "time" - "github.com/grafana/grafana/pkg/components/renderer" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/rendering" "github.com/grafana/grafana/pkg/util" ) -func RenderToPng(c *m.ReqContext) { +func (hs *HTTPServer) RenderToPng(c *m.ReqContext) { queryReader, err := util.NewUrlQueryReader(c.Req.URL) if err != nil { c.Handle(400, "Render parameters error", err) return } + queryParams := fmt.Sprintf("?%s", c.Req.URL.RawQuery) - renderOpts := &renderer.RenderOpts{ - Path: c.Params("*") + queryParams, - Width: queryReader.Get("width", "800"), - Height: queryReader.Get("height", "400"), - Timeout: queryReader.Get("timeout", "60"), + width, err := strconv.Atoi(queryReader.Get("width", "800")) + if err != nil { + c.Handle(400, "Render parameters error", fmt.Errorf("Cannot parse width as int: %s", err)) + return + } + + height, err := strconv.Atoi(queryReader.Get("height", "400")) + if err != nil { + c.Handle(400, "Render parameters error", fmt.Errorf("Cannot parse height as int: %s", err)) + return + } + + timeout, err := strconv.Atoi(queryReader.Get("timeout", "60")) + if err != nil { + c.Handle(400, "Render parameters error", fmt.Errorf("Cannot parse timeout as int: %s", err)) + return + } + + result, err := hs.RenderService.Render(c.Req.Context(), rendering.Opts{ + Width: width, + Height: height, + Timeout: time.Duration(timeout) * time.Second, OrgId: c.OrgId, UserId: c.UserId, OrgRole: c.OrgRole, + Path: c.Params("*") + queryParams, Timezone: queryReader.Get("tz", ""), Encoding: queryReader.Get("encoding", ""), + }) + + if err != nil && err == rendering.ErrTimeout { + c.Handle(500, err.Error(), err) + return } - pngPath, err := renderer.RenderToPng(renderOpts) - - if err != nil && err == renderer.ErrTimeout { - c.Handle(500, err.Error(), err) + if err != nil && err == rendering.ErrPhantomJSNotInstalled { + if strings.HasPrefix(runtime.GOARCH, "arm") { + c.Handle(500, "Rendering failed - PhantomJS isn't included in arm build per default", err) + } else { + c.Handle(500, "Rendering failed - PhantomJS isn't installed correctly", err) + } return } @@ -42,5 +72,5 @@ func RenderToPng(c *m.ReqContext) { } c.Resp.Header().Set("Content-Type", "image/png") - http.ServeFile(c.Resp, c.Req.Request, pngPath) + http.ServeFile(c.Resp, c.Req.Request, result.FilePath) } diff --git a/pkg/api/route_register.go b/pkg/api/routing/route_register.go similarity index 68% rename from pkg/api/route_register.go rename to pkg/api/routing/route_register.go index 926de13c546..7a054ad0a24 100644 --- a/pkg/api/route_register.go +++ b/pkg/api/routing/route_register.go @@ -1,9 +1,10 @@ -package api +package routing import ( "net/http" + "strings" - macaron "gopkg.in/macaron.v1" + "gopkg.in/macaron.v1" ) type Router interface { @@ -14,16 +15,34 @@ type Router interface { // RouteRegister allows you to add routes and macaron.Handlers // that the web server should serve. type RouteRegister interface { + // Get adds a list of handlers to a given route with a GET HTTP verb Get(string, ...macaron.Handler) + + // Post adds a list of handlers to a given route with a POST HTTP verb Post(string, ...macaron.Handler) + + // Delete adds a list of handlers to a given route with a DELETE HTTP verb Delete(string, ...macaron.Handler) + + // Put adds a list of handlers to a given route with a PUT HTTP verb Put(string, ...macaron.Handler) + + // Patch adds a list of handlers to a given route with a PATCH HTTP verb Patch(string, ...macaron.Handler) + + // Any adds a list of handlers to a given route with any HTTP verb Any(string, ...macaron.Handler) + // Group allows you to pass a function that can add multiple routes + // with a shared prefix route. Group(string, func(RouteRegister), ...macaron.Handler) - Register(Router) *macaron.Router + // Insert adds more routes to an existing Group. + Insert(string, func(RouteRegister), ...macaron.Handler) + + // Register iterates over all routes added to the RouteRegister + // and add them to the `Router` pass as an parameter. + Register(Router) } type RegisterNamedMiddleware func(name string) macaron.Handler @@ -52,6 +71,24 @@ type routeRegister struct { groups []*routeRegister } +func (rr *routeRegister) Insert(pattern string, fn func(RouteRegister), handlers ...macaron.Handler) { + + //loop over all groups at current level + for _, g := range rr.groups { + + // apply routes if the prefix matches the pattern + if g.prefix == pattern { + g.Group("", fn) + break + } + + // go down one level if the prefix can be find in the pattern + if strings.HasPrefix(pattern, g.prefix) { + g.Insert(pattern, fn) + } + } +} + func (rr *routeRegister) Group(pattern string, fn func(rr RouteRegister), handlers ...macaron.Handler) { group := &routeRegister{ prefix: rr.prefix + pattern, @@ -64,7 +101,7 @@ func (rr *routeRegister) Group(pattern string, fn func(rr RouteRegister), handle rr.groups = append(rr.groups, group) } -func (rr *routeRegister) Register(router Router) *macaron.Router { +func (rr *routeRegister) Register(router Router) { for _, r := range rr.routes { // GET requests have to be added to macaron routing using Get() // Otherwise HEAD requests will not be allowed. @@ -79,8 +116,6 @@ func (rr *routeRegister) Register(router Router) *macaron.Router { for _, g := range rr.groups { g.Register(router) } - - return &macaron.Router{} } func (rr *routeRegister) route(pattern, method string, handlers ...macaron.Handler) { @@ -92,6 +127,12 @@ func (rr *routeRegister) route(pattern, method string, handlers ...macaron.Handl h = append(h, rr.subfixHandlers...) h = append(h, handlers...) + for _, r := range rr.routes { + if r.pattern == rr.prefix+pattern && r.method == method { + panic("cannot add duplicate route") + } + } + rr.routes = append(rr.routes, route{ method: method, pattern: rr.prefix + pattern, diff --git a/pkg/api/route_register_test.go b/pkg/api/routing/route_register_test.go similarity index 72% rename from pkg/api/route_register_test.go rename to pkg/api/routing/route_register_test.go index 3b5d79599a8..62e8989ff92 100644 --- a/pkg/api/route_register_test.go +++ b/pkg/api/routing/route_register_test.go @@ -1,11 +1,11 @@ -package api +package routing import ( "net/http" "strconv" "testing" - macaron "gopkg.in/macaron.v1" + "gopkg.in/macaron.v1" ) type fakeRouter struct { @@ -33,7 +33,7 @@ func (fr *fakeRouter) Get(pattern string, handlers ...macaron.Handler) *macaron. } func emptyHandlers(n int) []macaron.Handler { - res := []macaron.Handler{} + var res []macaron.Handler for i := 1; n >= i; i++ { res = append(res, emptyHandler(strconv.Itoa(i))) } @@ -138,7 +138,78 @@ func TestRouteGroupedRegister(t *testing.T) { } } } +func TestRouteGroupInserting(t *testing.T) { + testTable := []route{ + {method: http.MethodGet, pattern: "/api/", handlers: emptyHandlers(1)}, + {method: http.MethodPost, pattern: "/api/group/endpoint", handlers: emptyHandlers(1)}, + {method: http.MethodGet, pattern: "/api/group/inserted", handlers: emptyHandlers(1)}, + {method: http.MethodDelete, pattern: "/api/inserted-endpoint", handlers: emptyHandlers(1)}, + } + + // Setup + rr := NewRouteRegister() + + rr.Group("/api", func(api RouteRegister) { + api.Get("/", emptyHandler("1")) + + api.Group("/group", func(group RouteRegister) { + group.Post("/endpoint", emptyHandler("1")) + }) + }) + + rr.Insert("/api", func(api RouteRegister) { + api.Delete("/inserted-endpoint", emptyHandler("1")) + }) + + rr.Insert("/api/group", func(group RouteRegister) { + group.Get("/inserted", emptyHandler("1")) + }) + + fr := &fakeRouter{} + rr.Register(fr) + + // Validation + if len(fr.route) != len(testTable) { + t.Fatalf("want %v routes, got %v", len(testTable), len(fr.route)) + } + + for i := range testTable { + if testTable[i].method != fr.route[i].method { + t.Errorf("want %s got %v", testTable[i].method, fr.route[i].method) + } + + if testTable[i].pattern != fr.route[i].pattern { + t.Errorf("want %s got %v", testTable[i].pattern, fr.route[i].pattern) + } + + if len(testTable[i].handlers) != len(fr.route[i].handlers) { + t.Errorf("want %d handlers got %d handlers \ntestcase: %v\nroute: %v\n", + len(testTable[i].handlers), + len(fr.route[i].handlers), + testTable[i], + fr.route[i]) + } + } +} + +func TestDuplicateRoutShouldPanic(t *testing.T) { + defer func() { + if recover() != "cannot add duplicate route" { + t.Errorf("Should cause panic if duplicate routes are added ") + } + }() + + rr := NewRouteRegister(func(name string) macaron.Handler { + return emptyHandler(name) + }) + + rr.Get("/api", emptyHandler("1")) + rr.Get("/api", emptyHandler("1")) + + fr := &fakeRouter{} + rr.Register(fr) +} func TestNamedMiddlewareRouteRegister(t *testing.T) { testTable := []route{ {method: "DELETE", pattern: "/admin", handlers: emptyHandlers(2)}, diff --git a/pkg/api/team.go b/pkg/api/team.go index 9919305881b..ebb426c4c82 100644 --- a/pkg/api/team.go +++ b/pkg/api/team.go @@ -93,5 +93,6 @@ func GetTeamByID(c *m.ReqContext) Response { return Error(500, "Failed to get Team", err) } + query.Result.AvatarUrl = dtos.GetGravatarUrlWithDefault(query.Result.Email, query.Result.Name) return JSON(200, &query.Result) } diff --git a/pkg/api/team_test.go b/pkg/api/team_test.go index 0bf06d723c8..a1984288870 100644 --- a/pkg/api/team_test.go +++ b/pkg/api/team_test.go @@ -13,7 +13,7 @@ import ( func TestTeamApiEndpoint(t *testing.T) { Convey("Given two teams", t, func() { mockResult := models.SearchTeamQueryResult{ - Teams: []*models.SearchTeamDto{ + Teams: []*models.TeamDTO{ {Name: "team1"}, {Name: "team2"}, }, diff --git a/pkg/bus/bus.go b/pkg/bus/bus.go index 32a591b6672..9cf930aeb82 100644 --- a/pkg/bus/bus.go +++ b/pkg/bus/bus.go @@ -12,21 +12,42 @@ type Msg interface{} var ErrHandlerNotFound = errors.New("handler not found") +type TransactionManager interface { + InTransaction(ctx context.Context, fn func(ctx context.Context) error) error +} + type Bus interface { Dispatch(msg Msg) error DispatchCtx(ctx context.Context, msg Msg) error Publish(msg Msg) error + // InTransaction starts a transaction and store it in the context. + // The caller can then pass a function with multiple DispatchCtx calls that + // all will be executed in the same transaction. InTransaction will rollback if the + // callback returns an error. + InTransaction(ctx context.Context, fn func(ctx context.Context) error) error + AddHandler(handler HandlerFunc) - AddCtxHandler(handler HandlerFunc) + AddHandlerCtx(handler HandlerFunc) AddEventListener(handler HandlerFunc) AddWildcardListener(handler HandlerFunc) + + // SetTransactionManager allows the user to replace the internal + // noop TransactionManager that is responsible for manageing + // transactions in `InTransaction` + SetTransactionManager(tm TransactionManager) +} + +func (b *InProcBus) InTransaction(ctx context.Context, fn func(ctx context.Context) error) error { + return b.txMng.InTransaction(ctx, fn) } type InProcBus struct { handlers map[string]HandlerFunc + handlersWithCtx map[string]HandlerFunc listeners map[string][]HandlerFunc wildcardListeners []HandlerFunc + txMng TransactionManager } // temp stuff, not sure how to handle bus instance, and init yet @@ -35,8 +56,11 @@ var globalBus = New() func New() Bus { bus := &InProcBus{} bus.handlers = make(map[string]HandlerFunc) + bus.handlersWithCtx = make(map[string]HandlerFunc) bus.listeners = make(map[string][]HandlerFunc) bus.wildcardListeners = make([]HandlerFunc, 0) + bus.txMng = &noopTransactionManager{} + return bus } @@ -45,17 +69,21 @@ func GetBus() Bus { return globalBus } +func (b *InProcBus) SetTransactionManager(tm TransactionManager) { + b.txMng = tm +} + func (b *InProcBus) DispatchCtx(ctx context.Context, msg Msg) error { var msgName = reflect.TypeOf(msg).Elem().Name() - var handler = b.handlers[msgName] + var handler = b.handlersWithCtx[msgName] if handler == nil { return ErrHandlerNotFound } - var params = make([]reflect.Value, 2) - params[0] = reflect.ValueOf(ctx) - params[1] = reflect.ValueOf(msg) + var params = []reflect.Value{} + params = append(params, reflect.ValueOf(ctx)) + params = append(params, reflect.ValueOf(msg)) ret := reflect.ValueOf(handler).Call(params) err := ret[0].Interface() @@ -68,13 +96,23 @@ func (b *InProcBus) DispatchCtx(ctx context.Context, msg Msg) error { func (b *InProcBus) Dispatch(msg Msg) error { var msgName = reflect.TypeOf(msg).Elem().Name() - var handler = b.handlers[msgName] + var handler = b.handlersWithCtx[msgName] + withCtx := true + + if handler == nil { + withCtx = false + handler = b.handlers[msgName] + } + if handler == nil { return ErrHandlerNotFound } - var params = make([]reflect.Value, 1) - params[0] = reflect.ValueOf(msg) + var params = []reflect.Value{} + if withCtx { + params = append(params, reflect.ValueOf(context.Background())) + } + params = append(params, reflect.ValueOf(msg)) ret := reflect.ValueOf(handler).Call(params) err := ret[0].Interface() @@ -120,10 +158,10 @@ func (b *InProcBus) AddHandler(handler HandlerFunc) { b.handlers[queryTypeName] = handler } -func (b *InProcBus) AddCtxHandler(handler HandlerFunc) { +func (b *InProcBus) AddHandlerCtx(handler HandlerFunc) { handlerType := reflect.TypeOf(handler) queryTypeName := handlerType.In(1).Elem().Name() - b.handlers[queryTypeName] = handler + b.handlersWithCtx[queryTypeName] = handler } func (b *InProcBus) AddEventListener(handler HandlerFunc) { @@ -142,8 +180,8 @@ func AddHandler(implName string, handler HandlerFunc) { } // Package level functions -func AddCtxHandler(implName string, handler HandlerFunc) { - globalBus.AddCtxHandler(handler) +func AddHandlerCtx(implName string, handler HandlerFunc) { + globalBus.AddHandlerCtx(handler) } // Package level functions @@ -167,6 +205,20 @@ func Publish(msg Msg) error { return globalBus.Publish(msg) } +// InTransaction starts a transaction and store it in the context. +// The caller can then pass a function with multiple DispatchCtx calls that +// all will be executed in the same transaction. InTransaction will rollback if the +// callback returns an error. +func InTransaction(ctx context.Context, fn func(ctx context.Context) error) error { + return globalBus.InTransaction(ctx, fn) +} + func ClearBusHandlers() { globalBus = New() } + +type noopTransactionManager struct{} + +func (*noopTransactionManager) InTransaction(ctx context.Context, fn func(ctx context.Context) error) error { + return fn(ctx) +} diff --git a/pkg/bus/bus_test.go b/pkg/bus/bus_test.go index 62e72f18308..9f41a5154df 100644 --- a/pkg/bus/bus_test.go +++ b/pkg/bus/bus_test.go @@ -1,24 +1,67 @@ package bus import ( + "context" "errors" "fmt" "testing" ) -type TestQuery struct { +type testQuery struct { Id int64 Resp string } +func TestDispatchCtxCanUseNormalHandlers(t *testing.T) { + bus := New() + + handlerWithCtxCallCount := 0 + handlerCallCount := 0 + + handlerWithCtx := func(ctx context.Context, query *testQuery) error { + handlerWithCtxCallCount++ + return nil + } + + handler := func(query *testQuery) error { + handlerCallCount++ + return nil + } + + err := bus.DispatchCtx(context.Background(), &testQuery{}) + if err != ErrHandlerNotFound { + t.Errorf("expected bus to return HandlerNotFound is no handler is registered") + } + + bus.AddHandler(handler) + + t.Run("when a normal handler is registered", func(t *testing.T) { + bus.Dispatch(&testQuery{}) + + if handlerCallCount != 1 { + t.Errorf("Expected normal handler to be called 1 time. was called %d", handlerCallCount) + } + + t.Run("when a ctx handler is registered", func(t *testing.T) { + bus.AddHandlerCtx(handlerWithCtx) + bus.Dispatch(&testQuery{}) + + if handlerWithCtxCallCount != 1 { + t.Errorf("Expected ctx handler to be called 1 time. was called %d", handlerWithCtxCallCount) + } + }) + }) + +} + func TestQueryHandlerReturnsError(t *testing.T) { bus := New() - bus.AddHandler(func(query *TestQuery) error { + bus.AddHandler(func(query *testQuery) error { return errors.New("handler error") }) - err := bus.Dispatch(&TestQuery{}) + err := bus.Dispatch(&testQuery{}) if err == nil { t.Fatal("Send query failed " + err.Error()) @@ -30,12 +73,12 @@ func TestQueryHandlerReturnsError(t *testing.T) { func TestQueryHandlerReturn(t *testing.T) { bus := New() - bus.AddHandler(func(q *TestQuery) error { + bus.AddHandler(func(q *testQuery) error { q.Resp = "hello from handler" return nil }) - query := &TestQuery{} + query := &testQuery{} err := bus.Dispatch(query) if err != nil { @@ -49,17 +92,17 @@ func TestEventListeners(t *testing.T) { bus := New() count := 0 - bus.AddEventListener(func(query *TestQuery) error { + bus.AddEventListener(func(query *testQuery) error { count += 1 return nil }) - bus.AddEventListener(func(query *TestQuery) error { + bus.AddEventListener(func(query *testQuery) error { count += 10 return nil }) - err := bus.Publish(&TestQuery{}) + err := bus.Publish(&testQuery{}) if err != nil { t.Fatal("Publish event failed " + err.Error()) diff --git a/pkg/cmd/grafana-cli/commands/commands.go b/pkg/cmd/grafana-cli/commands/commands.go index 43484749670..5e69559b9fa 100644 --- a/pkg/cmd/grafana-cli/commands/commands.go +++ b/pkg/cmd/grafana-cli/commands/commands.go @@ -22,7 +22,9 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli Args: flag.Args(), }) - sqlstore.NewEngine() + engine := &sqlstore.SqlStore{} + engine.Cfg = cfg + engine.Init() if err := command(cmd); err != nil { logger.Errorf("\n%s: ", color.RedString("Error")) diff --git a/pkg/cmd/grafana-server/main.go b/pkg/cmd/grafana-server/main.go index c7ea6bb432b..f00e6bba0fd 100644 --- a/pkg/cmd/grafana-server/main.go +++ b/pkg/cmd/grafana-server/main.go @@ -18,10 +18,11 @@ import ( "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/setting" - _ "github.com/grafana/grafana/pkg/extensions" + extensions "github.com/grafana/grafana/pkg/extensions" _ "github.com/grafana/grafana/pkg/services/alerting/conditions" _ "github.com/grafana/grafana/pkg/services/alerting/notifiers" _ "github.com/grafana/grafana/pkg/tsdb/cloudwatch" + _ "github.com/grafana/grafana/pkg/tsdb/elasticsearch" _ "github.com/grafana/grafana/pkg/tsdb/graphite" _ "github.com/grafana/grafana/pkg/tsdb/influxdb" _ "github.com/grafana/grafana/pkg/tsdb/mysql" @@ -34,7 +35,6 @@ import ( var version = "5.0.0" var commit = "NA" var buildstamp string -var enterprise string var configFile = flag.String("config", "", "path to config file") var homePath = flag.String("homepath", "", "path to grafana install/home path, defaults to working directory") @@ -77,7 +77,7 @@ func main() { setting.BuildVersion = version setting.BuildCommit = commit setting.BuildStamp = buildstampInt64 - setting.Enterprise, _ = strconv.ParseBool(enterprise) + setting.IsEnterprise = extensions.IsEnterprise metrics.M_Grafana_Version.WithLabelValues(version).Set(1) @@ -87,10 +87,11 @@ func main() { err := server.Run() + code := server.Exit(err) trace.Stop() log.Close() - server.Exit(err) + os.Exit(code) } func listenToSystemSignals(server *GrafanaServerImpl) { diff --git a/pkg/cmd/grafana-server/server.go b/pkg/cmd/grafana-server/server.go index 9209722527c..8794d7d8338 100644 --- a/pkg/cmd/grafana-server/server.go +++ b/pkg/cmd/grafana-server/server.go @@ -8,22 +8,20 @@ import ( "net" "os" "path/filepath" - "reflect" "strconv" "time" "github.com/facebookgo/inject" + "github.com/grafana/grafana/pkg/api/routing" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/registry" - "github.com/grafana/grafana/pkg/services/dashboards" "golang.org/x/sync/errgroup" "github.com/grafana/grafana/pkg/api" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/login" - "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/social" @@ -36,7 +34,9 @@ import ( _ "github.com/grafana/grafana/pkg/services/cleanup" _ "github.com/grafana/grafana/pkg/services/notifications" _ "github.com/grafana/grafana/pkg/services/provisioning" + _ "github.com/grafana/grafana/pkg/services/rendering" _ "github.com/grafana/grafana/pkg/services/search" + _ "github.com/grafana/grafana/pkg/services/sqlstore" _ "github.com/grafana/grafana/pkg/tracing" ) @@ -62,33 +62,28 @@ type GrafanaServerImpl struct { shutdownReason string shutdownInProgress bool - RouteRegister api.RouteRegister `inject:""` - HttpServer *api.HTTPServer `inject:""` + RouteRegister routing.RouteRegister `inject:""` + HttpServer *api.HTTPServer `inject:""` } func (g *GrafanaServerImpl) Run() error { g.loadConfiguration() g.writePIDFile() - // initSql - sqlstore.NewEngine() // TODO: this should return an error - sqlstore.EnsureAdminUser() - login.Init() social.NewOAuthService() serviceGraph := inject.Graph{} serviceGraph.Provide(&inject.Object{Value: bus.GetBus()}) serviceGraph.Provide(&inject.Object{Value: g.cfg}) - serviceGraph.Provide(&inject.Object{Value: dashboards.NewProvisioningService()}) - serviceGraph.Provide(&inject.Object{Value: api.NewRouteRegister(middleware.RequestMetrics, middleware.RequestTracing)}) + serviceGraph.Provide(&inject.Object{Value: routing.NewRouteRegister(middleware.RequestMetrics, middleware.RequestTracing)}) // self registered services services := registry.GetServices() // Add all services to dependency graph for _, service := range services { - serviceGraph.Provide(&inject.Object{Value: service}) + serviceGraph.Provide(&inject.Object{Value: service.Instance}) } serviceGraph.Provide(&inject.Object{Value: g}) @@ -100,25 +95,27 @@ func (g *GrafanaServerImpl) Run() error { // Init & start services for _, service := range services { - if registry.IsDisabled(service) { + if registry.IsDisabled(service.Instance) { continue } - g.log.Info("Initializing " + reflect.TypeOf(service).Elem().Name()) + g.log.Info("Initializing " + service.Name) - if err := service.Init(); err != nil { + if err := service.Instance.Init(); err != nil { return fmt.Errorf("Service init failed: %v", err) } } // Start background services - for index := range services { - service, ok := services[index].(registry.BackgroundService) + for _, srv := range services { + // variable needed for accessing loop variable in function callback + descriptor := srv + service, ok := srv.Instance.(registry.BackgroundService) if !ok { continue } - if registry.IsDisabled(services[index]) { + if registry.IsDisabled(descriptor.Instance) { continue } @@ -133,9 +130,9 @@ func (g *GrafanaServerImpl) Run() error { // If error is not canceled then the service crashed if err != context.Canceled && err != nil { - g.log.Error("Stopped "+reflect.TypeOf(service).Elem().Name(), "reason", err) + g.log.Error("Stopped "+descriptor.Name, "reason", err) } else { - g.log.Info("Stopped "+reflect.TypeOf(service).Elem().Name(), "reason", err) + g.log.Info("Stopped "+descriptor.Name, "reason", err) } // Mark that we are in shutdown mode @@ -178,7 +175,7 @@ func (g *GrafanaServerImpl) Shutdown(reason string) { g.childRoutines.Wait() } -func (g *GrafanaServerImpl) Exit(reason error) { +func (g *GrafanaServerImpl) Exit(reason error) int { // default exit code is 1 code := 1 @@ -188,7 +185,7 @@ func (g *GrafanaServerImpl) Exit(reason error) { } g.log.Error("Server shutdown", "reason", reason) - os.Exit(code) + return code } func (g *GrafanaServerImpl) writePIDFile() { diff --git a/pkg/components/imguploader/webdavuploader.go b/pkg/components/imguploader/webdavuploader.go index f5478ea8a2f..ed6b14725c0 100644 --- a/pkg/components/imguploader/webdavuploader.go +++ b/pkg/components/imguploader/webdavuploader.go @@ -9,6 +9,7 @@ import ( "net/http" "net/url" "path" + "strings" "time" "github.com/grafana/grafana/pkg/util" @@ -35,6 +36,16 @@ var netClient = &http.Client{ Transport: netTransport, } +func (u *WebdavUploader) PublicURL(filename string) string { + if strings.Contains(u.public_url, "${file}") { + return strings.Replace(u.public_url, "${file}", filename, -1) + } else { + publicURL, _ := url.Parse(u.public_url) + publicURL.Path = path.Join(publicURL.Path, filename) + return publicURL.String() + } +} + func (u *WebdavUploader) Upload(ctx context.Context, pa string) (string, error) { url, _ := url.Parse(u.url) filename := util.GetRandomString(20) + ".png" @@ -65,9 +76,7 @@ func (u *WebdavUploader) Upload(ctx context.Context, pa string) (string, error) } if u.public_url != "" { - publicURL, _ := url.Parse(u.public_url) - publicURL.Path = path.Join(publicURL.Path, filename) - return publicURL.String(), nil + return u.PublicURL(filename), nil } return url.String(), nil diff --git a/pkg/components/imguploader/webdavuploader_test.go b/pkg/components/imguploader/webdavuploader_test.go index 5a8abd0542d..0178c9cda6c 100644 --- a/pkg/components/imguploader/webdavuploader_test.go +++ b/pkg/components/imguploader/webdavuploader_test.go @@ -2,6 +2,7 @@ package imguploader import ( "context" + "net/url" "testing" . "github.com/smartystreets/goconvey/convey" @@ -26,3 +27,15 @@ func TestUploadToWebdav(t *testing.T) { So(path, ShouldStartWith, "http://publicurl:8888/webdav/") }) } + +func TestPublicURL(t *testing.T) { + Convey("Given a public URL with parameters, and no template", t, func() { + webdavUploader, _ := NewWebdavImageUploader("http://localhost:8888/webdav/", "test", "test", "http://cloudycloud.me/s/DOIFDOMV/download?files=") + parsed, _ := url.Parse(webdavUploader.PublicURL("fileyfile.png")) + So(parsed.Path, ShouldEndWith, "fileyfile.png") + }) + Convey("Given a public URL with parameters, and a template", t, func() { + webdavUploader, _ := NewWebdavImageUploader("http://localhost:8888/webdav/", "test", "test", "http://cloudycloud.me/s/DOIFDOMV/download?files=${file}") + So(webdavUploader.PublicURL("fileyfile.png"), ShouldEndWith, "fileyfile.png") + }) +} diff --git a/pkg/components/renderer/renderer.go b/pkg/components/renderer/renderer.go deleted file mode 100644 index 26751ddd5c7..00000000000 --- a/pkg/components/renderer/renderer.go +++ /dev/null @@ -1,161 +0,0 @@ -package renderer - -import ( - "errors" - "fmt" - "io" - "os" - "os/exec" - "path/filepath" - "runtime" - "time" - - "strconv" - - "strings" - - "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/middleware" - "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/util" -) - -type RenderOpts struct { - Path string - Width string - Height string - Timeout string - OrgId int64 - UserId int64 - OrgRole models.RoleType - Timezone string - IsAlertContext bool - Encoding string -} - -var ErrTimeout = errors.New("Timeout error. You can set timeout in seconds with &timeout url parameter") -var rendererLog log.Logger = log.New("png-renderer") - -func isoTimeOffsetToPosixTz(isoOffset string) string { - // invert offset - if strings.HasPrefix(isoOffset, "UTC+") { - return strings.Replace(isoOffset, "UTC+", "UTC-", 1) - } - if strings.HasPrefix(isoOffset, "UTC-") { - return strings.Replace(isoOffset, "UTC-", "UTC+", 1) - } - return isoOffset -} - -func appendEnviron(baseEnviron []string, name string, value string) []string { - results := make([]string, 0) - prefix := fmt.Sprintf("%s=", name) - for _, v := range baseEnviron { - if !strings.HasPrefix(v, prefix) { - results = append(results, v) - } - } - return append(results, fmt.Sprintf("%s=%s", name, value)) -} - -func RenderToPng(params *RenderOpts) (string, error) { - rendererLog.Info("Rendering", "path", params.Path) - - var executable = "phantomjs" - if runtime.GOOS == "windows" { - executable = executable + ".exe" - } - - localDomain := "localhost" - if setting.HttpAddr != setting.DEFAULT_HTTP_ADDR { - localDomain = setting.HttpAddr - } - - // &render=1 signals to the legacy redirect layer to - // avoid redirect these requests. - url := fmt.Sprintf("%s://%s:%s/%s&render=1", setting.Protocol, localDomain, setting.HttpPort, params.Path) - - binPath, _ := filepath.Abs(filepath.Join(setting.PhantomDir, executable)) - scriptPath, _ := filepath.Abs(filepath.Join(setting.PhantomDir, "render.js")) - pngPath, _ := filepath.Abs(filepath.Join(setting.ImagesDir, util.GetRandomString(20))) - pngPath = pngPath + ".png" - - orgRole := params.OrgRole - if params.IsAlertContext { - orgRole = models.ROLE_ADMIN - } - renderKey := middleware.AddRenderAuthKey(params.OrgId, params.UserId, orgRole) - defer middleware.RemoveRenderAuthKey(renderKey) - - timeout, err := strconv.Atoi(params.Timeout) - if err != nil { - timeout = 15 - } - - phantomDebugArg := "--debug=false" - if log.GetLogLevelFor("png-renderer") >= log.LvlDebug { - phantomDebugArg = "--debug=true" - } - - cmdArgs := []string{ - "--ignore-ssl-errors=true", - "--web-security=false", - phantomDebugArg, - scriptPath, - "url=" + url, - "width=" + params.Width, - "height=" + params.Height, - "png=" + pngPath, - "domain=" + localDomain, - "timeout=" + strconv.Itoa(timeout), - "renderKey=" + renderKey, - } - - if params.Encoding != "" { - cmdArgs = append([]string{fmt.Sprintf("--output-encoding=%s", params.Encoding)}, cmdArgs...) - } - - cmd := exec.Command(binPath, cmdArgs...) - output, err := cmd.StdoutPipe() - - if err != nil { - rendererLog.Error("Could not acquire stdout pipe", err) - return "", err - } - cmd.Stderr = cmd.Stdout - - if params.Timezone != "" { - baseEnviron := os.Environ() - cmd.Env = appendEnviron(baseEnviron, "TZ", isoTimeOffsetToPosixTz(params.Timezone)) - } - - err = cmd.Start() - if err != nil { - rendererLog.Error("Could not start command", err) - return "", err - } - - logWriter := log.NewLogWriter(rendererLog, log.LvlDebug, "[phantom] ") - go io.Copy(logWriter, output) - - done := make(chan error) - go func() { - if err := cmd.Wait(); err != nil { - rendererLog.Error("failed to render an image", "error", err) - } - close(done) - }() - - select { - case <-time.After(time.Duration(timeout) * time.Second): - if err := cmd.Process.Kill(); err != nil { - rendererLog.Error("failed to kill", "error", err) - } - return "", ErrTimeout - case <-done: - } - - rendererLog.Debug("Image rendered", "path", pngPath) - return pngPath, nil -} diff --git a/pkg/components/renderer/renderer_test.go b/pkg/components/renderer/renderer_test.go deleted file mode 100644 index 5ee42b784be..00000000000 --- a/pkg/components/renderer/renderer_test.go +++ /dev/null @@ -1,35 +0,0 @@ -package renderer - -// -// import ( -// "io/ioutil" -// "os" -// "testing" -// -// . "github.com/smartystreets/goconvey/convey" -// ) -// -// func TestPhantomRender(t *testing.T) { -// -// Convey("Can render url", t, func() { -// tempDir, _ := ioutil.TempDir("", "img") -// ipng, err := RenderToPng("http://www.google.com") -// So(err, ShouldBeNil) -// So(exists(png), ShouldEqual, true) -// -// //_, err = os.Stat(store.getFilePathForDashboard("hello")) -// //So(err, ShouldBeNil) -// }) -// -// } -// -// func exists(path string) bool { -// _, err := os.Stat(path) -// if err == nil { -// return true -// } -// if os.IsNotExist(err) { -// return false -// } -// return false -// } diff --git a/pkg/extensions/main.go b/pkg/extensions/main.go index 34ac9da7e86..6e3461da8a8 100644 --- a/pkg/extensions/main.go +++ b/pkg/extensions/main.go @@ -1,3 +1,3 @@ package extensions -import _ "github.com/pkg/errors" +var IsEnterprise bool = false diff --git a/pkg/login/ext_user.go b/pkg/login/ext_user.go index e1d5e3e3b48..a421e3ebe0a 100644 --- a/pkg/login/ext_user.go +++ b/pkg/login/ext_user.go @@ -21,6 +21,7 @@ func UpsertUser(cmd *m.UpsertUserCommand) error { Email: extUser.Email, Login: extUser.Login, } + err := bus.Dispatch(userQuery) if err != m.ErrUserNotFound && err != nil { return err @@ -66,7 +67,28 @@ func UpsertUser(cmd *m.UpsertUserCommand) error { } } - return syncOrgRoles(cmd.Result, extUser) + err = syncOrgRoles(cmd.Result, extUser) + if err != nil { + return err + } + + // Sync isGrafanaAdmin permission + if extUser.IsGrafanaAdmin != nil && *extUser.IsGrafanaAdmin != cmd.Result.IsAdmin { + if err := bus.Dispatch(&m.UpdateUserPermissionsCommand{UserId: cmd.Result.Id, IsGrafanaAdmin: *extUser.IsGrafanaAdmin}); err != nil { + return err + } + } + + err = bus.Dispatch(&m.SyncTeamsCommand{ + User: cmd.Result, + ExternalUser: extUser, + }) + + if err == bus.ErrHandlerNotFound { + return nil + } + + return err } func createUser(extUser *m.ExternalUserInfo) (*m.User, error) { @@ -76,6 +98,7 @@ func createUser(extUser *m.ExternalUserInfo) (*m.User, error) { Name: extUser.Name, SkipOrgSetup: len(extUser.OrgRoles) > 0, } + if err := bus.Dispatch(cmd); err != nil { return nil, err } diff --git a/pkg/login/ldap.go b/pkg/login/ldap.go index 3c5001df3a4..9e4918f0290 100644 --- a/pkg/login/ldap.go +++ b/pkg/login/ldap.go @@ -163,6 +163,7 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo Name: fmt.Sprintf("%s %s", ldapUser.FirstName, ldapUser.LastName), Login: ldapUser.Username, Email: ldapUser.Email, + Groups: ldapUser.MemberOf, OrgRoles: map[int64]m.RoleType{}, } @@ -174,6 +175,7 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo if ldapUser.isMemberOf(group.GroupDN) { extUser.OrgRoles[group.OrgId] = group.OrgRole + extUser.IsGrafanaAdmin = group.IsGrafanaAdmin } } @@ -189,17 +191,18 @@ func (a *ldapAuther) GetGrafanaUserFor(ctx *m.ReqContext, ldapUser *LdapUserInfo } // add/update user in grafana - userQuery := &m.UpsertUserCommand{ + upsertUserCmd := &m.UpsertUserCommand{ ReqContext: ctx, ExternalUser: extUser, SignupAllowed: setting.LdapAllowSignup, } - err := bus.Dispatch(userQuery) + + err := bus.Dispatch(upsertUserCmd) if err != nil { return nil, err } - return userQuery.Result, nil + return upsertUserCmd.Result, nil } func (a *ldapAuther) serverBind() error { @@ -308,6 +311,7 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) { } else { filter_replace = getLdapAttr(a.server.GroupSearchFilterUserAttribute, searchResult) } + filter := strings.Replace(a.server.GroupSearchFilter, "%s", ldap.EscapeFilter(filter_replace), -1) a.log.Info("Searching for user's groups", "filter", filter) @@ -348,7 +352,7 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) { } func getLdapAttrN(name string, result *ldap.SearchResult, n int) string { - if name == "DN" { + if strings.ToLower(name) == "dn" { return result.Entries[n].DN } for _, attr := range result.Entries[n].Attributes { diff --git a/pkg/login/ldap_settings.go b/pkg/login/ldap_settings.go index 497d8725e29..c4f5982b237 100644 --- a/pkg/login/ldap_settings.go +++ b/pkg/login/ldap_settings.go @@ -44,9 +44,10 @@ type LdapAttributeMap struct { } type LdapGroupToOrgRole struct { - GroupDN string `toml:"group_dn"` - OrgId int64 `toml:"org_id"` - OrgRole m.RoleType `toml:"org_role"` + GroupDN string `toml:"group_dn"` + OrgId int64 `toml:"org_id"` + IsGrafanaAdmin *bool `toml:"grafana_admin"` // This is a pointer to know if it was set or not (for backwards compatability) + OrgRole m.RoleType `toml:"org_role"` } var LdapCfg LdapConfig diff --git a/pkg/login/ldap_test.go b/pkg/login/ldap_test.go index 34932926406..1cf98bd1e14 100644 --- a/pkg/login/ldap_test.go +++ b/pkg/login/ldap_test.go @@ -1,6 +1,7 @@ package login import ( + "context" "crypto/tls" "testing" @@ -14,6 +15,14 @@ func TestLdapAuther(t *testing.T) { Convey("When translating ldap user to grafana user", t, func() { + var user1 = &m.User{} + + bus.AddHandlerCtx("test", func(ctx context.Context, cmd *m.UpsertUserCommand) error { + cmd.Result = user1 + cmd.Result.Login = "torkelo" + return nil + }) + Convey("Given no ldap group map match", func() { ldapAuther := NewLdapAuthenticator(&LdapServerConf{ LdapGroups: []*LdapGroupToOrgRole{{}}, @@ -23,8 +32,6 @@ func TestLdapAuther(t *testing.T) { So(err, ShouldEqual, ErrInvalidCredentials) }) - var user1 = &m.User{} - ldapAutherScenario("Given wildcard group match", func(sc *scenarioContext) { ldapAuther := NewLdapAuthenticator(&LdapServerConf{ LdapGroups: []*LdapGroupToOrgRole{ @@ -91,12 +98,15 @@ func TestLdapAuther(t *testing.T) { So(result.Login, ShouldEqual, "torkelo") }) + Convey("Should set isGrafanaAdmin to false by default", func() { + So(result.IsAdmin, ShouldBeFalse) + }) + }) }) Convey("When syncing ldap groups to grafana org roles", t, func() { - ldapAutherScenario("given no current user orgs", func(sc *scenarioContext) { ldapAuther := NewLdapAuthenticator(&LdapServerConf{ LdapGroups: []*LdapGroupToOrgRole{ @@ -217,8 +227,32 @@ func TestLdapAuther(t *testing.T) { So(sc.addOrgUserCmd.Role, ShouldEqual, m.ROLE_ADMIN) So(sc.setUsingOrgCmd.OrgId, ShouldEqual, 1) }) + + Convey("Should not update permissions unless specified", func() { + So(err, ShouldBeNil) + So(sc.updateUserPermissionsCmd, ShouldBeNil) + }) }) + ldapAutherScenario("given ldap groups with grafana_admin=true", func(sc *scenarioContext) { + trueVal := true + + ldapAuther := NewLdapAuthenticator(&LdapServerConf{ + LdapGroups: []*LdapGroupToOrgRole{ + {GroupDN: "cn=admins", OrgId: 1, OrgRole: "Admin", IsGrafanaAdmin: &trueVal}, + }, + }) + + sc.userOrgsQueryReturns([]*m.UserOrgDTO{}) + _, err := ldapAuther.GetGrafanaUserFor(nil, &LdapUserInfo{ + MemberOf: []string{"cn=admins"}, + }) + + Convey("Should create user with admin set to true", func() { + So(err, ShouldBeNil) + So(sc.updateUserPermissionsCmd.IsGrafanaAdmin, ShouldBeTrue) + }) + }) }) Convey("When calling SyncUser", t, func() { @@ -322,6 +356,15 @@ func ldapAutherScenario(desc string, fn scenarioFunc) { bus.AddHandler("test", UpsertUser) + bus.AddHandlerCtx("test", func(ctx context.Context, cmd *m.SyncTeamsCommand) error { + return nil + }) + + bus.AddHandlerCtx("test", func(ctx context.Context, cmd *m.UpdateUserPermissionsCommand) error { + sc.updateUserPermissionsCmd = cmd + return nil + }) + bus.AddHandler("test", func(cmd *m.GetUserByAuthInfoQuery) error { sc.getUserByAuthInfoQuery = cmd sc.getUserByAuthInfoQuery.Result = &m.User{Login: cmd.Login} @@ -369,14 +412,15 @@ func ldapAutherScenario(desc string, fn scenarioFunc) { } type scenarioContext struct { - getUserByAuthInfoQuery *m.GetUserByAuthInfoQuery - getUserOrgListQuery *m.GetUserOrgListQuery - createUserCmd *m.CreateUserCommand - addOrgUserCmd *m.AddOrgUserCommand - updateOrgUserCmd *m.UpdateOrgUserCommand - removeOrgUserCmd *m.RemoveOrgUserCommand - updateUserCmd *m.UpdateUserCommand - setUsingOrgCmd *m.SetUsingOrgCommand + getUserByAuthInfoQuery *m.GetUserByAuthInfoQuery + getUserOrgListQuery *m.GetUserOrgListQuery + createUserCmd *m.CreateUserCommand + addOrgUserCmd *m.AddOrgUserCommand + updateOrgUserCmd *m.UpdateOrgUserCommand + removeOrgUserCmd *m.RemoveOrgUserCommand + updateUserCmd *m.UpdateUserCommand + setUsingOrgCmd *m.SetUsingOrgCommand + updateUserPermissionsCmd *m.UpdateUserPermissionsCommand } func (sc *scenarioContext) userQueryReturns(user *m.User) { diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index 83505826910..a8d9f7308fa 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -44,6 +44,7 @@ var ( M_Alerting_Notification_Sent *prometheus.CounterVec M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter M_Aws_CloudWatch_ListMetrics prometheus.Counter + M_Aws_CloudWatch_GetMetricData prometheus.Counter M_DB_DataSource_QueryById prometheus.Counter // Timers @@ -218,6 +219,12 @@ func init() { Namespace: exporterName, }) + M_Aws_CloudWatch_GetMetricData = prometheus.NewCounter(prometheus.CounterOpts{ + Name: "aws_cloudwatch_get_metric_data_total", + Help: "counter for getting metric data time series from aws", + Namespace: exporterName, + }) + M_DB_DataSource_QueryById = prometheus.NewCounter(prometheus.CounterOpts{ Name: "db_datasource_query_by_id_total", Help: "counter for getting datasource by id", @@ -307,6 +314,7 @@ func initMetricVars() { M_Alerting_Notification_Sent, M_Aws_CloudWatch_GetMetricStatistics, M_Aws_CloudWatch_ListMetrics, + M_Aws_CloudWatch_GetMetricData, M_DB_DataSource_QueryById, M_Alerting_Active_Alerts, M_StatTotal_Dashboards, @@ -332,6 +340,16 @@ func updateTotalStats() { M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs)) } +var usageStatsURL = "https://stats.grafana.org/grafana-usage-report" + +func getEdition() string { + if setting.IsEnterprise { + return "enterprise" + } else { + return "oss" + } +} + func sendUsageStats() { if !setting.ReportingEnabled { return @@ -347,6 +365,7 @@ func sendUsageStats() { "metrics": metrics, "os": runtime.GOOS, "arch": runtime.GOARCH, + "edition": getEdition(), } statsQuery := models.GetSystemStatsQuery{} @@ -366,6 +385,12 @@ func sendUsageStats() { metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers metrics["stats.datasources.count"] = statsQuery.Result.Datasources metrics["stats.stars.count"] = statsQuery.Result.Stars + metrics["stats.folders.count"] = statsQuery.Result.Folders + metrics["stats.dashboard_permissions.count"] = statsQuery.Result.DashboardPermissions + metrics["stats.folder_permissions.count"] = statsQuery.Result.FolderPermissions + metrics["stats.provisioned_dashboards.count"] = statsQuery.Result.ProvisionedDashboards + metrics["stats.snapshots.count"] = statsQuery.Result.Snapshots + metrics["stats.teams.count"] = statsQuery.Result.Teams dsStats := models.GetDataSourceStatsQuery{} if err := bus.Dispatch(&dsStats); err != nil { @@ -386,9 +411,38 @@ func sendUsageStats() { } metrics["stats.ds.other.count"] = dsOtherCount + dsAccessStats := models.GetDataSourceAccessStatsQuery{} + if err := bus.Dispatch(&dsAccessStats); err != nil { + metricsLogger.Error("Failed to get datasource access stats", "error", err) + return + } + + // send access counters for each data source + // but ignore any custom data sources + // as sending that name could be sensitive information + dsAccessOtherCount := make(map[string]int64) + for _, dsAccessStat := range dsAccessStats.Result { + if dsAccessStat.Access == "" { + continue + } + + access := strings.ToLower(dsAccessStat.Access) + + if models.IsKnownDataSourcePlugin(dsAccessStat.Type) { + metrics["stats.ds_access."+dsAccessStat.Type+"."+access+".count"] = dsAccessStat.Count + } else { + old := dsAccessOtherCount[access] + dsAccessOtherCount[access] = old + dsAccessStat.Count + } + } + + for access, count := range dsAccessOtherCount { + metrics["stats.ds_access.other."+access+".count"] = count + } + out, _ := json.MarshalIndent(report, "", " ") data := bytes.NewBuffer(out) client := http.Client{Timeout: 5 * time.Second} - go client.Post("https://stats.grafana.org/grafana-usage-report", "application/json", data) + go client.Post(usageStatsURL, "application/json", data) } diff --git a/pkg/metrics/metrics_test.go b/pkg/metrics/metrics_test.go new file mode 100644 index 00000000000..8d88e03d106 --- /dev/null +++ b/pkg/metrics/metrics_test.go @@ -0,0 +1,222 @@ +package metrics + +import ( + "bytes" + "io/ioutil" + "runtime" + "sync" + "testing" + "time" + + "net/http" + "net/http/httptest" + + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/setting" + . "github.com/smartystreets/goconvey/convey" +) + +func TestMetrics(t *testing.T) { + Convey("Test send usage stats", t, func() { + var getSystemStatsQuery *models.GetSystemStatsQuery + bus.AddHandler("test", func(query *models.GetSystemStatsQuery) error { + query.Result = &models.SystemStats{ + Dashboards: 1, + Datasources: 2, + Users: 3, + ActiveUsers: 4, + Orgs: 5, + Playlists: 6, + Alerts: 7, + Stars: 8, + Folders: 9, + DashboardPermissions: 10, + FolderPermissions: 11, + ProvisionedDashboards: 12, + Snapshots: 13, + Teams: 14, + } + getSystemStatsQuery = query + return nil + }) + + var getDataSourceStatsQuery *models.GetDataSourceStatsQuery + bus.AddHandler("test", func(query *models.GetDataSourceStatsQuery) error { + query.Result = []*models.DataSourceStats{ + { + Type: models.DS_ES, + Count: 9, + }, + { + Type: models.DS_PROMETHEUS, + Count: 10, + }, + { + Type: "unknown_ds", + Count: 11, + }, + { + Type: "unknown_ds2", + Count: 12, + }, + } + getDataSourceStatsQuery = query + return nil + }) + + var getDataSourceAccessStatsQuery *models.GetDataSourceAccessStatsQuery + bus.AddHandler("test", func(query *models.GetDataSourceAccessStatsQuery) error { + query.Result = []*models.DataSourceAccessStats{ + { + Type: models.DS_ES, + Access: "direct", + Count: 1, + }, + { + Type: models.DS_ES, + Access: "proxy", + Count: 2, + }, + { + Type: models.DS_PROMETHEUS, + Access: "proxy", + Count: 3, + }, + { + Type: "unknown_ds", + Access: "proxy", + Count: 4, + }, + { + Type: "unknown_ds2", + Access: "", + Count: 5, + }, + { + Type: "unknown_ds3", + Access: "direct", + Count: 6, + }, + { + Type: "unknown_ds4", + Access: "direct", + Count: 7, + }, + { + Type: "unknown_ds5", + Access: "proxy", + Count: 8, + }, + } + getDataSourceAccessStatsQuery = query + return nil + }) + + var wg sync.WaitGroup + var responseBuffer *bytes.Buffer + var req *http.Request + ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + req = r + buf, err := ioutil.ReadAll(r.Body) + if err != nil { + t.Fatalf("Failed to read response body, err=%v", err) + } + responseBuffer = bytes.NewBuffer(buf) + wg.Done() + })) + usageStatsURL = ts.URL + + sendUsageStats() + + Convey("Given reporting not enabled and sending usage stats", func() { + setting.ReportingEnabled = false + sendUsageStats() + + Convey("Should not gather stats or call http endpoint", func() { + So(getSystemStatsQuery, ShouldBeNil) + So(getDataSourceStatsQuery, ShouldBeNil) + So(getDataSourceAccessStatsQuery, ShouldBeNil) + So(req, ShouldBeNil) + }) + }) + + Convey("Given reporting enabled and sending usage stats", func() { + setting.ReportingEnabled = true + setting.BuildVersion = "5.0.0" + wg.Add(1) + sendUsageStats() + + Convey("Should gather stats and call http endpoint", func() { + if waitTimeout(&wg, 2*time.Second) { + t.Fatalf("Timed out waiting for http request") + } + + So(getSystemStatsQuery, ShouldNotBeNil) + So(getDataSourceStatsQuery, ShouldNotBeNil) + So(getDataSourceAccessStatsQuery, ShouldNotBeNil) + So(req, ShouldNotBeNil) + So(req.Method, ShouldEqual, http.MethodPost) + So(req.Header.Get("Content-Type"), ShouldEqual, "application/json") + + So(responseBuffer, ShouldNotBeNil) + + j, err := simplejson.NewFromReader(responseBuffer) + So(err, ShouldBeNil) + + So(j.Get("version").MustString(), ShouldEqual, "5_0_0") + So(j.Get("os").MustString(), ShouldEqual, runtime.GOOS) + So(j.Get("arch").MustString(), ShouldEqual, runtime.GOARCH) + + metrics := j.Get("metrics") + So(metrics.Get("stats.dashboards.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Dashboards) + So(metrics.Get("stats.users.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Users) + So(metrics.Get("stats.orgs.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Orgs) + So(metrics.Get("stats.playlist.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Playlists) + So(metrics.Get("stats.plugins.apps.count").MustInt(), ShouldEqual, len(plugins.Apps)) + So(metrics.Get("stats.plugins.panels.count").MustInt(), ShouldEqual, len(plugins.Panels)) + So(metrics.Get("stats.plugins.datasources.count").MustInt(), ShouldEqual, len(plugins.DataSources)) + So(metrics.Get("stats.alerts.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Alerts) + So(metrics.Get("stats.active_users.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.ActiveUsers) + So(metrics.Get("stats.datasources.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Datasources) + So(metrics.Get("stats.stars.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Stars) + So(metrics.Get("stats.folders.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Folders) + So(metrics.Get("stats.dashboard_permissions.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.DashboardPermissions) + So(metrics.Get("stats.folder_permissions.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.FolderPermissions) + So(metrics.Get("stats.provisioned_dashboards.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.ProvisionedDashboards) + So(metrics.Get("stats.snapshots.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Snapshots) + So(metrics.Get("stats.teams.count").MustInt(), ShouldEqual, getSystemStatsQuery.Result.Teams) + + So(metrics.Get("stats.ds."+models.DS_ES+".count").MustInt(), ShouldEqual, 9) + So(metrics.Get("stats.ds."+models.DS_PROMETHEUS+".count").MustInt(), ShouldEqual, 10) + So(metrics.Get("stats.ds.other.count").MustInt(), ShouldEqual, 11+12) + + So(metrics.Get("stats.ds_access."+models.DS_ES+".direct.count").MustInt(), ShouldEqual, 1) + So(metrics.Get("stats.ds_access."+models.DS_ES+".proxy.count").MustInt(), ShouldEqual, 2) + So(metrics.Get("stats.ds_access."+models.DS_PROMETHEUS+".proxy.count").MustInt(), ShouldEqual, 3) + So(metrics.Get("stats.ds_access.other.direct.count").MustInt(), ShouldEqual, 6+7) + So(metrics.Get("stats.ds_access.other.proxy.count").MustInt(), ShouldEqual, 4+8) + }) + }) + + Reset(func() { + ts.Close() + }) + }) +} + +func waitTimeout(wg *sync.WaitGroup, timeout time.Duration) bool { + c := make(chan struct{}) + go func() { + defer close(c) + wg.Wait() + }() + select { + case <-c: + return false // completed normally + case <-time.After(timeout): + return true // timed out + } +} diff --git a/pkg/middleware/auth.go b/pkg/middleware/auth.go index 37e79c01071..5faee1e3fa7 100644 --- a/pkg/middleware/auth.go +++ b/pkg/middleware/auth.go @@ -9,6 +9,7 @@ import ( m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" ) type AuthOptions struct { @@ -34,6 +35,11 @@ func getApiKey(c *m.ReqContext) string { return key } + username, password, err := util.DecodeBasicAuthHeader(header) + if err == nil && username == "api_key" { + return password + } + return "" } diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index 93db49ed880..475dce089b1 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -49,7 +49,6 @@ func GetContextHandler() macaron.Handler { c.Map(ctx) - // update last seen at // update last seen every 5min if ctx.ShouldUpdateLastSeenAt() { ctx.Logger.Debug("Updating last user_seen_at", "user_id", ctx.UserId) diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go index b827751b1a5..87c23a7b49a 100644 --- a/pkg/middleware/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -82,7 +82,7 @@ func TestMiddlewareContext(t *testing.T) { setting.BasicAuthEnabled = true authHeader := util.GetBasicAuthHeader("myUser", "myPass") - sc.fakeReq("GET", "/").withAuthoriziationHeader(authHeader).exec() + sc.fakeReq("GET", "/").withAuthorizationHeader(authHeader).exec() Convey("Should init middleware context with user", func() { So(sc.context.IsSignedIn, ShouldEqual, true) @@ -128,6 +128,28 @@ func TestMiddlewareContext(t *testing.T) { }) }) + middlewareScenario("Valid api key via Basic auth", func(sc *scenarioContext) { + keyhash := util.EncodePassword("v5nAwpMafFP6znaS4urhdWDLS5511M42", "asd") + + bus.AddHandler("test", func(query *m.GetApiKeyByNameQuery) error { + query.Result = &m.ApiKey{OrgId: 12, Role: m.ROLE_EDITOR, Key: keyhash} + return nil + }) + + authHeader := util.GetBasicAuthHeader("api_key", "eyJrIjoidjVuQXdwTWFmRlA2em5hUzR1cmhkV0RMUzU1MTFNNDIiLCJuIjoiYXNkIiwiaWQiOjF9") + sc.fakeReq("GET", "/").withAuthorizationHeader(authHeader).exec() + + Convey("Should return 200", func() { + So(sc.resp.Code, ShouldEqual, 200) + }) + + Convey("Should init middleware context", func() { + So(sc.context.IsSignedIn, ShouldEqual, true) + So(sc.context.OrgId, ShouldEqual, 12) + So(sc.context.OrgRole, ShouldEqual, m.ROLE_EDITOR) + }) + }) + middlewareScenario("UserId in session", func(sc *scenarioContext) { sc.fakeReq("GET", "/").handler(func(c *m.ReqContext) { @@ -418,7 +440,7 @@ func (sc *scenarioContext) withInvalidApiKey() *scenarioContext { return sc } -func (sc *scenarioContext) withAuthoriziationHeader(authHeader string) *scenarioContext { +func (sc *scenarioContext) withAuthorizationHeader(authHeader string) *scenarioContext { sc.authHeader = authHeader return sc } diff --git a/pkg/middleware/render_auth.go b/pkg/middleware/render_auth.go index c382eb8e707..e30cfe67924 100644 --- a/pkg/middleware/render_auth.go +++ b/pkg/middleware/render_auth.go @@ -2,6 +2,7 @@ package middleware import ( "sync" + "time" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/util" @@ -28,6 +29,7 @@ func initContextWithRenderAuth(ctx *m.ReqContext) bool { ctx.IsSignedIn = true ctx.SignedInUser = renderUser ctx.IsRenderCall = true + ctx.LastSeenAt = time.Now() return true } diff --git a/pkg/models/alert.go b/pkg/models/alert.go index b72d87e94b2..fba2aa63df9 100644 --- a/pkg/models/alert.go +++ b/pkg/models/alert.go @@ -161,12 +161,13 @@ type SetAlertStateCommand struct { //Queries type GetAlertsQuery struct { - OrgId int64 - State []string - DashboardId int64 - PanelId int64 - Limit int64 - User *SignedInUser + OrgId int64 + State []string + DashboardIDs []int64 + PanelId int64 + Limit int64 + Query string + User *SignedInUser Result []*AlertListItemDTO } diff --git a/pkg/models/dashboard_snapshot.go b/pkg/models/dashboard_snapshot.go index ec8b19f3c18..3024ba94122 100644 --- a/pkg/models/dashboard_snapshot.go +++ b/pkg/models/dashboard_snapshot.go @@ -29,7 +29,6 @@ type DashboardSnapshotDTO struct { Id int64 `json:"id"` Name string `json:"name"` Key string `json:"key"` - DeleteKey string `json:"deleteKey"` OrgId int64 `json:"orgId"` UserId int64 `json:"userId"` External bool `json:"external"` diff --git a/pkg/models/dashboards.go b/pkg/models/dashboards.go index eb44c1bc582..4b84d840113 100644 --- a/pkg/models/dashboards.go +++ b/pkg/models/dashboards.go @@ -254,6 +254,7 @@ type DashboardProvisioning struct { DashboardId int64 Name string ExternalId string + CheckSum string Updated int64 } diff --git a/pkg/models/playlist.go b/pkg/models/playlist.go index 5c49bb9256c..c52da202293 100644 --- a/pkg/models/playlist.go +++ b/pkg/models/playlist.go @@ -63,7 +63,7 @@ type PlaylistDashboards []*PlaylistDashboard type UpdatePlaylistCommand struct { OrgId int64 `json:"-"` - Id int64 `json:"id" binding:"Required"` + Id int64 `json:"id"` Name string `json:"name" binding:"Required"` Interval string `json:"interval"` Items []PlaylistItemDTO `json:"items"` diff --git a/pkg/models/stats.go b/pkg/models/stats.go index e132d88c030..4cd50d37463 100644 --- a/pkg/models/stats.go +++ b/pkg/models/stats.go @@ -1,14 +1,20 @@ package models type SystemStats struct { - Dashboards int64 - Datasources int64 - Users int64 - ActiveUsers int64 - Orgs int64 - Playlists int64 - Alerts int64 - Stars int64 + Dashboards int64 + Datasources int64 + Users int64 + ActiveUsers int64 + Orgs int64 + Playlists int64 + Alerts int64 + Stars int64 + Snapshots int64 + Teams int64 + DashboardPermissions int64 + FolderPermissions int64 + Folders int64 + ProvisionedDashboards int64 } type DataSourceStats struct { @@ -24,6 +30,16 @@ type GetDataSourceStatsQuery struct { Result []*DataSourceStats } +type DataSourceAccessStats struct { + Type string + Access string + Count int64 +} + +type GetDataSourceAccessStatsQuery struct { + Result []*DataSourceAccessStats +} + type AdminStats struct { Users int `json:"users"` Orgs int `json:"orgs"` @@ -40,3 +56,11 @@ type AdminStats struct { type GetAdminStatsQuery struct { Result *AdminStats } + +type SystemUserCountStats struct { + Count int64 +} + +type GetSystemUserCountStatsQuery struct { + Result *SystemUserCountStats +} diff --git a/pkg/models/team.go b/pkg/models/team.go index 9c679a13394..61285db3a5f 100644 --- a/pkg/models/team.go +++ b/pkg/models/team.go @@ -49,13 +49,13 @@ type DeleteTeamCommand struct { type GetTeamByIdQuery struct { OrgId int64 Id int64 - Result *Team + Result *TeamDTO } type GetTeamsByUserQuery struct { OrgId int64 - UserId int64 `json:"userId"` - Result []*Team `json:"teams"` + UserId int64 `json:"userId"` + Result []*TeamDTO `json:"teams"` } type SearchTeamsQuery struct { @@ -68,7 +68,7 @@ type SearchTeamsQuery struct { Result SearchTeamQueryResult } -type SearchTeamDto struct { +type TeamDTO struct { Id int64 `json:"id"` OrgId int64 `json:"orgId"` Name string `json:"name"` @@ -78,8 +78,8 @@ type SearchTeamDto struct { } type SearchTeamQueryResult struct { - TotalCount int64 `json:"totalCount"` - Teams []*SearchTeamDto `json:"teams"` - Page int `json:"page"` - PerPage int `json:"perPage"` + TotalCount int64 `json:"totalCount"` + Teams []*TeamDTO `json:"teams"` + Page int `json:"page"` + PerPage int `json:"perPage"` } diff --git a/pkg/models/team_member.go b/pkg/models/team_member.go index 19cf657292d..9434dad8ecd 100644 --- a/pkg/models/team_member.go +++ b/pkg/models/team_member.go @@ -42,6 +42,7 @@ type RemoveTeamMemberCommand struct { type GetTeamMembersQuery struct { OrgId int64 TeamId int64 + UserId int64 Result []*TeamMemberDTO } diff --git a/pkg/models/user_auth.go b/pkg/models/user_auth.go index 0ecd144d52c..28189005737 100644 --- a/pkg/models/user_auth.go +++ b/pkg/models/user_auth.go @@ -13,13 +13,15 @@ type UserAuth struct { } type ExternalUserInfo struct { - AuthModule string - AuthId string - UserId int64 - Email string - Login string - Name string - OrgRoles map[int64]RoleType + AuthModule string + AuthId string + UserId int64 + Email string + Login string + Name string + Groups []string + OrgRoles map[int64]RoleType + IsGrafanaAdmin *bool // This is a pointer to know if we should sync this or not (nil = ignore sync) } // --------------------- @@ -70,3 +72,8 @@ type GetAuthInfoQuery struct { Result *UserAuth } + +type SyncTeamsCommand struct { + ExternalUser *ExternalUserInfo + User *User +} diff --git a/pkg/plugins/backend_utils.go b/pkg/plugins/backend_utils.go new file mode 100644 index 00000000000..d3ee32f0545 --- /dev/null +++ b/pkg/plugins/backend_utils.go @@ -0,0 +1,19 @@ +package plugins + +import ( + "fmt" + "runtime" + "strings" +) + +func ComposePluginStartCommmand(executable string) string { + os := strings.ToLower(runtime.GOOS) + arch := runtime.GOARCH + extension := "" + + if os == "windows" { + extension = ".exe" + } + + return fmt.Sprintf("%s_%s_%s%s", executable, os, strings.ToLower(arch), extension) +} diff --git a/pkg/plugins/dashboard_importer.go b/pkg/plugins/dashboard_importer.go index 1364fded987..9b319358780 100644 --- a/pkg/plugins/dashboard_importer.go +++ b/pkg/plugins/dashboard_importer.go @@ -16,6 +16,7 @@ type ImportDashboardCommand struct { Path string Inputs []ImportDashboardInput Overwrite bool + FolderId int64 OrgId int64 User *m.SignedInUser @@ -70,7 +71,7 @@ func ImportDashboard(cmd *ImportDashboardCommand) error { UserId: cmd.User.UserId, Overwrite: cmd.Overwrite, PluginId: cmd.PluginId, - FolderId: dashboard.FolderId, + FolderId: cmd.FolderId, } dto := &dashboards.SaveDashboardDTO{ @@ -91,6 +92,7 @@ func ImportDashboard(cmd *ImportDashboardCommand) error { Title: savedDash.Title, Path: cmd.Path, Revision: savedDash.Data.Get("revision").MustInt64(1), + FolderId: savedDash.FolderId, ImportedUri: "db/" + savedDash.Slug, ImportedUrl: savedDash.GetUrl(), ImportedRevision: dashboard.Data.Get("revision").MustInt64(1), diff --git a/pkg/plugins/dashboards.go b/pkg/plugins/dashboards.go index d15bcdd6db5..500d97e38ca 100644 --- a/pkg/plugins/dashboards.go +++ b/pkg/plugins/dashboards.go @@ -17,6 +17,7 @@ type PluginDashboardInfoDTO struct { ImportedUrl string `json:"importedUrl"` Slug string `json:"slug"` DashboardId int64 `json:"dashboardId"` + FolderId int64 `json:"folderId"` ImportedRevision int64 `json:"importedRevision"` Revision int64 `json:"revision"` Description string `json:"description"` diff --git a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go b/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go index d2cd9f63cde..0af727e14df 100644 --- a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go +++ b/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go @@ -5,12 +5,12 @@ import ( "errors" "fmt" + "github.com/grafana/grafana-plugin-model/go/datasource" "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" - "github.com/grafana/grafana_plugin_model/go/datasource" ) func NewDatasourcePluginWrapper(log log.Logger, plugin datasource.DatasourcePlugin) *DatasourcePluginWrapper { diff --git a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go b/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go index 7ada6fb6b03..e312913fc56 100644 --- a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go +++ b/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go @@ -3,9 +3,9 @@ package wrapper import ( "testing" + "github.com/grafana/grafana-plugin-model/go/datasource" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/tsdb" - "github.com/grafana/grafana_plugin_model/go/datasource" ) func TestMapTables(t *testing.T) { diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index 114b71deefc..cef35a2e7d9 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -3,20 +3,17 @@ package plugins import ( "context" "encoding/json" - "fmt" "os" "os/exec" "path" "path/filepath" - "runtime" - "strings" "time" + "github.com/grafana/grafana-plugin-model/go/datasource" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins/datasource/wrapper" "github.com/grafana/grafana/pkg/tsdb" - "github.com/grafana/grafana_plugin_model/go/datasource" plugin "github.com/hashicorp/go-plugin" ) @@ -25,6 +22,7 @@ type DataSourcePlugin struct { Annotations bool `json:"annotations"` Metrics bool `json:"metrics"` Alerting bool `json:"alerting"` + Explore bool `json:"explore"` QueryOptions map[string]bool `json:"queryOptions,omitempty"` BuiltIn bool `json:"builtIn,omitempty"` Mixed bool `json:"mixed,omitempty"` @@ -66,16 +64,6 @@ var handshakeConfig = plugin.HandshakeConfig{ MagicCookieValue: "datasource", } -func composeBinaryName(executable, os, arch string) string { - var extension string - os = strings.ToLower(os) - if os == "windows" { - extension = ".exe" - } - - return fmt.Sprintf("%s_%s_%s%s", executable, os, strings.ToLower(arch), extension) -} - func (p *DataSourcePlugin) startBackendPlugin(ctx context.Context, log log.Logger) error { p.log = log.New("plugin-id", p.Id) @@ -88,7 +76,7 @@ func (p *DataSourcePlugin) startBackendPlugin(ctx context.Context, log log.Logge } func (p *DataSourcePlugin) spawnSubProcess() error { - cmd := composeBinaryName(p.Executable, runtime.GOOS, runtime.GOARCH) + cmd := ComposePluginStartCommmand(p.Executable) fullpath := path.Join(p.PluginDir, cmd) p.client = plugin.NewClient(&plugin.ClientConfig{ diff --git a/pkg/plugins/datasource_plugin_test.go b/pkg/plugins/datasource_plugin_test.go deleted file mode 100644 index 147f0310f5c..00000000000 --- a/pkg/plugins/datasource_plugin_test.go +++ /dev/null @@ -1,35 +0,0 @@ -package plugins - -import ( - "testing" -) - -func TestComposeBinaryName(t *testing.T) { - tests := []struct { - name string - os string - arch string - - expectedPath string - }{ - { - name: "simple-json", - os: "linux", - arch: "amd64", - expectedPath: `simple-json_linux_amd64`, - }, - { - name: "simple-json", - os: "windows", - arch: "amd64", - expectedPath: `simple-json_windows_amd64.exe`, - }, - } - - for _, v := range tests { - have := composeBinaryName(v.name, v.os, v.arch) - if have != v.expectedPath { - t.Errorf("expected %s got %s", v.expectedPath, have) - } - } -} diff --git a/pkg/plugins/plugins.go b/pkg/plugins/plugins.go index 5096bf5cebc..67eb0f51d70 100644 --- a/pkg/plugins/plugins.go +++ b/pkg/plugins/plugins.go @@ -26,6 +26,7 @@ var ( Apps map[string]*AppPlugin Plugins map[string]*PluginBase PluginTypes map[string]interface{} + Renderer *RendererPlugin GrafanaLatestVersion string GrafanaHasUpdate bool @@ -58,6 +59,7 @@ func (pm *PluginManager) Init() error { "panel": PanelPlugin{}, "datasource": DataSourcePlugin{}, "app": AppPlugin{}, + "renderer": RendererPlugin{}, } pm.log.Info("Starting plugin search") diff --git a/pkg/plugins/renderer_plugin.go b/pkg/plugins/renderer_plugin.go new file mode 100644 index 00000000000..286c670eb41 --- /dev/null +++ b/pkg/plugins/renderer_plugin.go @@ -0,0 +1,22 @@ +package plugins + +import "encoding/json" + +type RendererPlugin struct { + PluginBase + + Executable string `json:"executable,omitempty"` +} + +func (r *RendererPlugin) Load(decoder *json.Decoder, pluginDir string) error { + if err := decoder.Decode(&r); err != nil { + return err + } + + if err := r.registerPlugin(pluginDir); err != nil { + return err + } + + Renderer = r + return nil +} diff --git a/pkg/registry/registry.go b/pkg/registry/registry.go index ba3229d6df6..87fca27f6c1 100644 --- a/pkg/registry/registry.go +++ b/pkg/registry/registry.go @@ -2,32 +2,87 @@ package registry import ( "context" + "reflect" + "sort" + + "github.com/grafana/grafana/pkg/services/sqlstore/migrator" ) -var services = []Service{} - -func RegisterService(srv Service) { - services = append(services, srv) +type Descriptor struct { + Name string + Instance Service + InitPriority Priority } -func GetServices() []Service { +var services []*Descriptor + +func RegisterService(instance Service) { + services = append(services, &Descriptor{ + Name: reflect.TypeOf(instance).Elem().Name(), + Instance: instance, + InitPriority: Low, + }) +} + +func Register(descriptor *Descriptor) { + services = append(services, descriptor) +} + +func GetServices() []*Descriptor { + sort.Slice(services, func(i, j int) bool { + return services[i].InitPriority > services[j].InitPriority + }) + return services } +// Service interface is the lowest common shape that services +// are expected to forfill to be started within Grafana. type Service interface { + + // Init is called by Grafana main process which gives the service + // the possibility do some initial work before its started. Things + // like adding routes, bus handlers should be done in the Init function Init() error } -// Useful for alerting service +// CanBeDisabled allows the services to decide if it should +// be started or not by itself. This is useful for services +// that might not always be started, ex alerting. +// This will be called after `Init()`. type CanBeDisabled interface { + + // IsDisabled should return a bool saying if it can be started or not. IsDisabled() bool } +// BackgroundService should be implemented for services that have +// long running tasks in the background. type BackgroundService interface { + // Run starts the background process of the service after `Init` have been called + // on all services. The `context.Context` passed into the function should be used + // to subscribe to ctx.Done() so the service can be notified when Grafana shuts down. Run(ctx context.Context) error } +// DatabaseMigrator allows the caller to add migrations to +// the migrator passed as argument +type DatabaseMigrator interface { + + // AddMigrations allows the service to add migrations to + // the database migrator. + AddMigration(mg *migrator.Migrator) +} + +// IsDisabled takes an service and return true if its disabled func IsDisabled(srv Service) bool { canBeDisabled, ok := srv.(CanBeDisabled) return ok && canBeDisabled.IsDisabled() } + +type Priority int + +const ( + High Priority = 100 + Low Priority = 0 +) diff --git a/pkg/services/alerting/conditions/reducer.go b/pkg/services/alerting/conditions/reducer.go index 0a61c13fa12..1e8ae792746 100644 --- a/pkg/services/alerting/conditions/reducer.go +++ b/pkg/services/alerting/conditions/reducer.go @@ -108,9 +108,9 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { break } } - // get other points + // get the oldest point points = points[0:i] - for i := len(points) - 1; i >= 0; i-- { + for i := 0; i < len(points); i++ { if points[i][0].Valid { allNull = false value = first - points[i][0].Float64 @@ -131,9 +131,9 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { break } } - // get other points + // get the oldest point points = points[0:i] - for i := len(points) - 1; i >= 0; i-- { + for i := 0; i < len(points); i++ { if points[i][0].Valid { allNull = false val := (first - points[i][0].Float64) / points[i][0].Float64 * 100 diff --git a/pkg/services/alerting/conditions/reducer_test.go b/pkg/services/alerting/conditions/reducer_test.go index 866b574f59f..9d4e1462690 100644 --- a/pkg/services/alerting/conditions/reducer_test.go +++ b/pkg/services/alerting/conditions/reducer_test.go @@ -110,16 +110,35 @@ func TestSimpleReducer(t *testing.T) { So(reducer.Reduce(series).Float64, ShouldEqual, float64(3)) }) - Convey("diff", func() { + Convey("diff one point", func() { + result := testReducer("diff", 30) + So(result, ShouldEqual, float64(0)) + }) + + Convey("diff two points", func() { result := testReducer("diff", 30, 40) So(result, ShouldEqual, float64(10)) }) - Convey("percent_diff", func() { + Convey("diff three points", func() { + result := testReducer("diff", 30, 40, 40) + So(result, ShouldEqual, float64(10)) + }) + + Convey("percent_diff one point", func() { + result := testReducer("percent_diff", 40) + So(result, ShouldEqual, float64(0)) + }) + + Convey("percent_diff two points", func() { result := testReducer("percent_diff", 30, 40) So(result, ShouldEqual, float64(33.33333333333333)) }) + Convey("percent_diff three points", func() { + result := testReducer("percent_diff", 30, 40, 40) + So(result, ShouldEqual, float64(33.33333333333333)) + }) }) } diff --git a/pkg/services/alerting/engine.go b/pkg/services/alerting/engine.go index c23cf56ae47..0f8e24bcef5 100644 --- a/pkg/services/alerting/engine.go +++ b/pkg/services/alerting/engine.go @@ -12,11 +12,14 @@ import ( "github.com/benbjohnson/clock" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/services/rendering" "github.com/grafana/grafana/pkg/setting" "golang.org/x/sync/errgroup" ) type AlertingService struct { + RenderService rendering.Service `inject:""` + execQueue chan *Job //clock clock.Clock ticker *Ticker @@ -48,7 +51,7 @@ func (e *AlertingService) Init() error { e.evalHandler = NewEvalHandler() e.ruleReader = NewRuleReader() e.log = log.New("alerting.engine") - e.resultHandler = NewResultHandler() + e.resultHandler = NewResultHandler(e.RenderService) return nil } diff --git a/pkg/services/alerting/eval_context_test.go b/pkg/services/alerting/eval_context_test.go index 709eeee4e5e..750fa959683 100644 --- a/pkg/services/alerting/eval_context_test.go +++ b/pkg/services/alerting/eval_context_test.go @@ -9,91 +9,93 @@ import ( . "github.com/smartystreets/goconvey/convey" ) +func TestStateIsUpdatedWhenNeeded(t *testing.T) { + ctx := NewEvalContext(context.TODO(), &Rule{Conditions: []Condition{&conditionStub{firing: true}}}) + + t.Run("ok -> alerting", func(t *testing.T) { + ctx.PrevAlertState = models.AlertStateOK + ctx.Rule.State = models.AlertStateAlerting + + if !ctx.ShouldUpdateAlertState() { + t.Fatalf("expected should updated to be true") + } + }) + + t.Run("ok -> ok", func(t *testing.T) { + ctx.PrevAlertState = models.AlertStateOK + ctx.Rule.State = models.AlertStateOK + + if ctx.ShouldUpdateAlertState() { + t.Fatalf("expected should updated to be false") + } + }) +} + func TestAlertingEvalContext(t *testing.T) { - Convey("Eval context", t, func() { + Convey("Should compute and replace properly new rule state", t, func() { ctx := NewEvalContext(context.TODO(), &Rule{Conditions: []Condition{&conditionStub{firing: true}}}) + dummieError := fmt.Errorf("dummie error") - Convey("Should update alert state when needed", func() { + Convey("ok -> alerting", func() { + ctx.PrevAlertState = models.AlertStateOK + ctx.Firing = true - Convey("ok -> alerting", func() { - ctx.PrevAlertState = models.AlertStateOK - ctx.Rule.State = models.AlertStateAlerting - - So(ctx.ShouldUpdateAlertState(), ShouldBeTrue) - }) - - Convey("ok -> ok", func() { - ctx.PrevAlertState = models.AlertStateOK - ctx.Rule.State = models.AlertStateOK - - So(ctx.ShouldUpdateAlertState(), ShouldBeFalse) - }) + ctx.Rule.State = ctx.GetNewState() + So(ctx.Rule.State, ShouldEqual, models.AlertStateAlerting) }) - Convey("Should compute and replace properly new rule state", func() { - dummieError := fmt.Errorf("dummie error") + Convey("ok -> error(alerting)", func() { + ctx.PrevAlertState = models.AlertStateOK + ctx.Error = dummieError + ctx.Rule.ExecutionErrorState = models.ExecutionErrorSetAlerting - Convey("ok -> alerting", func() { - ctx.PrevAlertState = models.AlertStateOK - ctx.Firing = true + ctx.Rule.State = ctx.GetNewState() + So(ctx.Rule.State, ShouldEqual, models.AlertStateAlerting) + }) - ctx.Rule.State = ctx.GetNewState() - So(ctx.Rule.State, ShouldEqual, models.AlertStateAlerting) - }) + Convey("ok -> error(keep_last)", func() { + ctx.PrevAlertState = models.AlertStateOK + ctx.Error = dummieError + ctx.Rule.ExecutionErrorState = models.ExecutionErrorKeepState - Convey("ok -> error(alerting)", func() { - ctx.PrevAlertState = models.AlertStateOK - ctx.Error = dummieError - ctx.Rule.ExecutionErrorState = models.ExecutionErrorSetAlerting + ctx.Rule.State = ctx.GetNewState() + So(ctx.Rule.State, ShouldEqual, models.AlertStateOK) + }) - ctx.Rule.State = ctx.GetNewState() - So(ctx.Rule.State, ShouldEqual, models.AlertStateAlerting) - }) + Convey("pending -> error(keep_last)", func() { + ctx.PrevAlertState = models.AlertStatePending + ctx.Error = dummieError + ctx.Rule.ExecutionErrorState = models.ExecutionErrorKeepState - Convey("ok -> error(keep_last)", func() { - ctx.PrevAlertState = models.AlertStateOK - ctx.Error = dummieError - ctx.Rule.ExecutionErrorState = models.ExecutionErrorKeepState + ctx.Rule.State = ctx.GetNewState() + So(ctx.Rule.State, ShouldEqual, models.AlertStatePending) + }) - ctx.Rule.State = ctx.GetNewState() - So(ctx.Rule.State, ShouldEqual, models.AlertStateOK) - }) + Convey("ok -> no_data(alerting)", func() { + ctx.PrevAlertState = models.AlertStateOK + ctx.Rule.NoDataState = models.NoDataSetAlerting + ctx.NoDataFound = true - Convey("pending -> error(keep_last)", func() { - ctx.PrevAlertState = models.AlertStatePending - ctx.Error = dummieError - ctx.Rule.ExecutionErrorState = models.ExecutionErrorKeepState + ctx.Rule.State = ctx.GetNewState() + So(ctx.Rule.State, ShouldEqual, models.AlertStateAlerting) + }) - ctx.Rule.State = ctx.GetNewState() - So(ctx.Rule.State, ShouldEqual, models.AlertStatePending) - }) + Convey("ok -> no_data(keep_last)", func() { + ctx.PrevAlertState = models.AlertStateOK + ctx.Rule.NoDataState = models.NoDataKeepState + ctx.NoDataFound = true - Convey("ok -> no_data(alerting)", func() { - ctx.PrevAlertState = models.AlertStateOK - ctx.Rule.NoDataState = models.NoDataSetAlerting - ctx.NoDataFound = true + ctx.Rule.State = ctx.GetNewState() + So(ctx.Rule.State, ShouldEqual, models.AlertStateOK) + }) - ctx.Rule.State = ctx.GetNewState() - So(ctx.Rule.State, ShouldEqual, models.AlertStateAlerting) - }) + Convey("pending -> no_data(keep_last)", func() { + ctx.PrevAlertState = models.AlertStatePending + ctx.Rule.NoDataState = models.NoDataKeepState + ctx.NoDataFound = true - Convey("ok -> no_data(keep_last)", func() { - ctx.PrevAlertState = models.AlertStateOK - ctx.Rule.NoDataState = models.NoDataKeepState - ctx.NoDataFound = true - - ctx.Rule.State = ctx.GetNewState() - So(ctx.Rule.State, ShouldEqual, models.AlertStateOK) - }) - - Convey("pending -> no_data(keep_last)", func() { - ctx.PrevAlertState = models.AlertStatePending - ctx.Rule.NoDataState = models.NoDataKeepState - ctx.NoDataFound = true - - ctx.Rule.State = ctx.GetNewState() - So(ctx.Rule.State, ShouldEqual, models.AlertStatePending) - }) + ctx.Rule.State = ctx.GetNewState() + So(ctx.Rule.State, ShouldEqual, models.AlertStatePending) }) }) } diff --git a/pkg/services/alerting/extractor_test.go b/pkg/services/alerting/extractor_test.go index 861e9b9cbfc..c7212e48174 100644 --- a/pkg/services/alerting/extractor_test.go +++ b/pkg/services/alerting/extractor_test.go @@ -50,7 +50,7 @@ func TestAlertRuleExtraction(t *testing.T) { So(err, ShouldBeNil) Convey("Extractor should not modify the original json", func() { - dashJson, err := simplejson.NewJson([]byte(json)) + dashJson, err := simplejson.NewJson(json) So(err, ShouldBeNil) dash := m.NewDashboardFromJson(dashJson) @@ -79,7 +79,7 @@ func TestAlertRuleExtraction(t *testing.T) { Convey("Parsing and validating dashboard containing graphite alerts", func() { - dashJson, err := simplejson.NewJson([]byte(json)) + dashJson, err := simplejson.NewJson(json) So(err, ShouldBeNil) dash := m.NewDashboardFromJson(dashJson) @@ -143,7 +143,7 @@ func TestAlertRuleExtraction(t *testing.T) { panelWithoutId, err := ioutil.ReadFile("./test-data/panels-missing-id.json") So(err, ShouldBeNil) - dashJson, err := simplejson.NewJson([]byte(panelWithoutId)) + dashJson, err := simplejson.NewJson(panelWithoutId) So(err, ShouldBeNil) dash := m.NewDashboardFromJson(dashJson) extractor := NewDashAlertExtractor(dash, 1) @@ -159,7 +159,7 @@ func TestAlertRuleExtraction(t *testing.T) { panelWithIdZero, err := ioutil.ReadFile("./test-data/panel-with-id-0.json") So(err, ShouldBeNil) - dashJson, err := simplejson.NewJson([]byte(panelWithIdZero)) + dashJson, err := simplejson.NewJson(panelWithIdZero) So(err, ShouldBeNil) dash := m.NewDashboardFromJson(dashJson) extractor := NewDashAlertExtractor(dash, 1) diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index 1d5affbd3ec..07212746f7e 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -3,14 +3,15 @@ package alerting import ( "errors" "fmt" + "time" "golang.org/x/sync/errgroup" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/imguploader" - "github.com/grafana/grafana/pkg/components/renderer" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/metrics" + "github.com/grafana/grafana/pkg/services/rendering" m "github.com/grafana/grafana/pkg/models" ) @@ -27,18 +28,16 @@ type NotificationService interface { SendIfNeeded(context *EvalContext) error } -func NewNotificationService() NotificationService { - return newNotificationService() +func NewNotificationService(renderService rendering.Service) NotificationService { + return ¬ificationService{ + log: log.New("alerting.notifier"), + renderService: renderService, + } } type notificationService struct { - log log.Logger -} - -func newNotificationService() *notificationService { - return ¬ificationService{ - log: log.New("alerting.notifier"), - } + log log.Logger + renderService rendering.Service } func (n *notificationService) SendIfNeeded(context *EvalContext) error { @@ -79,32 +78,36 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { return err } - renderOpts := &renderer.RenderOpts{ - Width: "800", - Height: "400", - Timeout: "30", - OrgId: context.Rule.OrgId, - IsAlertContext: true, + renderOpts := rendering.Opts{ + Width: 1000, + Height: 500, + Timeout: time.Second * 30, + OrgId: context.Rule.OrgId, + OrgRole: m.ROLE_ADMIN, } ref, err := context.GetDashboardUID() if err != nil { return err } + renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?panelId=%d", ref.Uid, ref.Slug, context.Rule.PanelId) - imagePath, err := renderer.RenderToPng(renderOpts) + result, err := n.renderService.Render(context.Ctx, renderOpts) if err != nil { return err } - context.ImageOnDiskPath = imagePath + context.ImageOnDiskPath = result.FilePath context.ImagePublicUrl, err = uploader.Upload(context.Ctx, context.ImageOnDiskPath) if err != nil { return err } - n.log.Info("uploaded", "url", context.ImagePublicUrl) + if context.ImagePublicUrl != "" { + n.log.Info("uploaded screenshot of alert to external image store", "url", context.ImagePublicUrl) + } + return nil } diff --git a/pkg/services/alerting/notifiers/base.go b/pkg/services/alerting/notifiers/base.go index 51676efdfd5..868db3aec79 100644 --- a/pkg/services/alerting/notifiers/base.go +++ b/pkg/services/alerting/notifiers/base.go @@ -3,6 +3,7 @@ package notifiers import ( "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/alerting" ) diff --git a/pkg/services/alerting/notifiers/teams.go b/pkg/services/alerting/notifiers/teams.go index 7f62340d0e1..4e34e16ab51 100644 --- a/pkg/services/alerting/notifiers/teams.go +++ b/pkg/services/alerting/notifiers/teams.go @@ -41,10 +41,8 @@ func NewTeamsNotifier(model *m.AlertNotification) (alerting.Notifier, error) { type TeamsNotifier struct { NotifierBase - Url string - Recipient string - Mention string - log log.Logger + Url string + log log.Logger } func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { @@ -75,17 +73,17 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { }) } - message := this.Mention - if evalContext.Rule.State != m.AlertStateOK { //don't add message when going back to alert state ok. - message += " " + evalContext.Rule.Message - } else { - message += " " // summary must not be empty + message := "" + if evalContext.Rule.State != m.AlertStateOK { //dont add message when going back to alert state ok. + message = evalContext.Rule.Message } body := map[string]interface{}{ - "@type": "MessageCard", - "@context": "http://schema.org/extensions", - "summary": message, + "@type": "MessageCard", + "@context": "http://schema.org/extensions", + // summary MUST not be empty or the webhook request fails + // summary SHOULD contain some meaningful information, since it is used for mobile notifications + "summary": evalContext.GetNotificationTitle(), "title": evalContext.GetNotificationTitle(), "themeColor": evalContext.GetStateModel().Color, "sections": []map[string]interface{}{ diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go index 0c92fc32110..c57b28c7c3e 100644 --- a/pkg/services/alerting/result_handler.go +++ b/pkg/services/alerting/result_handler.go @@ -9,6 +9,7 @@ import ( "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/annotations" + "github.com/grafana/grafana/pkg/services/rendering" ) type ResultHandler interface { @@ -20,10 +21,10 @@ type DefaultResultHandler struct { log log.Logger } -func NewResultHandler() *DefaultResultHandler { +func NewResultHandler(renderService rendering.Service) *DefaultResultHandler { return &DefaultResultHandler{ log: log.New("alerting.resultHandler"), - notifier: NewNotificationService(), + notifier: NewNotificationService(renderService), } } diff --git a/pkg/services/alerting/test_notification.go b/pkg/services/alerting/test_notification.go index 7dc9a150d92..8421360b5ed 100644 --- a/pkg/services/alerting/test_notification.go +++ b/pkg/services/alerting/test_notification.go @@ -24,7 +24,7 @@ func init() { } func handleNotificationTestCommand(cmd *NotificationTestCommand) error { - notifier := newNotificationService() + notifier := NewNotificationService(nil).(*notificationService) model := &m.AlertNotification{ Name: cmd.Name, diff --git a/pkg/services/annotations/annotations.go b/pkg/services/annotations/annotations.go index 5cebb3d2df9..9b490169d3b 100644 --- a/pkg/services/annotations/annotations.go +++ b/pkg/services/annotations/annotations.go @@ -35,11 +35,12 @@ type PostParams struct { } type DeleteParams struct { - Id int64 `json:"id"` - AlertId int64 `json:"alertId"` - DashboardId int64 `json:"dashboardId"` - PanelId int64 `json:"panelId"` - RegionId int64 `json:"regionId"` + OrgId int64 + Id int64 + AlertId int64 + DashboardId int64 + PanelId int64 + RegionId int64 } var repositoryInstance Repository diff --git a/pkg/services/cleanup/cleanup.go b/pkg/services/cleanup/cleanup.go index 69bc7695dea..521601a358b 100644 --- a/pkg/services/cleanup/cleanup.go +++ b/pkg/services/cleanup/cleanup.go @@ -57,8 +57,10 @@ func (srv *CleanUpService) cleanUpTmpFiles() { } var toDelete []os.FileInfo + var now = time.Now() + for _, file := range files { - if file.ModTime().AddDate(0, 0, 1).Before(time.Now()) { + if srv.shouldCleanupTempFile(file.ModTime(), now) { toDelete = append(toDelete, file) } } @@ -74,6 +76,14 @@ func (srv *CleanUpService) cleanUpTmpFiles() { srv.log.Debug("Found old rendered image to delete", "deleted", len(toDelete), "keept", len(files)) } +func (srv *CleanUpService) shouldCleanupTempFile(filemtime time.Time, now time.Time) bool { + if srv.Cfg.TempDataLifetime == 0 { + return false + } + + return filemtime.Add(srv.Cfg.TempDataLifetime).Before(now) +} + func (srv *CleanUpService) deleteExpiredSnapshots() { cmd := m.DeleteExpiredSnapshotsCommand{} if err := bus.Dispatch(&cmd); err != nil { diff --git a/pkg/services/cleanup/cleanup_test.go b/pkg/services/cleanup/cleanup_test.go new file mode 100644 index 00000000000..54d29e32bf1 --- /dev/null +++ b/pkg/services/cleanup/cleanup_test.go @@ -0,0 +1,41 @@ +package cleanup + +import ( + "github.com/grafana/grafana/pkg/setting" + . "github.com/smartystreets/goconvey/convey" + "testing" + "time" +) + +func TestCleanUpTmpFiles(t *testing.T) { + Convey("Cleanup service tests", t, func() { + cfg := setting.Cfg{} + cfg.TempDataLifetime, _ = time.ParseDuration("24h") + service := CleanUpService{ + Cfg: &cfg, + } + now := time.Now() + secondAgo := now.Add(-time.Second) + twoDaysAgo := now.Add(-time.Second * 3600 * 24 * 2) + weekAgo := now.Add(-time.Second * 3600 * 24 * 7) + + Convey("Should not cleanup recent files", func() { + So(service.shouldCleanupTempFile(secondAgo, now), ShouldBeFalse) + }) + + Convey("Should cleanup older files", func() { + So(service.shouldCleanupTempFile(twoDaysAgo, now), ShouldBeTrue) + }) + + Convey("After increasing temporary files lifetime, older files should be kept", func() { + cfg.TempDataLifetime, _ = time.ParseDuration("1000h") + So(service.shouldCleanupTempFile(weekAgo, now), ShouldBeFalse) + }) + + Convey("If lifetime is 0, files should never be cleaned up", func() { + cfg.TempDataLifetime = 0 + So(service.shouldCleanupTempFile(weekAgo, now), ShouldBeFalse) + }) + }) + +} diff --git a/pkg/services/guardian/guardian.go b/pkg/services/guardian/guardian.go index bf455adc7ca..7506338c5f0 100644 --- a/pkg/services/guardian/guardian.go +++ b/pkg/services/guardian/guardian.go @@ -30,7 +30,7 @@ type dashboardGuardianImpl struct { dashId int64 orgId int64 acl []*m.DashboardAclInfoDTO - groups []*m.Team + teams []*m.TeamDTO log log.Logger } @@ -83,7 +83,7 @@ func (g *dashboardGuardianImpl) checkAcl(permission m.PermissionType, acl []*m.D for _, p := range acl { // user match - if !g.user.IsAnonymous { + if !g.user.IsAnonymous && p.UserId > 0 { if p.UserId == g.user.UserId && p.Permission >= permission { return true, nil } @@ -186,15 +186,15 @@ func (g *dashboardGuardianImpl) GetAcl() ([]*m.DashboardAclInfoDTO, error) { return g.acl, nil } -func (g *dashboardGuardianImpl) getTeams() ([]*m.Team, error) { - if g.groups != nil { - return g.groups, nil +func (g *dashboardGuardianImpl) getTeams() ([]*m.TeamDTO, error) { + if g.teams != nil { + return g.teams, nil } query := m.GetTeamsByUserQuery{OrgId: g.orgId, UserId: g.user.UserId} err := bus.Dispatch(&query) - g.groups = query.Result + g.teams = query.Result return query.Result, err } diff --git a/pkg/services/guardian/guardian_test.go b/pkg/services/guardian/guardian_test.go index 5e56b1d88c3..4704519b38d 100644 --- a/pkg/services/guardian/guardian_test.go +++ b/pkg/services/guardian/guardian_test.go @@ -28,7 +28,7 @@ func TestGuardianAdmin(t *testing.T) { Convey("Guardian admin org role tests", t, func() { orgRoleScenario("Given user has admin org role", t, m.ROLE_ADMIN, func(sc *scenarioContext) { // dashboard has default permissions - sc.defaultPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) + sc.defaultPermissionScenario(USER, FULL_ACCESS) // dashboard has user with permission sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) @@ -76,6 +76,9 @@ func TestGuardianAdmin(t *testing.T) { func TestGuardianEditor(t *testing.T) { Convey("Guardian editor org role tests", t, func() { orgRoleScenario("Given user has editor org role", t, m.ROLE_EDITOR, func(sc *scenarioContext) { + // dashboard has default permissions + sc.defaultPermissionScenario(USER, EDITOR_ACCESS) + // dashboard has user with permission sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) sc.dashboardPermissionScenario(USER, m.PERMISSION_EDIT, EDITOR_ACCESS) @@ -122,6 +125,9 @@ func TestGuardianEditor(t *testing.T) { func TestGuardianViewer(t *testing.T) { Convey("Guardian viewer org role tests", t, func() { orgRoleScenario("Given user has viewer org role", t, m.ROLE_VIEWER, func(sc *scenarioContext) { + // dashboard has default permissions + sc.defaultPermissionScenario(USER, VIEWER_ACCESS) + // dashboard has user with permission sc.dashboardPermissionScenario(USER, m.PERMISSION_ADMIN, FULL_ACCESS) sc.dashboardPermissionScenario(USER, m.PERMISSION_EDIT, EDITOR_ACCESS) @@ -162,10 +168,15 @@ func TestGuardianViewer(t *testing.T) { sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_EDIT, EDITOR_ACCESS) sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_VIEW, VIEWER_ACCESS) }) + + apiKeyScenario("Given api key with viewer role", t, m.ROLE_VIEWER, func(sc *scenarioContext) { + // dashboard has default permissions + sc.defaultPermissionScenario(VIEWER, VIEWER_ACCESS) + }) }) } -func (sc *scenarioContext) defaultPermissionScenario(pt permissionType, permission m.PermissionType, flag permissionFlags) { +func (sc *scenarioContext) defaultPermissionScenario(pt permissionType, flag permissionFlags) { _, callerFile, callerLine, _ := runtime.Caller(1) sc.callerFile = callerFile sc.callerLine = callerLine @@ -267,7 +278,7 @@ func (sc *scenarioContext) verifyExpectedPermissionsFlags() { actualFlag = NO_ACCESS } - if sc.expectedFlags&actualFlag != sc.expectedFlags { + if actualFlag&sc.expectedFlags != actualFlag { sc.reportFailure(tc, sc.expectedFlags.String(), actualFlag.String()) } diff --git a/pkg/services/guardian/guardian_util_test.go b/pkg/services/guardian/guardian_util_test.go index b065c4194ad..d85548ecb8c 100644 --- a/pkg/services/guardian/guardian_util_test.go +++ b/pkg/services/guardian/guardian_util_test.go @@ -19,7 +19,7 @@ type scenarioContext struct { givenUser *m.SignedInUser givenDashboardID int64 givenPermissions []*m.DashboardAclInfoDTO - givenTeams []*m.Team + givenTeams []*m.TeamDTO updatePermissions []*m.DashboardAcl expectedFlags permissionFlags callerFile string @@ -48,6 +48,27 @@ func orgRoleScenario(desc string, t *testing.T, role m.RoleType, fn scenarioFunc }) } +func apiKeyScenario(desc string, t *testing.T, role m.RoleType, fn scenarioFunc) { + user := &m.SignedInUser{ + UserId: 0, + OrgId: orgID, + OrgRole: role, + ApiKeyId: 10, + } + guard := New(dashboardID, orgID, user) + sc := &scenarioContext{ + t: t, + orgRoleScenario: desc, + givenUser: user, + givenDashboardID: dashboardID, + g: guard, + } + + Convey(desc, func() { + fn(sc) + }) +} + func permissionScenario(desc string, dashboardID int64, sc *scenarioContext, permissions []*m.DashboardAclInfoDTO, fn scenarioFunc) { bus.ClearBusHandlers() @@ -63,11 +84,11 @@ func permissionScenario(desc string, dashboardID int64, sc *scenarioContext, per return nil }) - teams := []*m.Team{} + teams := []*m.TeamDTO{} for _, p := range permissions { if p.TeamId > 0 { - teams = append(teams, &m.Team{Id: p.TeamId}) + teams = append(teams, &m.TeamDTO{Id: p.TeamId}) } } diff --git a/pkg/services/notifications/notifications.go b/pkg/services/notifications/notifications.go index 14d362c5e1e..769fdd06fd0 100644 --- a/pkg/services/notifications/notifications.go +++ b/pkg/services/notifications/notifications.go @@ -45,8 +45,8 @@ func (ns *NotificationService) Init() error { ns.Bus.AddHandler(ns.validateResetPasswordCode) ns.Bus.AddHandler(ns.sendEmailCommandHandler) - ns.Bus.AddCtxHandler(ns.sendEmailCommandHandlerSync) - ns.Bus.AddCtxHandler(ns.SendWebhookSync) + ns.Bus.AddHandlerCtx(ns.sendEmailCommandHandlerSync) + ns.Bus.AddHandlerCtx(ns.SendWebhookSync) ns.Bus.AddEventListener(ns.signUpStartedHandler) ns.Bus.AddEventListener(ns.signUpCompletedHandler) @@ -98,8 +98,6 @@ func (ns *NotificationService) Run(ctx context.Context) error { return ctx.Err() } } - - return nil } func (ns *NotificationService) SendWebhookSync(ctx context.Context, cmd *m.SendWebhookSync) error { diff --git a/pkg/services/provisioning/dashboards/config_reader.go b/pkg/services/provisioning/dashboards/config_reader.go index 4f9577f82db..7508550838f 100644 --- a/pkg/services/provisioning/dashboards/config_reader.go +++ b/pkg/services/provisioning/dashboards/config_reader.go @@ -81,6 +81,10 @@ func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) { if dashboards[i].OrgId == 0 { dashboards[i].OrgId = 1 } + + if dashboards[i].UpdateIntervalSeconds == 0 { + dashboards[i].UpdateIntervalSeconds = 3 + } } return dashboards, nil diff --git a/pkg/services/provisioning/dashboards/config_reader_test.go b/pkg/services/provisioning/dashboards/config_reader_test.go index 72664c37990..df0d2ae038e 100644 --- a/pkg/services/provisioning/dashboards/config_reader_test.go +++ b/pkg/services/provisioning/dashboards/config_reader_test.go @@ -8,9 +8,9 @@ import ( ) var ( - simpleDashboardConfig = "./test-configs/dashboards-from-disk" - oldVersion = "./test-configs/version-0" - brokenConfigs = "./test-configs/broken-configs" + simpleDashboardConfig = "./testdata/test-configs/dashboards-from-disk" + oldVersion = "./testdata/test-configs/version-0" + brokenConfigs = "./testdata/test-configs/broken-configs" ) func TestDashboardsAsConfig(t *testing.T) { @@ -22,7 +22,7 @@ func TestDashboardsAsConfig(t *testing.T) { cfg, err := cfgProvider.readConfig() So(err, ShouldBeNil) - validateDashboardAsConfig(cfg) + validateDashboardAsConfig(t, cfg) }) Convey("Can read config file in version 0 format", func() { @@ -30,7 +30,7 @@ func TestDashboardsAsConfig(t *testing.T) { cfg, err := cfgProvider.readConfig() So(err, ShouldBeNil) - validateDashboardAsConfig(cfg) + validateDashboardAsConfig(t, cfg) }) Convey("Should skip invalid path", func() { @@ -56,7 +56,9 @@ func TestDashboardsAsConfig(t *testing.T) { }) }) } -func validateDashboardAsConfig(cfg []*DashboardsAsConfig) { +func validateDashboardAsConfig(t *testing.T, cfg []*DashboardsAsConfig) { + t.Helper() + So(len(cfg), ShouldEqual, 2) ds := cfg[0] @@ -68,6 +70,7 @@ func validateDashboardAsConfig(cfg []*DashboardsAsConfig) { So(len(ds.Options), ShouldEqual, 1) So(ds.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") So(ds.DisableDeletion, ShouldBeTrue) + So(ds.UpdateIntervalSeconds, ShouldEqual, 10) ds2 := cfg[1] So(ds2.Name, ShouldEqual, "default") @@ -78,4 +81,5 @@ func validateDashboardAsConfig(cfg []*DashboardsAsConfig) { So(len(ds2.Options), ShouldEqual, 1) So(ds2.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") So(ds2.DisableDeletion, ShouldBeFalse) + So(ds2.UpdateIntervalSeconds, ShouldEqual, 3) } diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index e5186e12f06..ef27ba97235 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -4,12 +4,14 @@ import ( "context" "errors" "fmt" + "io/ioutil" "os" "path/filepath" "strings" "time" "github.com/grafana/grafana/pkg/services/dashboards" + "github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/bus" @@ -19,8 +21,6 @@ import ( ) var ( - checkDiskForChangesInterval = time.Second * 3 - ErrFolderNameMissing = errors.New("Folder name missing") ) @@ -47,6 +47,22 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade log.Error("Cannot read directory", "error", err) } + copy := path + path, err := filepath.Abs(path) + if err != nil { + log.Error("Could not create absolute path ", "path", path) + } + + path, err = filepath.EvalSymlinks(path) + if err != nil { + log.Error("Failed to read content of symlinked path: %s", path) + } + + if path == "" { + path = copy + log.Info("falling back to original path due to EvalSymlink/Abs failure") + } + return &fileReader{ Cfg: cfg, Path: path, @@ -60,7 +76,7 @@ func (fr *fileReader) ReadAndListen(ctx context.Context) error { fr.log.Error("failed to search for dashboards", "error", err) } - ticker := time.NewTicker(checkDiskForChangesInterval) + ticker := time.NewTicker(time.Duration(int64(time.Second) * fr.Cfg.UpdateIntervalSeconds)) running := false @@ -153,15 +169,20 @@ func (fr *fileReader) saveDashboard(path string, folderId int64, fileInfo os.Fil } provisionedData, alreadyProvisioned := provisionedDashboardRefs[path] - upToDate := alreadyProvisioned && provisionedData.Updated == resolvedFileInfo.ModTime().Unix() + upToDate := alreadyProvisioned && provisionedData.Updated >= resolvedFileInfo.ModTime().Unix() - dash, err := fr.readDashboardFromFile(path, resolvedFileInfo.ModTime(), folderId) + jsonFile, err := fr.readDashboardFromFile(path, resolvedFileInfo.ModTime(), folderId) if err != nil { fr.log.Error("failed to load dashboard from ", "file", path, "error", err) return provisioningMetadata, nil } + if provisionedData != nil && jsonFile.checkSum == provisionedData.CheckSum { + upToDate = true + } + // keeps track of what uid's and title's we have already provisioned + dash := jsonFile.dashboard provisioningMetadata.uid = dash.Dashboard.Uid provisioningMetadata.title = dash.Dashboard.Title @@ -179,7 +200,13 @@ func (fr *fileReader) saveDashboard(path string, folderId int64, fileInfo os.Fil } fr.log.Debug("saving new dashboard", "file", path) - dp := &models.DashboardProvisioning{ExternalId: path, Name: fr.Cfg.Name, Updated: resolvedFileInfo.ModTime().Unix()} + dp := &models.DashboardProvisioning{ + ExternalId: path, + Name: fr.Cfg.Name, + Updated: resolvedFileInfo.ModTime().Unix(), + CheckSum: jsonFile.checkSum, + } + _, err = fr.dashboardService.SaveProvisionedDashboard(dash, dp) return provisioningMetadata, err } @@ -277,14 +304,30 @@ func validateWalkablePath(fileInfo os.FileInfo) (bool, error) { return true, nil } -func (fr *fileReader) readDashboardFromFile(path string, lastModified time.Time, folderId int64) (*dashboards.SaveDashboardDTO, error) { +type dashboardJsonFile struct { + dashboard *dashboards.SaveDashboardDTO + checkSum string + lastModified time.Time +} + +func (fr *fileReader) readDashboardFromFile(path string, lastModified time.Time, folderId int64) (*dashboardJsonFile, error) { reader, err := os.Open(path) if err != nil { return nil, err } defer reader.Close() - data, err := simplejson.NewFromReader(reader) + all, err := ioutil.ReadAll(reader) + if err != nil { + return nil, err + } + + checkSum, err := util.Md5SumString(string(all)) + if err != nil { + return nil, err + } + + data, err := simplejson.NewJson(all) if err != nil { return nil, err } @@ -294,7 +337,11 @@ func (fr *fileReader) readDashboardFromFile(path string, lastModified time.Time, return nil, err } - return dash, nil + return &dashboardJsonFile{ + dashboard: dash, + checkSum: checkSum, + lastModified: lastModified, + }, nil } type provisioningMetadata struct { @@ -322,7 +369,6 @@ func (checker provisioningSanityChecker) track(pm provisioningMetadata) { if len(pm.title) > 0 { checker.titleUsage[pm.title] += 1 } - } func (checker provisioningSanityChecker) logWarnings(log log.Logger) { @@ -337,5 +383,4 @@ func (checker provisioningSanityChecker) logWarnings(log log.Logger) { log.Error("the same 'title' is used more than once", "title", title, "provider", checker.provisioningProvider) } } - } diff --git a/pkg/services/provisioning/dashboards/file_reader_linux_test.go b/pkg/services/provisioning/dashboards/file_reader_linux_test.go new file mode 100644 index 00000000000..9d4cdae8609 --- /dev/null +++ b/pkg/services/provisioning/dashboards/file_reader_linux_test.go @@ -0,0 +1,39 @@ +// +build linux + +package dashboards + +import ( + "path/filepath" + "testing" + + "github.com/grafana/grafana/pkg/log" +) + +var ( + symlinkedFolder = "testdata/test-dashboards/symlink" +) + +func TestProvsionedSymlinkedFolder(t *testing.T) { + cfg := &DashboardsAsConfig{ + Name: "Default", + Type: "file", + OrgId: 1, + Folder: "", + Options: map[string]interface{}{"path": symlinkedFolder}, + } + + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + if err != nil { + t.Error("expected err to be nil") + } + + want, err := filepath.Abs(containingId) + + if err != nil { + t.Errorf("expected err to be nill") + } + + if reader.Path != want { + t.Errorf("got %s want %s", reader.Path, want) + } +} diff --git a/pkg/services/provisioning/dashboards/file_reader_test.go b/pkg/services/provisioning/dashboards/file_reader_test.go index 8a301987ea6..bdc1e95aafe 100644 --- a/pkg/services/provisioning/dashboards/file_reader_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_test.go @@ -3,6 +3,7 @@ package dashboards import ( "os" "path/filepath" + "runtime" "testing" "time" @@ -15,14 +16,62 @@ import ( ) var ( - defaultDashboards = "./test-dashboards/folder-one" - brokenDashboards = "./test-dashboards/broken-dashboards" - oneDashboard = "./test-dashboards/one-dashboard" - containingId = "./test-dashboards/containing-id" + defaultDashboards = "testdata/test-dashboards/folder-one" + brokenDashboards = "testdata/test-dashboards/broken-dashboards" + oneDashboard = "testdata/test-dashboards/one-dashboard" + containingId = "testdata/test-dashboards/containing-id" fakeService *fakeDashboardProvisioningService ) +func TestCreatingNewDashboardFileReader(t *testing.T) { + Convey("creating new dashboard file reader", t, func() { + cfg := &DashboardsAsConfig{ + Name: "Default", + Type: "file", + OrgId: 1, + Folder: "", + Options: map[string]interface{}{}, + } + + Convey("using path parameter", func() { + cfg.Options["path"] = defaultDashboards + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + So(err, ShouldBeNil) + So(reader.Path, ShouldNotEqual, "") + }) + + Convey("using folder as options", func() { + cfg.Options["folder"] = defaultDashboards + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + So(err, ShouldBeNil) + So(reader.Path, ShouldNotEqual, "") + }) + + Convey("using full path", func() { + fullPath := "/var/lib/grafana/dashboards" + if runtime.GOOS == "windows" { + fullPath = `c:\var\lib\grafana` + } + + cfg.Options["folder"] = fullPath + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + So(err, ShouldBeNil) + + So(reader.Path, ShouldEqual, fullPath) + So(filepath.IsAbs(reader.Path), ShouldBeTrue) + }) + + Convey("using relative path", func() { + cfg.Options["folder"] = defaultDashboards + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + So(err, ShouldBeNil) + + So(filepath.IsAbs(reader.Path), ShouldBeTrue) + }) + }) +} + func TestDashboardFileReader(t *testing.T) { Convey("Dashboard file reader", t, func() { bus.ClearBusHandlers() @@ -170,30 +219,6 @@ func TestDashboardFileReader(t *testing.T) { }) }) - Convey("Can use bpth path and folder as dashboard path", func() { - cfg := &DashboardsAsConfig{ - Name: "Default", - Type: "file", - OrgId: 1, - Folder: "", - Options: map[string]interface{}{}, - } - - Convey("using path parameter", func() { - cfg.Options["path"] = defaultDashboards - reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) - So(err, ShouldBeNil) - So(reader.Path, ShouldEqual, defaultDashboards) - }) - - Convey("using folder as options", func() { - cfg.Options["folder"] = defaultDashboards - reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) - So(err, ShouldBeNil) - So(reader.Path, ShouldEqual, defaultDashboards) - }) - }) - Reset(func() { dashboards.NewProvisioningService = origNewDashboardProvisioningService }) diff --git a/pkg/services/provisioning/dashboards/test-configs/broken-configs/commented.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/broken-configs/commented.yaml similarity index 100% rename from pkg/services/provisioning/dashboards/test-configs/broken-configs/commented.yaml rename to pkg/services/provisioning/dashboards/testdata/test-configs/broken-configs/commented.yaml diff --git a/pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/dev-dashboards.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml similarity index 90% rename from pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/dev-dashboards.yaml rename to pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml index e9776d69010..e26c329f87c 100644 --- a/pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/dev-dashboards.yaml +++ b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/dev-dashboards.yaml @@ -6,6 +6,7 @@ providers: folder: 'developers' editable: true disableDeletion: true + updateIntervalSeconds: 10 type: file options: path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/sample.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/sample.yaml similarity index 100% rename from pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/sample.yaml rename to pkg/services/provisioning/dashboards/testdata/test-configs/dashboards-from-disk/sample.yaml diff --git a/pkg/services/provisioning/dashboards/test-configs/version-0/version-0.yaml b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml similarity index 89% rename from pkg/services/provisioning/dashboards/test-configs/version-0/version-0.yaml rename to pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml index 979e762d4d4..69a317fb396 100644 --- a/pkg/services/provisioning/dashboards/test-configs/version-0/version-0.yaml +++ b/pkg/services/provisioning/dashboards/testdata/test-configs/version-0/version-0.yaml @@ -3,6 +3,7 @@ folder: 'developers' editable: true disableDeletion: true + updateIntervalSeconds: 10 type: file options: path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/test-dashboards/broken-dashboards/empty-json.json b/pkg/services/provisioning/dashboards/testdata/test-dashboards/broken-dashboards/empty-json.json similarity index 100% rename from pkg/services/provisioning/dashboards/test-dashboards/broken-dashboards/empty-json.json rename to pkg/services/provisioning/dashboards/testdata/test-dashboards/broken-dashboards/empty-json.json diff --git a/pkg/services/provisioning/dashboards/test-dashboards/broken-dashboards/invalid.json b/pkg/services/provisioning/dashboards/testdata/test-dashboards/broken-dashboards/invalid.json similarity index 100% rename from pkg/services/provisioning/dashboards/test-dashboards/broken-dashboards/invalid.json rename to pkg/services/provisioning/dashboards/testdata/test-dashboards/broken-dashboards/invalid.json diff --git a/pkg/services/provisioning/dashboards/test-dashboards/containing-id/dashboard1.json b/pkg/services/provisioning/dashboards/testdata/test-dashboards/containing-id/dashboard1.json similarity index 100% rename from pkg/services/provisioning/dashboards/test-dashboards/containing-id/dashboard1.json rename to pkg/services/provisioning/dashboards/testdata/test-dashboards/containing-id/dashboard1.json diff --git a/pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard1.json b/pkg/services/provisioning/dashboards/testdata/test-dashboards/folder-one/dashboard1.json similarity index 100% rename from pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard1.json rename to pkg/services/provisioning/dashboards/testdata/test-dashboards/folder-one/dashboard1.json diff --git a/pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard2.json b/pkg/services/provisioning/dashboards/testdata/test-dashboards/folder-one/dashboard2.json similarity index 100% rename from pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard2.json rename to pkg/services/provisioning/dashboards/testdata/test-dashboards/folder-one/dashboard2.json diff --git a/pkg/services/provisioning/dashboards/test-dashboards/one-dashboard/dashboard1.json b/pkg/services/provisioning/dashboards/testdata/test-dashboards/one-dashboard/dashboard1.json similarity index 100% rename from pkg/services/provisioning/dashboards/test-dashboards/one-dashboard/dashboard1.json rename to pkg/services/provisioning/dashboards/testdata/test-dashboards/one-dashboard/dashboard1.json diff --git a/pkg/services/provisioning/dashboards/testdata/test-dashboards/symlink b/pkg/services/provisioning/dashboards/testdata/test-dashboards/symlink new file mode 120000 index 00000000000..42e166e6959 --- /dev/null +++ b/pkg/services/provisioning/dashboards/testdata/test-dashboards/symlink @@ -0,0 +1 @@ +containing-id/ \ No newline at end of file diff --git a/pkg/services/provisioning/dashboards/types.go b/pkg/services/provisioning/dashboards/types.go index 4a55351d3e4..a658b816c7d 100644 --- a/pkg/services/provisioning/dashboards/types.go +++ b/pkg/services/provisioning/dashboards/types.go @@ -10,23 +10,25 @@ import ( ) type DashboardsAsConfig struct { - Name string - Type string - OrgId int64 - Folder string - Editable bool - Options map[string]interface{} - DisableDeletion bool + Name string + Type string + OrgId int64 + Folder string + Editable bool + Options map[string]interface{} + DisableDeletion bool + UpdateIntervalSeconds int64 } type DashboardsAsConfigV0 struct { - Name string `json:"name" yaml:"name"` - Type string `json:"type" yaml:"type"` - OrgId int64 `json:"org_id" yaml:"org_id"` - Folder string `json:"folder" yaml:"folder"` - Editable bool `json:"editable" yaml:"editable"` - Options map[string]interface{} `json:"options" yaml:"options"` - DisableDeletion bool `json:"disableDeletion" yaml:"disableDeletion"` + Name string `json:"name" yaml:"name"` + Type string `json:"type" yaml:"type"` + OrgId int64 `json:"org_id" yaml:"org_id"` + Folder string `json:"folder" yaml:"folder"` + Editable bool `json:"editable" yaml:"editable"` + Options map[string]interface{} `json:"options" yaml:"options"` + DisableDeletion bool `json:"disableDeletion" yaml:"disableDeletion"` + UpdateIntervalSeconds int64 `json:"updateIntervalSeconds" yaml:"updateIntervalSeconds"` } type ConfigVersion struct { @@ -38,13 +40,14 @@ type DashboardAsConfigV1 struct { } type DashboardProviderConfigs struct { - Name string `json:"name" yaml:"name"` - Type string `json:"type" yaml:"type"` - OrgId int64 `json:"orgId" yaml:"orgId"` - Folder string `json:"folder" yaml:"folder"` - Editable bool `json:"editable" yaml:"editable"` - Options map[string]interface{} `json:"options" yaml:"options"` - DisableDeletion bool `json:"disableDeletion" yaml:"disableDeletion"` + Name string `json:"name" yaml:"name"` + Type string `json:"type" yaml:"type"` + OrgId int64 `json:"orgId" yaml:"orgId"` + Folder string `json:"folder" yaml:"folder"` + Editable bool `json:"editable" yaml:"editable"` + Options map[string]interface{} `json:"options" yaml:"options"` + DisableDeletion bool `json:"disableDeletion" yaml:"disableDeletion"` + UpdateIntervalSeconds int64 `json:"updateIntervalSeconds" yaml:"updateIntervalSeconds"` } func createDashboardJson(data *simplejson.Json, lastModified time.Time, cfg *DashboardsAsConfig, folderId int64) (*dashboards.SaveDashboardDTO, error) { @@ -68,13 +71,14 @@ func mapV0ToDashboardAsConfig(v0 []*DashboardsAsConfigV0) []*DashboardsAsConfig for _, v := range v0 { r = append(r, &DashboardsAsConfig{ - Name: v.Name, - Type: v.Type, - OrgId: v.OrgId, - Folder: v.Folder, - Editable: v.Editable, - Options: v.Options, - DisableDeletion: v.DisableDeletion, + Name: v.Name, + Type: v.Type, + OrgId: v.OrgId, + Folder: v.Folder, + Editable: v.Editable, + Options: v.Options, + DisableDeletion: v.DisableDeletion, + UpdateIntervalSeconds: v.UpdateIntervalSeconds, }) } @@ -86,13 +90,14 @@ func (dc *DashboardAsConfigV1) mapToDashboardAsConfig() []*DashboardsAsConfig { for _, v := range dc.Providers { r = append(r, &DashboardsAsConfig{ - Name: v.Name, - Type: v.Type, - OrgId: v.OrgId, - Folder: v.Folder, - Editable: v.Editable, - Options: v.Options, - DisableDeletion: v.DisableDeletion, + Name: v.Name, + Type: v.Type, + OrgId: v.OrgId, + Folder: v.Folder, + Editable: v.Editable, + Options: v.Options, + DisableDeletion: v.DisableDeletion, + UpdateIntervalSeconds: v.UpdateIntervalSeconds, }) } diff --git a/pkg/services/provisioning/datasources/config_reader_test.go b/pkg/services/provisioning/datasources/config_reader_test.go index 89ecc5a0b68..2e407dbe4de 100644 --- a/pkg/services/provisioning/datasources/config_reader_test.go +++ b/pkg/services/provisioning/datasources/config_reader_test.go @@ -13,12 +13,12 @@ import ( var ( logger log.Logger = log.New("fake.log") - twoDatasourcesConfig = "./test-configs/two-datasources" - twoDatasourcesConfigPurgeOthers = "./test-configs/insert-two-delete-two" - doubleDatasourcesConfig = "./test-configs/double-default" - allProperties = "./test-configs/all-properties" - versionZero = "./test-configs/version-0" - brokenYaml = "./test-configs/broken-yaml" + twoDatasourcesConfig = "testdata/two-datasources" + twoDatasourcesConfigPurgeOthers = "testdata/insert-two-delete-two" + doubleDatasourcesConfig = "testdata/double-default" + allProperties = "testdata/all-properties" + versionZero = "testdata/version-0" + brokenYaml = "testdata/broken-yaml" fakeRepo *fakeRepository ) diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/all-properties.yaml b/pkg/services/provisioning/datasources/testdata/all-properties/all-properties.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/all-properties/all-properties.yaml rename to pkg/services/provisioning/datasources/testdata/all-properties/all-properties.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/not.yaml.txt b/pkg/services/provisioning/datasources/testdata/all-properties/not.yaml.txt similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/all-properties/not.yaml.txt rename to pkg/services/provisioning/datasources/testdata/all-properties/not.yaml.txt diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/sample.yaml b/pkg/services/provisioning/datasources/testdata/all-properties/sample.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/all-properties/sample.yaml rename to pkg/services/provisioning/datasources/testdata/all-properties/sample.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/second.yaml b/pkg/services/provisioning/datasources/testdata/all-properties/second.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/all-properties/second.yaml rename to pkg/services/provisioning/datasources/testdata/all-properties/second.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/broken-yaml/broken.yaml b/pkg/services/provisioning/datasources/testdata/broken-yaml/broken.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/broken-yaml/broken.yaml rename to pkg/services/provisioning/datasources/testdata/broken-yaml/broken.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/broken-yaml/commented.yaml b/pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/broken-yaml/commented.yaml rename to pkg/services/provisioning/datasources/testdata/broken-yaml/commented.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/double-default/default-1.yaml b/pkg/services/provisioning/datasources/testdata/double-default/default-1.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/double-default/default-1.yaml rename to pkg/services/provisioning/datasources/testdata/double-default/default-1.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/double-default/default-2.yaml b/pkg/services/provisioning/datasources/testdata/double-default/default-2.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/double-default/default-2.yaml rename to pkg/services/provisioning/datasources/testdata/double-default/default-2.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/insert-two-delete-two/one-datasources.yaml b/pkg/services/provisioning/datasources/testdata/insert-two-delete-two/one-datasources.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/insert-two-delete-two/one-datasources.yaml rename to pkg/services/provisioning/datasources/testdata/insert-two-delete-two/one-datasources.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/insert-two-delete-two/two-datasources.yml b/pkg/services/provisioning/datasources/testdata/insert-two-delete-two/two-datasources.yml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/insert-two-delete-two/two-datasources.yml rename to pkg/services/provisioning/datasources/testdata/insert-two-delete-two/two-datasources.yml diff --git a/pkg/services/provisioning/datasources/test-configs/two-datasources/two-datasources.yaml b/pkg/services/provisioning/datasources/testdata/two-datasources/two-datasources.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/two-datasources/two-datasources.yaml rename to pkg/services/provisioning/datasources/testdata/two-datasources/two-datasources.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/version-0/version-0.yaml b/pkg/services/provisioning/datasources/testdata/version-0/version-0.yaml similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/version-0/version-0.yaml rename to pkg/services/provisioning/datasources/testdata/version-0/version-0.yaml diff --git a/pkg/services/provisioning/datasources/test-configs/zero-datasources/placeholder-for-git b/pkg/services/provisioning/datasources/testdata/zero-datasources/placeholder-for-git similarity index 100% rename from pkg/services/provisioning/datasources/test-configs/zero-datasources/placeholder-for-git rename to pkg/services/provisioning/datasources/testdata/zero-datasources/placeholder-for-git diff --git a/pkg/services/rendering/http_mode.go b/pkg/services/rendering/http_mode.go new file mode 100644 index 00000000000..9084ca27353 --- /dev/null +++ b/pkg/services/rendering/http_mode.go @@ -0,0 +1,68 @@ +package rendering + +import ( + "context" + "io" + "net" + "net/http" + "net/url" + "os" + "strconv" + "time" +) + +var netTransport = &http.Transport{ + Proxy: http.ProxyFromEnvironment, + Dial: (&net.Dialer{ + Timeout: 30 * time.Second, + DualStack: true, + }).Dial, + TLSHandshakeTimeout: 5 * time.Second, +} + +func (rs *RenderingService) renderViaHttp(ctx context.Context, opts Opts) (*RenderResult, error) { + filePath := rs.getFilePathForNewImage() + + var netClient = &http.Client{ + Timeout: opts.Timeout, + Transport: netTransport, + } + + rendererUrl, err := url.Parse(rs.Cfg.RendererUrl) + if err != nil { + return nil, err + } + + queryParams := rendererUrl.Query() + queryParams.Add("url", rs.getURL(opts.Path)) + queryParams.Add("renderKey", rs.getRenderKey(opts.UserId, opts.OrgId, opts.OrgRole)) + queryParams.Add("width", strconv.Itoa(opts.Width)) + queryParams.Add("height", strconv.Itoa(opts.Height)) + queryParams.Add("domain", rs.getLocalDomain()) + queryParams.Add("timezone", isoTimeOffsetToPosixTz(opts.Timezone)) + queryParams.Add("encoding", opts.Encoding) + queryParams.Add("timeout", strconv.Itoa(int(opts.Timeout.Seconds()))) + rendererUrl.RawQuery = queryParams.Encode() + + req, err := http.NewRequest("GET", rendererUrl.String(), nil) + if err != nil { + return nil, err + } + + // make request to renderer server + resp, err := netClient.Do(req) + if err != nil { + return nil, err + } + + // save response to file + defer resp.Body.Close() + out, err := os.Create(filePath) + if err != nil { + return nil, err + } + defer out.Close() + io.Copy(out, resp.Body) + + return &RenderResult{FilePath: filePath}, err +} diff --git a/pkg/services/rendering/interface.go b/pkg/services/rendering/interface.go new file mode 100644 index 00000000000..85c139cfc04 --- /dev/null +++ b/pkg/services/rendering/interface.go @@ -0,0 +1,35 @@ +package rendering + +import ( + "context" + "errors" + "time" + + "github.com/grafana/grafana/pkg/models" +) + +var ErrTimeout = errors.New("Timeout error. You can set timeout in seconds with &timeout url parameter") +var ErrNoRenderer = errors.New("No renderer plugin found nor is an external render server configured") +var ErrPhantomJSNotInstalled = errors.New("PhantomJS executable not found") + +type Opts struct { + Width int + Height int + Timeout time.Duration + OrgId int64 + UserId int64 + OrgRole models.RoleType + Path string + Encoding string + Timezone string +} + +type RenderResult struct { + FilePath string +} + +type renderFunc func(ctx context.Context, options Opts) (*RenderResult, error) + +type Service interface { + Render(ctx context.Context, opts Opts) (*RenderResult, error) +} diff --git a/pkg/services/rendering/phantomjs.go b/pkg/services/rendering/phantomjs.go new file mode 100644 index 00000000000..87ccaf6b5d2 --- /dev/null +++ b/pkg/services/rendering/phantomjs.go @@ -0,0 +1,111 @@ +package rendering + +import ( + "context" + "fmt" + "os" + "os/exec" + "path/filepath" + "runtime" + "strings" + "time" + + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/middleware" +) + +func (rs *RenderingService) renderViaPhantomJS(ctx context.Context, opts Opts) (*RenderResult, error) { + rs.log.Info("Rendering", "path", opts.Path) + + var executable = "phantomjs" + if runtime.GOOS == "windows" { + executable = executable + ".exe" + } + + url := rs.getURL(opts.Path) + binPath, _ := filepath.Abs(filepath.Join(rs.Cfg.PhantomDir, executable)) + if _, err := os.Stat(binPath); os.IsNotExist(err) { + rs.log.Error("executable not found", "executable", binPath) + return nil, ErrPhantomJSNotInstalled + } + + scriptPath, _ := filepath.Abs(filepath.Join(rs.Cfg.PhantomDir, "render.js")) + pngPath := rs.getFilePathForNewImage() + + renderKey := middleware.AddRenderAuthKey(opts.OrgId, opts.UserId, opts.OrgRole) + defer middleware.RemoveRenderAuthKey(renderKey) + + phantomDebugArg := "--debug=false" + if log.GetLogLevelFor("renderer") >= log.LvlDebug { + phantomDebugArg = "--debug=true" + } + + cmdArgs := []string{ + "--ignore-ssl-errors=true", + "--web-security=false", + phantomDebugArg, + scriptPath, + fmt.Sprintf("url=%v", url), + fmt.Sprintf("width=%v", opts.Width), + fmt.Sprintf("height=%v", opts.Height), + fmt.Sprintf("png=%v", pngPath), + fmt.Sprintf("domain=%v", rs.getLocalDomain()), + fmt.Sprintf("timeout=%v", opts.Timeout.Seconds()), + fmt.Sprintf("renderKey=%v", renderKey), + } + + if opts.Encoding != "" { + cmdArgs = append([]string{fmt.Sprintf("--output-encoding=%s", opts.Encoding)}, cmdArgs...) + } + + commandCtx, cancel := context.WithTimeout(ctx, opts.Timeout+time.Second*2) + defer cancel() + + cmd := exec.CommandContext(commandCtx, binPath, cmdArgs...) + cmd.Stderr = cmd.Stdout + + if opts.Timezone != "" { + baseEnviron := os.Environ() + cmd.Env = appendEnviron(baseEnviron, "TZ", isoTimeOffsetToPosixTz(opts.Timezone)) + } + + out, err := cmd.Output() + + // check for timeout first + if commandCtx.Err() == context.DeadlineExceeded { + rs.log.Info("Rendering timed out") + return nil, ErrTimeout + } + + if err != nil { + rs.log.Error("Phantomjs exited with non zero exit code", "error", err) + return nil, err + } + + rs.log.Debug("Phantomjs output", "out", string(out)) + + rs.log.Debug("Image rendered", "path", pngPath) + return &RenderResult{FilePath: pngPath}, nil +} + +func isoTimeOffsetToPosixTz(isoOffset string) string { + // invert offset + if strings.HasPrefix(isoOffset, "UTC+") { + return strings.Replace(isoOffset, "UTC+", "UTC-", 1) + } + if strings.HasPrefix(isoOffset, "UTC-") { + return strings.Replace(isoOffset, "UTC-", "UTC+", 1) + } + return isoOffset +} + +func appendEnviron(baseEnviron []string, name string, value string) []string { + results := make([]string, 0) + prefix := fmt.Sprintf("%s=", name) + for _, v := range baseEnviron { + if !strings.HasPrefix(v, prefix) { + results = append(results, v) + } + } + return append(results, fmt.Sprintf("%s=%s", name, value)) +} diff --git a/pkg/services/rendering/plugin_mode.go b/pkg/services/rendering/plugin_mode.go new file mode 100644 index 00000000000..550779ad7c3 --- /dev/null +++ b/pkg/services/rendering/plugin_mode.go @@ -0,0 +1,95 @@ +package rendering + +import ( + "context" + "fmt" + "os/exec" + "path" + "time" + + pluginModel "github.com/grafana/grafana-plugin-model/go/renderer" + "github.com/grafana/grafana/pkg/plugins" + plugin "github.com/hashicorp/go-plugin" +) + +func (rs *RenderingService) startPlugin(ctx context.Context) error { + cmd := plugins.ComposePluginStartCommmand("plugin_start") + fullpath := path.Join(rs.pluginInfo.PluginDir, cmd) + + var handshakeConfig = plugin.HandshakeConfig{ + ProtocolVersion: 1, + MagicCookieKey: "grafana_plugin_type", + MagicCookieValue: "renderer", + } + + rs.log.Info("Renderer plugin found, starting", "cmd", cmd) + + rs.pluginClient = plugin.NewClient(&plugin.ClientConfig{ + HandshakeConfig: handshakeConfig, + Plugins: map[string]plugin.Plugin{ + plugins.Renderer.Id: &pluginModel.RendererPluginImpl{}, + }, + Cmd: exec.Command(fullpath), + AllowedProtocols: []plugin.Protocol{plugin.ProtocolGRPC}, + Logger: plugins.LogWrapper{Logger: rs.log}, + }) + + rpcClient, err := rs.pluginClient.Client() + if err != nil { + return err + } + + raw, err := rpcClient.Dispense(rs.pluginInfo.Id) + if err != nil { + return err + } + + rs.grpcPlugin = raw.(pluginModel.RendererPlugin) + + return nil +} + +func (rs *RenderingService) watchAndRestartPlugin(ctx context.Context) error { + ticker := time.NewTicker(time.Second * 1) + + for { + select { + case <-ctx.Done(): + return ctx.Err() + case <-ticker.C: + if rs.pluginClient.Exited() { + err := rs.startPlugin(ctx) + rs.log.Debug("Render plugin existed, restarting...") + if err != nil { + rs.log.Error("Failed to start render plugin", err) + } + } + } + } +} + +func (rs *RenderingService) renderViaPlugin(ctx context.Context, opts Opts) (*RenderResult, error) { + pngPath := rs.getFilePathForNewImage() + + rsp, err := rs.grpcPlugin.Render(ctx, &pluginModel.RenderRequest{ + Url: rs.getURL(opts.Path), + Width: int32(opts.Width), + Height: int32(opts.Height), + FilePath: pngPath, + Timeout: int32(opts.Timeout.Seconds()), + RenderKey: rs.getRenderKey(opts.UserId, opts.OrgId, opts.OrgRole), + Encoding: opts.Encoding, + Timezone: isoTimeOffsetToPosixTz(opts.Timezone), + Domain: rs.getLocalDomain(), + }) + + if err != nil { + return nil, err + } + + if rsp.Error != "" { + return nil, fmt.Errorf("Rendering failed: %v", rsp.Error) + } + + return &RenderResult{FilePath: pngPath}, err +} diff --git a/pkg/services/rendering/rendering.go b/pkg/services/rendering/rendering.go new file mode 100644 index 00000000000..799aecc3e88 --- /dev/null +++ b/pkg/services/rendering/rendering.go @@ -0,0 +1,99 @@ +package rendering + +import ( + "context" + "fmt" + "path/filepath" + + plugin "github.com/hashicorp/go-plugin" + + pluginModel "github.com/grafana/grafana-plugin-model/go/renderer" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/middleware" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins" + "github.com/grafana/grafana/pkg/registry" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" +) + +func init() { + registry.RegisterService(&RenderingService{}) +} + +type RenderingService struct { + log log.Logger + pluginClient *plugin.Client + grpcPlugin pluginModel.RendererPlugin + pluginInfo *plugins.RendererPlugin + renderAction renderFunc + + Cfg *setting.Cfg `inject:""` +} + +func (rs *RenderingService) Init() error { + rs.log = log.New("rendering") + return nil +} + +func (rs *RenderingService) Run(ctx context.Context) error { + if rs.Cfg.RendererUrl != "" { + rs.log.Info("Backend rendering via external http server") + rs.renderAction = rs.renderViaHttp + <-ctx.Done() + return nil + } + + if plugins.Renderer == nil { + rs.renderAction = rs.renderViaPhantomJS + <-ctx.Done() + return nil + } + + rs.pluginInfo = plugins.Renderer + + if err := rs.startPlugin(ctx); err != nil { + return err + } + + rs.renderAction = rs.renderViaPlugin + + err := rs.watchAndRestartPlugin(ctx) + + if rs.pluginClient != nil { + rs.log.Debug("Killing renderer plugin process") + rs.pluginClient.Kill() + } + + return err +} + +func (rs *RenderingService) Render(ctx context.Context, opts Opts) (*RenderResult, error) { + if rs.renderAction != nil { + return rs.renderAction(ctx, opts) + } else { + return nil, fmt.Errorf("No renderer found") + } +} + +func (rs *RenderingService) getFilePathForNewImage() string { + pngPath, _ := filepath.Abs(filepath.Join(rs.Cfg.ImagesDir, util.GetRandomString(20))) + return pngPath + ".png" +} + +func (rs *RenderingService) getURL(path string) string { + // &render=1 signals to the legacy redirect layer to + return fmt.Sprintf("%s://%s:%s/%s&render=1", setting.Protocol, rs.getLocalDomain(), setting.HttpPort, path) +} + +func (rs *RenderingService) getLocalDomain() string { + if setting.HttpAddr != setting.DEFAULT_HTTP_ADDR { + return setting.HttpAddr + } + + return "localhost" +} + +func (rs *RenderingService) getRenderKey(orgId, userId int64, orgRole models.RoleType) string { + return middleware.AddRenderAuthKey(orgId, userId, orgRole) +} diff --git a/pkg/services/sqlstore/alert.go b/pkg/services/sqlstore/alert.go index b0ca50eb67d..af911dc22e6 100644 --- a/pkg/services/sqlstore/alert.go +++ b/pkg/services/sqlstore/alert.go @@ -73,6 +73,7 @@ func HandleAlertsQuery(query *m.GetAlertsQuery) error { alert.name, alert.state, alert.new_state_date, + alert.eval_data, alert.eval_date, alert.execution_error, dashboard.uid as dashboard_uid, @@ -82,8 +83,16 @@ func HandleAlertsQuery(query *m.GetAlertsQuery) error { builder.Write(`WHERE alert.org_id = ?`, query.OrgId) - if query.DashboardId != 0 { - builder.Write(` AND alert.dashboard_id = ?`, query.DashboardId) + if len(strings.TrimSpace(query.Query)) > 0 { + builder.Write(" AND alert.name "+dialect.LikeStr()+" ?", "%"+query.Query+"%") + } + + if len(query.DashboardIDs) > 0 { + builder.sql.WriteString(` AND alert.dashboard_id IN (?` + strings.Repeat(",?", len(query.DashboardIDs)-1) + `) `) + + for _, dbID := range query.DashboardIDs { + builder.AddParams(dbID) + } } if query.PanelId != 0 { @@ -108,7 +117,7 @@ func HandleAlertsQuery(query *m.GetAlertsQuery) error { } if query.User.OrgRole != m.ROLE_ADMIN { - builder.writeDashboardPermissionFilter(query.User, m.PERMISSION_EDIT) + builder.writeDashboardPermissionFilter(query.User, m.PERMISSION_VIEW) } builder.Write(" ORDER BY name ASC") diff --git a/pkg/services/sqlstore/alert_test.go b/pkg/services/sqlstore/alert_test.go index 296d16c2f45..d97deb45f0e 100644 --- a/pkg/services/sqlstore/alert_test.go +++ b/pkg/services/sqlstore/alert_test.go @@ -2,18 +2,18 @@ package sqlstore import ( "testing" + "time" "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" . "github.com/smartystreets/goconvey/convey" - "time" ) func mockTimeNow() { var timeSeed int64 timeNow = func() time.Time { fakeNow := time.Unix(timeSeed, 0) - timeSeed += 1 + timeSeed++ return fakeNow } } @@ -30,7 +30,7 @@ func TestAlertingDataAccess(t *testing.T) { InitTestDB(t) testDash := insertTestDashboard("dashboard with alerts", 1, 0, false, "alert") - + evalData, _ := simplejson.NewJson([]byte(`{"test": "test"}`)) items := []*m.Alert{ { PanelId: 1, @@ -40,6 +40,7 @@ func TestAlertingDataAccess(t *testing.T) { Message: "Alerting message", Settings: simplejson.New(), Frequency: 1, + EvalData: evalData, }, } @@ -99,21 +100,32 @@ func TestAlertingDataAccess(t *testing.T) { }) Convey("Can read properties", func() { - alertQuery := m.GetAlertsQuery{DashboardId: testDash.Id, PanelId: 1, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} + alertQuery := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, PanelId: 1, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} err2 := HandleAlertsQuery(&alertQuery) alert := alertQuery.Result[0] So(err2, ShouldBeNil) + So(alert.Id, ShouldBeGreaterThan, 0) + So(alert.DashboardId, ShouldEqual, testDash.Id) + So(alert.PanelId, ShouldEqual, 1) So(alert.Name, ShouldEqual, "Alerting title") So(alert.State, ShouldEqual, "pending") + So(alert.NewStateDate, ShouldNotBeNil) + So(alert.EvalData, ShouldNotBeNil) + So(alert.EvalData.Get("test").MustString(), ShouldEqual, "test") + So(alert.EvalDate, ShouldNotBeNil) + So(alert.ExecutionError, ShouldEqual, "") + So(alert.DashboardUid, ShouldNotBeNil) + So(alert.DashboardSlug, ShouldEqual, "dashboard-with-alerts") }) Convey("Viewer cannot read alerts", func() { - alertQuery := m.GetAlertsQuery{DashboardId: testDash.Id, PanelId: 1, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_VIEWER}} + viewerUser := &m.SignedInUser{OrgRole: m.ROLE_VIEWER, OrgId: 1} + alertQuery := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, PanelId: 1, OrgId: 1, User: viewerUser} err2 := HandleAlertsQuery(&alertQuery) So(err2, ShouldBeNil) - So(alertQuery.Result, ShouldHaveLength, 0) + So(alertQuery.Result, ShouldHaveLength, 1) }) Convey("Alerts with same dashboard id and panel id should update", func() { @@ -134,7 +146,7 @@ func TestAlertingDataAccess(t *testing.T) { }) Convey("Alerts should be updated", func() { - query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} + query := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} err2 := HandleAlertsQuery(&query) So(err2, ShouldBeNil) @@ -183,7 +195,7 @@ func TestAlertingDataAccess(t *testing.T) { Convey("Should save 3 dashboards", func() { So(err, ShouldBeNil) - queryForDashboard := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} + queryForDashboard := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} err2 := HandleAlertsQuery(&queryForDashboard) So(err2, ShouldBeNil) @@ -197,7 +209,7 @@ func TestAlertingDataAccess(t *testing.T) { err = SaveAlerts(&cmd) Convey("should delete the missing alert", func() { - query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} + query := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} err2 := HandleAlertsQuery(&query) So(err2, ShouldBeNil) So(len(query.Result), ShouldEqual, 2) @@ -232,7 +244,7 @@ func TestAlertingDataAccess(t *testing.T) { So(err, ShouldBeNil) Convey("Alerts should be removed", func() { - query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} + query := m.GetAlertsQuery{DashboardIDs: []int64{testDash.Id}, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} err2 := HandleAlertsQuery(&query) So(testDash.Id, ShouldEqual, 1) diff --git a/pkg/services/sqlstore/annotation.go b/pkg/services/sqlstore/annotation.go index 52da7a99516..a65bc136554 100644 --- a/pkg/services/sqlstore/annotation.go +++ b/pkg/services/sqlstore/annotation.go @@ -238,18 +238,19 @@ func (r *SqlAnnotationRepo) Delete(params *annotations.DeleteParams) error { queryParams []interface{} ) + sqlog.Info("delete", "orgId", params.OrgId) if params.RegionId != 0 { - annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE region_id = ?)" - sql = "DELETE FROM annotation WHERE region_id = ?" - queryParams = []interface{}{params.RegionId} + annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE region_id = ? AND org_id = ?)" + sql = "DELETE FROM annotation WHERE region_id = ? AND org_id = ?" + queryParams = []interface{}{params.RegionId, params.OrgId} } else if params.Id != 0 { - annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE id = ?)" - sql = "DELETE FROM annotation WHERE id = ?" - queryParams = []interface{}{params.Id} + annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE id = ? AND org_id = ?)" + sql = "DELETE FROM annotation WHERE id = ? AND org_id = ?" + queryParams = []interface{}{params.Id, params.OrgId} } else { - annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE dashboard_id = ? AND panel_id = ?)" - sql = "DELETE FROM annotation WHERE dashboard_id = ? AND panel_id = ?" - queryParams = []interface{}{params.DashboardId, params.PanelId} + annoTagSql = "DELETE FROM annotation_tag WHERE annotation_id IN (SELECT id FROM annotation WHERE dashboard_id = ? AND panel_id = ? AND org_id = ?)" + sql = "DELETE FROM annotation WHERE dashboard_id = ? AND panel_id = ? AND org_id = ?" + queryParams = []interface{}{params.DashboardId, params.PanelId, params.OrgId} } if _, err := sess.Exec(annoTagSql, queryParams...); err != nil { diff --git a/pkg/services/sqlstore/annotation_test.go b/pkg/services/sqlstore/annotation_test.go index 01a95c7db7b..c0d267f2578 100644 --- a/pkg/services/sqlstore/annotation_test.go +++ b/pkg/services/sqlstore/annotation_test.go @@ -268,7 +268,7 @@ func TestAnnotations(t *testing.T) { annotationId := items[0].Id - err = repo.Delete(&annotations.DeleteParams{Id: annotationId}) + err = repo.Delete(&annotations.DeleteParams{Id: annotationId, OrgId: 1}) So(err, ShouldBeNil) items, err = repo.Find(query) diff --git a/pkg/services/sqlstore/apikey.go b/pkg/services/sqlstore/apikey.go index 9d41b5c809e..775d4cf6447 100644 --- a/pkg/services/sqlstore/apikey.go +++ b/pkg/services/sqlstore/apikey.go @@ -1,6 +1,7 @@ package sqlstore import ( + "context" "time" "github.com/grafana/grafana/pkg/bus" @@ -11,7 +12,7 @@ func init() { bus.AddHandler("sql", GetApiKeys) bus.AddHandler("sql", GetApiKeyById) bus.AddHandler("sql", GetApiKeyByName) - bus.AddHandler("sql", DeleteApiKey) + bus.AddHandlerCtx("sql", DeleteApiKeyCtx) bus.AddHandler("sql", AddApiKey) } @@ -22,8 +23,8 @@ func GetApiKeys(query *m.GetApiKeysQuery) error { return sess.Find(&query.Result) } -func DeleteApiKey(cmd *m.DeleteApiKeyCommand) error { - return inTransaction(func(sess *DBSession) error { +func DeleteApiKeyCtx(ctx context.Context, cmd *m.DeleteApiKeyCommand) error { + return withDbSession(ctx, func(sess *DBSession) error { var rawSql = "DELETE FROM api_key WHERE id=? and org_id=?" _, err := sess.Exec(rawSql, cmd.Id, cmd.OrgId) return err diff --git a/pkg/services/sqlstore/dashboard_snapshot_test.go b/pkg/services/sqlstore/dashboard_snapshot_test.go index d1f70082a38..09b3fb7607f 100644 --- a/pkg/services/sqlstore/dashboard_snapshot_test.go +++ b/pkg/services/sqlstore/dashboard_snapshot_test.go @@ -4,7 +4,6 @@ import ( "testing" "time" - "github.com/go-xorm/xorm" . "github.com/smartystreets/goconvey/convey" "github.com/grafana/grafana/pkg/components/simplejson" @@ -110,14 +109,14 @@ func TestDashboardSnapshotDBAccess(t *testing.T) { } func TestDeleteExpiredSnapshots(t *testing.T) { - x := InitTestDB(t) + sqlstore := InitTestDB(t) Convey("Testing dashboard snapshots clean up", t, func() { setting.SnapShotRemoveExpired = true - notExpiredsnapshot := createTestSnapshot(x, "key1", 1200) - createTestSnapshot(x, "key2", -1200) - createTestSnapshot(x, "key3", -1200) + notExpiredsnapshot := createTestSnapshot(sqlstore, "key1", 48000) + createTestSnapshot(sqlstore, "key2", -1200) + createTestSnapshot(sqlstore, "key3", -1200) err := DeleteExpiredSnapshots(&m.DeleteExpiredSnapshotsCommand{}) So(err, ShouldBeNil) @@ -146,7 +145,7 @@ func TestDeleteExpiredSnapshots(t *testing.T) { }) } -func createTestSnapshot(x *xorm.Engine, key string, expires int64) *m.DashboardSnapshot { +func createTestSnapshot(sqlstore *SqlStore, key string, expires int64) *m.DashboardSnapshot { cmd := m.CreateDashboardSnapshotCommand{ Key: key, DeleteKey: "delete" + key, @@ -163,7 +162,7 @@ func createTestSnapshot(x *xorm.Engine, key string, expires int64) *m.DashboardS // Set expiry date manually - to be able to create expired snapshots if expires < 0 { expireDate := time.Now().Add(time.Second * time.Duration(expires)) - _, err = x.Exec("UPDATE dashboard_snapshot SET expires = ? WHERE id = ?", expireDate, cmd.Result.Id) + _, err = sqlstore.engine.Exec("UPDATE dashboard_snapshot SET expires = ? WHERE id = ?", expireDate, cmd.Result.Id) So(err, ShouldBeNil) } diff --git a/pkg/services/sqlstore/dashboard_test.go b/pkg/services/sqlstore/dashboard_test.go index 6d7c7a93e47..0ca1c5d67e4 100644 --- a/pkg/services/sqlstore/dashboard_test.go +++ b/pkg/services/sqlstore/dashboard_test.go @@ -1,6 +1,7 @@ package sqlstore import ( + "context" "fmt" "testing" "time" @@ -386,10 +387,11 @@ func insertTestDashboardForPlugin(title string, orgId int64, folderId int64, isF func createUser(name string, role string, isAdmin bool) m.User { setting.AutoAssignOrg = true + setting.AutoAssignOrgId = 1 setting.AutoAssignOrgRole = role currentUserCmd := m.CreateUserCommand{Login: name, Email: name + "@test.com", Name: "a " + name, IsAdmin: isAdmin} - err := CreateUser(¤tUserCmd) + err := CreateUser(context.Background(), ¤tUserCmd) So(err, ShouldBeNil) q1 := m.GetUserOrgListQuery{UserId: currentUserCmd.Result.Id} diff --git a/pkg/services/sqlstore/migrations/dashboard_mig.go b/pkg/services/sqlstore/migrations/dashboard_mig.go index 170498c4bd9..b770afb1b4e 100644 --- a/pkg/services/sqlstore/migrations/dashboard_mig.go +++ b/pkg/services/sqlstore/migrations/dashboard_mig.go @@ -211,4 +211,8 @@ func addDashboardMigration(mg *Migrator) { "name": "name", "external_id": "external_id", }) + + mg.AddMigration("Add check_sum column", NewAddColumnMigration(dashboardExtrasTableV2, &Column{ + Name: "check_sum", Type: DB_NVarchar, Length: 32, Nullable: true, + })) } diff --git a/pkg/services/sqlstore/migrations/migrations_test.go b/pkg/services/sqlstore/migrations/migrations_test.go index c36eee55da9..ec4cb5fbce1 100644 --- a/pkg/services/sqlstore/migrations/migrations_test.go +++ b/pkg/services/sqlstore/migrations/migrations_test.go @@ -39,7 +39,7 @@ func TestMigrations(t *testing.T) { has, err := x.SQL(sql).Get(&r) So(err, ShouldBeNil) So(has, ShouldBeTrue) - expectedMigrations := mg.MigrationsCount() - 2 //we currently skip to migrations. We should rewrite skipped migrations to write in the log as well. until then we have to keep this + expectedMigrations := mg.MigrationsCount() //we currently skip to migrations. We should rewrite skipped migrations to write in the log as well. until then we have to keep this So(r.Count, ShouldEqual, expectedMigrations) mg = NewMigrator(x) diff --git a/pkg/services/sqlstore/migrations/org_mig.go b/pkg/services/sqlstore/migrations/org_mig.go index f74b78cf1b3..4e9f4295017 100644 --- a/pkg/services/sqlstore/migrations/org_mig.go +++ b/pkg/services/sqlstore/migrations/org_mig.go @@ -48,27 +48,6 @@ func addOrgMigrations(mg *Migrator) { mg.AddMigration("create org_user table v1", NewAddTableMigration(orgUserV1)) addTableIndicesMigrations(mg, "v1", orgUserV1) - //------- copy data from old table------------------- - mg.AddMigration("copy data account to org", NewCopyTableDataMigration("org", "account", map[string]string{ - "id": "id", - "version": "version", - "name": "name", - "created": "created", - "updated": "updated", - }).IfTableExists("account")) - - mg.AddMigration("copy data account_user to org_user", NewCopyTableDataMigration("org_user", "account_user", map[string]string{ - "id": "id", - "org_id": "account_id", - "user_id": "user_id", - "role": "role", - "created": "created", - "updated": "updated", - }).IfTableExists("account_user")) - - mg.AddMigration("Drop old table account", NewDropTableMigration("account")) - mg.AddMigration("Drop old table account_user", NewDropTableMigration("account_user")) - mg.AddMigration("Update org table charset", NewTableCharsetMigration("org", []*Column{ {Name: "name", Type: DB_NVarchar, Length: 190, Nullable: false}, {Name: "address1", Type: DB_NVarchar, Length: 255, Nullable: true}, diff --git a/pkg/services/sqlstore/migrations/team_mig.go b/pkg/services/sqlstore/migrations/team_mig.go index eb0641fbc32..9800d27f8ab 100644 --- a/pkg/services/sqlstore/migrations/team_mig.go +++ b/pkg/services/sqlstore/migrations/team_mig.go @@ -50,4 +50,5 @@ func addTeamMigrations(mg *Migrator) { mg.AddMigration("Add column email to team table", NewAddColumnMigration(teamV1, &Column{ Name: "email", Type: DB_NVarchar, Nullable: true, Length: 190, })) + } diff --git a/pkg/services/sqlstore/migrator/migrator.go b/pkg/services/sqlstore/migrator/migrator.go index a55163bd780..9bdaaf7cc14 100644 --- a/pkg/services/sqlstore/migrator/migrator.go +++ b/pkg/services/sqlstore/migrator/migrator.go @@ -125,7 +125,7 @@ func (mg *Migrator) exec(m Migration, sess *xorm.Session) error { sql, args := condition.Sql(mg.dialect) results, err := sess.SQL(sql).Query(args...) if err != nil || len(results) == 0 { - mg.Logger.Info("Skipping migration condition not fulfilled", "id", m.Id()) + mg.Logger.Debug("Skipping migration condition not fulfilled", "id", m.Id()) return sess.Rollback() } } diff --git a/pkg/services/sqlstore/org_test.go b/pkg/services/sqlstore/org_test.go index 63b20aa6e86..af8500707d5 100644 --- a/pkg/services/sqlstore/org_test.go +++ b/pkg/services/sqlstore/org_test.go @@ -1,6 +1,7 @@ package sqlstore import ( + "context" "testing" "time" @@ -16,15 +17,16 @@ func TestAccountDataAccess(t *testing.T) { Convey("Given single org mode", func() { setting.AutoAssignOrg = true + setting.AutoAssignOrgId = 1 setting.AutoAssignOrgRole = "Viewer" Convey("Users should be added to default organization", func() { ac1cmd := m.CreateUserCommand{Login: "ac1", Email: "ac1@test.com", Name: "ac1 name"} ac2cmd := m.CreateUserCommand{Login: "ac2", Email: "ac2@test.com", Name: "ac2 name"} - err := CreateUser(&ac1cmd) + err := CreateUser(context.Background(), &ac1cmd) So(err, ShouldBeNil) - err = CreateUser(&ac2cmd) + err = CreateUser(context.Background(), &ac2cmd) So(err, ShouldBeNil) q1 := m.GetUserOrgListQuery{UserId: ac1cmd.Result.Id} @@ -43,8 +45,8 @@ func TestAccountDataAccess(t *testing.T) { ac1cmd := m.CreateUserCommand{Login: "ac1", Email: "ac1@test.com", Name: "ac1 name"} ac2cmd := m.CreateUserCommand{Login: "ac2", Email: "ac2@test.com", Name: "ac2 name", IsAdmin: true} - err := CreateUser(&ac1cmd) - err = CreateUser(&ac2cmd) + err := CreateUser(context.Background(), &ac1cmd) + err = CreateUser(context.Background(), &ac2cmd) So(err, ShouldBeNil) ac1 := ac1cmd.Result @@ -150,7 +152,7 @@ func TestAccountDataAccess(t *testing.T) { }) Convey("Can set using org", func() { - cmd := m.SetUsingOrgCommand{UserId: ac2.Id, OrgId: ac1.Id} + cmd := m.SetUsingOrgCommand{UserId: ac2.Id, OrgId: ac1.OrgId} err := SetUsingOrg(&cmd) So(err, ShouldBeNil) @@ -159,13 +161,25 @@ func TestAccountDataAccess(t *testing.T) { err := GetSignedInUser(&query) So(err, ShouldBeNil) - So(query.Result.OrgId, ShouldEqual, ac1.Id) + So(query.Result.OrgId, ShouldEqual, ac1.OrgId) So(query.Result.Email, ShouldEqual, "ac2@test.com") So(query.Result.Name, ShouldEqual, "ac2 name") So(query.Result.Login, ShouldEqual, "ac2") So(query.Result.OrgName, ShouldEqual, "ac1@test.com") So(query.Result.OrgRole, ShouldEqual, "Viewer") }) + + Convey("Should set last org as current when removing user from current", func() { + remCmd := m.RemoveOrgUserCommand{OrgId: ac1.OrgId, UserId: ac2.Id} + err := RemoveOrgUser(&remCmd) + So(err, ShouldBeNil) + + query := m.GetSignedInUserQuery{UserId: ac2.Id} + err = GetSignedInUser(&query) + + So(err, ShouldBeNil) + So(query.Result.OrgId, ShouldEqual, ac2.OrgId) + }) }) Convey("Cannot delete last admin org user", func() { @@ -182,7 +196,7 @@ func TestAccountDataAccess(t *testing.T) { Convey("Given an org user with dashboard permissions", func() { ac3cmd := m.CreateUserCommand{Login: "ac3", Email: "ac3@test.com", Name: "ac3 name", IsAdmin: false} - err := CreateUser(&ac3cmd) + err := CreateUser(context.Background(), &ac3cmd) So(err, ShouldBeNil) ac3 := ac3cmd.Result diff --git a/pkg/services/sqlstore/org_users.go b/pkg/services/sqlstore/org_users.go index 0b991c73c55..aad72cdacb4 100644 --- a/pkg/services/sqlstore/org_users.go +++ b/pkg/services/sqlstore/org_users.go @@ -20,7 +20,14 @@ func init() { func AddOrgUser(cmd *m.AddOrgUserCommand) error { return inTransaction(func(sess *DBSession) error { // check if user exists - if res, err := sess.Query("SELECT 1 from org_user WHERE org_id=? and user_id=?", cmd.OrgId, cmd.UserId); err != nil { + var user m.User + if exists, err := sess.Id(cmd.UserId).Get(&user); err != nil { + return err + } else if !exists { + return m.ErrUserNotFound + } + + if res, err := sess.Query("SELECT 1 from org_user WHERE org_id=? and user_id=?", cmd.OrgId, user.Id); err != nil { return err } else if len(res) == 1 { return m.ErrOrgUserAlreadyAdded @@ -41,7 +48,26 @@ func AddOrgUser(cmd *m.AddOrgUserCommand) error { } _, err := sess.Insert(&entity) - return err + if err != nil { + return err + } + + var userOrgs []*m.UserOrgDTO + sess.Table("org_user") + sess.Join("INNER", "org", "org_user.org_id=org.id") + sess.Where("org_user.user_id=? AND org_user.org_id=?", user.Id, user.OrgId) + sess.Cols("org.name", "org_user.role", "org_user.org_id") + err = sess.Find(&userOrgs) + + if err != nil { + return err + } + + if len(userOrgs) == 0 { + return setUsingOrgInTransaction(sess, user.Id, cmd.OrgId) + } + + return nil }) } @@ -110,6 +136,14 @@ func GetOrgUsers(query *m.GetOrgUsersQuery) error { func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error { return inTransaction(func(sess *DBSession) error { + // check if user exists + var user m.User + if exists, err := sess.Id(cmd.UserId).Get(&user); err != nil { + return err + } else if !exists { + return m.ErrUserNotFound + } + deletes := []string{ "DELETE FROM org_user WHERE org_id=? and user_id=?", "DELETE FROM dashboard_acl WHERE org_id=? and user_id = ?", @@ -123,6 +157,32 @@ func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error { } } + var userOrgs []*m.UserOrgDTO + sess.Table("org_user") + sess.Join("INNER", "org", "org_user.org_id=org.id") + sess.Where("org_user.user_id=?", user.Id) + sess.Cols("org.name", "org_user.role", "org_user.org_id") + err := sess.Find(&userOrgs) + + if err != nil { + return err + } + + hasCurrentOrgSet := false + for _, userOrg := range userOrgs { + if user.OrgId == userOrg.OrgId { + hasCurrentOrgSet = true + break + } + } + + if !hasCurrentOrgSet && len(userOrgs) > 0 { + err = setUsingOrgInTransaction(sess, user.Id, userOrgs[0].OrgId) + if err != nil { + return err + } + } + return validateOneAdminLeftInOrg(cmd.OrgId, sess) }) } diff --git a/pkg/services/sqlstore/quota_test.go b/pkg/services/sqlstore/quota_test.go index ed6565b1c3f..49e028e9cd3 100644 --- a/pkg/services/sqlstore/quota_test.go +++ b/pkg/services/sqlstore/quota_test.go @@ -43,6 +43,7 @@ func TestQuotaCommandsAndQueries(t *testing.T) { Name: "TestOrg", UserId: 1, } + err := CreateOrg(&userCmd) So(err, ShouldBeNil) orgId = userCmd.Result.Id diff --git a/pkg/services/sqlstore/session.go b/pkg/services/sqlstore/session.go new file mode 100644 index 00000000000..29d7392678f --- /dev/null +++ b/pkg/services/sqlstore/session.go @@ -0,0 +1,71 @@ +package sqlstore + +import ( + "context" + "reflect" + + "github.com/go-xorm/xorm" +) + +type DBSession struct { + *xorm.Session + events []interface{} +} + +type dbTransactionFunc func(sess *DBSession) error + +func (sess *DBSession) publishAfterCommit(msg interface{}) { + sess.events = append(sess.events, msg) +} + +func newSession() *DBSession { + return &DBSession{Session: x.NewSession()} +} + +func startSession(ctx context.Context, engine *xorm.Engine, beginTran bool) (*DBSession, error) { + value := ctx.Value(ContextSessionName) + var sess *DBSession + sess, ok := value.(*DBSession) + + if ok { + return sess, nil + } + + newSess := &DBSession{Session: engine.NewSession()} + if beginTran { + err := newSess.Begin() + if err != nil { + return nil, err + } + } + return newSess, nil +} + +func withDbSession(ctx context.Context, callback dbTransactionFunc) error { + sess, err := startSession(ctx, x, false) + if err != nil { + return err + } + + return callback(sess) +} + +func (sess *DBSession) InsertId(bean interface{}) (int64, error) { + table := sess.DB().Mapper.Obj2Table(getTypeName(bean)) + + dialect.PreInsertId(table, sess.Session) + + id, err := sess.Session.InsertOne(bean) + + dialect.PostInsertId(table, sess.Session) + + return id, err +} + +func getTypeName(bean interface{}) (res string) { + t := reflect.TypeOf(bean) + for t.Kind() == reflect.Ptr { + t = t.Elem() + } + return t.Name() +} diff --git a/pkg/services/sqlstore/shared.go b/pkg/services/sqlstore/shared.go deleted file mode 100644 index 9a24a513aad..00000000000 --- a/pkg/services/sqlstore/shared.go +++ /dev/null @@ -1,90 +0,0 @@ -package sqlstore - -import ( - "reflect" - "time" - - "github.com/go-xorm/xorm" - "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/log" - sqlite3 "github.com/mattn/go-sqlite3" -) - -type DBSession struct { - *xorm.Session - events []interface{} -} - -type dbTransactionFunc func(sess *DBSession) error - -func (sess *DBSession) publishAfterCommit(msg interface{}) { - sess.events = append(sess.events, msg) -} - -func newSession() *DBSession { - return &DBSession{Session: x.NewSession()} -} - -func inTransaction(callback dbTransactionFunc) error { - return inTransactionWithRetry(callback, 0) -} - -func inTransactionWithRetry(callback dbTransactionFunc, retry int) error { - var err error - - sess := newSession() - defer sess.Close() - - if err = sess.Begin(); err != nil { - return err - } - - err = callback(sess) - - // special handling of database locked errors for sqlite, then we can retry 3 times - if sqlError, ok := err.(sqlite3.Error); ok && retry < 5 { - if sqlError.Code == sqlite3.ErrLocked { - sess.Rollback() - time.Sleep(time.Millisecond * time.Duration(10)) - sqlog.Info("Database table locked, sleeping then retrying", "retry", retry) - return inTransactionWithRetry(callback, retry+1) - } - } - - if err != nil { - sess.Rollback() - return err - } else if err = sess.Commit(); err != nil { - return err - } - - if len(sess.events) > 0 { - for _, e := range sess.events { - if err = bus.Publish(e); err != nil { - log.Error(3, "Failed to publish event after commit", err) - } - } - } - - return nil -} - -func (sess *DBSession) InsertId(bean interface{}) (int64, error) { - table := sess.DB().Mapper.Obj2Table(getTypeName(bean)) - - dialect.PreInsertId(table, sess.Session) - - id, err := sess.Session.InsertOne(bean) - - dialect.PostInsertId(table, sess.Session) - - return id, err -} - -func getTypeName(bean interface{}) (res string) { - t := reflect.TypeOf(bean) - for t.Kind() == reflect.Ptr { - t = t.Elem() - } - return t.Name() -} diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go index b1b4b89d716..13d706b6198 100644 --- a/pkg/services/sqlstore/sqlstore.go +++ b/pkg/services/sqlstore/sqlstore.go @@ -1,6 +1,7 @@ package sqlstore import ( + "context" "fmt" "net/url" "os" @@ -13,6 +14,7 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/registry" "github.com/grafana/grafana/pkg/services/annotations" "github.com/grafana/grafana/pkg/services/sqlstore/migrations" "github.com/grafana/grafana/pkg/services/sqlstore/migrator" @@ -21,157 +23,246 @@ import ( "github.com/go-sql-driver/mysql" "github.com/go-xorm/xorm" - _ "github.com/lib/pq" - _ "github.com/mattn/go-sqlite3" _ "github.com/grafana/grafana/pkg/tsdb/mssql" + _ "github.com/lib/pq" + sqlite3 "github.com/mattn/go-sqlite3" ) -type DatabaseConfig struct { - Type, Host, Name, User, Pwd, Path, SslMode string - CaCertPath string - ClientKeyPath string - ClientCertPath string - ServerCertName string - MaxOpenConn int - MaxIdleConn int - ConnMaxLifetime int -} - var ( x *xorm.Engine dialect migrator.Dialect - HasEngine bool - - DbCfg DatabaseConfig - - UseSQLite3 bool - sqlog log.Logger = log.New("sqlstore") + sqlog log.Logger = log.New("sqlstore") ) -func EnsureAdminUser() { - statsQuery := m.GetSystemStatsQuery{} +const ContextSessionName = "db-session" - if err := bus.Dispatch(&statsQuery); err != nil { - log.Fatal(3, "Could not determine if admin user exists: %v", err) - return - } - - if statsQuery.Result.Users > 0 { - return - } - - cmd := m.CreateUserCommand{} - cmd.Login = setting.AdminUser - cmd.Email = setting.AdminUser + "@localhost" - cmd.Password = setting.AdminPassword - cmd.IsAdmin = true - - if err := bus.Dispatch(&cmd); err != nil { - log.Error(3, "Failed to create default admin user", err) - return - } - - log.Info("Created default admin user: %v", setting.AdminUser) +func init() { + registry.Register(®istry.Descriptor{ + Name: "SqlStore", + Instance: &SqlStore{}, + InitPriority: registry.High, + }) } -func NewEngine() *xorm.Engine { - x, err := getEngine() +type SqlStore struct { + Cfg *setting.Cfg `inject:""` + Bus bus.Bus `inject:""` - if err != nil { - sqlog.Crit("Fail to connect to database", "error", err) - os.Exit(1) - } - - err = SetEngine(x) - - if err != nil { - sqlog.Error("Fail to initialize orm engine", "error", err) - os.Exit(1) - } - - return x + dbCfg DatabaseConfig + engine *xorm.Engine + log log.Logger + skipEnsureAdmin bool } -func SetEngine(engine *xorm.Engine) (err error) { +// NewSession returns a new DBSession +func (ss *SqlStore) NewSession() *DBSession { + return &DBSession{Session: ss.engine.NewSession()} +} + +// WithDbSession calls the callback with an session attached to the context. +func (ss *SqlStore) WithDbSession(ctx context.Context, callback dbTransactionFunc) error { + sess, err := startSession(ctx, ss.engine, false) + if err != nil { + return err + } + + return callback(sess) +} + +// WithTransactionalDbSession calls the callback with an session within a transaction +func (ss *SqlStore) WithTransactionalDbSession(ctx context.Context, callback dbTransactionFunc) error { + return ss.inTransactionWithRetryCtx(ctx, callback, 0) +} + +func (ss *SqlStore) inTransactionWithRetryCtx(ctx context.Context, callback dbTransactionFunc, retry int) error { + sess, err := startSession(ctx, ss.engine, true) + if err != nil { + return err + } + + defer sess.Close() + + err = callback(sess) + + // special handling of database locked errors for sqlite, then we can retry 3 times + if sqlError, ok := err.(sqlite3.Error); ok && retry < 5 { + if sqlError.Code == sqlite3.ErrLocked { + sess.Rollback() + time.Sleep(time.Millisecond * time.Duration(10)) + sqlog.Info("Database table locked, sleeping then retrying", "retry", retry) + return ss.inTransactionWithRetryCtx(ctx, callback, retry+1) + } + } + + if err != nil { + sess.Rollback() + return err + } else if err = sess.Commit(); err != nil { + return err + } + + if len(sess.events) > 0 { + for _, e := range sess.events { + if err = bus.Publish(e); err != nil { + log.Error(3, "Failed to publish event after commit", err) + } + } + } + + return nil +} + +func (ss *SqlStore) Init() error { + ss.log = log.New("sqlstore") + ss.readConfig() + + engine, err := ss.getEngine() + + if err != nil { + return fmt.Errorf("Fail to connect to database: %v", err) + } + + ss.engine = engine + + // temporarily still set global var x = engine dialect = migrator.NewDialect(x) - migrator := migrator.NewMigrator(x) migrations.AddMigrations(migrator) + for _, descriptor := range registry.GetServices() { + sc, ok := descriptor.Instance.(registry.DatabaseMigrator) + if ok { + sc.AddMigration(migrator) + } + } + if err := migrator.Start(); err != nil { - return fmt.Errorf("Sqlstore::Migration failed err: %v\n", err) + return fmt.Errorf("Migration failed err: %v", err) } // Init repo instances annotations.SetRepository(&SqlAnnotationRepo{}) - return nil + + ss.Bus.SetTransactionManager(ss) + + // ensure admin user + if ss.skipEnsureAdmin { + return nil + } + + return ss.ensureAdminUser() } -func getEngine() (*xorm.Engine, error) { - LoadConfig() +func (ss *SqlStore) ensureAdminUser() error { + systemUserCountQuery := m.GetSystemUserCountStatsQuery{} - cnnstr := "" - switch DbCfg.Type { + err := ss.InTransaction(context.Background(), func(ctx context.Context) error { + + err := bus.DispatchCtx(ctx, &systemUserCountQuery) + if err != nil { + return fmt.Errorf("Could not determine if admin user exists: %v", err) + } + + if systemUserCountQuery.Result.Count > 0 { + return nil + } + + cmd := m.CreateUserCommand{} + cmd.Login = setting.AdminUser + cmd.Email = setting.AdminUser + "@localhost" + cmd.Password = setting.AdminPassword + cmd.IsAdmin = true + + if err := bus.DispatchCtx(ctx, &cmd); err != nil { + return fmt.Errorf("Failed to create admin user: %v", err) + } + + ss.log.Info("Created default admin", "user", setting.AdminUser) + + return nil + }) + + return err +} + +func (ss *SqlStore) buildConnectionString() (string, error) { + cnnstr := ss.dbCfg.ConnectionString + + // special case used by integration tests + if cnnstr != "" { + return cnnstr, nil + } + + switch ss.dbCfg.Type { case migrator.MYSQL: protocol := "tcp" - if strings.HasPrefix(DbCfg.Host, "/") { + if strings.HasPrefix(ss.dbCfg.Host, "/") { protocol = "unix" } cnnstr = fmt.Sprintf("%s:%s@%s(%s)/%s?collation=utf8mb4_unicode_ci&allowNativePasswords=true", - url.QueryEscape(DbCfg.User), url.QueryEscape(DbCfg.Pwd), protocol, DbCfg.Host, url.PathEscape(DbCfg.Name)) + ss.dbCfg.User, ss.dbCfg.Pwd, protocol, ss.dbCfg.Host, ss.dbCfg.Name) - if DbCfg.SslMode == "true" || DbCfg.SslMode == "skip-verify" { - tlsCert, err := makeCert("custom", DbCfg) + if ss.dbCfg.SslMode == "true" || ss.dbCfg.SslMode == "skip-verify" { + tlsCert, err := makeCert("custom", ss.dbCfg) if err != nil { - return nil, err + return "", err } mysql.RegisterTLSConfig("custom", tlsCert) cnnstr += "&tls=custom" } case migrator.POSTGRES: var host, port = "127.0.0.1", "5432" - fields := strings.Split(DbCfg.Host, ":") + fields := strings.Split(ss.dbCfg.Host, ":") if len(fields) > 0 && len(strings.TrimSpace(fields[0])) > 0 { host = fields[0] } if len(fields) > 1 && len(strings.TrimSpace(fields[1])) > 0 { port = fields[1] } - cnnstr = fmt.Sprintf("user='%s' password='%s' host='%s' port='%s' dbname='%s' sslmode='%s' sslcert='%s' sslkey='%s' sslrootcert='%s'", - strings.Replace(DbCfg.User, `'`, `\'`, -1), - strings.Replace(DbCfg.Pwd, `'`, `\'`, -1), - strings.Replace(host, `'`, `\'`, -1), - strings.Replace(port, `'`, `\'`, -1), - strings.Replace(DbCfg.Name, `'`, `\'`, -1), - strings.Replace(DbCfg.SslMode, `'`, `\'`, -1), - strings.Replace(DbCfg.ClientCertPath, `'`, `\'`, -1), - strings.Replace(DbCfg.ClientKeyPath, `'`, `\'`, -1), - strings.Replace(DbCfg.CaCertPath, `'`, `\'`, -1), - ) - case migrator.SQLITE: - if !filepath.IsAbs(DbCfg.Path) { - DbCfg.Path = filepath.Join(setting.DataPath, DbCfg.Path) + if ss.dbCfg.Pwd == "" { + ss.dbCfg.Pwd = "''" } - os.MkdirAll(path.Dir(DbCfg.Path), os.ModePerm) - cnnstr = "file:" + DbCfg.Path + "?cache=shared&mode=rwc" + if ss.dbCfg.User == "" { + ss.dbCfg.User = "''" + } + cnnstr = fmt.Sprintf("user=%s password=%s host=%s port=%s dbname=%s sslmode=%s sslcert=%s sslkey=%s sslrootcert=%s", ss.dbCfg.User, ss.dbCfg.Pwd, host, port, ss.dbCfg.Name, ss.dbCfg.SslMode, ss.dbCfg.ClientCertPath, ss.dbCfg.ClientKeyPath, ss.dbCfg.CaCertPath) + case migrator.SQLITE: + // special case for tests + if !filepath.IsAbs(ss.dbCfg.Path) { + ss.dbCfg.Path = filepath.Join(setting.DataPath, ss.dbCfg.Path) + } + os.MkdirAll(path.Dir(ss.dbCfg.Path), os.ModePerm) + cnnstr = "file:" + ss.dbCfg.Path + "?cache=shared&mode=rwc" default: - return nil, fmt.Errorf("Unknown database type: %s", DbCfg.Type) + return "", fmt.Errorf("Unknown database type: %s", ss.dbCfg.Type) } - sqlog.Info("Initializing DB", "dbtype", DbCfg.Type) - engine, err := xorm.NewEngine(DbCfg.Type, cnnstr) + return cnnstr, nil +} + +func (ss *SqlStore) getEngine() (*xorm.Engine, error) { + connectionString, err := ss.buildConnectionString() + if err != nil { return nil, err } - engine.SetMaxOpenConns(DbCfg.MaxOpenConn) - engine.SetMaxIdleConns(DbCfg.MaxIdleConn) - engine.SetConnMaxLifetime(time.Second * time.Duration(DbCfg.ConnMaxLifetime)) - debugSql := setting.Raw.Section("database").Key("log_queries").MustBool(false) + sqlog.Info("Connecting to DB", "dbtype", ss.dbCfg.Type) + engine, err := xorm.NewEngine(ss.dbCfg.Type, connectionString) + if err != nil { + return nil, err + } + + engine.SetMaxOpenConns(ss.dbCfg.MaxOpenConn) + engine.SetMaxIdleConns(ss.dbCfg.MaxIdleConn) + engine.SetConnMaxLifetime(time.Second * time.Duration(ss.dbCfg.ConnMaxLifetime)) + + // configure sql logging + debugSql := ss.Cfg.Raw.Section("database").Key("log_queries").MustBool(false) if !debugSql { engine.SetLogger(&xorm.DiscardLogger{}) } else { @@ -183,95 +274,92 @@ func getEngine() (*xorm.Engine, error) { return engine, nil } -func LoadConfig() { - sec := setting.Raw.Section("database") +func (ss *SqlStore) readConfig() { + sec := ss.Cfg.Raw.Section("database") cfgURL := sec.Key("url").String() if len(cfgURL) != 0 { dbURL, _ := url.Parse(cfgURL) - DbCfg.Type = dbURL.Scheme - DbCfg.Host = dbURL.Host + ss.dbCfg.Type = dbURL.Scheme + ss.dbCfg.Host = dbURL.Host pathSplit := strings.Split(dbURL.Path, "/") if len(pathSplit) > 1 { - DbCfg.Name = pathSplit[1] + ss.dbCfg.Name = pathSplit[1] } userInfo := dbURL.User if userInfo != nil { - DbCfg.User = userInfo.Username() - DbCfg.Pwd, _ = userInfo.Password() + ss.dbCfg.User = userInfo.Username() + ss.dbCfg.Pwd, _ = userInfo.Password() } } else { - DbCfg.Type = sec.Key("type").String() - DbCfg.Host = sec.Key("host").String() - DbCfg.Name = sec.Key("name").String() - DbCfg.User = sec.Key("user").String() - if len(DbCfg.Pwd) == 0 { - DbCfg.Pwd = sec.Key("password").String() - } + ss.dbCfg.Type = sec.Key("type").String() + ss.dbCfg.Host = sec.Key("host").String() + ss.dbCfg.Name = sec.Key("name").String() + ss.dbCfg.User = sec.Key("user").String() + ss.dbCfg.ConnectionString = sec.Key("connection_string").String() + ss.dbCfg.Pwd = sec.Key("password").String() } - DbCfg.MaxOpenConn = sec.Key("max_open_conn").MustInt(0) - DbCfg.MaxIdleConn = sec.Key("max_idle_conn").MustInt(0) - DbCfg.ConnMaxLifetime = sec.Key("conn_max_lifetime").MustInt(14400) - if DbCfg.Type == "sqlite3" { - UseSQLite3 = true - // only allow one connection as sqlite3 has multi threading issues that cause table locks - // DbCfg.MaxIdleConn = 1 - // DbCfg.MaxOpenConn = 1 - } - DbCfg.SslMode = sec.Key("ssl_mode").String() - DbCfg.CaCertPath = sec.Key("ca_cert_path").String() - DbCfg.ClientKeyPath = sec.Key("client_key_path").String() - DbCfg.ClientCertPath = sec.Key("client_cert_path").String() - DbCfg.ServerCertName = sec.Key("server_cert_name").String() - DbCfg.Path = sec.Key("path").MustString("data/grafana.db") + ss.dbCfg.MaxOpenConn = sec.Key("max_open_conn").MustInt(0) + ss.dbCfg.MaxIdleConn = sec.Key("max_idle_conn").MustInt(2) + ss.dbCfg.ConnMaxLifetime = sec.Key("conn_max_lifetime").MustInt(14400) + + ss.dbCfg.SslMode = sec.Key("ssl_mode").String() + ss.dbCfg.CaCertPath = sec.Key("ca_cert_path").String() + ss.dbCfg.ClientKeyPath = sec.Key("client_key_path").String() + ss.dbCfg.ClientCertPath = sec.Key("client_cert_path").String() + ss.dbCfg.ServerCertName = sec.Key("server_cert_name").String() + ss.dbCfg.Path = sec.Key("path").MustString("data/grafana.db") } -func InitTestDB(t *testing.T) *xorm.Engine { - selectedDb := migrator.SQLITE - // selectedDb := migrator.MYSQL - // selectedDb := migrator.POSTGRES +func InitTestDB(t *testing.T) *SqlStore { + t.Helper() + sqlstore := &SqlStore{} + sqlstore.skipEnsureAdmin = true + sqlstore.Bus = bus.New() - var x *xorm.Engine - var err error + dbType := migrator.SQLITE // environment variable present for test db? if db, present := os.LookupEnv("GRAFANA_TEST_DB"); present { - selectedDb = db + dbType = db } - switch strings.ToLower(selectedDb) { - case migrator.MYSQL: - x, err = xorm.NewEngine(sqlutil.TestDB_Mysql.DriverName, sqlutil.TestDB_Mysql.ConnStr) - case migrator.POSTGRES: - x, err = xorm.NewEngine(sqlutil.TestDB_Postgres.DriverName, sqlutil.TestDB_Postgres.ConnStr) + // set test db config + sqlstore.Cfg = setting.NewCfg() + sec, _ := sqlstore.Cfg.Raw.NewSection("database") + sec.NewKey("type", dbType) + + switch dbType { + case "mysql": + sec.NewKey("connection_string", sqlutil.TestDB_Mysql.ConnStr) + case "postgres": + sec.NewKey("connection_string", sqlutil.TestDB_Postgres.ConnStr) default: - x, err = xorm.NewEngine(sqlutil.TestDB_Sqlite3.DriverName, sqlutil.TestDB_Sqlite3.ConnStr) + sec.NewKey("connection_string", sqlutil.TestDB_Sqlite3.ConnStr) } - x.DatabaseTZ = time.UTC - x.TZLocation = time.UTC - + // need to get engine to clean db before we init + engine, err := xorm.NewEngine(dbType, sec.Key("connection_string").String()) if err != nil { t.Fatalf("Failed to init test database: %v", err) } - dialect = migrator.NewDialect(x) - - err = dialect.CleanDB() - if err != nil { + dialect = migrator.NewDialect(engine) + if err := dialect.CleanDB(); err != nil { t.Fatalf("Failed to clean test db %v", err) } - if err := SetEngine(x); err != nil { - t.Fatal(err) + if err := sqlstore.Init(); err != nil { + t.Fatalf("Failed to init test database: %v", err) } - // x.ShowSQL() + sqlstore.engine.DatabaseTZ = time.UTC + sqlstore.engine.TZLocation = time.UTC - return x + return sqlstore } func IsTestDbMySql() bool { @@ -289,3 +377,15 @@ func IsTestDbPostgres() bool { return false } + +type DatabaseConfig struct { + Type, Host, Name, User, Pwd, Path, SslMode string + CaCertPath string + ClientKeyPath string + ClientCertPath string + ServerCertName string + ConnectionString string + MaxOpenConn int + MaxIdleConn int + ConnMaxLifetime int +} diff --git a/pkg/services/sqlstore/stats.go b/pkg/services/sqlstore/stats.go index 173a1e56634..6db481bf06b 100644 --- a/pkg/services/sqlstore/stats.go +++ b/pkg/services/sqlstore/stats.go @@ -1,6 +1,7 @@ package sqlstore import ( + "context" "time" "github.com/grafana/grafana/pkg/bus" @@ -10,7 +11,9 @@ import ( func init() { bus.AddHandler("sql", GetSystemStats) bus.AddHandler("sql", GetDataSourceStats) + bus.AddHandler("sql", GetDataSourceAccessStats) bus.AddHandler("sql", GetAdminStats) + bus.AddHandlerCtx("sql", GetSystemUserCountStats) } var activeUserTimeLimit = time.Hour * 24 * 30 @@ -22,43 +25,51 @@ func GetDataSourceStats(query *m.GetDataSourceStatsQuery) error { return err } +func GetDataSourceAccessStats(query *m.GetDataSourceAccessStatsQuery) error { + var rawSql = `SELECT COUNT(*) as count, type, access FROM data_source GROUP BY type, access` + query.Result = make([]*m.DataSourceAccessStats, 0) + err := x.SQL(rawSql).Find(&query.Result) + return err +} + func GetSystemStats(query *m.GetSystemStatsQuery) error { - var rawSql = `SELECT - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("user") + ` - ) AS users, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("org") + ` - ) AS orgs, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("dashboard") + ` - ) AS dashboards, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("data_source") + ` - ) AS datasources, - ( - SELECT COUNT(*) FROM ` + dialect.Quote("star") + ` - ) AS stars, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("playlist") + ` - ) AS playlists, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("alert") + ` - ) AS alerts, - ( - SELECT COUNT(*) FROM ` + dialect.Quote("user") + ` where last_seen_at > ? - ) as active_users - ` + sb := &SqlBuilder{} + sb.Write("SELECT ") + sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("user") + `) AS users,`) + sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("org") + `) AS orgs,`) + sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("dashboard") + `) AS dashboards,`) + sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("data_source") + `) AS datasources,`) + sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("star") + `) AS stars,`) + sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("playlist") + `) AS playlists,`) + sb.Write(`(SELECT COUNT(*) FROM ` + dialect.Quote("alert") + `) AS alerts,`) activeUserDeadlineDate := time.Now().Add(-activeUserTimeLimit) + sb.Write(`(SELECT COUNT(*) FROM `+dialect.Quote("user")+` where last_seen_at > ?) AS active_users,`, activeUserDeadlineDate) + + sb.Write(`(SELECT COUNT(id) FROM `+dialect.Quote("dashboard")+` where is_folder = ?) AS folders,`, dialect.BooleanStr(true)) + + sb.Write(`( + SELECT COUNT(acl.id) + FROM `+dialect.Quote("dashboard_acl")+` as acl + inner join `+dialect.Quote("dashboard")+` as d + on d.id = acl.dashboard_id + WHERE d.is_folder = ? + ) AS dashboard_permissions,`, dialect.BooleanStr(false)) + + sb.Write(`( + SELECT COUNT(acl.id) + FROM `+dialect.Quote("dashboard_acl")+` as acl + inner join `+dialect.Quote("dashboard")+` as d + on d.id = acl.dashboard_id + WHERE d.is_folder = ? + ) AS folder_permissions,`, dialect.BooleanStr(true)) + + sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("dashboard_provisioning") + `) AS provisioned_dashboards,`) + sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("dashboard_snapshot") + `) AS snapshots,`) + sb.Write(`(SELECT COUNT(id) FROM ` + dialect.Quote("team") + `) AS teams`) + var stats m.SystemStats - _, err := x.SQL(rawSql, activeUserDeadlineDate).Get(&stats) + _, err := x.SQL(sb.GetSqlString(), sb.params...).Get(&stats) if err != nil { return err } @@ -122,3 +133,19 @@ func GetAdminStats(query *m.GetAdminStatsQuery) error { query.Result = &stats return err } + +func GetSystemUserCountStats(ctx context.Context, query *m.GetSystemUserCountStatsQuery) error { + return withDbSession(ctx, func(sess *DBSession) error { + + var rawSql = `SELECT COUNT(id) AS Count FROM ` + dialect.Quote("user") + var stats m.SystemUserCountStats + _, err := sess.SQL(rawSql).Get(&stats) + if err != nil { + return err + } + + query.Result = &stats + + return err + }) +} diff --git a/pkg/services/sqlstore/stats_test.go b/pkg/services/sqlstore/stats_test.go new file mode 100644 index 00000000000..dae24952d17 --- /dev/null +++ b/pkg/services/sqlstore/stats_test.go @@ -0,0 +1,40 @@ +package sqlstore + +import ( + "context" + "testing" + + m "github.com/grafana/grafana/pkg/models" + . "github.com/smartystreets/goconvey/convey" +) + +func TestStatsDataAccess(t *testing.T) { + + Convey("Testing Stats Data Access", t, func() { + InitTestDB(t) + + Convey("Get system stats should not results in error", func() { + query := m.GetSystemStatsQuery{} + err := GetSystemStats(&query) + So(err, ShouldBeNil) + }) + + Convey("Get system user count stats should not results in error", func() { + query := m.GetSystemUserCountStatsQuery{} + err := GetSystemUserCountStats(context.Background(), &query) + So(err, ShouldBeNil) + }) + + Convey("Get datasource stats should not results in error", func() { + query := m.GetDataSourceStatsQuery{} + err := GetDataSourceStats(&query) + So(err, ShouldBeNil) + }) + + Convey("Get datasource access stats should not results in error", func() { + query := m.GetDataSourceAccessStatsQuery{} + err := GetDataSourceAccessStats(&query) + So(err, ShouldBeNil) + }) + }) +} diff --git a/pkg/services/sqlstore/team.go b/pkg/services/sqlstore/team.go index 7d53d114235..72955df9a6a 100644 --- a/pkg/services/sqlstore/team.go +++ b/pkg/services/sqlstore/team.go @@ -22,6 +22,16 @@ func init() { bus.AddHandler("sql", GetTeamMembers) } +func getTeamSelectSqlBase() string { + return `SELECT + team.id as id, + team.org_id, + team.name as name, + team.email as email, + (SELECT COUNT(*) from team_member where team_member.team_id = team.id) as member_count + FROM team as team ` +} + func CreateTeam(cmd *m.CreateTeamCommand) error { return inTransaction(func(sess *DBSession) error { @@ -130,21 +140,15 @@ func isTeamNameTaken(orgId int64, name string, existingId int64, sess *DBSession func SearchTeams(query *m.SearchTeamsQuery) error { query.Result = m.SearchTeamQueryResult{ - Teams: make([]*m.SearchTeamDto, 0), + Teams: make([]*m.TeamDTO, 0), } queryWithWildcards := "%" + query.Query + "%" var sql bytes.Buffer params := make([]interface{}, 0) - sql.WriteString(`select - team.id as id, - team.org_id, - team.name as name, - team.email as email, - (select count(*) from team_member where team_member.team_id = team.id) as member_count - from team as team - where team.org_id = ?`) + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` WHERE team.org_id = ?`) params = append(params, query.OrgId) @@ -186,8 +190,14 @@ func SearchTeams(query *m.SearchTeamsQuery) error { } func GetTeamById(query *m.GetTeamByIdQuery) error { - var team m.Team - exists, err := x.Where("org_id=? and id=?", query.OrgId, query.Id).Get(&team) + var sql bytes.Buffer + + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` WHERE team.org_id = ? and team.id = ?`) + + var team m.TeamDTO + exists, err := x.Sql(sql.String(), query.OrgId, query.Id).Get(&team) + if err != nil { return err } @@ -202,13 +212,15 @@ func GetTeamById(query *m.GetTeamByIdQuery) error { // GetTeamsByUser is used by the Guardian when checking a users' permissions func GetTeamsByUser(query *m.GetTeamsByUserQuery) error { - query.Result = make([]*m.Team, 0) + query.Result = make([]*m.TeamDTO, 0) - sess := x.Table("team") - sess.Join("INNER", "team_member", "team.id=team_member.team_id") - sess.Where("team.org_id=? and team_member.user_id=?", query.OrgId, query.UserId) + var sql bytes.Buffer - err := sess.Find(&query.Result) + sql.WriteString(getTeamSelectSqlBase()) + sql.WriteString(` INNER JOIN team_member on team.id = team_member.team_id`) + sql.WriteString(` WHERE team.org_id = ? and team_member.user_id = ?`) + + err := x.Sql(sql.String(), query.OrgId, query.UserId).Find(&query.Result) return err } @@ -268,7 +280,15 @@ func GetTeamMembers(query *m.GetTeamMembersQuery) error { query.Result = make([]*m.TeamMemberDTO, 0) sess := x.Table("team_member") sess.Join("INNER", "user", fmt.Sprintf("team_member.user_id=%s.id", x.Dialect().Quote("user"))) - sess.Where("team_member.org_id=? and team_member.team_id=?", query.OrgId, query.TeamId) + if query.OrgId != 0 { + sess.Where("team_member.org_id=?", query.OrgId) + } + if query.TeamId != 0 { + sess.Where("team_member.team_id=?", query.TeamId) + } + if query.UserId != 0 { + sess.Where("team_member.user_id=?", query.UserId) + } sess.Cols("user.org_id", "team_member.team_id", "team_member.user_id", "user.email", "user.login") sess.Asc("user.login", "user.email") diff --git a/pkg/services/sqlstore/team_test.go b/pkg/services/sqlstore/team_test.go index f4b022906da..abaa973957d 100644 --- a/pkg/services/sqlstore/team_test.go +++ b/pkg/services/sqlstore/team_test.go @@ -1,6 +1,7 @@ package sqlstore import ( + "context" "fmt" "testing" @@ -22,7 +23,7 @@ func TestTeamCommandsAndQueries(t *testing.T) { Name: fmt.Sprint("user", i), Login: fmt.Sprint("loginuser", i), } - err := CreateUser(userCmd) + err := CreateUser(context.Background(), userCmd) So(err, ShouldBeNil) userIds = append(userIds, userCmd.Result.Id) } diff --git a/pkg/services/sqlstore/transactions.go b/pkg/services/sqlstore/transactions.go new file mode 100644 index 00000000000..eccd37f9a43 --- /dev/null +++ b/pkg/services/sqlstore/transactions.go @@ -0,0 +1,106 @@ +package sqlstore + +import ( + "context" + "time" + + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/log" + sqlite3 "github.com/mattn/go-sqlite3" +) + +func (ss *SqlStore) InTransaction(ctx context.Context, fn func(ctx context.Context) error) error { + return ss.inTransactionWithRetry(ctx, fn, 0) +} + +func (ss *SqlStore) inTransactionWithRetry(ctx context.Context, fn func(ctx context.Context) error, retry int) error { + sess, err := startSession(ctx, ss.engine, true) + if err != nil { + return err + } + + defer sess.Close() + + withValue := context.WithValue(ctx, ContextSessionName, sess) + + err = fn(withValue) + + // special handling of database locked errors for sqlite, then we can retry 3 times + if sqlError, ok := err.(sqlite3.Error); ok && retry < 5 { + if sqlError.Code == sqlite3.ErrLocked { + sess.Rollback() + time.Sleep(time.Millisecond * time.Duration(10)) + ss.log.Info("Database table locked, sleeping then retrying", "retry", retry) + return ss.inTransactionWithRetry(ctx, fn, retry+1) + } + } + + if err != nil { + sess.Rollback() + return err + } + + if err = sess.Commit(); err != nil { + return err + } + + if len(sess.events) > 0 { + for _, e := range sess.events { + if err = bus.Publish(e); err != nil { + ss.log.Error("Failed to publish event after commit", err) + } + } + } + + return nil +} + +func inTransactionWithRetry(callback dbTransactionFunc, retry int) error { + return inTransactionWithRetryCtx(context.Background(), callback, retry) +} + +func inTransactionWithRetryCtx(ctx context.Context, callback dbTransactionFunc, retry int) error { + sess, err := startSession(ctx, x, true) + if err != nil { + return err + } + + defer sess.Close() + + err = callback(sess) + + // special handling of database locked errors for sqlite, then we can retry 3 times + if sqlError, ok := err.(sqlite3.Error); ok && retry < 5 { + if sqlError.Code == sqlite3.ErrLocked { + sess.Rollback() + time.Sleep(time.Millisecond * time.Duration(10)) + sqlog.Info("Database table locked, sleeping then retrying", "retry", retry) + return inTransactionWithRetry(callback, retry+1) + } + } + + if err != nil { + sess.Rollback() + return err + } else if err = sess.Commit(); err != nil { + return err + } + + if len(sess.events) > 0 { + for _, e := range sess.events { + if err = bus.Publish(e); err != nil { + log.Error(3, "Failed to publish event after commit", err) + } + } + } + + return nil +} + +func inTransaction(callback dbTransactionFunc) error { + return inTransactionWithRetry(callback, 0) +} + +func inTransactionCtx(ctx context.Context, callback dbTransactionFunc) error { + return inTransactionWithRetryCtx(ctx, callback, 0) +} diff --git a/pkg/services/sqlstore/transactions_test.go b/pkg/services/sqlstore/transactions_test.go new file mode 100644 index 00000000000..937649921ba --- /dev/null +++ b/pkg/services/sqlstore/transactions_test.go @@ -0,0 +1,60 @@ +package sqlstore + +import ( + "context" + "errors" + "testing" + + "github.com/grafana/grafana/pkg/models" + + . "github.com/smartystreets/goconvey/convey" +) + +type testQuery struct { + result bool +} + +var ProvokedError = errors.New("testing error.") + +func TestTransaction(t *testing.T) { + ss := InitTestDB(t) + + Convey("InTransaction asdf asdf", t, func() { + cmd := &models.AddApiKeyCommand{Key: "secret-key", Name: "key", OrgId: 1} + + err := AddApiKey(cmd) + So(err, ShouldBeNil) + + deleteApiKeyCmd := &models.DeleteApiKeyCommand{Id: cmd.Result.Id, OrgId: 1} + + Convey("can update key", func() { + err := ss.InTransaction(context.Background(), func(ctx context.Context) error { + return DeleteApiKeyCtx(ctx, deleteApiKeyCmd) + }) + + So(err, ShouldBeNil) + + query := &models.GetApiKeyByIdQuery{ApiKeyId: cmd.Result.Id} + err = GetApiKeyById(query) + So(err, ShouldEqual, models.ErrInvalidApiKey) + }) + + Convey("wont update if one handler fails", func() { + err := ss.InTransaction(context.Background(), func(ctx context.Context) error { + err := DeleteApiKeyCtx(ctx, deleteApiKeyCmd) + if err != nil { + return err + } + + return ProvokedError + }) + + So(err, ShouldEqual, ProvokedError) + + query := &models.GetApiKeyByIdQuery{ApiKeyId: cmd.Result.Id} + err = GetApiKeyById(query) + So(err, ShouldBeNil) + So(query.Result.Id, ShouldEqual, cmd.Result.Id) + }) + }) +} diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index e7aa8da837a..0ec1a947870 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -1,6 +1,7 @@ package sqlstore import ( + "context" "strconv" "strings" "time" @@ -15,7 +16,7 @@ import ( ) func init() { - bus.AddHandler("sql", CreateUser) + //bus.AddHandler("sql", CreateUser) bus.AddHandler("sql", GetUserById) bus.AddHandler("sql", UpdateUser) bus.AddHandler("sql", ChangeUserPassword) @@ -30,6 +31,7 @@ func init() { bus.AddHandler("sql", DeleteUser) bus.AddHandler("sql", UpdateUserPermissions) bus.AddHandler("sql", SetUserHelpFlag) + bus.AddHandlerCtx("sql", CreateUser) } func getOrgIdForNewUser(cmd *m.CreateUserCommand, sess *DBSession) (int64, error) { @@ -40,16 +42,23 @@ func getOrgIdForNewUser(cmd *m.CreateUserCommand, sess *DBSession) (int64, error var org m.Org if setting.AutoAssignOrg { - // right now auto assign to org with id 1 - has, err := sess.Where("id=?", 1).Get(&org) + has, err := sess.Where("id=?", setting.AutoAssignOrgId).Get(&org) if err != nil { return 0, err } if has { return org.Id, nil + } else { + if setting.AutoAssignOrgId == 1 { + org.Name = "Main Org." + org.Id = int64(setting.AutoAssignOrgId) + } else { + sqlog.Info("Could not create user: organization id %v does not exist", + setting.AutoAssignOrgId) + return 0, fmt.Errorf("Could not create user: organization id %v does not exist", + setting.AutoAssignOrgId) + } } - org.Name = "Main Org." - org.Id = 1 } else { org.Name = cmd.OrgName if len(org.Name) == 0 { @@ -79,8 +88,8 @@ func getOrgIdForNewUser(cmd *m.CreateUserCommand, sess *DBSession) (int64, error return org.Id, nil } -func CreateUser(cmd *m.CreateUserCommand) error { - return inTransaction(func(sess *DBSession) error { +func CreateUser(ctx context.Context, cmd *m.CreateUserCommand) error { + return inTransactionCtx(ctx, func(sess *DBSession) error { orgId, err := getOrgIdForNewUser(cmd, sess) if err != nil { return err @@ -290,16 +299,20 @@ func SetUsingOrg(cmd *m.SetUsingOrgCommand) error { } return inTransaction(func(sess *DBSession) error { - user := m.User{ - Id: cmd.UserId, - OrgId: cmd.OrgId, - } - - _, err := sess.Id(cmd.UserId).Update(&user) - return err + return setUsingOrgInTransaction(sess, cmd.UserId, cmd.OrgId) }) } +func setUsingOrgInTransaction(sess *DBSession, userID int64, orgID int64) error { + user := m.User{ + Id: userID, + OrgId: orgID, + } + + _, err := sess.Id(userID).Update(&user) + return err +} + func GetUserProfile(query *m.GetUserProfileQuery) error { var user m.User has, err := x.Id(query.UserId).Get(&user) diff --git a/pkg/services/sqlstore/user_auth_test.go b/pkg/services/sqlstore/user_auth_test.go index 882e0c7afa5..5ad93dc7a3b 100644 --- a/pkg/services/sqlstore/user_auth_test.go +++ b/pkg/services/sqlstore/user_auth_test.go @@ -1,6 +1,7 @@ package sqlstore import ( + "context" "fmt" "testing" @@ -22,7 +23,7 @@ func TestUserAuth(t *testing.T) { Name: fmt.Sprint("user", i), Login: fmt.Sprint("loginuser", i), } - err = CreateUser(cmd) + err = CreateUser(context.Background(), cmd) So(err, ShouldBeNil) users = append(users, cmd.Result) } diff --git a/pkg/services/sqlstore/user_test.go b/pkg/services/sqlstore/user_test.go index 2830733c96a..a76ae860b7d 100644 --- a/pkg/services/sqlstore/user_test.go +++ b/pkg/services/sqlstore/user_test.go @@ -1,6 +1,7 @@ package sqlstore import ( + "context" "fmt" "testing" @@ -24,7 +25,7 @@ func TestUserDataAccess(t *testing.T) { Name: fmt.Sprint("user", i), Login: fmt.Sprint("loginuser", i), } - err = CreateUser(cmd) + err = CreateUser(context.Background(), cmd) So(err, ShouldBeNil) users = append(users, cmd.Result) } @@ -96,33 +97,33 @@ func TestUserDataAccess(t *testing.T) { }) Convey("when a user is an org member and has been assigned permissions", func() { - err = AddOrgUser(&m.AddOrgUserCommand{LoginOrEmail: users[0].Login, Role: m.ROLE_VIEWER, OrgId: users[0].OrgId}) + err = AddOrgUser(&m.AddOrgUserCommand{LoginOrEmail: users[1].Login, Role: m.ROLE_VIEWER, OrgId: users[0].OrgId, UserId: users[1].Id}) So(err, ShouldBeNil) - testHelperUpdateDashboardAcl(1, m.DashboardAcl{DashboardId: 1, OrgId: users[0].OrgId, UserId: users[0].Id, Permission: m.PERMISSION_EDIT}) + testHelperUpdateDashboardAcl(1, m.DashboardAcl{DashboardId: 1, OrgId: users[0].OrgId, UserId: users[1].Id, Permission: m.PERMISSION_EDIT}) So(err, ShouldBeNil) - err = SavePreferences(&m.SavePreferencesCommand{UserId: users[0].Id, OrgId: users[0].OrgId, HomeDashboardId: 1, Theme: "dark"}) + err = SavePreferences(&m.SavePreferencesCommand{UserId: users[1].Id, OrgId: users[0].OrgId, HomeDashboardId: 1, Theme: "dark"}) So(err, ShouldBeNil) Convey("when the user is deleted", func() { - err = DeleteUser(&m.DeleteUserCommand{UserId: users[0].Id}) + err = DeleteUser(&m.DeleteUserCommand{UserId: users[1].Id}) So(err, ShouldBeNil) Convey("Should delete connected org users and permissions", func() { - query := &m.GetOrgUsersQuery{OrgId: 1} + query := &m.GetOrgUsersQuery{OrgId: users[0].OrgId} err = GetOrgUsersForTest(query) So(err, ShouldBeNil) So(len(query.Result), ShouldEqual, 1) - permQuery := &m.GetDashboardAclInfoListQuery{DashboardId: 1, OrgId: 1} + permQuery := &m.GetDashboardAclInfoListQuery{DashboardId: 1, OrgId: users[0].OrgId} err = GetDashboardAclInfoList(permQuery) So(err, ShouldBeNil) So(len(permQuery.Result), ShouldEqual, 0) - prefsQuery := &m.GetPreferencesQuery{OrgId: users[0].OrgId, UserId: users[0].Id} + prefsQuery := &m.GetPreferencesQuery{OrgId: users[0].OrgId, UserId: users[1].Id} err = GetPreferences(prefsQuery) So(err, ShouldBeNil) diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index f53b1f852d5..eb61568261d 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -18,6 +18,8 @@ import ( "github.com/go-macaron/session" + "time" + "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/util" ) @@ -48,7 +50,7 @@ var ( BuildVersion string BuildCommit string BuildStamp int64 - Enterprise bool + IsEnterprise bool ApplicationName string // Paths @@ -98,12 +100,14 @@ var ( AllowUserSignUp bool AllowUserOrgCreate bool AutoAssignOrg bool + AutoAssignOrgId int AutoAssignOrgRole string VerifyEmailEnabled bool LoginHint string DefaultTheme string DisableLoginForm bool DisableSignoutMenu bool + SignoutRedirectUrl string ExternalUserMngLinkUrl string ExternalUserMngLinkName string ExternalUserMngInfo string @@ -141,10 +145,6 @@ var ( ConfRootPath string IsWindows bool - // PhantomJs Rendering - ImagesDir string - PhantomDir string - // for logging purposes configFiles []string appliedCommandLineProperties []string @@ -193,8 +193,13 @@ type Cfg struct { // SMTP email settings Smtp SmtpSettings + // Rendering ImagesDir string + PhantomDir string + RendererUrl string DisableBruteForceLoginProtection bool + + TempDataLifetime time.Duration } type CommandLineArgs struct { @@ -495,7 +500,9 @@ func validateStaticRootPath() error { } func NewCfg() *Cfg { - return &Cfg{} + return &Cfg{ + Raw: ini.Empty(), + } } func (cfg *Cfg) Load(args *CommandLineArgs) error { @@ -512,7 +519,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { Raw = cfg.Raw ApplicationName = "Grafana" - if Enterprise { + if IsEnterprise { ApplicationName += " Enterprise" } @@ -586,6 +593,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { AllowUserSignUp = users.Key("allow_sign_up").MustBool(true) AllowUserOrgCreate = users.Key("allow_org_create").MustBool(true) AutoAssignOrg = users.Key("auto_assign_org").MustBool(true) + AutoAssignOrgId = users.Key("auto_assign_org_id").MustInt(1) AutoAssignOrgRole = users.Key("auto_assign_org_role").In("Editor", []string{"Editor", "Admin", "Viewer"}) VerifyEmailEnabled = users.Key("verify_email_enabled").MustBool(false) LoginHint = users.Key("login_hint").String() @@ -599,6 +607,7 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { auth := iniFile.Section("auth") DisableLoginForm = auth.Key("disable_login_form").MustBool(false) DisableSignoutMenu = auth.Key("disable_signout_menu").MustBool(false) + SignoutRedirectUrl = auth.Key("signout_redirect_url").String() // anonymous access AnonymousEnabled = iniFile.Section("auth.anonymous").Key("enabled").MustBool(false) @@ -629,10 +638,12 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { // global plugin settings PluginAppsSkipVerifyTLS = iniFile.Section("plugins").Key("app_tls_skip_verify_insecure").MustBool(false) - // PhantomJS rendering + // Rendering + renderSec := iniFile.Section("rendering") + cfg.RendererUrl = renderSec.Key("server_url").String() cfg.ImagesDir = filepath.Join(DataPath, "png") - ImagesDir = cfg.ImagesDir - PhantomDir = filepath.Join(HomePath, "tools/phantomjs") + cfg.PhantomDir = filepath.Join(HomePath, "tools/phantomjs") + cfg.TempDataLifetime = iniFile.Section("paths").Key("temp_data_lifetime").MustDuration(time.Second * 3600 * 24) analytics := iniFile.Section("analytics") ReportingEnabled = analytics.Key("reporting_enabled").MustBool(true) diff --git a/pkg/social/github_oauth.go b/pkg/social/github_oauth.go index 815c684cf03..b07f112b8d3 100644 --- a/pkg/social/github_oauth.go +++ b/pkg/social/github_oauth.go @@ -213,6 +213,7 @@ func (s *SocialGithub) UserInfo(client *http.Client, token *oauth2.Token) (*Basi userInfo := &BasicUserInfo{ Name: data.Login, Login: data.Login, + Id: fmt.Sprintf("%d", data.Id), Email: data.Email, } diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 499a3ed6e03..38fbac3aa29 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -3,6 +3,7 @@ package cloudwatch import ( "context" "errors" + "fmt" "regexp" "sort" "strconv" @@ -13,6 +14,7 @@ import ( "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/tsdb" + "golang.org/x/sync/errgroup" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/request" @@ -87,48 +89,67 @@ func (e *CloudWatchExecutor) executeTimeSeriesQuery(ctx context.Context, queryCo Results: make(map[string]*tsdb.QueryResult), } - errCh := make(chan error, 1) - resCh := make(chan *tsdb.QueryResult, 1) + eg, ectx := errgroup.WithContext(ctx) - currentlyExecuting := 0 + getMetricDataQueries := make(map[string]map[string]*CloudWatchQuery) for i, model := range queryContext.Queries { queryType := model.Model.Get("type").MustString() if queryType != "timeSeriesQuery" && queryType != "" { continue } - currentlyExecuting++ - go func(refId string, index int) { - queryRes, err := e.executeQuery(ctx, queryContext.Queries[index].Model, queryContext) - currentlyExecuting-- - if err != nil { - errCh <- err - } else { - queryRes.RefId = refId - resCh <- queryRes + + query, err := parseQuery(queryContext.Queries[i].Model) + if err != nil { + return nil, err + } + query.RefId = queryContext.Queries[i].RefId + + if query.Id != "" { + if _, ok := getMetricDataQueries[query.Region]; !ok { + getMetricDataQueries[query.Region] = make(map[string]*CloudWatchQuery) } - }(model.RefId, i) + getMetricDataQueries[query.Region][query.Id] = query + continue + } + + if query.Id == "" && query.Expression != "" { + return nil, fmt.Errorf("Invalid query: id should be set if using expression") + } + + eg.Go(func() error { + queryRes, err := e.executeQuery(ectx, query, queryContext) + if err != nil { + return err + } + result.Results[queryRes.RefId] = queryRes + return nil + }) } - for currentlyExecuting != 0 { - select { - case res := <-resCh: - result.Results[res.RefId] = res - case err := <-errCh: - return result, err - case <-ctx.Done(): - return result, ctx.Err() + if len(getMetricDataQueries) > 0 { + for region, getMetricDataQuery := range getMetricDataQueries { + q := getMetricDataQuery + eg.Go(func() error { + queryResponses, err := e.executeGetMetricDataQuery(ectx, region, q, queryContext) + if err != nil { + return err + } + for _, queryRes := range queryResponses { + result.Results[queryRes.RefId] = queryRes + } + return nil + }) } } + if err := eg.Wait(); err != nil { + return nil, err + } + return result, nil } -func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { - query, err := parseQuery(parameters) - if err != nil { - return nil, err - } - +func (e *CloudWatchExecutor) executeQuery(ctx context.Context, query *CloudWatchQuery, queryContext *tsdb.TsdbQuery) (*tsdb.QueryResult, error) { client, err := e.getClient(query.Region) if err != nil { return nil, err @@ -144,6 +165,10 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simpl return nil, err } + if endTime.Before(startTime) { + return nil, fmt.Errorf("Invalid time range: End time can't be before start time") + } + params := &cloudwatch.GetMetricStatisticsInput{ Namespace: aws.String(query.Namespace), MetricName: aws.String(query.MetricName), @@ -196,6 +221,139 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simpl return queryRes, nil } +func (e *CloudWatchExecutor) executeGetMetricDataQuery(ctx context.Context, region string, queries map[string]*CloudWatchQuery, queryContext *tsdb.TsdbQuery) ([]*tsdb.QueryResult, error) { + queryResponses := make([]*tsdb.QueryResult, 0) + + // validate query + for _, query := range queries { + if !(len(query.Statistics) == 1 && len(query.ExtendedStatistics) == 0) && + !(len(query.Statistics) == 0 && len(query.ExtendedStatistics) == 1) { + return queryResponses, errors.New("Statistics count should be 1") + } + } + + client, err := e.getClient(region) + if err != nil { + return queryResponses, err + } + + startTime, err := queryContext.TimeRange.ParseFrom() + if err != nil { + return queryResponses, err + } + + endTime, err := queryContext.TimeRange.ParseTo() + if err != nil { + return queryResponses, err + } + + params := &cloudwatch.GetMetricDataInput{ + StartTime: aws.Time(startTime), + EndTime: aws.Time(endTime), + ScanBy: aws.String("TimestampAscending"), + } + for _, query := range queries { + // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600 + if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) { + return nil, errors.New("too long query period") + } + + mdq := &cloudwatch.MetricDataQuery{ + Id: aws.String(query.Id), + ReturnData: aws.Bool(query.ReturnData), + } + if query.Expression != "" { + mdq.Expression = aws.String(query.Expression) + } else { + mdq.MetricStat = &cloudwatch.MetricStat{ + Metric: &cloudwatch.Metric{ + Namespace: aws.String(query.Namespace), + MetricName: aws.String(query.MetricName), + }, + Period: aws.Int64(int64(query.Period)), + } + for _, d := range query.Dimensions { + mdq.MetricStat.Metric.Dimensions = append(mdq.MetricStat.Metric.Dimensions, + &cloudwatch.Dimension{ + Name: d.Name, + Value: d.Value, + }) + } + if len(query.Statistics) == 1 { + mdq.MetricStat.Stat = query.Statistics[0] + } else { + mdq.MetricStat.Stat = query.ExtendedStatistics[0] + } + } + params.MetricDataQueries = append(params.MetricDataQueries, mdq) + } + + nextToken := "" + mdr := make(map[string]*cloudwatch.MetricDataResult) + for { + if nextToken != "" { + params.NextToken = aws.String(nextToken) + } + resp, err := client.GetMetricDataWithContext(ctx, params) + if err != nil { + return queryResponses, err + } + metrics.M_Aws_CloudWatch_GetMetricData.Add(float64(len(params.MetricDataQueries))) + + for _, r := range resp.MetricDataResults { + if _, ok := mdr[*r.Id]; !ok { + mdr[*r.Id] = r + } else { + mdr[*r.Id].Timestamps = append(mdr[*r.Id].Timestamps, r.Timestamps...) + mdr[*r.Id].Values = append(mdr[*r.Id].Values, r.Values...) + } + } + + if resp.NextToken == nil || *resp.NextToken == "" { + break + } + nextToken = *resp.NextToken + } + + for i, r := range mdr { + if *r.StatusCode != "Complete" { + return queryResponses, fmt.Errorf("Part of query is failed: %s", *r.StatusCode) + } + + queryRes := tsdb.NewQueryResult() + queryRes.RefId = queries[i].RefId + query := queries[*r.Id] + + series := tsdb.TimeSeries{ + Tags: map[string]string{}, + Points: make([]tsdb.TimePoint, 0), + } + for _, d := range query.Dimensions { + series.Tags[*d.Name] = *d.Value + } + s := "" + if len(query.Statistics) == 1 { + s = *query.Statistics[0] + } else { + s = *query.ExtendedStatistics[0] + } + series.Name = formatAlias(query, s, series.Tags) + + for j, t := range r.Timestamps { + expectedTimestamp := r.Timestamps[j].Add(time.Duration(query.Period) * time.Second) + if j > 0 && expectedTimestamp.Before(*t) { + series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFromPtr(nil), float64(expectedTimestamp.Unix()*1000))) + } + series.Points = append(series.Points, tsdb.NewTimePoint(null.FloatFrom(*r.Values[j]), float64((*t).Unix())*1000)) + } + + queryRes.Series = append(queryRes.Series, &series) + queryResponses = append(queryResponses, queryRes) + } + + return queryResponses, nil +} + func parseDimensions(model *simplejson.Json) ([]*cloudwatch.Dimension, error) { var result []*cloudwatch.Dimension @@ -252,6 +410,9 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { return nil, err } + id := model.Get("id").MustString("") + expression := model.Get("expression").MustString("") + dimensions, err := parseDimensions(model) if err != nil { return nil, err @@ -290,6 +451,7 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { alias = "{{metric}}_{{stat}}" } + returnData := model.Get("returnData").MustBool(false) highResolution := model.Get("highResolution").MustBool(false) return &CloudWatchQuery{ @@ -301,11 +463,18 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { ExtendedStatistics: aws.StringSlice(extendedStatistics), Period: period, Alias: alias, + Id: id, + Expression: expression, + ReturnData: returnData, HighResolution: highResolution, }, nil } func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]string) string { + if len(query.Id) > 0 && len(query.Expression) > 0 { + return query.Id + } + data := map[string]string{} data["region"] = query.Region data["namespace"] = query.Namespace @@ -333,6 +502,7 @@ func formatAlias(query *CloudWatchQuery, stat string, dimensions map[string]stri func parseResponse(resp *cloudwatch.GetMetricStatisticsOutput, query *CloudWatchQuery) (*tsdb.QueryResult, error) { queryRes := tsdb.NewQueryResult() + queryRes.RefId = query.RefId var value float64 for _, s := range append(query.Statistics, query.ExtendedStatistics...) { series := tsdb.TimeSeries{ diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index a7d33645b9b..136ee241c2e 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -230,8 +230,8 @@ func parseMultiSelectValue(input string) []string { // Please update the region list in public/app/plugins/datasource/cloudwatch/partials/config.html func (e *CloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { regions := []string{ - "ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1", - "eu-central-1", "eu-west-1", "eu-west-2", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", + "ap-northeast-1", "ap-northeast-2", "ap-southeast-1", "ap-southeast-2", "ap-south-1", "ca-central-1", "cn-north-1", "cn-northwest-1", + "eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "sa-east-1", "us-east-1", "us-east-2", "us-gov-west-1", "us-west-1", "us-west-2", } result := make([]suggestData, 0) diff --git a/pkg/tsdb/cloudwatch/types.go b/pkg/tsdb/cloudwatch/types.go index 0737b64686d..1225fb9b31b 100644 --- a/pkg/tsdb/cloudwatch/types.go +++ b/pkg/tsdb/cloudwatch/types.go @@ -5,6 +5,7 @@ import ( ) type CloudWatchQuery struct { + RefId string Region string Namespace string MetricName string @@ -13,5 +14,8 @@ type CloudWatchQuery struct { ExtendedStatistics []*string Period int Alias string + Id string + Expression string + ReturnData bool HighResolution bool } diff --git a/pkg/tsdb/elasticsearch/client/client.go b/pkg/tsdb/elasticsearch/client/client.go new file mode 100644 index 00000000000..dff626a79eb --- /dev/null +++ b/pkg/tsdb/elasticsearch/client/client.go @@ -0,0 +1,257 @@ +package es + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "path" + "strconv" + "strings" + "time" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/tsdb" + + "github.com/grafana/grafana/pkg/models" + "golang.org/x/net/context/ctxhttp" +) + +const loggerName = "tsdb.elasticsearch.client" + +var ( + clientLog = log.New(loggerName) +) + +var newDatasourceHttpClient = func(ds *models.DataSource) (*http.Client, error) { + return ds.GetHttpClient() +} + +// Client represents a client which can interact with elasticsearch api +type Client interface { + GetVersion() int + GetTimeField() string + GetMinInterval(queryInterval string) (time.Duration, error) + ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error) + MultiSearch() *MultiSearchRequestBuilder +} + +// NewClient creates a new elasticsearch client +var NewClient = func(ctx context.Context, ds *models.DataSource, timeRange *tsdb.TimeRange) (Client, error) { + version, err := ds.JsonData.Get("esVersion").Int() + if err != nil { + return nil, fmt.Errorf("elasticsearch version is required, err=%v", err) + } + + timeField, err := ds.JsonData.Get("timeField").String() + if err != nil { + return nil, fmt.Errorf("elasticsearch time field name is required, err=%v", err) + } + + indexInterval := ds.JsonData.Get("interval").MustString() + ip, err := newIndexPattern(indexInterval, ds.Database) + if err != nil { + return nil, err + } + + indices, err := ip.GetIndices(timeRange) + if err != nil { + return nil, err + } + + clientLog.Debug("Creating new client", "version", version, "timeField", timeField, "indices", strings.Join(indices, ", ")) + + switch version { + case 2, 5, 56: + return &baseClientImpl{ + ctx: ctx, + ds: ds, + version: version, + timeField: timeField, + indices: indices, + timeRange: timeRange, + }, nil + } + + return nil, fmt.Errorf("elasticsearch version=%d is not supported", version) +} + +type baseClientImpl struct { + ctx context.Context + ds *models.DataSource + version int + timeField string + indices []string + timeRange *tsdb.TimeRange +} + +func (c *baseClientImpl) GetVersion() int { + return c.version +} + +func (c *baseClientImpl) GetTimeField() string { + return c.timeField +} + +func (c *baseClientImpl) GetMinInterval(queryInterval string) (time.Duration, error) { + return tsdb.GetIntervalFrom(c.ds, simplejson.NewFromAny(map[string]interface{}{ + "interval": queryInterval, + }), 5*time.Second) +} + +func (c *baseClientImpl) getSettings() *simplejson.Json { + return c.ds.JsonData +} + +type multiRequest struct { + header map[string]interface{} + body interface{} + interval tsdb.Interval +} + +func (c *baseClientImpl) executeBatchRequest(uriPath string, requests []*multiRequest) (*http.Response, error) { + bytes, err := c.encodeBatchRequests(requests) + if err != nil { + return nil, err + } + return c.executeRequest(http.MethodPost, uriPath, bytes) +} + +func (c *baseClientImpl) encodeBatchRequests(requests []*multiRequest) ([]byte, error) { + clientLog.Debug("Encoding batch requests to json", "batch requests", len(requests)) + start := time.Now() + + payload := bytes.Buffer{} + for _, r := range requests { + reqHeader, err := json.Marshal(r.header) + if err != nil { + return nil, err + } + payload.WriteString(string(reqHeader) + "\n") + + reqBody, err := json.Marshal(r.body) + if err != nil { + return nil, err + } + + body := string(reqBody) + body = strings.Replace(body, "$__interval_ms", strconv.FormatInt(r.interval.Value.Nanoseconds()/int64(time.Millisecond), 10), -1) + body = strings.Replace(body, "$__interval", r.interval.Text, -1) + + payload.WriteString(body + "\n") + } + + elapsed := time.Now().Sub(start) + clientLog.Debug("Encoded batch requests to json", "took", elapsed) + + return payload.Bytes(), nil +} + +func (c *baseClientImpl) executeRequest(method, uriPath string, body []byte) (*http.Response, error) { + u, _ := url.Parse(c.ds.Url) + u.Path = path.Join(u.Path, uriPath) + + var req *http.Request + var err error + if method == http.MethodPost { + req, err = http.NewRequest(http.MethodPost, u.String(), bytes.NewBuffer(body)) + } else { + req, err = http.NewRequest(http.MethodGet, u.String(), nil) + } + if err != nil { + return nil, err + } + + clientLog.Debug("Executing request", "url", req.URL.String(), "method", method) + + req.Header.Set("User-Agent", "Grafana") + req.Header.Set("Content-Type", "application/json") + + if c.ds.BasicAuth { + clientLog.Debug("Request configured to use basic authentication") + req.SetBasicAuth(c.ds.BasicAuthUser, c.ds.BasicAuthPassword) + } + + if !c.ds.BasicAuth && c.ds.User != "" { + clientLog.Debug("Request configured to use basic authentication") + req.SetBasicAuth(c.ds.User, c.ds.Password) + } + + httpClient, err := newDatasourceHttpClient(c.ds) + if err != nil { + return nil, err + } + + start := time.Now() + defer func() { + elapsed := time.Now().Sub(start) + clientLog.Debug("Executed request", "took", elapsed) + }() + return ctxhttp.Do(c.ctx, httpClient, req) +} + +func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearchResponse, error) { + clientLog.Debug("Executing multisearch", "search requests", len(r.Requests)) + + multiRequests := c.createMultiSearchRequests(r.Requests) + res, err := c.executeBatchRequest("_msearch", multiRequests) + if err != nil { + return nil, err + } + + clientLog.Debug("Received multisearch response", "code", res.StatusCode, "status", res.Status, "content-length", res.ContentLength) + + start := time.Now() + clientLog.Debug("Decoding multisearch json response") + + var msr MultiSearchResponse + defer res.Body.Close() + dec := json.NewDecoder(res.Body) + err = dec.Decode(&msr) + if err != nil { + return nil, err + } + + elapsed := time.Now().Sub(start) + clientLog.Debug("Decoded multisearch json response", "took", elapsed) + + msr.Status = res.StatusCode + + return &msr, nil +} + +func (c *baseClientImpl) createMultiSearchRequests(searchRequests []*SearchRequest) []*multiRequest { + multiRequests := []*multiRequest{} + + for _, searchReq := range searchRequests { + mr := multiRequest{ + header: map[string]interface{}{ + "search_type": "query_then_fetch", + "ignore_unavailable": true, + "index": strings.Join(c.indices, ","), + }, + body: searchReq, + interval: searchReq.Interval, + } + + if c.version == 2 { + mr.header["search_type"] = "count" + } + + if c.version >= 56 { + maxConcurrentShardRequests := c.getSettings().Get("maxConcurrentShardRequests").MustInt(256) + mr.header["max_concurrent_shard_requests"] = maxConcurrentShardRequests + } + + multiRequests = append(multiRequests, &mr) + } + + return multiRequests +} + +func (c *baseClientImpl) MultiSearch() *MultiSearchRequestBuilder { + return NewMultiSearchRequestBuilder(c.GetVersion()) +} diff --git a/pkg/tsdb/elasticsearch/client/client_test.go b/pkg/tsdb/elasticsearch/client/client_test.go new file mode 100644 index 00000000000..11d1cdb1d71 --- /dev/null +++ b/pkg/tsdb/elasticsearch/client/client_test.go @@ -0,0 +1,304 @@ +package es + +import ( + "bytes" + "context" + "fmt" + "io/ioutil" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + + "github.com/grafana/grafana/pkg/models" + . "github.com/smartystreets/goconvey/convey" +) + +func TestClient(t *testing.T) { + Convey("Test elasticsearch client", t, func() { + Convey("NewClient", func() { + Convey("When no version set should return error", func() { + ds := &models.DataSource{ + JsonData: simplejson.NewFromAny(make(map[string]interface{})), + } + + _, err := NewClient(nil, ds, nil) + So(err, ShouldNotBeNil) + }) + + Convey("When no time field name set should return error", func() { + ds := &models.DataSource{ + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "esVersion": 5, + }), + } + + _, err := NewClient(nil, ds, nil) + So(err, ShouldNotBeNil) + }) + + Convey("When unspported version set should return error", func() { + ds := &models.DataSource{ + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "esVersion": 6, + "timeField": "@timestamp", + }), + } + + _, err := NewClient(nil, ds, nil) + So(err, ShouldNotBeNil) + }) + + Convey("When version 2 should return v2 client", func() { + ds := &models.DataSource{ + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "esVersion": 2, + "timeField": "@timestamp", + }), + } + + c, err := NewClient(nil, ds, nil) + So(err, ShouldBeNil) + So(c.GetVersion(), ShouldEqual, 2) + }) + + Convey("When version 5 should return v5 client", func() { + ds := &models.DataSource{ + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "esVersion": 5, + "timeField": "@timestamp", + }), + } + + c, err := NewClient(nil, ds, nil) + So(err, ShouldBeNil) + So(c.GetVersion(), ShouldEqual, 5) + }) + + Convey("When version 56 should return v5.6 client", func() { + ds := &models.DataSource{ + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "esVersion": 56, + "timeField": "@timestamp", + }), + } + + c, err := NewClient(nil, ds, nil) + So(err, ShouldBeNil) + So(c.GetVersion(), ShouldEqual, 56) + }) + }) + + Convey("Given a fake http client", func() { + var responseBuffer *bytes.Buffer + var req *http.Request + ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + req = r + buf, err := ioutil.ReadAll(r.Body) + if err != nil { + t.Fatalf("Failed to read response body, err=%v", err) + } + responseBuffer = bytes.NewBuffer(buf) + })) + + currentNewDatasourceHttpClient := newDatasourceHttpClient + + newDatasourceHttpClient = func(ds *models.DataSource) (*http.Client, error) { + return ts.Client(), nil + } + + from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC) + to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC) + fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond)) + toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond)) + timeRange := tsdb.NewTimeRange(fromStr, toStr) + + Convey("and a v2.x client", func() { + ds := models.DataSource{ + Database: "[metrics-]YYYY.MM.DD", + Url: ts.URL, + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "esVersion": 2, + "timeField": "@timestamp", + "interval": "Daily", + }), + } + + c, err := NewClient(context.Background(), &ds, timeRange) + So(err, ShouldBeNil) + So(c, ShouldNotBeNil) + + Convey("When executing multi search", func() { + ms, err := createMultisearchForTest(c) + So(err, ShouldBeNil) + c.ExecuteMultisearch(ms) + + Convey("Should send correct request and payload", func() { + So(req, ShouldNotBeNil) + So(req.Method, ShouldEqual, http.MethodPost) + So(req.URL.Path, ShouldEqual, "/_msearch") + + So(responseBuffer, ShouldNotBeNil) + + headerBytes, err := responseBuffer.ReadBytes('\n') + So(err, ShouldBeNil) + bodyBytes := responseBuffer.Bytes() + + jHeader, err := simplejson.NewJson(headerBytes) + So(err, ShouldBeNil) + + jBody, err := simplejson.NewJson(bodyBytes) + So(err, ShouldBeNil) + + fmt.Println("body", string(headerBytes)) + + So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15") + So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true) + So(jHeader.Get("search_type").MustString(), ShouldEqual, "count") + So(jHeader.Get("max_concurrent_shard_requests").MustInt(10), ShouldEqual, 10) + + Convey("and replace $__interval variable", func() { + So(jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString(), ShouldEqual, "15000*@hostname") + }) + + Convey("and replace $__interval_ms variable", func() { + So(jBody.GetPath("aggs", "2", "date_histogram", "interval").MustString(), ShouldEqual, "15s") + }) + }) + }) + }) + + Convey("and a v5.x client", func() { + ds := models.DataSource{ + Database: "[metrics-]YYYY.MM.DD", + Url: ts.URL, + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "esVersion": 5, + "maxConcurrentShardRequests": 100, + "timeField": "@timestamp", + "interval": "Daily", + }), + } + + c, err := NewClient(context.Background(), &ds, timeRange) + So(err, ShouldBeNil) + So(c, ShouldNotBeNil) + + Convey("When executing multi search", func() { + ms, err := createMultisearchForTest(c) + So(err, ShouldBeNil) + c.ExecuteMultisearch(ms) + + Convey("Should send correct request and payload", func() { + So(req, ShouldNotBeNil) + So(req.Method, ShouldEqual, http.MethodPost) + So(req.URL.Path, ShouldEqual, "/_msearch") + + So(responseBuffer, ShouldNotBeNil) + + headerBytes, err := responseBuffer.ReadBytes('\n') + So(err, ShouldBeNil) + bodyBytes := responseBuffer.Bytes() + + jHeader, err := simplejson.NewJson(headerBytes) + So(err, ShouldBeNil) + + jBody, err := simplejson.NewJson(bodyBytes) + So(err, ShouldBeNil) + + fmt.Println("body", string(headerBytes)) + + So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15") + So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true) + So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch") + So(jHeader.Get("max_concurrent_shard_requests").MustInt(10), ShouldEqual, 10) + + Convey("and replace $__interval variable", func() { + So(jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString(), ShouldEqual, "15000*@hostname") + }) + + Convey("and replace $__interval_ms variable", func() { + So(jBody.GetPath("aggs", "2", "date_histogram", "interval").MustString(), ShouldEqual, "15s") + }) + }) + }) + }) + + Convey("and a v5.6 client", func() { + ds := models.DataSource{ + Database: "[metrics-]YYYY.MM.DD", + Url: ts.URL, + JsonData: simplejson.NewFromAny(map[string]interface{}{ + "esVersion": 56, + "maxConcurrentShardRequests": 100, + "timeField": "@timestamp", + "interval": "Daily", + }), + } + + c, err := NewClient(context.Background(), &ds, timeRange) + So(err, ShouldBeNil) + So(c, ShouldNotBeNil) + + Convey("When executing multi search", func() { + ms, err := createMultisearchForTest(c) + So(err, ShouldBeNil) + c.ExecuteMultisearch(ms) + + Convey("Should send correct request and payload", func() { + So(req, ShouldNotBeNil) + So(req.Method, ShouldEqual, http.MethodPost) + So(req.URL.Path, ShouldEqual, "/_msearch") + + So(responseBuffer, ShouldNotBeNil) + + headerBytes, err := responseBuffer.ReadBytes('\n') + So(err, ShouldBeNil) + bodyBytes := responseBuffer.Bytes() + + jHeader, err := simplejson.NewJson(headerBytes) + So(err, ShouldBeNil) + + jBody, err := simplejson.NewJson(bodyBytes) + So(err, ShouldBeNil) + + fmt.Println("body", string(headerBytes)) + + So(jHeader.Get("index").MustString(), ShouldEqual, "metrics-2018.05.15") + So(jHeader.Get("ignore_unavailable").MustBool(false), ShouldEqual, true) + So(jHeader.Get("search_type").MustString(), ShouldEqual, "query_then_fetch") + So(jHeader.Get("max_concurrent_shard_requests").MustInt(), ShouldEqual, 100) + + Convey("and replace $__interval variable", func() { + So(jBody.GetPath("aggs", "2", "aggs", "1", "avg", "script").MustString(), ShouldEqual, "15000*@hostname") + }) + + Convey("and replace $__interval_ms variable", func() { + So(jBody.GetPath("aggs", "2", "date_histogram", "interval").MustString(), ShouldEqual, "15s") + }) + }) + }) + }) + + Reset(func() { + newDatasourceHttpClient = currentNewDatasourceHttpClient + }) + }) + }) +} + +func createMultisearchForTest(c Client) (*MultiSearchRequest, error) { + msb := c.MultiSearch() + s := msb.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + s.Agg().DateHistogram("2", "@timestamp", func(a *DateHistogramAgg, ab AggBuilder) { + a.Interval = "$__interval" + + ab.Metric("1", "avg", "@hostname", func(a *MetricAggregation) { + a.Settings["script"] = "$__interval_ms*@hostname" + }) + }) + return msb.Build() +} diff --git a/pkg/tsdb/elasticsearch/client/index_pattern.go b/pkg/tsdb/elasticsearch/client/index_pattern.go new file mode 100644 index 00000000000..8391e902ea4 --- /dev/null +++ b/pkg/tsdb/elasticsearch/client/index_pattern.go @@ -0,0 +1,312 @@ +package es + +import ( + "fmt" + "regexp" + "strings" + "time" + + "github.com/grafana/grafana/pkg/tsdb" +) + +const ( + noInterval = "" + intervalHourly = "hourly" + intervalDaily = "daily" + intervalWeekly = "weekly" + intervalMonthly = "monthly" + intervalYearly = "yearly" +) + +type indexPattern interface { + GetIndices(timeRange *tsdb.TimeRange) ([]string, error) +} + +var newIndexPattern = func(interval string, pattern string) (indexPattern, error) { + if interval == noInterval { + return &staticIndexPattern{indexName: pattern}, nil + } + + return newDynamicIndexPattern(interval, pattern) +} + +type staticIndexPattern struct { + indexName string +} + +func (ip *staticIndexPattern) GetIndices(timeRange *tsdb.TimeRange) ([]string, error) { + return []string{ip.indexName}, nil +} + +type intervalGenerator interface { + Generate(from, to time.Time) []time.Time +} + +type dynamicIndexPattern struct { + interval string + pattern string + intervalGenerator intervalGenerator +} + +func newDynamicIndexPattern(interval, pattern string) (*dynamicIndexPattern, error) { + var generator intervalGenerator + + switch strings.ToLower(interval) { + case intervalHourly: + generator = &hourlyInterval{} + case intervalDaily: + generator = &dailyInterval{} + case intervalWeekly: + generator = &weeklyInterval{} + case intervalMonthly: + generator = &monthlyInterval{} + case intervalYearly: + generator = &yearlyInterval{} + default: + return nil, fmt.Errorf("unsupported interval '%s'", interval) + } + + return &dynamicIndexPattern{ + interval: interval, + pattern: pattern, + intervalGenerator: generator, + }, nil +} + +func (ip *dynamicIndexPattern) GetIndices(timeRange *tsdb.TimeRange) ([]string, error) { + from := timeRange.GetFromAsTimeUTC() + to := timeRange.GetToAsTimeUTC() + intervals := ip.intervalGenerator.Generate(from, to) + indices := make([]string, 0) + + for _, t := range intervals { + indices = append(indices, formatDate(t, ip.pattern)) + } + + return indices, nil +} + +type hourlyInterval struct{} + +func (i *hourlyInterval) Generate(from, to time.Time) []time.Time { + intervals := []time.Time{} + start := time.Date(from.Year(), from.Month(), from.Day(), from.Hour(), 0, 0, 0, time.UTC) + end := time.Date(to.Year(), to.Month(), to.Day(), to.Hour(), 0, 0, 0, time.UTC) + + intervals = append(intervals, start) + + for start.Before(end) { + start = start.Add(time.Hour) + intervals = append(intervals, start) + } + + return intervals +} + +type dailyInterval struct{} + +func (i *dailyInterval) Generate(from, to time.Time) []time.Time { + intervals := []time.Time{} + start := time.Date(from.Year(), from.Month(), from.Day(), 0, 0, 0, 0, time.UTC) + end := time.Date(to.Year(), to.Month(), to.Day(), 0, 0, 0, 0, time.UTC) + + intervals = append(intervals, start) + + for start.Before(end) { + start = start.Add(24 * time.Hour) + intervals = append(intervals, start) + } + + return intervals +} + +type weeklyInterval struct{} + +func (i *weeklyInterval) Generate(from, to time.Time) []time.Time { + intervals := []time.Time{} + start := time.Date(from.Year(), from.Month(), from.Day(), 0, 0, 0, 0, time.UTC) + end := time.Date(to.Year(), to.Month(), to.Day(), 0, 0, 0, 0, time.UTC) + + for start.Weekday() != time.Monday { + start = start.Add(-24 * time.Hour) + } + + for end.Weekday() != time.Monday { + end = end.Add(-24 * time.Hour) + } + + year, week := start.ISOWeek() + intervals = append(intervals, start) + + for start.Before(end) { + start = start.Add(24 * time.Hour) + nextYear, nextWeek := start.ISOWeek() + if nextYear != year || nextWeek != week { + intervals = append(intervals, start) + } + year = nextYear + week = nextWeek + } + + return intervals +} + +type monthlyInterval struct{} + +func (i *monthlyInterval) Generate(from, to time.Time) []time.Time { + intervals := []time.Time{} + start := time.Date(from.Year(), from.Month(), 1, 0, 0, 0, 0, time.UTC) + end := time.Date(to.Year(), to.Month(), 1, 0, 0, 0, 0, time.UTC) + + month := start.Month() + intervals = append(intervals, start) + + for start.Before(end) { + start = start.Add(24 * time.Hour) + nextMonth := start.Month() + if nextMonth != month { + intervals = append(intervals, start) + } + month = nextMonth + } + + return intervals +} + +type yearlyInterval struct{} + +func (i *yearlyInterval) Generate(from, to time.Time) []time.Time { + intervals := []time.Time{} + start := time.Date(from.Year(), 1, 1, 0, 0, 0, 0, time.UTC) + end := time.Date(to.Year(), 1, 1, 0, 0, 0, 0, time.UTC) + + year := start.Year() + intervals = append(intervals, start) + + for start.Before(end) { + start = start.Add(24 * time.Hour) + nextYear := start.Year() + if nextYear != year { + intervals = append(intervals, start) + } + year = nextYear + } + + return intervals +} + +var datePatternRegex = regexp.MustCompile("(LT|LL?L?L?|l{1,4}|Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|YYYYY|YYYY|YY|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|mm?|ss?|SS?S?|X|zz?|ZZ?|Q)") + +var datePatternReplacements = map[string]string{ + "M": "1", // stdNumMonth 1 2 ... 11 12 + "MM": "01", // stdZeroMonth 01 02 ... 11 12 + "MMM": "Jan", // stdMonth Jan Feb ... Nov Dec + "MMMM": "January", // stdLongMonth January February ... November December + "D": "2", // stdDay 1 2 ... 30 30 + "DD": "02", // stdZeroDay 01 02 ... 30 31 + "DDD": "", // Day of the year 1 2 ... 364 365 + "DDDD": "", // Day of the year 001 002 ... 364 365 @todo**** + "d": "", // Numeric representation of day of the week 0 1 ... 5 6 + "dd": "Mon", // ***Su Mo ... Fr Sa @todo + "ddd": "Mon", // Sun Mon ... Fri Sat + "dddd": "Monday", // stdLongWeekDay Sunday Monday ... Friday Saturday + "e": "", // Numeric representation of day of the week 0 1 ... 5 6 @todo + "E": "", // ISO-8601 numeric representation of the day of the week (added in PHP 5.1.0) 1 2 ... 6 7 @todo + "w": "", // 1 2 ... 52 53 + "ww": "", // ***01 02 ... 52 53 @todo + "W": "", // 1 2 ... 52 53 + "WW": "", // ***01 02 ... 52 53 @todo + "YY": "06", // stdYear 70 71 ... 29 30 + "YYYY": "2006", // stdLongYear 1970 1971 ... 2029 2030 + "gg": "", // ISO-8601 year number 70 71 ... 29 30 + "gggg": "", // ***1970 1971 ... 2029 2030 + "GG": "", //70 71 ... 29 30 + "GGGG": "", // ***1970 1971 ... 2029 2030 + "Q": "", // 1, 2, 3, 4 + "A": "PM", // stdPM AM PM + "a": "pm", // stdpm am pm + "H": "", // stdHour 0 1 ... 22 23 + "HH": "15", // 00 01 ... 22 23 + "h": "3", // stdHour12 1 2 ... 11 12 + "hh": "03", // stdZeroHour12 01 02 ... 11 12 + "m": "4", // stdZeroMinute 0 1 ... 58 59 + "mm": "04", // stdZeroMinute 00 01 ... 58 59 + "s": "5", // stdSecond 0 1 ... 58 59 + "ss": "05", // stdZeroSecond ***00 01 ... 58 59 + "z": "MST", //EST CST ... MST PST + "zz": "MST", //EST CST ... MST PST + "Z": "Z07:00", // stdNumColonTZ -07:00 -06:00 ... +06:00 +07:00 + "ZZ": "-0700", // stdNumTZ -0700 -0600 ... +0600 +0700 + "X": "", // Seconds since unix epoch 1360013296 + "LT": "3:04 PM", // 8:30 PM + "L": "01/02/2006", //09/04/1986 + "l": "1/2/2006", //9/4/1986 + "ll": "Jan 2 2006", //Sep 4 1986 + "lll": "Jan 2 2006 3:04 PM", //Sep 4 1986 8:30 PM + "llll": "Mon, Jan 2 2006 3:04 PM", //Thu, Sep 4 1986 8:30 PM +} + +func formatDate(t time.Time, pattern string) string { + var datePattern string + parts := strings.Split(strings.TrimLeft(pattern, "["), "]") + base := parts[0] + if len(parts) == 2 { + datePattern = parts[1] + } else { + datePattern = base + base = "" + } + + formatted := t.Format(patternToLayout(datePattern)) + + if strings.Contains(formatted, "", fmt.Sprintf("%d", isoYear), -1) + formatted = strings.Replace(formatted, "", isoYearShort, -1) + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", isoWeek), -1) + + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", t.Unix()), -1) + + day := t.Weekday() + dayOfWeekIso := int(day) + if day == time.Sunday { + dayOfWeekIso = 7 + } + + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", day), -1) + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", dayOfWeekIso), -1) + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", t.YearDay()), -1) + + quarter := 4 + + switch t.Month() { + case time.January, time.February, time.March: + quarter = 1 + case time.April, time.May, time.June: + quarter = 2 + case time.July, time.August, time.September: + quarter = 3 + } + + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", quarter), -1) + formatted = strings.Replace(formatted, "", fmt.Sprintf("%d", t.Hour()), -1) + } + + return base + formatted +} + +func patternToLayout(pattern string) string { + var match [][]string + if match = datePatternRegex.FindAllStringSubmatch(pattern, -1); match == nil { + return pattern + } + + for i := range match { + if replace, ok := datePatternReplacements[match[i][0]]; ok { + pattern = strings.Replace(pattern, match[i][0], replace, 1) + } + } + + return pattern +} diff --git a/pkg/tsdb/elasticsearch/client/index_pattern_test.go b/pkg/tsdb/elasticsearch/client/index_pattern_test.go new file mode 100644 index 00000000000..3bd823d8c87 --- /dev/null +++ b/pkg/tsdb/elasticsearch/client/index_pattern_test.go @@ -0,0 +1,244 @@ +package es + +import ( + "fmt" + "testing" + "time" + + "github.com/grafana/grafana/pkg/tsdb" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestIndexPattern(t *testing.T) { + Convey("Static index patterns", t, func() { + indexPatternScenario(noInterval, "data-*", nil, func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "data-*") + }) + + indexPatternScenario(noInterval, "es-index-name", nil, func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "es-index-name") + }) + }) + + Convey("Dynamic index patterns", t, func() { + from := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond)) + to := fmt.Sprintf("%d", time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC).UnixNano()/int64(time.Millisecond)) + + indexPatternScenario(intervalHourly, "[data-]YYYY.MM.DD.HH", tsdb.NewTimeRange(from, to), func(indices []string) { + //So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "data-2018.05.15.17") + }) + + indexPatternScenario(intervalDaily, "[data-]YYYY.MM.DD", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "data-2018.05.15") + }) + + indexPatternScenario(intervalWeekly, "[data-]GGGG.WW", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "data-2018.20") + }) + + indexPatternScenario(intervalMonthly, "[data-]YYYY.MM", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "data-2018.05") + }) + + indexPatternScenario(intervalYearly, "[data-]YYYY", tsdb.NewTimeRange(from, to), func(indices []string) { + So(indices, ShouldHaveLength, 1) + So(indices[0], ShouldEqual, "data-2018") + }) + }) + + Convey("Hourly interval", t, func() { + Convey("Should return 1 interval", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC) + intervals := (&hourlyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 1) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 2 intervals", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 1, 2, 0, 6, 0, 0, time.UTC) + intervals := (&hourlyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 2) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC)) + So(intervals[1], ShouldEqual, time.Date(2018, 1, 2, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 10 intervals", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 1, 2, 8, 6, 0, 0, time.UTC) + intervals := (&hourlyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 10) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 23, 0, 0, 0, time.UTC)) + So(intervals[4], ShouldEqual, time.Date(2018, 1, 2, 3, 0, 0, 0, time.UTC)) + So(intervals[9], ShouldEqual, time.Date(2018, 1, 2, 8, 0, 0, 0, time.UTC)) + }) + }) + + Convey("Daily interval", t, func() { + Convey("Should return 1 day", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC) + intervals := (&dailyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 1) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 2 days", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 1, 2, 0, 6, 0, 0, time.UTC) + intervals := (&dailyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 2) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + So(intervals[1], ShouldEqual, time.Date(2018, 1, 2, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 32 days", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 2, 1, 8, 6, 0, 0, time.UTC) + intervals := (&dailyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 32) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + So(intervals[30], ShouldEqual, time.Date(2018, 1, 31, 0, 0, 0, 0, time.UTC)) + So(intervals[31], ShouldEqual, time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC)) + }) + }) + + Convey("Weekly interval", t, func() { + Convey("Should return 1 week (1)", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC) + intervals := (&weeklyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 1) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 1 week (2)", func() { + from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2017, 1, 1, 23, 6, 0, 0, time.UTC) + intervals := (&weeklyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 1) + So(intervals[0], ShouldEqual, time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 2 weeks (1)", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 1, 10, 23, 6, 0, 0, time.UTC) + intervals := (&weeklyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 2) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + So(intervals[1], ShouldEqual, time.Date(2018, 1, 8, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 2 weeks (2)", func() { + from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2017, 1, 8, 23, 6, 0, 0, time.UTC) + intervals := (&weeklyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 2) + So(intervals[0], ShouldEqual, time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC)) + So(intervals[1], ShouldEqual, time.Date(2017, 1, 2, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 3 weeks (1)", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 1, 21, 23, 6, 0, 0, time.UTC) + intervals := (&weeklyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 3) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + So(intervals[1], ShouldEqual, time.Date(2018, 1, 8, 0, 0, 0, 0, time.UTC)) + So(intervals[2], ShouldEqual, time.Date(2018, 1, 15, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 3 weeks (2)", func() { + from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2017, 1, 9, 23, 6, 0, 0, time.UTC) + intervals := (&weeklyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 3) + So(intervals[0], ShouldEqual, time.Date(2016, 12, 26, 0, 0, 0, 0, time.UTC)) + So(intervals[1], ShouldEqual, time.Date(2017, 1, 2, 0, 0, 0, 0, time.UTC)) + So(intervals[2], ShouldEqual, time.Date(2017, 1, 9, 0, 0, 0, 0, time.UTC)) + }) + }) + + Convey("Monthly interval", t, func() { + Convey("Should return 1 month", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 1, 1, 23, 6, 0, 0, time.UTC) + intervals := (&monthlyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 1) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 2 months", func() { + from := time.Date(2018, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 2, 2, 0, 6, 0, 0, time.UTC) + intervals := (&monthlyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 2) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + So(intervals[1], ShouldEqual, time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 14 months", func() { + from := time.Date(2017, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 2, 1, 8, 6, 0, 0, time.UTC) + intervals := (&monthlyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 14) + So(intervals[0], ShouldEqual, time.Date(2017, 1, 1, 0, 0, 0, 0, time.UTC)) + So(intervals[13], ShouldEqual, time.Date(2018, 2, 1, 0, 0, 0, 0, time.UTC)) + }) + }) + + Convey("Yearly interval", t, func() { + Convey("Should return 1 year (hour diff)", func() { + from := time.Date(2018, 2, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 2, 1, 23, 6, 0, 0, time.UTC) + intervals := (&yearlyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 1) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 1 year (month diff)", func() { + from := time.Date(2018, 2, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 12, 31, 23, 59, 59, 0, time.UTC) + intervals := (&yearlyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 1) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 2 years", func() { + from := time.Date(2018, 2, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2019, 1, 1, 23, 59, 59, 0, time.UTC) + intervals := (&yearlyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 2) + So(intervals[0], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + So(intervals[1], ShouldEqual, time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC)) + }) + + Convey("Should return 5 years", func() { + from := time.Date(2014, 1, 1, 23, 1, 1, 0, time.UTC) + to := time.Date(2018, 11, 1, 23, 59, 59, 0, time.UTC) + intervals := (&yearlyInterval{}).Generate(from, to) + So(intervals, ShouldHaveLength, 5) + So(intervals[0], ShouldEqual, time.Date(2014, 1, 1, 0, 0, 0, 0, time.UTC)) + So(intervals[4], ShouldEqual, time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)) + }) + }) +} + +func indexPatternScenario(interval string, pattern string, timeRange *tsdb.TimeRange, fn func(indices []string)) { + Convey(fmt.Sprintf("Index pattern (interval=%s, index=%s", interval, pattern), func() { + ip, err := newIndexPattern(interval, pattern) + So(err, ShouldBeNil) + So(ip, ShouldNotBeNil) + indices, err := ip.GetIndices(timeRange) + So(err, ShouldBeNil) + fn(indices) + }) +} diff --git a/pkg/tsdb/elasticsearch/client/models.go b/pkg/tsdb/elasticsearch/client/models.go new file mode 100644 index 00000000000..a0d257d01a6 --- /dev/null +++ b/pkg/tsdb/elasticsearch/client/models.go @@ -0,0 +1,311 @@ +package es + +import ( + "encoding/json" + + "github.com/grafana/grafana/pkg/tsdb" +) + +// SearchRequest represents a search request +type SearchRequest struct { + Index string + Interval tsdb.Interval + Size int + Sort map[string]interface{} + Query *Query + Aggs AggArray + CustomProps map[string]interface{} +} + +// MarshalJSON returns the JSON encoding of the request. +func (r *SearchRequest) MarshalJSON() ([]byte, error) { + root := make(map[string]interface{}) + + root["size"] = r.Size + if len(r.Sort) > 0 { + root["sort"] = r.Sort + } + + for key, value := range r.CustomProps { + root[key] = value + } + + root["query"] = r.Query + + if len(r.Aggs) > 0 { + root["aggs"] = r.Aggs + } + + return json.Marshal(root) +} + +// SearchResponseHits represents search response hits +type SearchResponseHits struct { + Hits []map[string]interface{} + Total int64 +} + +// SearchResponse represents a search response +type SearchResponse struct { + Error map[string]interface{} `json:"error"` + Aggregations map[string]interface{} `json:"aggregations"` + Hits *SearchResponseHits `json:"hits"` +} + +// func (r *Response) getErrMsg() string { +// var msg bytes.Buffer +// errJson := simplejson.NewFromAny(r.Err) +// errType, err := errJson.Get("type").String() +// if err == nil { +// msg.WriteString(fmt.Sprintf("type:%s", errType)) +// } + +// reason, err := errJson.Get("type").String() +// if err == nil { +// msg.WriteString(fmt.Sprintf("reason:%s", reason)) +// } +// return msg.String() +// } + +// MultiSearchRequest represents a multi search request +type MultiSearchRequest struct { + Requests []*SearchRequest +} + +// MultiSearchResponse represents a multi search response +type MultiSearchResponse struct { + Status int `json:"status,omitempty"` + Responses []*SearchResponse `json:"responses"` +} + +// Query represents a query +type Query struct { + Bool *BoolQuery `json:"bool"` +} + +// BoolQuery represents a bool query +type BoolQuery struct { + Filters []Filter +} + +// NewBoolQuery create a new bool query +func NewBoolQuery() *BoolQuery { + return &BoolQuery{Filters: make([]Filter, 0)} +} + +// MarshalJSON returns the JSON encoding of the boolean query. +func (q *BoolQuery) MarshalJSON() ([]byte, error) { + root := make(map[string]interface{}) + + if len(q.Filters) > 0 { + if len(q.Filters) == 1 { + root["filter"] = q.Filters[0] + } else { + root["filter"] = q.Filters + } + } + return json.Marshal(root) +} + +// Filter represents a search filter +type Filter interface{} + +// QueryStringFilter represents a query string search filter +type QueryStringFilter struct { + Filter + Query string + AnalyzeWildcard bool +} + +// MarshalJSON returns the JSON encoding of the query string filter. +func (f *QueryStringFilter) MarshalJSON() ([]byte, error) { + root := map[string]interface{}{ + "query_string": map[string]interface{}{ + "query": f.Query, + "analyze_wildcard": f.AnalyzeWildcard, + }, + } + + return json.Marshal(root) +} + +// RangeFilter represents a range search filter +type RangeFilter struct { + Filter + Key string + Gte string + Lte string + Format string +} + +// DateFormatEpochMS represents a date format of epoch milliseconds (epoch_millis) +const DateFormatEpochMS = "epoch_millis" + +// MarshalJSON returns the JSON encoding of the query string filter. +func (f *RangeFilter) MarshalJSON() ([]byte, error) { + root := map[string]map[string]map[string]interface{}{ + "range": { + f.Key: { + "lte": f.Lte, + "gte": f.Gte, + }, + }, + } + + if f.Format != "" { + root["range"][f.Key]["format"] = f.Format + } + + return json.Marshal(root) +} + +// Aggregation represents an aggregation +type Aggregation interface{} + +// Agg represents a key and aggregation +type Agg struct { + Key string + Aggregation *aggContainer +} + +// MarshalJSON returns the JSON encoding of the agg +func (a *Agg) MarshalJSON() ([]byte, error) { + root := map[string]interface{}{ + a.Key: a.Aggregation, + } + + return json.Marshal(root) +} + +// AggArray represents a collection of key/aggregation pairs +type AggArray []*Agg + +// MarshalJSON returns the JSON encoding of the agg +func (a AggArray) MarshalJSON() ([]byte, error) { + aggsMap := make(map[string]Aggregation) + + for _, subAgg := range a { + aggsMap[subAgg.Key] = subAgg.Aggregation + } + + return json.Marshal(aggsMap) +} + +type aggContainer struct { + Type string + Aggregation Aggregation + Aggs AggArray +} + +// MarshalJSON returns the JSON encoding of the aggregation container +func (a *aggContainer) MarshalJSON() ([]byte, error) { + root := map[string]interface{}{ + a.Type: a.Aggregation, + } + + if len(a.Aggs) > 0 { + root["aggs"] = a.Aggs + } + + return json.Marshal(root) +} + +type aggDef struct { + key string + aggregation *aggContainer + builders []AggBuilder +} + +func newAggDef(key string, aggregation *aggContainer) *aggDef { + return &aggDef{ + key: key, + aggregation: aggregation, + builders: make([]AggBuilder, 0), + } +} + +// HistogramAgg represents a histogram aggregation +type HistogramAgg struct { + Interval int `json:"interval,omitempty"` + Field string `json:"field"` + MinDocCount int `json:"min_doc_count"` + Missing *int `json:"missing,omitempty"` +} + +// DateHistogramAgg represents a date histogram aggregation +type DateHistogramAgg struct { + Field string `json:"field"` + Interval string `json:"interval,omitempty"` + MinDocCount int `json:"min_doc_count"` + Missing *string `json:"missing,omitempty"` + ExtendedBounds *ExtendedBounds `json:"extended_bounds"` + Format string `json:"format"` +} + +// FiltersAggregation represents a filters aggregation +type FiltersAggregation struct { + Filters map[string]interface{} `json:"filters"` +} + +// TermsAggregation represents a terms aggregation +type TermsAggregation struct { + Field string `json:"field"` + Size int `json:"size"` + Order map[string]interface{} `json:"order"` + MinDocCount *int `json:"min_doc_count,omitempty"` + Missing *string `json:"missing,omitempty"` +} + +// ExtendedBounds represents extended bounds +type ExtendedBounds struct { + Min string `json:"min"` + Max string `json:"max"` +} + +// GeoHashGridAggregation represents a geo hash grid aggregation +type GeoHashGridAggregation struct { + Field string `json:"field"` + Precision int `json:"precision"` +} + +// MetricAggregation represents a metric aggregation +type MetricAggregation struct { + Field string + Settings map[string]interface{} +} + +// MarshalJSON returns the JSON encoding of the metric aggregation +func (a *MetricAggregation) MarshalJSON() ([]byte, error) { + root := map[string]interface{}{ + "field": a.Field, + } + + for k, v := range a.Settings { + if k != "" && v != nil { + root[k] = v + } + } + + return json.Marshal(root) +} + +// PipelineAggregation represents a metric aggregation +type PipelineAggregation struct { + BucketPath string + Settings map[string]interface{} +} + +// MarshalJSON returns the JSON encoding of the pipeline aggregation +func (a *PipelineAggregation) MarshalJSON() ([]byte, error) { + root := map[string]interface{}{ + "buckets_path": a.BucketPath, + } + + for k, v := range a.Settings { + if k != "" && v != nil { + root[k] = v + } + } + + return json.Marshal(root) +} diff --git a/pkg/tsdb/elasticsearch/client/search_request.go b/pkg/tsdb/elasticsearch/client/search_request.go new file mode 100644 index 00000000000..2b833ce78d3 --- /dev/null +++ b/pkg/tsdb/elasticsearch/client/search_request.go @@ -0,0 +1,451 @@ +package es + +import ( + "strings" + + "github.com/grafana/grafana/pkg/tsdb" +) + +// SearchRequestBuilder represents a builder which can build a search request +type SearchRequestBuilder struct { + version int + interval tsdb.Interval + index string + size int + sort map[string]interface{} + queryBuilder *QueryBuilder + aggBuilders []AggBuilder + customProps map[string]interface{} +} + +// NewSearchRequestBuilder create a new search request builder +func NewSearchRequestBuilder(version int, interval tsdb.Interval) *SearchRequestBuilder { + builder := &SearchRequestBuilder{ + version: version, + interval: interval, + sort: make(map[string]interface{}), + customProps: make(map[string]interface{}), + aggBuilders: make([]AggBuilder, 0), + } + return builder +} + +// Build builds and return a search request +func (b *SearchRequestBuilder) Build() (*SearchRequest, error) { + sr := SearchRequest{ + Index: b.index, + Interval: b.interval, + Size: b.size, + Sort: b.sort, + CustomProps: b.customProps, + } + + if b.queryBuilder != nil { + q, err := b.queryBuilder.Build() + if err != nil { + return nil, err + } + sr.Query = q + } + + if len(b.aggBuilders) > 0 { + sr.Aggs = make(AggArray, 0) + + for _, ab := range b.aggBuilders { + aggArray, err := ab.Build() + if err != nil { + return nil, err + } + for _, agg := range aggArray { + sr.Aggs = append(sr.Aggs, agg) + } + } + } + + return &sr, nil +} + +// Size sets the size of the search request +func (b *SearchRequestBuilder) Size(size int) *SearchRequestBuilder { + b.size = size + return b +} + +// SortDesc adds a sort to the search request +func (b *SearchRequestBuilder) SortDesc(field, unmappedType string) *SearchRequestBuilder { + props := map[string]string{ + "order": "desc", + } + + if unmappedType != "" { + props["unmapped_type"] = unmappedType + } + + b.sort[field] = props + + return b +} + +// AddDocValueField adds a doc value field to the search request +func (b *SearchRequestBuilder) AddDocValueField(field string) *SearchRequestBuilder { + // fields field not supported on version >= 5 + if b.version < 5 { + b.customProps["fields"] = []string{"*", "_source"} + } + + b.customProps["script_fields"] = make(map[string]interface{}) + + if b.version < 5 { + b.customProps["fielddata_fields"] = []string{field} + } else { + b.customProps["docvalue_fields"] = []string{field} + } + + return b +} + +// Query creates and return a query builder +func (b *SearchRequestBuilder) Query() *QueryBuilder { + if b.queryBuilder == nil { + b.queryBuilder = NewQueryBuilder() + } + return b.queryBuilder +} + +// Agg initaite and returns a new aggregation builder +func (b *SearchRequestBuilder) Agg() AggBuilder { + aggBuilder := newAggBuilder() + b.aggBuilders = append(b.aggBuilders, aggBuilder) + return aggBuilder +} + +// MultiSearchRequestBuilder represents a builder which can build a multi search request +type MultiSearchRequestBuilder struct { + version int + requestBuilders []*SearchRequestBuilder +} + +// NewMultiSearchRequestBuilder creates a new multi search request builder +func NewMultiSearchRequestBuilder(version int) *MultiSearchRequestBuilder { + return &MultiSearchRequestBuilder{ + version: version, + } +} + +// Search initiates and returns a new search request builder +func (m *MultiSearchRequestBuilder) Search(interval tsdb.Interval) *SearchRequestBuilder { + b := NewSearchRequestBuilder(m.version, interval) + m.requestBuilders = append(m.requestBuilders, b) + return b +} + +// Build builds and return a multi search request +func (m *MultiSearchRequestBuilder) Build() (*MultiSearchRequest, error) { + requests := []*SearchRequest{} + for _, sb := range m.requestBuilders { + searchRequest, err := sb.Build() + if err != nil { + return nil, err + } + requests = append(requests, searchRequest) + } + + return &MultiSearchRequest{ + Requests: requests, + }, nil +} + +// QueryBuilder represents a query builder +type QueryBuilder struct { + boolQueryBuilder *BoolQueryBuilder +} + +// NewQueryBuilder create a new query builder +func NewQueryBuilder() *QueryBuilder { + return &QueryBuilder{} +} + +// Build builds and return a query builder +func (b *QueryBuilder) Build() (*Query, error) { + q := Query{} + + if b.boolQueryBuilder != nil { + b, err := b.boolQueryBuilder.Build() + if err != nil { + return nil, err + } + q.Bool = b + } + + return &q, nil +} + +// Bool creates and return a query builder +func (b *QueryBuilder) Bool() *BoolQueryBuilder { + if b.boolQueryBuilder == nil { + b.boolQueryBuilder = NewBoolQueryBuilder() + } + return b.boolQueryBuilder +} + +// BoolQueryBuilder represents a bool query builder +type BoolQueryBuilder struct { + filterQueryBuilder *FilterQueryBuilder +} + +// NewBoolQueryBuilder create a new bool query builder +func NewBoolQueryBuilder() *BoolQueryBuilder { + return &BoolQueryBuilder{} +} + +// Filter creates and return a filter query builder +func (b *BoolQueryBuilder) Filter() *FilterQueryBuilder { + if b.filterQueryBuilder == nil { + b.filterQueryBuilder = NewFilterQueryBuilder() + } + return b.filterQueryBuilder +} + +// Build builds and return a bool query builder +func (b *BoolQueryBuilder) Build() (*BoolQuery, error) { + boolQuery := BoolQuery{} + + if b.filterQueryBuilder != nil { + filters, err := b.filterQueryBuilder.Build() + if err != nil { + return nil, err + } + boolQuery.Filters = filters + } + + return &boolQuery, nil +} + +// FilterQueryBuilder represents a filter query builder +type FilterQueryBuilder struct { + filters []Filter +} + +// NewFilterQueryBuilder creates a new filter query builder +func NewFilterQueryBuilder() *FilterQueryBuilder { + return &FilterQueryBuilder{ + filters: make([]Filter, 0), + } +} + +// Build builds and return a filter query builder +func (b *FilterQueryBuilder) Build() ([]Filter, error) { + return b.filters, nil +} + +// AddDateRangeFilter adds a new time range filter +func (b *FilterQueryBuilder) AddDateRangeFilter(timeField, lte, gte, format string) *FilterQueryBuilder { + b.filters = append(b.filters, &RangeFilter{ + Key: timeField, + Lte: lte, + Gte: gte, + Format: format, + }) + return b +} + +// AddQueryStringFilter adds a new query string filter +func (b *FilterQueryBuilder) AddQueryStringFilter(querystring string, analyseWildcard bool) *FilterQueryBuilder { + if len(strings.TrimSpace(querystring)) == 0 { + return b + } + + b.filters = append(b.filters, &QueryStringFilter{ + Query: querystring, + AnalyzeWildcard: analyseWildcard, + }) + return b +} + +// AggBuilder represents an aggregation builder +type AggBuilder interface { + Histogram(key, field string, fn func(a *HistogramAgg, b AggBuilder)) AggBuilder + DateHistogram(key, field string, fn func(a *DateHistogramAgg, b AggBuilder)) AggBuilder + Terms(key, field string, fn func(a *TermsAggregation, b AggBuilder)) AggBuilder + Filters(key string, fn func(a *FiltersAggregation, b AggBuilder)) AggBuilder + GeoHashGrid(key, field string, fn func(a *GeoHashGridAggregation, b AggBuilder)) AggBuilder + Metric(key, metricType, field string, fn func(a *MetricAggregation)) AggBuilder + Pipeline(key, pipelineType, bucketPath string, fn func(a *PipelineAggregation)) AggBuilder + Build() (AggArray, error) +} + +type aggBuilderImpl struct { + AggBuilder + aggDefs []*aggDef +} + +func newAggBuilder() *aggBuilderImpl { + return &aggBuilderImpl{ + aggDefs: make([]*aggDef, 0), + } +} + +func (b *aggBuilderImpl) Build() (AggArray, error) { + aggs := make(AggArray, 0) + + for _, aggDef := range b.aggDefs { + agg := &Agg{ + Key: aggDef.key, + Aggregation: aggDef.aggregation, + } + + for _, cb := range aggDef.builders { + childAggs, err := cb.Build() + if err != nil { + return nil, err + } + + for _, childAgg := range childAggs { + agg.Aggregation.Aggs = append(agg.Aggregation.Aggs, childAgg) + } + } + + aggs = append(aggs, agg) + } + + return aggs, nil +} + +func (b *aggBuilderImpl) Histogram(key, field string, fn func(a *HistogramAgg, b AggBuilder)) AggBuilder { + innerAgg := &HistogramAgg{ + Field: field, + } + aggDef := newAggDef(key, &aggContainer{ + Type: "histogram", + Aggregation: innerAgg, + }) + + if fn != nil { + builder := newAggBuilder() + aggDef.builders = append(aggDef.builders, builder) + fn(innerAgg, builder) + } + + b.aggDefs = append(b.aggDefs, aggDef) + + return b +} + +func (b *aggBuilderImpl) DateHistogram(key, field string, fn func(a *DateHistogramAgg, b AggBuilder)) AggBuilder { + innerAgg := &DateHistogramAgg{ + Field: field, + } + aggDef := newAggDef(key, &aggContainer{ + Type: "date_histogram", + Aggregation: innerAgg, + }) + + if fn != nil { + builder := newAggBuilder() + aggDef.builders = append(aggDef.builders, builder) + fn(innerAgg, builder) + } + + b.aggDefs = append(b.aggDefs, aggDef) + + return b +} + +func (b *aggBuilderImpl) Terms(key, field string, fn func(a *TermsAggregation, b AggBuilder)) AggBuilder { + innerAgg := &TermsAggregation{ + Field: field, + Order: make(map[string]interface{}), + } + aggDef := newAggDef(key, &aggContainer{ + Type: "terms", + Aggregation: innerAgg, + }) + + if fn != nil { + builder := newAggBuilder() + aggDef.builders = append(aggDef.builders, builder) + fn(innerAgg, builder) + } + + b.aggDefs = append(b.aggDefs, aggDef) + + return b +} + +func (b *aggBuilderImpl) Filters(key string, fn func(a *FiltersAggregation, b AggBuilder)) AggBuilder { + innerAgg := &FiltersAggregation{ + Filters: make(map[string]interface{}), + } + aggDef := newAggDef(key, &aggContainer{ + Type: "filters", + Aggregation: innerAgg, + }) + if fn != nil { + builder := newAggBuilder() + aggDef.builders = append(aggDef.builders, builder) + fn(innerAgg, builder) + } + + b.aggDefs = append(b.aggDefs, aggDef) + + return b +} + +func (b *aggBuilderImpl) GeoHashGrid(key, field string, fn func(a *GeoHashGridAggregation, b AggBuilder)) AggBuilder { + innerAgg := &GeoHashGridAggregation{ + Field: field, + Precision: 5, + } + aggDef := newAggDef(key, &aggContainer{ + Type: "geohash_grid", + Aggregation: innerAgg, + }) + + if fn != nil { + builder := newAggBuilder() + aggDef.builders = append(aggDef.builders, builder) + fn(innerAgg, builder) + } + + b.aggDefs = append(b.aggDefs, aggDef) + + return b +} + +func (b *aggBuilderImpl) Metric(key, metricType, field string, fn func(a *MetricAggregation)) AggBuilder { + innerAgg := &MetricAggregation{ + Field: field, + Settings: make(map[string]interface{}), + } + aggDef := newAggDef(key, &aggContainer{ + Type: metricType, + Aggregation: innerAgg, + }) + + if fn != nil { + fn(innerAgg) + } + + b.aggDefs = append(b.aggDefs, aggDef) + + return b +} + +func (b *aggBuilderImpl) Pipeline(key, pipelineType, bucketPath string, fn func(a *PipelineAggregation)) AggBuilder { + innerAgg := &PipelineAggregation{ + BucketPath: bucketPath, + Settings: make(map[string]interface{}), + } + aggDef := newAggDef(key, &aggContainer{ + Type: pipelineType, + Aggregation: innerAgg, + }) + + if fn != nil { + fn(innerAgg) + } + + b.aggDefs = append(b.aggDefs, aggDef) + + return b +} diff --git a/pkg/tsdb/elasticsearch/client/search_request_test.go b/pkg/tsdb/elasticsearch/client/search_request_test.go new file mode 100644 index 00000000000..862b8058cba --- /dev/null +++ b/pkg/tsdb/elasticsearch/client/search_request_test.go @@ -0,0 +1,473 @@ +package es + +import ( + "encoding/json" + "testing" + "time" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestSearchRequest(t *testing.T) { + Convey("Test elasticsearch search request", t, func() { + timeField := "@timestamp" + Convey("Given new search request builder for es version 5", func() { + b := NewSearchRequestBuilder(5, tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + + Convey("When building search request", func() { + sr, err := b.Build() + So(err, ShouldBeNil) + + Convey("Should have size of zero", func() { + So(sr.Size, ShouldEqual, 0) + }) + + Convey("Should have no sorting", func() { + So(sr.Sort, ShouldHaveLength, 0) + }) + + Convey("When marshal to JSON should generate correct json", func() { + body, err := json.Marshal(sr) + So(err, ShouldBeNil) + json, err := simplejson.NewJson(body) + So(err, ShouldBeNil) + So(json.Get("size").MustInt(500), ShouldEqual, 0) + So(json.Get("sort").Interface(), ShouldBeNil) + So(json.Get("aggs").Interface(), ShouldBeNil) + So(json.Get("query").Interface(), ShouldBeNil) + }) + }) + + Convey("When adding size, sort, filters", func() { + b.Size(200) + b.SortDesc(timeField, "boolean") + filters := b.Query().Bool().Filter() + filters.AddDateRangeFilter(timeField, "$timeTo", "$timeFrom", DateFormatEpochMS) + filters.AddQueryStringFilter("test", true) + + Convey("When building search request", func() { + sr, err := b.Build() + So(err, ShouldBeNil) + + Convey("Should have correct size", func() { + So(sr.Size, ShouldEqual, 200) + }) + + Convey("Should have correct sorting", func() { + sort, ok := sr.Sort[timeField].(map[string]string) + So(ok, ShouldBeTrue) + So(sort["order"], ShouldEqual, "desc") + So(sort["unmapped_type"], ShouldEqual, "boolean") + }) + + Convey("Should have range filter", func() { + f, ok := sr.Query.Bool.Filters[0].(*RangeFilter) + So(ok, ShouldBeTrue) + So(f.Gte, ShouldEqual, "$timeFrom") + So(f.Lte, ShouldEqual, "$timeTo") + So(f.Format, ShouldEqual, "epoch_millis") + }) + + Convey("Should have query string filter", func() { + f, ok := sr.Query.Bool.Filters[1].(*QueryStringFilter) + So(ok, ShouldBeTrue) + So(f.Query, ShouldEqual, "test") + So(f.AnalyzeWildcard, ShouldBeTrue) + }) + + Convey("When marshal to JSON should generate correct json", func() { + body, err := json.Marshal(sr) + So(err, ShouldBeNil) + json, err := simplejson.NewJson(body) + So(err, ShouldBeNil) + So(json.Get("size").MustInt(0), ShouldEqual, 200) + + sort := json.GetPath("sort", timeField) + So(sort.Get("order").MustString(), ShouldEqual, "desc") + So(sort.Get("unmapped_type").MustString(), ShouldEqual, "boolean") + + timeRangeFilter := json.GetPath("query", "bool", "filter").GetIndex(0).Get("range").Get(timeField) + So(timeRangeFilter.Get("gte").MustString(""), ShouldEqual, "$timeFrom") + So(timeRangeFilter.Get("lte").MustString(""), ShouldEqual, "$timeTo") + So(timeRangeFilter.Get("format").MustString(""), ShouldEqual, DateFormatEpochMS) + + queryStringFilter := json.GetPath("query", "bool", "filter").GetIndex(1).Get("query_string") + So(queryStringFilter.Get("analyze_wildcard").MustBool(false), ShouldEqual, true) + So(queryStringFilter.Get("query").MustString(""), ShouldEqual, "test") + }) + }) + }) + + Convey("When adding doc value field", func() { + b.AddDocValueField(timeField) + + Convey("should set correct props", func() { + So(b.customProps["fields"], ShouldBeNil) + + scriptFields, ok := b.customProps["script_fields"].(map[string]interface{}) + So(ok, ShouldBeTrue) + So(scriptFields, ShouldHaveLength, 0) + + docValueFields, ok := b.customProps["docvalue_fields"].([]string) + So(ok, ShouldBeTrue) + So(docValueFields, ShouldHaveLength, 1) + So(docValueFields[0], ShouldEqual, timeField) + }) + + Convey("When building search request", func() { + sr, err := b.Build() + So(err, ShouldBeNil) + + Convey("When marshal to JSON should generate correct json", func() { + body, err := json.Marshal(sr) + So(err, ShouldBeNil) + json, err := simplejson.NewJson(body) + So(err, ShouldBeNil) + + scriptFields, err := json.Get("script_fields").Map() + So(err, ShouldBeNil) + So(scriptFields, ShouldHaveLength, 0) + + _, err = json.Get("fields").StringArray() + So(err, ShouldNotBeNil) + + docValueFields, err := json.Get("docvalue_fields").StringArray() + So(err, ShouldBeNil) + So(docValueFields, ShouldHaveLength, 1) + So(docValueFields[0], ShouldEqual, timeField) + }) + }) + }) + + Convey("and adding multiple top level aggs", func() { + aggBuilder := b.Agg() + aggBuilder.Terms("1", "@hostname", nil) + aggBuilder.DateHistogram("2", "@timestamp", nil) + + Convey("When building search request", func() { + sr, err := b.Build() + So(err, ShouldBeNil) + + Convey("Should have 2 top level aggs", func() { + aggs := sr.Aggs + So(aggs, ShouldHaveLength, 2) + So(aggs[0].Key, ShouldEqual, "1") + So(aggs[0].Aggregation.Type, ShouldEqual, "terms") + So(aggs[1].Key, ShouldEqual, "2") + So(aggs[1].Aggregation.Type, ShouldEqual, "date_histogram") + }) + + Convey("When marshal to JSON should generate correct json", func() { + body, err := json.Marshal(sr) + So(err, ShouldBeNil) + json, err := simplejson.NewJson(body) + So(err, ShouldBeNil) + + So(json.Get("aggs").MustMap(), ShouldHaveLength, 2) + So(json.GetPath("aggs", "1", "terms", "field").MustString(), ShouldEqual, "@hostname") + So(json.GetPath("aggs", "2", "date_histogram", "field").MustString(), ShouldEqual, "@timestamp") + }) + }) + }) + + Convey("and adding top level agg with child agg", func() { + aggBuilder := b.Agg() + aggBuilder.Terms("1", "@hostname", func(a *TermsAggregation, ib AggBuilder) { + ib.DateHistogram("2", "@timestamp", nil) + }) + + Convey("When building search request", func() { + sr, err := b.Build() + So(err, ShouldBeNil) + + Convey("Should have 1 top level agg and one child agg", func() { + aggs := sr.Aggs + So(aggs, ShouldHaveLength, 1) + + topAgg := aggs[0] + So(topAgg.Key, ShouldEqual, "1") + So(topAgg.Aggregation.Type, ShouldEqual, "terms") + So(topAgg.Aggregation.Aggs, ShouldHaveLength, 1) + + childAgg := aggs[0].Aggregation.Aggs[0] + So(childAgg.Key, ShouldEqual, "2") + So(childAgg.Aggregation.Type, ShouldEqual, "date_histogram") + }) + + Convey("When marshal to JSON should generate correct json", func() { + body, err := json.Marshal(sr) + So(err, ShouldBeNil) + json, err := simplejson.NewJson(body) + So(err, ShouldBeNil) + + So(json.Get("aggs").MustMap(), ShouldHaveLength, 1) + firstLevelAgg := json.GetPath("aggs", "1") + secondLevelAgg := firstLevelAgg.GetPath("aggs", "2") + So(firstLevelAgg.GetPath("terms", "field").MustString(), ShouldEqual, "@hostname") + So(secondLevelAgg.GetPath("date_histogram", "field").MustString(), ShouldEqual, "@timestamp") + }) + }) + }) + + Convey("and adding two top level aggs with child agg", func() { + aggBuilder := b.Agg() + aggBuilder.Histogram("1", "@hostname", func(a *HistogramAgg, ib AggBuilder) { + ib.DateHistogram("2", "@timestamp", nil) + }) + aggBuilder.Filters("3", func(a *FiltersAggregation, ib AggBuilder) { + ib.Terms("4", "@test", nil) + }) + + Convey("When building search request", func() { + sr, err := b.Build() + So(err, ShouldBeNil) + + Convey("Should have 2 top level aggs with one child agg each", func() { + aggs := sr.Aggs + So(aggs, ShouldHaveLength, 2) + + topAggOne := aggs[0] + So(topAggOne.Key, ShouldEqual, "1") + So(topAggOne.Aggregation.Type, ShouldEqual, "histogram") + So(topAggOne.Aggregation.Aggs, ShouldHaveLength, 1) + + topAggOnechildAgg := topAggOne.Aggregation.Aggs[0] + So(topAggOnechildAgg.Key, ShouldEqual, "2") + So(topAggOnechildAgg.Aggregation.Type, ShouldEqual, "date_histogram") + + topAggTwo := aggs[1] + So(topAggTwo.Key, ShouldEqual, "3") + So(topAggTwo.Aggregation.Type, ShouldEqual, "filters") + So(topAggTwo.Aggregation.Aggs, ShouldHaveLength, 1) + + topAggTwochildAgg := topAggTwo.Aggregation.Aggs[0] + So(topAggTwochildAgg.Key, ShouldEqual, "4") + So(topAggTwochildAgg.Aggregation.Type, ShouldEqual, "terms") + }) + + Convey("When marshal to JSON should generate correct json", func() { + body, err := json.Marshal(sr) + So(err, ShouldBeNil) + json, err := simplejson.NewJson(body) + So(err, ShouldBeNil) + + topAggOne := json.GetPath("aggs", "1") + So(topAggOne.GetPath("histogram", "field").MustString(), ShouldEqual, "@hostname") + topAggOnechildAgg := topAggOne.GetPath("aggs", "2") + So(topAggOnechildAgg.GetPath("date_histogram", "field").MustString(), ShouldEqual, "@timestamp") + + topAggTwo := json.GetPath("aggs", "3") + topAggTwochildAgg := topAggTwo.GetPath("aggs", "4") + So(topAggTwo.GetPath("filters").MustArray(), ShouldHaveLength, 0) + So(topAggTwochildAgg.GetPath("terms", "field").MustString(), ShouldEqual, "@test") + }) + }) + }) + + Convey("and adding top level agg with child agg with child agg", func() { + aggBuilder := b.Agg() + aggBuilder.Terms("1", "@hostname", func(a *TermsAggregation, ib AggBuilder) { + ib.Terms("2", "@app", func(a *TermsAggregation, ib AggBuilder) { + ib.DateHistogram("3", "@timestamp", nil) + }) + }) + + Convey("When building search request", func() { + sr, err := b.Build() + So(err, ShouldBeNil) + + Convey("Should have 1 top level agg with one child having a child", func() { + aggs := sr.Aggs + So(aggs, ShouldHaveLength, 1) + + topAgg := aggs[0] + So(topAgg.Key, ShouldEqual, "1") + So(topAgg.Aggregation.Type, ShouldEqual, "terms") + So(topAgg.Aggregation.Aggs, ShouldHaveLength, 1) + + childAgg := topAgg.Aggregation.Aggs[0] + So(childAgg.Key, ShouldEqual, "2") + So(childAgg.Aggregation.Type, ShouldEqual, "terms") + + childChildAgg := childAgg.Aggregation.Aggs[0] + So(childChildAgg.Key, ShouldEqual, "3") + So(childChildAgg.Aggregation.Type, ShouldEqual, "date_histogram") + }) + + Convey("When marshal to JSON should generate correct json", func() { + body, err := json.Marshal(sr) + So(err, ShouldBeNil) + json, err := simplejson.NewJson(body) + So(err, ShouldBeNil) + + topAgg := json.GetPath("aggs", "1") + So(topAgg.GetPath("terms", "field").MustString(), ShouldEqual, "@hostname") + + childAgg := topAgg.GetPath("aggs", "2") + So(childAgg.GetPath("terms", "field").MustString(), ShouldEqual, "@app") + + childChildAgg := childAgg.GetPath("aggs", "3") + So(childChildAgg.GetPath("date_histogram", "field").MustString(), ShouldEqual, "@timestamp") + }) + }) + }) + + Convey("and adding bucket and metric aggs", func() { + aggBuilder := b.Agg() + aggBuilder.Terms("1", "@hostname", func(a *TermsAggregation, ib AggBuilder) { + ib.Terms("2", "@app", func(a *TermsAggregation, ib AggBuilder) { + ib.Metric("4", "avg", "@value", nil) + ib.DateHistogram("3", "@timestamp", func(a *DateHistogramAgg, ib AggBuilder) { + ib.Metric("4", "avg", "@value", nil) + ib.Metric("5", "max", "@value", nil) + }) + }) + }) + + Convey("When building search request", func() { + sr, err := b.Build() + So(err, ShouldBeNil) + + Convey("Should have 1 top level agg with one child having a child", func() { + aggs := sr.Aggs + So(aggs, ShouldHaveLength, 1) + + topAgg := aggs[0] + So(topAgg.Key, ShouldEqual, "1") + So(topAgg.Aggregation.Type, ShouldEqual, "terms") + So(topAgg.Aggregation.Aggs, ShouldHaveLength, 1) + + childAgg := topAgg.Aggregation.Aggs[0] + So(childAgg.Key, ShouldEqual, "2") + So(childAgg.Aggregation.Type, ShouldEqual, "terms") + + childChildOneAgg := childAgg.Aggregation.Aggs[0] + So(childChildOneAgg.Key, ShouldEqual, "4") + So(childChildOneAgg.Aggregation.Type, ShouldEqual, "avg") + + childChildTwoAgg := childAgg.Aggregation.Aggs[1] + So(childChildTwoAgg.Key, ShouldEqual, "3") + So(childChildTwoAgg.Aggregation.Type, ShouldEqual, "date_histogram") + + childChildTwoChildOneAgg := childChildTwoAgg.Aggregation.Aggs[0] + So(childChildTwoChildOneAgg.Key, ShouldEqual, "4") + So(childChildTwoChildOneAgg.Aggregation.Type, ShouldEqual, "avg") + + childChildTwoChildTwoAgg := childChildTwoAgg.Aggregation.Aggs[1] + So(childChildTwoChildTwoAgg.Key, ShouldEqual, "5") + So(childChildTwoChildTwoAgg.Aggregation.Type, ShouldEqual, "max") + }) + + Convey("When marshal to JSON should generate correct json", func() { + body, err := json.Marshal(sr) + So(err, ShouldBeNil) + json, err := simplejson.NewJson(body) + So(err, ShouldBeNil) + + termsAgg := json.GetPath("aggs", "1") + So(termsAgg.GetPath("terms", "field").MustString(), ShouldEqual, "@hostname") + + termsAggTwo := termsAgg.GetPath("aggs", "2") + So(termsAggTwo.GetPath("terms", "field").MustString(), ShouldEqual, "@app") + + termsAggTwoAvg := termsAggTwo.GetPath("aggs", "4") + So(termsAggTwoAvg.GetPath("avg", "field").MustString(), ShouldEqual, "@value") + + dateHistAgg := termsAggTwo.GetPath("aggs", "3") + So(dateHistAgg.GetPath("date_histogram", "field").MustString(), ShouldEqual, "@timestamp") + + avgAgg := dateHistAgg.GetPath("aggs", "4") + So(avgAgg.GetPath("avg", "field").MustString(), ShouldEqual, "@value") + + maxAgg := dateHistAgg.GetPath("aggs", "5") + So(maxAgg.GetPath("max", "field").MustString(), ShouldEqual, "@value") + }) + }) + }) + }) + + Convey("Given new search request builder for es version 2", func() { + b := NewSearchRequestBuilder(2, tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + + Convey("When adding doc value field", func() { + b.AddDocValueField(timeField) + + Convey("should set correct props", func() { + fields, ok := b.customProps["fields"].([]string) + So(ok, ShouldBeTrue) + So(fields, ShouldHaveLength, 2) + So(fields[0], ShouldEqual, "*") + So(fields[1], ShouldEqual, "_source") + + scriptFields, ok := b.customProps["script_fields"].(map[string]interface{}) + So(ok, ShouldBeTrue) + So(scriptFields, ShouldHaveLength, 0) + + fieldDataFields, ok := b.customProps["fielddata_fields"].([]string) + So(ok, ShouldBeTrue) + So(fieldDataFields, ShouldHaveLength, 1) + So(fieldDataFields[0], ShouldEqual, timeField) + }) + + Convey("When building search request", func() { + sr, err := b.Build() + So(err, ShouldBeNil) + + Convey("When marshal to JSON should generate correct json", func() { + body, err := json.Marshal(sr) + So(err, ShouldBeNil) + json, err := simplejson.NewJson(body) + So(err, ShouldBeNil) + + scriptFields, err := json.Get("script_fields").Map() + So(err, ShouldBeNil) + So(scriptFields, ShouldHaveLength, 0) + + fields, err := json.Get("fields").StringArray() + So(err, ShouldBeNil) + So(fields, ShouldHaveLength, 2) + So(fields[0], ShouldEqual, "*") + So(fields[1], ShouldEqual, "_source") + + fieldDataFields, err := json.Get("fielddata_fields").StringArray() + So(err, ShouldBeNil) + So(fieldDataFields, ShouldHaveLength, 1) + So(fieldDataFields[0], ShouldEqual, timeField) + }) + }) + }) + }) + }) +} + +func TestMultiSearchRequest(t *testing.T) { + Convey("Test elasticsearch multi search request", t, func() { + Convey("Given new multi search request builder", func() { + b := NewMultiSearchRequestBuilder(0) + + Convey("When adding one search request", func() { + b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + + Convey("When building search request should contain one search request", func() { + mr, err := b.Build() + So(err, ShouldBeNil) + So(mr.Requests, ShouldHaveLength, 1) + }) + }) + + Convey("When adding two search requests", func() { + b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + b.Search(tsdb.Interval{Value: 15 * time.Second, Text: "15s"}) + + Convey("When building search request should contain two search requests", func() { + mr, err := b.Build() + So(err, ShouldBeNil) + So(mr.Requests, ShouldHaveLength, 2) + }) + }) + }) + }) +} diff --git a/pkg/tsdb/elasticsearch/elasticsearch.go b/pkg/tsdb/elasticsearch/elasticsearch.go new file mode 100644 index 00000000000..857b847f0f9 --- /dev/null +++ b/pkg/tsdb/elasticsearch/elasticsearch.go @@ -0,0 +1,45 @@ +package elasticsearch + +import ( + "context" + "fmt" + + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" +) + +// ElasticsearchExecutor represents a handler for handling elasticsearch datasource request +type ElasticsearchExecutor struct{} + +var ( + glog log.Logger + intervalCalculator tsdb.IntervalCalculator +) + +// NewElasticsearchExecutor creates a new elasticsearch executor +func NewElasticsearchExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + return &ElasticsearchExecutor{}, nil +} + +func init() { + glog = log.New("tsdb.elasticsearch") + intervalCalculator = tsdb.NewIntervalCalculator(nil) + tsdb.RegisterTsdbQueryEndpoint("elasticsearch", NewElasticsearchExecutor) +} + +// Query handles an elasticsearch datasource request +func (e *ElasticsearchExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { + if len(tsdbQuery.Queries) == 0 { + return nil, fmt.Errorf("query contains no queries") + } + + client, err := es.NewClient(ctx, dsInfo, tsdbQuery.TimeRange) + if err != nil { + return nil, err + } + + query := newTimeSeriesQuery(client, tsdbQuery, intervalCalculator) + return query.execute() +} diff --git a/pkg/tsdb/elasticsearch/models.go b/pkg/tsdb/elasticsearch/models.go new file mode 100644 index 00000000000..b3fdee95b91 --- /dev/null +++ b/pkg/tsdb/elasticsearch/models.go @@ -0,0 +1,77 @@ +package elasticsearch + +import ( + "github.com/grafana/grafana/pkg/components/simplejson" +) + +// Query represents the time series query model of the datasource +type Query struct { + TimeField string `json:"timeField"` + RawQuery string `json:"query"` + BucketAggs []*BucketAgg `json:"bucketAggs"` + Metrics []*MetricAgg `json:"metrics"` + Alias string `json:"alias"` + Interval string + RefID string +} + +// BucketAgg represents a bucket aggregation of the time series query model of the datasource +type BucketAgg struct { + Field string `json:"field"` + ID string `json:"id"` + Settings *simplejson.Json `json:"settings"` + Type string `jsons:"type"` +} + +// MetricAgg represents a metric aggregation of the time series query model of the datasource +type MetricAgg struct { + Field string `json:"field"` + Hide bool `json:"hide"` + ID string `json:"id"` + PipelineAggregate string `json:"pipelineAgg"` + Settings *simplejson.Json `json:"settings"` + Meta *simplejson.Json `json:"meta"` + Type string `json:"type"` +} + +var metricAggType = map[string]string{ + "count": "Count", + "avg": "Average", + "sum": "Sum", + "max": "Max", + "min": "Min", + "extended_stats": "Extended Stats", + "percentiles": "Percentiles", + "cardinality": "Unique Count", + "moving_avg": "Moving Average", + "derivative": "Derivative", + "raw_document": "Raw Document", +} + +var extendedStats = map[string]string{ + "avg": "Avg", + "min": "Min", + "max": "Max", + "sum": "Sum", + "count": "Count", + "std_deviation": "Std Dev", + "std_deviation_bounds_upper": "Std Dev Upper", + "std_deviation_bounds_lower": "Std Dev Lower", +} + +var pipelineAggType = map[string]string{ + "moving_avg": "moving_avg", + "derivative": "derivative", +} + +func isPipelineAgg(metricType string) bool { + if _, ok := pipelineAggType[metricType]; ok { + return true + } + return false +} + +func describeMetric(metricType, field string) string { + text := metricAggType[metricType] + return text + " " + field +} diff --git a/pkg/tsdb/elasticsearch/response_parser.go b/pkg/tsdb/elasticsearch/response_parser.go new file mode 100644 index 00000000000..7bdab60389c --- /dev/null +++ b/pkg/tsdb/elasticsearch/response_parser.go @@ -0,0 +1,537 @@ +package elasticsearch + +import ( + "errors" + "regexp" + "sort" + "strconv" + "strings" + + "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" +) + +type responseParser struct { + Responses []*es.SearchResponse + Targets []*Query +} + +var newResponseParser = func(responses []*es.SearchResponse, targets []*Query) *responseParser { + return &responseParser{ + Responses: responses, + Targets: targets, + } +} + +func (rp *responseParser) getTimeSeries() (*tsdb.Response, error) { + result := &tsdb.Response{} + result.Results = make(map[string]*tsdb.QueryResult) + + if rp.Responses == nil { + return result, nil + } + + for i, res := range rp.Responses { + target := rp.Targets[i] + + if res.Error != nil { + result.Results[target.RefID] = getErrorFromElasticResponse(res) + continue + } + + queryRes := tsdb.NewQueryResult() + props := make(map[string]string) + table := tsdb.Table{ + Columns: make([]tsdb.TableColumn, 0), + Rows: make([]tsdb.RowValues, 0), + } + err := rp.processBuckets(res.Aggregations, target, &queryRes.Series, &table, props, 0) + if err != nil { + return nil, err + } + rp.nameSeries(&queryRes.Series, target) + rp.trimDatapoints(&queryRes.Series, target) + + if len(table.Rows) > 0 { + queryRes.Tables = append(queryRes.Tables, &table) + } + + result.Results[target.RefID] = queryRes + } + return result, nil +} + +func (rp *responseParser) processBuckets(aggs map[string]interface{}, target *Query, series *tsdb.TimeSeriesSlice, table *tsdb.Table, props map[string]string, depth int) error { + var err error + maxDepth := len(target.BucketAggs) - 1 + + aggIDs := make([]string, 0) + for k := range aggs { + aggIDs = append(aggIDs, k) + } + sort.Strings(aggIDs) + for _, aggID := range aggIDs { + v := aggs[aggID] + aggDef, _ := findAgg(target, aggID) + esAgg := simplejson.NewFromAny(v) + if aggDef == nil { + continue + } + + if depth == maxDepth { + if aggDef.Type == "date_histogram" { + err = rp.processMetrics(esAgg, target, series, props) + } else { + err = rp.processAggregationDocs(esAgg, aggDef, target, table, props) + } + if err != nil { + return err + } + } else { + for _, b := range esAgg.Get("buckets").MustArray() { + bucket := simplejson.NewFromAny(b) + newProps := make(map[string]string, 0) + + for k, v := range props { + newProps[k] = v + } + + if key, err := bucket.Get("key").String(); err == nil { + newProps[aggDef.Field] = key + } else if key, err := bucket.Get("key").Int64(); err == nil { + newProps[aggDef.Field] = strconv.FormatInt(key, 10) + } + + if key, err := bucket.Get("key_as_string").String(); err == nil { + newProps[aggDef.Field] = key + } + err = rp.processBuckets(bucket.MustMap(), target, series, table, newProps, depth+1) + if err != nil { + return err + } + } + + buckets := esAgg.Get("buckets").MustMap() + bucketKeys := make([]string, 0) + for k := range buckets { + bucketKeys = append(bucketKeys, k) + } + sort.Strings(bucketKeys) + + for _, bucketKey := range bucketKeys { + bucket := simplejson.NewFromAny(buckets[bucketKey]) + newProps := make(map[string]string, 0) + + for k, v := range props { + newProps[k] = v + } + + newProps["filter"] = bucketKey + + err = rp.processBuckets(bucket.MustMap(), target, series, table, newProps, depth+1) + if err != nil { + return err + } + } + } + + } + return nil + +} + +func (rp *responseParser) processMetrics(esAgg *simplejson.Json, target *Query, series *tsdb.TimeSeriesSlice, props map[string]string) error { + for _, metric := range target.Metrics { + if metric.Hide { + continue + } + + switch metric.Type { + case "count": + newSeries := tsdb.TimeSeries{ + Tags: make(map[string]string), + } + + for _, v := range esAgg.Get("buckets").MustArray() { + bucket := simplejson.NewFromAny(v) + value := castToNullFloat(bucket.Get("doc_count")) + key := castToNullFloat(bucket.Get("key")) + newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) + } + + for k, v := range props { + newSeries.Tags[k] = v + } + newSeries.Tags["metric"] = "count" + *series = append(*series, &newSeries) + + case "percentiles": + buckets := esAgg.Get("buckets").MustArray() + if len(buckets) == 0 { + break + } + + firstBucket := simplejson.NewFromAny(buckets[0]) + percentiles := firstBucket.GetPath(metric.ID, "values").MustMap() + + percentileKeys := make([]string, 0) + for k := range percentiles { + percentileKeys = append(percentileKeys, k) + } + sort.Strings(percentileKeys) + for _, percentileName := range percentileKeys { + newSeries := tsdb.TimeSeries{ + Tags: make(map[string]string), + } + for k, v := range props { + newSeries.Tags[k] = v + } + newSeries.Tags["metric"] = "p" + percentileName + newSeries.Tags["field"] = metric.Field + for _, v := range buckets { + bucket := simplejson.NewFromAny(v) + value := castToNullFloat(bucket.GetPath(metric.ID, "values", percentileName)) + key := castToNullFloat(bucket.Get("key")) + newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) + } + *series = append(*series, &newSeries) + } + case "extended_stats": + buckets := esAgg.Get("buckets").MustArray() + + metaKeys := make([]string, 0) + meta := metric.Meta.MustMap() + for k := range meta { + metaKeys = append(metaKeys, k) + } + sort.Strings(metaKeys) + for _, statName := range metaKeys { + v := meta[statName] + if enabled, ok := v.(bool); !ok || !enabled { + continue + } + + newSeries := tsdb.TimeSeries{ + Tags: make(map[string]string), + } + for k, v := range props { + newSeries.Tags[k] = v + } + newSeries.Tags["metric"] = statName + newSeries.Tags["field"] = metric.Field + + for _, v := range buckets { + bucket := simplejson.NewFromAny(v) + key := castToNullFloat(bucket.Get("key")) + var value null.Float + if statName == "std_deviation_bounds_upper" { + value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "upper")) + } else if statName == "std_deviation_bounds_lower" { + value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "lower")) + } else { + value = castToNullFloat(bucket.GetPath(metric.ID, statName)) + } + newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) + } + *series = append(*series, &newSeries) + } + default: + newSeries := tsdb.TimeSeries{ + Tags: make(map[string]string), + } + for k, v := range props { + newSeries.Tags[k] = v + } + + newSeries.Tags["metric"] = metric.Type + newSeries.Tags["field"] = metric.Field + for _, v := range esAgg.Get("buckets").MustArray() { + bucket := simplejson.NewFromAny(v) + key := castToNullFloat(bucket.Get("key")) + valueObj, err := bucket.Get(metric.ID).Map() + if err != nil { + continue + } + var value null.Float + if _, ok := valueObj["normalized_value"]; ok { + value = castToNullFloat(bucket.GetPath(metric.ID, "normalized_value")) + } else { + value = castToNullFloat(bucket.GetPath(metric.ID, "value")) + } + newSeries.Points = append(newSeries.Points, tsdb.TimePoint{value, key}) + } + *series = append(*series, &newSeries) + } + } + return nil +} + +func (rp *responseParser) processAggregationDocs(esAgg *simplejson.Json, aggDef *BucketAgg, target *Query, table *tsdb.Table, props map[string]string) error { + propKeys := make([]string, 0) + for k := range props { + propKeys = append(propKeys, k) + } + sort.Strings(propKeys) + + if len(table.Columns) == 0 { + for _, propKey := range propKeys { + table.Columns = append(table.Columns, tsdb.TableColumn{Text: propKey}) + } + table.Columns = append(table.Columns, tsdb.TableColumn{Text: aggDef.Field}) + } + + addMetricValue := func(values *tsdb.RowValues, metricName string, value null.Float) { + found := false + for _, c := range table.Columns { + if c.Text == metricName { + found = true + break + } + } + if !found { + table.Columns = append(table.Columns, tsdb.TableColumn{Text: metricName}) + } + *values = append(*values, value) + } + + for _, v := range esAgg.Get("buckets").MustArray() { + bucket := simplejson.NewFromAny(v) + values := make(tsdb.RowValues, 0) + + for _, propKey := range propKeys { + values = append(values, props[propKey]) + } + + if key, err := bucket.Get("key").String(); err == nil { + values = append(values, key) + } else { + values = append(values, castToNullFloat(bucket.Get("key"))) + } + + for _, metric := range target.Metrics { + switch metric.Type { + case "count": + addMetricValue(&values, rp.getMetricName(metric.Type), castToNullFloat(bucket.Get("doc_count"))) + break + case "extended_stats": + metaKeys := make([]string, 0) + meta := metric.Meta.MustMap() + for k := range meta { + metaKeys = append(metaKeys, k) + } + sort.Strings(metaKeys) + for _, statName := range metaKeys { + v := meta[statName] + if enabled, ok := v.(bool); !ok || !enabled { + continue + } + + var value null.Float + if statName == "std_deviation_bounds_upper" { + value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "upper")) + } else if statName == "std_deviation_bounds_lower" { + value = castToNullFloat(bucket.GetPath(metric.ID, "std_deviation_bounds", "lower")) + } else { + value = castToNullFloat(bucket.GetPath(metric.ID, statName)) + } + + addMetricValue(&values, rp.getMetricName(metric.Type), value) + break + } + default: + metricName := rp.getMetricName(metric.Type) + otherMetrics := make([]*MetricAgg, 0) + + for _, m := range target.Metrics { + if m.Type == metric.Type { + otherMetrics = append(otherMetrics, m) + } + } + + if len(otherMetrics) > 1 { + metricName += " " + metric.Field + } + + addMetricValue(&values, metricName, castToNullFloat(bucket.GetPath(metric.ID, "value"))) + break + } + } + + table.Rows = append(table.Rows, values) + } + + return nil +} + +func (rp *responseParser) trimDatapoints(series *tsdb.TimeSeriesSlice, target *Query) { + var histogram *BucketAgg + for _, bucketAgg := range target.BucketAggs { + if bucketAgg.Type == "date_histogram" { + histogram = bucketAgg + break + } + } + + if histogram == nil { + return + } + + trimEdges, err := histogram.Settings.Get("trimEdges").Int() + if err != nil { + return + } + + for _, s := range *series { + if len(s.Points) > trimEdges*2 { + s.Points = s.Points[trimEdges : len(s.Points)-trimEdges] + } + } +} + +func (rp *responseParser) nameSeries(seriesList *tsdb.TimeSeriesSlice, target *Query) { + set := make(map[string]string) + for _, v := range *seriesList { + if metricType, exists := v.Tags["metric"]; exists { + if _, ok := set[metricType]; !ok { + set[metricType] = "" + } + } + } + metricTypeCount := len(set) + for _, series := range *seriesList { + series.Name = rp.getSeriesName(series, target, metricTypeCount) + } + +} + +var aliasPatternRegex = regexp.MustCompile(`\{\{([\s\S]+?)\}\}`) + +func (rp *responseParser) getSeriesName(series *tsdb.TimeSeries, target *Query, metricTypeCount int) string { + metricType := series.Tags["metric"] + metricName := rp.getMetricName(metricType) + delete(series.Tags, "metric") + + field := "" + if v, ok := series.Tags["field"]; ok { + field = v + delete(series.Tags, "field") + } + + if target.Alias != "" { + seriesName := target.Alias + + subMatches := aliasPatternRegex.FindAllStringSubmatch(target.Alias, -1) + for _, subMatch := range subMatches { + group := subMatch[0] + + if len(subMatch) > 1 { + group = subMatch[1] + } + + if strings.Index(group, "term ") == 0 { + seriesName = strings.Replace(seriesName, subMatch[0], series.Tags[group[5:]], 1) + } + if v, ok := series.Tags[group]; ok { + seriesName = strings.Replace(seriesName, subMatch[0], v, 1) + } + if group == "metric" { + seriesName = strings.Replace(seriesName, subMatch[0], metricName, 1) + } + if group == "field" { + seriesName = strings.Replace(seriesName, subMatch[0], field, 1) + } + } + + return seriesName + } + // todo, if field and pipelineAgg + if field != "" && isPipelineAgg(metricType) { + found := false + for _, metric := range target.Metrics { + if metric.ID == field { + metricName += " " + describeMetric(metric.Type, field) + found = true + } + } + if !found { + metricName = "Unset" + } + } else if field != "" { + metricName += " " + field + } + + if len(series.Tags) == 0 { + return metricName + } + + name := "" + for _, v := range series.Tags { + name += v + " " + } + + if metricTypeCount == 1 { + return strings.TrimSpace(name) + } + + return strings.TrimSpace(name) + " " + metricName + +} + +func (rp *responseParser) getMetricName(metric string) string { + if text, ok := metricAggType[metric]; ok { + return text + } + + if text, ok := extendedStats[metric]; ok { + return text + } + + return metric +} + +func castToNullFloat(j *simplejson.Json) null.Float { + f, err := j.Float64() + if err == nil { + return null.FloatFrom(f) + } + + if s, err := j.String(); err == nil { + if strings.ToLower(s) == "nan" { + return null.NewFloat(0, false) + } + + if v, err := strconv.ParseFloat(s, 64); err == nil { + return null.FloatFromPtr(&v) + } + } + + return null.NewFloat(0, false) +} + +func findAgg(target *Query, aggID string) (*BucketAgg, error) { + for _, v := range target.BucketAggs { + if aggID == v.ID { + return v, nil + } + } + return nil, errors.New("can't found aggDef, aggID:" + aggID) +} + +func getErrorFromElasticResponse(response *es.SearchResponse) *tsdb.QueryResult { + result := tsdb.NewQueryResult() + json := simplejson.NewFromAny(response.Error) + reason := json.Get("reason").MustString() + rootCauseReason := json.Get("root_cause").GetIndex(0).Get("reason").MustString() + + if rootCauseReason != "" { + result.ErrorString = rootCauseReason + } else if reason != "" { + result.ErrorString = reason + } else { + result.ErrorString = "Unkown elasticsearch error response" + } + + return result +} diff --git a/pkg/tsdb/elasticsearch/response_parser_test.go b/pkg/tsdb/elasticsearch/response_parser_test.go new file mode 100644 index 00000000000..b00c14cf946 --- /dev/null +++ b/pkg/tsdb/elasticsearch/response_parser_test.go @@ -0,0 +1,880 @@ +package elasticsearch + +import ( + "encoding/json" + "fmt" + "testing" + "time" + + "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" + + "github.com/grafana/grafana/pkg/tsdb" + . "github.com/smartystreets/goconvey/convey" +) + +func TestResponseParser(t *testing.T) { + Convey("Elasticsearch response parser test", t, func() { + Convey("Simple query and count", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "count", "id": "1" }], + "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "2": { + "buckets": [ + { + "doc_count": 10, + "key": 1000 + }, + { + "doc_count": 15, + "key": 2000 + } + ] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Series, ShouldHaveLength, 1) + series := queryRes.Series[0] + So(series.Name, ShouldEqual, "Count") + So(series.Points, ShouldHaveLength, 2) + So(series.Points[0][0].Float64, ShouldEqual, 10) + So(series.Points[0][1].Float64, ShouldEqual, 1000) + So(series.Points[1][0].Float64, ShouldEqual, 15) + So(series.Points[1][1].Float64, ShouldEqual, 2000) + }) + + Convey("Simple query count & avg aggregation", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "count", "id": "1" }, {"type": "avg", "field": "value", "id": "2" }], + "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "3" }] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "3": { + "buckets": [ + { + "2": { "value": 88 }, + "doc_count": 10, + "key": 1000 + }, + { + "2": { "value": 99 }, + "doc_count": 15, + "key": 2000 + } + ] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Series, ShouldHaveLength, 2) + seriesOne := queryRes.Series[0] + So(seriesOne.Name, ShouldEqual, "Count") + So(seriesOne.Points, ShouldHaveLength, 2) + So(seriesOne.Points[0][0].Float64, ShouldEqual, 10) + So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesOne.Points[1][0].Float64, ShouldEqual, 15) + So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000) + + seriesTwo := queryRes.Series[1] + So(seriesTwo.Name, ShouldEqual, "Average value") + So(seriesTwo.Points, ShouldHaveLength, 2) + So(seriesTwo.Points[0][0].Float64, ShouldEqual, 88) + So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesTwo.Points[1][0].Float64, ShouldEqual, 99) + So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000) + }) + + Convey("Single group by query one metric", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "count", "id": "1" }], + "bucketAggs": [ + { "type": "terms", "field": "host", "id": "2" }, + { "type": "date_histogram", "field": "@timestamp", "id": "3" } + ] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "2": { + "buckets": [ + { + "3": { + "buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }] + }, + "doc_count": 4, + "key": "server1" + }, + { + "3": { + "buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }] + }, + "doc_count": 10, + "key": "server2" + } + ] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Series, ShouldHaveLength, 2) + seriesOne := queryRes.Series[0] + So(seriesOne.Name, ShouldEqual, "server1") + So(seriesOne.Points, ShouldHaveLength, 2) + So(seriesOne.Points[0][0].Float64, ShouldEqual, 1) + So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesOne.Points[1][0].Float64, ShouldEqual, 3) + So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000) + + seriesTwo := queryRes.Series[1] + So(seriesTwo.Name, ShouldEqual, "server2") + So(seriesTwo.Points, ShouldHaveLength, 2) + So(seriesTwo.Points[0][0].Float64, ShouldEqual, 2) + So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesTwo.Points[1][0].Float64, ShouldEqual, 8) + So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000) + }) + + Convey("Single group by query two metrics", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "count", "id": "1" }, { "type": "avg", "field": "@value", "id": "4" }], + "bucketAggs": [ + { "type": "terms", "field": "host", "id": "2" }, + { "type": "date_histogram", "field": "@timestamp", "id": "3" } + ] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "2": { + "buckets": [ + { + "3": { + "buckets": [ + { "4": { "value": 10 }, "doc_count": 1, "key": 1000 }, + { "4": { "value": 12 }, "doc_count": 3, "key": 2000 } + ] + }, + "doc_count": 4, + "key": "server1" + }, + { + "3": { + "buckets": [ + { "4": { "value": 20 }, "doc_count": 1, "key": 1000 }, + { "4": { "value": 32 }, "doc_count": 3, "key": 2000 } + ] + }, + "doc_count": 10, + "key": "server2" + } + ] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Series, ShouldHaveLength, 4) + seriesOne := queryRes.Series[0] + So(seriesOne.Name, ShouldEqual, "server1 Count") + So(seriesOne.Points, ShouldHaveLength, 2) + So(seriesOne.Points[0][0].Float64, ShouldEqual, 1) + So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesOne.Points[1][0].Float64, ShouldEqual, 3) + So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000) + + seriesTwo := queryRes.Series[1] + So(seriesTwo.Name, ShouldEqual, "server1 Average @value") + So(seriesTwo.Points, ShouldHaveLength, 2) + So(seriesTwo.Points[0][0].Float64, ShouldEqual, 10) + So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesTwo.Points[1][0].Float64, ShouldEqual, 12) + So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000) + + seriesThree := queryRes.Series[2] + So(seriesThree.Name, ShouldEqual, "server2 Count") + So(seriesThree.Points, ShouldHaveLength, 2) + So(seriesThree.Points[0][0].Float64, ShouldEqual, 1) + So(seriesThree.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesThree.Points[1][0].Float64, ShouldEqual, 3) + So(seriesThree.Points[1][1].Float64, ShouldEqual, 2000) + + seriesFour := queryRes.Series[3] + So(seriesFour.Name, ShouldEqual, "server2 Average @value") + So(seriesFour.Points, ShouldHaveLength, 2) + So(seriesFour.Points[0][0].Float64, ShouldEqual, 20) + So(seriesFour.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesFour.Points[1][0].Float64, ShouldEqual, 32) + So(seriesFour.Points[1][1].Float64, ShouldEqual, 2000) + }) + + Convey("With percentiles", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "percentiles", "settings": { "percents": [75, 90] }, "id": "1" }], + "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "3" }] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "3": { + "buckets": [ + { + "1": { "values": { "75": 3.3, "90": 5.5 } }, + "doc_count": 10, + "key": 1000 + }, + { + "1": { "values": { "75": 2.3, "90": 4.5 } }, + "doc_count": 15, + "key": 2000 + } + ] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Series, ShouldHaveLength, 2) + seriesOne := queryRes.Series[0] + So(seriesOne.Name, ShouldEqual, "p75") + So(seriesOne.Points, ShouldHaveLength, 2) + So(seriesOne.Points[0][0].Float64, ShouldEqual, 3.3) + So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesOne.Points[1][0].Float64, ShouldEqual, 2.3) + So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000) + + seriesTwo := queryRes.Series[1] + So(seriesTwo.Name, ShouldEqual, "p90") + So(seriesTwo.Points, ShouldHaveLength, 2) + So(seriesTwo.Points[0][0].Float64, ShouldEqual, 5.5) + So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesTwo.Points[1][0].Float64, ShouldEqual, 4.5) + So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000) + }) + + Convey("With extended stats", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "extended_stats", "meta": { "max": true, "std_deviation_bounds_upper": true, "std_deviation_bounds_lower": true }, "id": "1" }], + "bucketAggs": [ + { "type": "terms", "field": "host", "id": "3" }, + { "type": "date_histogram", "field": "@timestamp", "id": "4" } + ] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "3": { + "buckets": [ + { + "key": "server1", + "4": { + "buckets": [ + { + "1": { + "max": 10.2, + "min": 5.5, + "std_deviation_bounds": { "upper": 3, "lower": -2 } + }, + "doc_count": 10, + "key": 1000 + } + ] + } + }, + { + "key": "server2", + "4": { + "buckets": [ + { + "1": { + "max": 15.5, + "min": 3.4, + "std_deviation_bounds": { "upper": 4, "lower": -1 } + }, + "doc_count": 10, + "key": 1000 + } + ] + } + } + ] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Series, ShouldHaveLength, 6) + + seriesOne := queryRes.Series[0] + So(seriesOne.Name, ShouldEqual, "server1 Max") + So(seriesOne.Points, ShouldHaveLength, 1) + So(seriesOne.Points[0][0].Float64, ShouldEqual, 10.2) + So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000) + + seriesTwo := queryRes.Series[1] + So(seriesTwo.Name, ShouldEqual, "server1 Std Dev Lower") + So(seriesTwo.Points, ShouldHaveLength, 1) + So(seriesTwo.Points[0][0].Float64, ShouldEqual, -2) + So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000) + + seriesThree := queryRes.Series[2] + So(seriesThree.Name, ShouldEqual, "server1 Std Dev Upper") + So(seriesThree.Points, ShouldHaveLength, 1) + So(seriesThree.Points[0][0].Float64, ShouldEqual, 3) + So(seriesThree.Points[0][1].Float64, ShouldEqual, 1000) + + seriesFour := queryRes.Series[3] + So(seriesFour.Name, ShouldEqual, "server2 Max") + So(seriesFour.Points, ShouldHaveLength, 1) + So(seriesFour.Points[0][0].Float64, ShouldEqual, 15.5) + So(seriesFour.Points[0][1].Float64, ShouldEqual, 1000) + + seriesFive := queryRes.Series[4] + So(seriesFive.Name, ShouldEqual, "server2 Std Dev Lower") + So(seriesFive.Points, ShouldHaveLength, 1) + So(seriesFive.Points[0][0].Float64, ShouldEqual, -1) + So(seriesFive.Points[0][1].Float64, ShouldEqual, 1000) + + seriesSix := queryRes.Series[5] + So(seriesSix.Name, ShouldEqual, "server2 Std Dev Upper") + So(seriesSix.Points, ShouldHaveLength, 1) + So(seriesSix.Points[0][0].Float64, ShouldEqual, 4) + So(seriesSix.Points[0][1].Float64, ShouldEqual, 1000) + }) + + Convey("Single group by with alias pattern", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "alias": "{{term @host}} {{metric}} and {{not_exist}} {{@host}}", + "metrics": [{ "type": "count", "id": "1" }], + "bucketAggs": [ + { "type": "terms", "field": "@host", "id": "2" }, + { "type": "date_histogram", "field": "@timestamp", "id": "3" } + ] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "2": { + "buckets": [ + { + "3": { + "buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }] + }, + "doc_count": 4, + "key": "server1" + }, + { + "3": { + "buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }] + }, + "doc_count": 10, + "key": "server2" + }, + { + "3": { + "buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }] + }, + "doc_count": 10, + "key": 0 + } + ] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Series, ShouldHaveLength, 3) + + seriesOne := queryRes.Series[0] + So(seriesOne.Name, ShouldEqual, "server1 Count and {{not_exist}} server1") + So(seriesOne.Points, ShouldHaveLength, 2) + So(seriesOne.Points[0][0].Float64, ShouldEqual, 1) + So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesOne.Points[1][0].Float64, ShouldEqual, 3) + So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000) + + seriesTwo := queryRes.Series[1] + So(seriesTwo.Name, ShouldEqual, "server2 Count and {{not_exist}} server2") + So(seriesTwo.Points, ShouldHaveLength, 2) + So(seriesTwo.Points[0][0].Float64, ShouldEqual, 2) + So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesTwo.Points[1][0].Float64, ShouldEqual, 8) + So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000) + + seriesThree := queryRes.Series[2] + So(seriesThree.Name, ShouldEqual, "0 Count and {{not_exist}} 0") + So(seriesThree.Points, ShouldHaveLength, 2) + So(seriesThree.Points[0][0].Float64, ShouldEqual, 2) + So(seriesThree.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesThree.Points[1][0].Float64, ShouldEqual, 8) + So(seriesThree.Points[1][1].Float64, ShouldEqual, 2000) + }) + + Convey("Histogram response", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "count", "id": "1" }], + "bucketAggs": [{ "type": "histogram", "field": "bytes", "id": "3" }] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "3": { + "buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }, { "doc_count": 2, "key": 3000 }] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Tables, ShouldHaveLength, 1) + + rows := queryRes.Tables[0].Rows + So(rows, ShouldHaveLength, 3) + cols := queryRes.Tables[0].Columns + So(cols, ShouldHaveLength, 2) + + So(cols[0].Text, ShouldEqual, "bytes") + So(cols[1].Text, ShouldEqual, "Count") + + So(rows[0][0].(null.Float).Float64, ShouldEqual, 1000) + So(rows[0][1].(null.Float).Float64, ShouldEqual, 1) + So(rows[1][0].(null.Float).Float64, ShouldEqual, 2000) + So(rows[1][1].(null.Float).Float64, ShouldEqual, 3) + So(rows[2][0].(null.Float).Float64, ShouldEqual, 3000) + So(rows[2][1].(null.Float).Float64, ShouldEqual, 2) + }) + + Convey("With two filters agg", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "count", "id": "1" }], + "bucketAggs": [ + { + "type": "filters", + "id": "2", + "settings": { + "filters": [{ "query": "@metric:cpu" }, { "query": "@metric:logins.count" }] + } + }, + { "type": "date_histogram", "field": "@timestamp", "id": "3" } + ] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "2": { + "buckets": { + "@metric:cpu": { + "3": { + "buckets": [{ "doc_count": 1, "key": 1000 }, { "doc_count": 3, "key": 2000 }] + } + }, + "@metric:logins.count": { + "3": { + "buckets": [{ "doc_count": 2, "key": 1000 }, { "doc_count": 8, "key": 2000 }] + } + } + } + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Series, ShouldHaveLength, 2) + + seriesOne := queryRes.Series[0] + So(seriesOne.Name, ShouldEqual, "@metric:cpu") + So(seriesOne.Points, ShouldHaveLength, 2) + So(seriesOne.Points[0][0].Float64, ShouldEqual, 1) + So(seriesOne.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesOne.Points[1][0].Float64, ShouldEqual, 3) + So(seriesOne.Points[1][1].Float64, ShouldEqual, 2000) + + seriesTwo := queryRes.Series[1] + So(seriesTwo.Name, ShouldEqual, "@metric:logins.count") + So(seriesTwo.Points, ShouldHaveLength, 2) + So(seriesTwo.Points[0][0].Float64, ShouldEqual, 2) + So(seriesTwo.Points[0][1].Float64, ShouldEqual, 1000) + So(seriesTwo.Points[1][0].Float64, ShouldEqual, 8) + So(seriesTwo.Points[1][1].Float64, ShouldEqual, 2000) + }) + + Convey("With dropfirst and last aggregation", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "avg", "id": "1" }, { "type": "count" }], + "bucketAggs": [ + { + "type": "date_histogram", + "field": "@timestamp", + "id": "2", + "settings": { "trimEdges": 1 } + } + ] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "2": { + "buckets": [ + { + "1": { "value": 1000 }, + "key": 1, + "doc_count": 369 + }, + { + "1": { "value": 2000 }, + "key": 2, + "doc_count": 200 + }, + { + "1": { "value": 2000 }, + "key": 3, + "doc_count": 200 + } + ] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Series, ShouldHaveLength, 2) + + seriesOne := queryRes.Series[0] + So(seriesOne.Name, ShouldEqual, "Average") + So(seriesOne.Points, ShouldHaveLength, 1) + So(seriesOne.Points[0][0].Float64, ShouldEqual, 2000) + So(seriesOne.Points[0][1].Float64, ShouldEqual, 2) + + seriesTwo := queryRes.Series[1] + So(seriesTwo.Name, ShouldEqual, "Count") + So(seriesTwo.Points, ShouldHaveLength, 1) + So(seriesTwo.Points[0][0].Float64, ShouldEqual, 200) + So(seriesTwo.Points[0][1].Float64, ShouldEqual, 2) + }) + + Convey("No group by time", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "avg", "id": "1" }, { "type": "count" }], + "bucketAggs": [{ "type": "terms", "field": "host", "id": "2" }] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "2": { + "buckets": [ + { + "1": { "value": 1000 }, + "key": "server-1", + "doc_count": 369 + }, + { + "1": { "value": 2000 }, + "key": "server-2", + "doc_count": 200 + } + ] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Tables, ShouldHaveLength, 1) + + rows := queryRes.Tables[0].Rows + So(rows, ShouldHaveLength, 2) + cols := queryRes.Tables[0].Columns + So(cols, ShouldHaveLength, 3) + + So(cols[0].Text, ShouldEqual, "host") + So(cols[1].Text, ShouldEqual, "Average") + So(cols[2].Text, ShouldEqual, "Count") + + So(rows[0][0].(string), ShouldEqual, "server-1") + So(rows[0][1].(null.Float).Float64, ShouldEqual, 1000) + So(rows[0][2].(null.Float).Float64, ShouldEqual, 369) + So(rows[1][0].(string), ShouldEqual, "server-2") + So(rows[1][1].(null.Float).Float64, ShouldEqual, 2000) + So(rows[1][2].(null.Float).Float64, ShouldEqual, 200) + }) + + Convey("Multiple metrics of same type", func() { + targets := map[string]string{ + "A": `{ + "timeField": "@timestamp", + "metrics": [{ "type": "avg", "field": "test", "id": "1" }, { "type": "avg", "field": "test2", "id": "2" }], + "bucketAggs": [{ "type": "terms", "field": "host", "id": "2" }] + }`, + } + response := `{ + "responses": [ + { + "aggregations": { + "2": { + "buckets": [ + { + "1": { "value": 1000 }, + "2": { "value": 3000 }, + "key": "server-1", + "doc_count": 369 + } + ] + } + } + } + ] + }` + rp, err := newResponseParserForTest(targets, response) + So(err, ShouldBeNil) + result, err := rp.getTimeSeries() + So(err, ShouldBeNil) + So(result.Results, ShouldHaveLength, 1) + + queryRes := result.Results["A"] + So(queryRes, ShouldNotBeNil) + So(queryRes.Tables, ShouldHaveLength, 1) + + rows := queryRes.Tables[0].Rows + So(rows, ShouldHaveLength, 1) + cols := queryRes.Tables[0].Columns + So(cols, ShouldHaveLength, 3) + + So(cols[0].Text, ShouldEqual, "host") + So(cols[1].Text, ShouldEqual, "Average test") + So(cols[2].Text, ShouldEqual, "Average test2") + + So(rows[0][0].(string), ShouldEqual, "server-1") + So(rows[0][1].(null.Float).Float64, ShouldEqual, 1000) + So(rows[0][2].(null.Float).Float64, ShouldEqual, 3000) + }) + + // Convey("Raw documents query", func() { + // targets := map[string]string{ + // "A": `{ + // "timeField": "@timestamp", + // "metrics": [{ "type": "raw_document", "id": "1" }] + // }`, + // } + // response := `{ + // "responses": [ + // { + // "hits": { + // "total": 100, + // "hits": [ + // { + // "_id": "1", + // "_type": "type", + // "_index": "index", + // "_source": { "sourceProp": "asd" }, + // "fields": { "fieldProp": "field" } + // }, + // { + // "_source": { "sourceProp": "asd2" }, + // "fields": { "fieldProp": "field2" } + // } + // ] + // } + // } + // ] + // }` + // rp, err := newResponseParserForTest(targets, response) + // So(err, ShouldBeNil) + // result, err := rp.getTimeSeries() + // So(err, ShouldBeNil) + // So(result.Results, ShouldHaveLength, 1) + + // queryRes := result.Results["A"] + // So(queryRes, ShouldNotBeNil) + // So(queryRes.Tables, ShouldHaveLength, 1) + + // rows := queryRes.Tables[0].Rows + // So(rows, ShouldHaveLength, 1) + // cols := queryRes.Tables[0].Columns + // So(cols, ShouldHaveLength, 3) + + // So(cols[0].Text, ShouldEqual, "host") + // So(cols[1].Text, ShouldEqual, "Average test") + // So(cols[2].Text, ShouldEqual, "Average test2") + + // So(rows[0][0].(string), ShouldEqual, "server-1") + // So(rows[0][1].(null.Float).Float64, ShouldEqual, 1000) + // So(rows[0][2].(null.Float).Float64, ShouldEqual, 3000) + // }) + }) +} + +func newResponseParserForTest(tsdbQueries map[string]string, responseBody string) (*responseParser, error) { + from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC) + to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC) + fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond)) + toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond)) + tsdbQuery := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{}, + TimeRange: tsdb.NewTimeRange(fromStr, toStr), + } + + for refID, tsdbQueryBody := range tsdbQueries { + tsdbQueryJSON, err := simplejson.NewJson([]byte(tsdbQueryBody)) + if err != nil { + return nil, err + } + + tsdbQuery.Queries = append(tsdbQuery.Queries, &tsdb.Query{ + Model: tsdbQueryJSON, + RefId: refID, + }) + } + + var response es.MultiSearchResponse + err := json.Unmarshal([]byte(responseBody), &response) + if err != nil { + return nil, err + } + + tsQueryParser := newTimeSeriesQueryParser() + queries, err := tsQueryParser.parse(tsdbQuery) + if err != nil { + return nil, err + } + + return newResponseParser(response.Responses, queries), nil +} diff --git a/pkg/tsdb/elasticsearch/time_series_query.go b/pkg/tsdb/elasticsearch/time_series_query.go new file mode 100644 index 00000000000..c9bb05dd09a --- /dev/null +++ b/pkg/tsdb/elasticsearch/time_series_query.go @@ -0,0 +1,318 @@ +package elasticsearch + +import ( + "fmt" + "strconv" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" +) + +type timeSeriesQuery struct { + client es.Client + tsdbQuery *tsdb.TsdbQuery + intervalCalculator tsdb.IntervalCalculator +} + +var newTimeSeriesQuery = func(client es.Client, tsdbQuery *tsdb.TsdbQuery, intervalCalculator tsdb.IntervalCalculator) *timeSeriesQuery { + return &timeSeriesQuery{ + client: client, + tsdbQuery: tsdbQuery, + intervalCalculator: intervalCalculator, + } +} + +func (e *timeSeriesQuery) execute() (*tsdb.Response, error) { + result := &tsdb.Response{} + result.Results = make(map[string]*tsdb.QueryResult) + + tsQueryParser := newTimeSeriesQueryParser() + queries, err := tsQueryParser.parse(e.tsdbQuery) + if err != nil { + return nil, err + } + + ms := e.client.MultiSearch() + + from := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetFromAsMsEpoch()) + to := fmt.Sprintf("%d", e.tsdbQuery.TimeRange.GetToAsMsEpoch()) + + for _, q := range queries { + minInterval, err := e.client.GetMinInterval(q.Interval) + if err != nil { + return nil, err + } + interval := e.intervalCalculator.Calculate(e.tsdbQuery.TimeRange, minInterval) + + b := ms.Search(interval) + b.Size(0) + filters := b.Query().Bool().Filter() + filters.AddDateRangeFilter(e.client.GetTimeField(), to, from, es.DateFormatEpochMS) + + if q.RawQuery != "" { + filters.AddQueryStringFilter(q.RawQuery, true) + } + + if len(q.BucketAggs) == 0 { + if len(q.Metrics) == 0 || q.Metrics[0].Type != "raw_document" { + result.Results[q.RefID] = &tsdb.QueryResult{ + RefId: q.RefID, + Error: fmt.Errorf("invalid query, missing metrics and aggregations"), + ErrorString: "invalid query, missing metrics and aggregations", + } + continue + } + metric := q.Metrics[0] + b.Size(metric.Settings.Get("size").MustInt(500)) + b.SortDesc("@timestamp", "boolean") + b.AddDocValueField("@timestamp") + continue + } + + aggBuilder := b.Agg() + + // iterate backwards to create aggregations bottom-down + for _, bucketAgg := range q.BucketAggs { + switch bucketAgg.Type { + case "date_histogram": + aggBuilder = addDateHistogramAgg(aggBuilder, bucketAgg, from, to) + case "histogram": + aggBuilder = addHistogramAgg(aggBuilder, bucketAgg) + case "filters": + aggBuilder = addFiltersAgg(aggBuilder, bucketAgg) + case "terms": + aggBuilder = addTermsAgg(aggBuilder, bucketAgg, q.Metrics) + case "geohash_grid": + aggBuilder = addGeoHashGridAgg(aggBuilder, bucketAgg) + } + } + + for _, m := range q.Metrics { + if m.Type == "count" { + continue + } + + if isPipelineAgg(m.Type) { + if _, err := strconv.Atoi(m.PipelineAggregate); err == nil { + aggBuilder.Pipeline(m.ID, m.Type, m.PipelineAggregate, func(a *es.PipelineAggregation) { + a.Settings = m.Settings.MustMap() + }) + } else { + continue + } + } else { + aggBuilder.Metric(m.ID, m.Type, m.Field, func(a *es.MetricAggregation) { + a.Settings = m.Settings.MustMap() + }) + } + } + } + + req, err := ms.Build() + if err != nil { + return nil, err + } + + res, err := e.client.ExecuteMultisearch(req) + if err != nil { + return nil, err + } + + rp := newResponseParser(res.Responses, queries) + return rp.getTimeSeries() +} + +func addDateHistogramAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, timeFrom, timeTo string) es.AggBuilder { + aggBuilder.DateHistogram(bucketAgg.ID, bucketAgg.Field, func(a *es.DateHistogramAgg, b es.AggBuilder) { + a.Interval = bucketAgg.Settings.Get("interval").MustString("auto") + a.MinDocCount = bucketAgg.Settings.Get("min_doc_count").MustInt(0) + a.ExtendedBounds = &es.ExtendedBounds{Min: timeFrom, Max: timeTo} + a.Format = bucketAgg.Settings.Get("format").MustString(es.DateFormatEpochMS) + + if a.Interval == "auto" { + a.Interval = "$__interval" + } + + if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil { + a.Missing = &missing + } + + aggBuilder = b + }) + + return aggBuilder +} + +func addHistogramAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder { + aggBuilder.Histogram(bucketAgg.ID, bucketAgg.Field, func(a *es.HistogramAgg, b es.AggBuilder) { + a.Interval = bucketAgg.Settings.Get("interval").MustInt(1000) + a.MinDocCount = bucketAgg.Settings.Get("min_doc_count").MustInt(0) + + if missing, err := bucketAgg.Settings.Get("missing").Int(); err == nil { + a.Missing = &missing + } + + aggBuilder = b + }) + + return aggBuilder +} + +func addTermsAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg, metrics []*MetricAgg) es.AggBuilder { + aggBuilder.Terms(bucketAgg.ID, bucketAgg.Field, func(a *es.TermsAggregation, b es.AggBuilder) { + if size, err := bucketAgg.Settings.Get("size").Int(); err == nil { + a.Size = size + } else if size, err := bucketAgg.Settings.Get("size").String(); err == nil { + a.Size, err = strconv.Atoi(size) + if err != nil { + a.Size = 500 + } + } else { + a.Size = 500 + } + if minDocCount, err := bucketAgg.Settings.Get("min_doc_count").Int(); err == nil { + a.MinDocCount = &minDocCount + } + if missing, err := bucketAgg.Settings.Get("missing").String(); err == nil { + a.Missing = &missing + } + + if orderBy, err := bucketAgg.Settings.Get("orderBy").String(); err == nil { + a.Order[orderBy] = bucketAgg.Settings.Get("order").MustString("desc") + + if _, err := strconv.Atoi(orderBy); err == nil { + for _, m := range metrics { + if m.ID == orderBy { + b.Metric(m.ID, m.Type, m.Field, nil) + break + } + } + } + } + + aggBuilder = b + }) + + return aggBuilder +} + +func addFiltersAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder { + filters := make(map[string]interface{}) + for _, filter := range bucketAgg.Settings.Get("filters").MustArray() { + json := simplejson.NewFromAny(filter) + query := json.Get("query").MustString() + label := json.Get("label").MustString() + if label == "" { + label = query + } + filters[label] = &es.QueryStringFilter{Query: query, AnalyzeWildcard: true} + } + + if len(filters) > 0 { + aggBuilder.Filters(bucketAgg.ID, func(a *es.FiltersAggregation, b es.AggBuilder) { + a.Filters = filters + aggBuilder = b + }) + } + + return aggBuilder +} + +func addGeoHashGridAgg(aggBuilder es.AggBuilder, bucketAgg *BucketAgg) es.AggBuilder { + aggBuilder.GeoHashGrid(bucketAgg.ID, bucketAgg.Field, func(a *es.GeoHashGridAggregation, b es.AggBuilder) { + a.Precision = bucketAgg.Settings.Get("precision").MustInt(3) + aggBuilder = b + }) + + return aggBuilder +} + +type timeSeriesQueryParser struct{} + +func newTimeSeriesQueryParser() *timeSeriesQueryParser { + return &timeSeriesQueryParser{} +} + +func (p *timeSeriesQueryParser) parse(tsdbQuery *tsdb.TsdbQuery) ([]*Query, error) { + queries := make([]*Query, 0) + for _, q := range tsdbQuery.Queries { + model := q.Model + timeField, err := model.Get("timeField").String() + if err != nil { + return nil, err + } + rawQuery := model.Get("query").MustString() + bucketAggs, err := p.parseBucketAggs(model) + if err != nil { + return nil, err + } + metrics, err := p.parseMetrics(model) + if err != nil { + return nil, err + } + alias := model.Get("alias").MustString("") + interval := strconv.FormatInt(q.IntervalMs, 10) + "ms" + + queries = append(queries, &Query{ + TimeField: timeField, + RawQuery: rawQuery, + BucketAggs: bucketAggs, + Metrics: metrics, + Alias: alias, + Interval: interval, + RefID: q.RefId, + }) + } + + return queries, nil +} + +func (p *timeSeriesQueryParser) parseBucketAggs(model *simplejson.Json) ([]*BucketAgg, error) { + var err error + var result []*BucketAgg + for _, t := range model.Get("bucketAggs").MustArray() { + aggJSON := simplejson.NewFromAny(t) + agg := &BucketAgg{} + + agg.Type, err = aggJSON.Get("type").String() + if err != nil { + return nil, err + } + + agg.ID, err = aggJSON.Get("id").String() + if err != nil { + return nil, err + } + + agg.Field = aggJSON.Get("field").MustString() + agg.Settings = simplejson.NewFromAny(aggJSON.Get("settings").MustMap()) + + result = append(result, agg) + } + return result, nil +} + +func (p *timeSeriesQueryParser) parseMetrics(model *simplejson.Json) ([]*MetricAgg, error) { + var err error + var result []*MetricAgg + for _, t := range model.Get("metrics").MustArray() { + metricJSON := simplejson.NewFromAny(t) + metric := &MetricAgg{} + + metric.Field = metricJSON.Get("field").MustString() + metric.Hide = metricJSON.Get("hide").MustBool(false) + metric.ID = metricJSON.Get("id").MustString() + metric.PipelineAggregate = metricJSON.Get("pipelineAgg").MustString() + metric.Settings = simplejson.NewFromAny(metricJSON.Get("settings").MustMap()) + metric.Meta = simplejson.NewFromAny(metricJSON.Get("meta").MustMap()) + + metric.Type, err = metricJSON.Get("type").String() + if err != nil { + return nil, err + } + + result = append(result, metric) + } + return result, nil +} diff --git a/pkg/tsdb/elasticsearch/time_series_query_test.go b/pkg/tsdb/elasticsearch/time_series_query_test.go new file mode 100644 index 00000000000..49bf5f5bc75 --- /dev/null +++ b/pkg/tsdb/elasticsearch/time_series_query_test.go @@ -0,0 +1,604 @@ +package elasticsearch + +import ( + "fmt" + "testing" + "time" + + "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/tsdb" + . "github.com/smartystreets/goconvey/convey" +) + +func TestExecuteTimeSeriesQuery(t *testing.T) { + from := time.Date(2018, 5, 15, 17, 50, 0, 0, time.UTC) + to := time.Date(2018, 5, 15, 17, 55, 0, 0, time.UTC) + fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond)) + toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond)) + + Convey("Test execute time series query", t, func() { + Convey("With defaults on es 2", func() { + c := newFakeClient(2) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }], + "metrics": [{"type": "count", "id": "0" }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + rangeFilter := sr.Query.Bool.Filters[0].(*es.RangeFilter) + So(rangeFilter.Key, ShouldEqual, c.timeField) + So(rangeFilter.Lte, ShouldEqual, toStr) + So(rangeFilter.Gte, ShouldEqual, fromStr) + So(rangeFilter.Format, ShouldEqual, es.DateFormatEpochMS) + So(sr.Aggs[0].Key, ShouldEqual, "2") + dateHistogramAgg := sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg) + So(dateHistogramAgg.Field, ShouldEqual, "@timestamp") + So(dateHistogramAgg.ExtendedBounds.Min, ShouldEqual, fromStr) + So(dateHistogramAgg.ExtendedBounds.Max, ShouldEqual, toStr) + }) + + Convey("With defaults on es 5", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [{ "type": "date_histogram", "field": "@timestamp", "id": "2" }], + "metrics": [{"type": "count", "id": "0" }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + So(sr.Query.Bool.Filters[0].(*es.RangeFilter).Key, ShouldEqual, c.timeField) + So(sr.Aggs[0].Key, ShouldEqual, "2") + So(sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Min, ShouldEqual, fromStr) + So(sr.Aggs[0].Aggregation.Aggregation.(*es.DateHistogramAgg).ExtendedBounds.Max, ShouldEqual, toStr) + }) + + Convey("With multiple bucket aggs", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { "type": "terms", "field": "@host", "id": "2" }, + { "type": "date_histogram", "field": "@timestamp", "id": "3" } + ], + "metrics": [{"type": "count", "id": "1" }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + firstLevel := sr.Aggs[0] + So(firstLevel.Key, ShouldEqual, "2") + So(firstLevel.Aggregation.Aggregation.(*es.TermsAggregation).Field, ShouldEqual, "@host") + secondLevel := firstLevel.Aggregation.Aggs[0] + So(secondLevel.Key, ShouldEqual, "3") + So(secondLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp") + }) + + Convey("With select field", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { "type": "date_histogram", "field": "@timestamp", "id": "2" } + ], + "metrics": [{"type": "avg", "field": "@value", "id": "1" }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + firstLevel := sr.Aggs[0] + So(firstLevel.Key, ShouldEqual, "2") + So(firstLevel.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp") + secondLevel := firstLevel.Aggregation.Aggs[0] + So(secondLevel.Key, ShouldEqual, "1") + So(secondLevel.Aggregation.Type, ShouldEqual, "avg") + So(secondLevel.Aggregation.Aggregation.(*es.MetricAggregation).Field, ShouldEqual, "@value") + }) + + Convey("With term agg and order by metric agg", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { + "type": "terms", + "field": "@host", + "id": "2", + "settings": { "size": "5", "order": "asc", "orderBy": "5" } + }, + { "type": "date_histogram", "field": "@timestamp", "id": "3" } + ], + "metrics": [ + {"type": "count", "id": "1" }, + {"type": "avg", "field": "@value", "id": "5" } + ] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + avgAggOrderBy := sr.Aggs[0].Aggregation.Aggs[0] + So(avgAggOrderBy.Key, ShouldEqual, "5") + So(avgAggOrderBy.Aggregation.Type, ShouldEqual, "avg") + + avgAgg := sr.Aggs[0].Aggregation.Aggs[1].Aggregation.Aggs[0] + So(avgAgg.Key, ShouldEqual, "5") + So(avgAgg.Aggregation.Type, ShouldEqual, "avg") + }) + + Convey("With metric percentiles", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { "type": "date_histogram", "field": "@timestamp", "id": "3" } + ], + "metrics": [ + { + "id": "1", + "type": "percentiles", + "field": "@load_time", + "settings": { + "percents": [ "1", "2", "3", "4" ] + } + } + ] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + percentilesAgg := sr.Aggs[0].Aggregation.Aggs[0] + So(percentilesAgg.Key, ShouldEqual, "1") + So(percentilesAgg.Aggregation.Type, ShouldEqual, "percentiles") + metricAgg := percentilesAgg.Aggregation.Aggregation.(*es.MetricAggregation) + percents := metricAgg.Settings["percents"].([]interface{}) + So(percents, ShouldHaveLength, 4) + So(percents[0], ShouldEqual, "1") + So(percents[1], ShouldEqual, "2") + So(percents[2], ShouldEqual, "3") + So(percents[3], ShouldEqual, "4") + }) + + Convey("With filters aggs on es 2", func() { + c := newFakeClient(2) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { + "id": "2", + "type": "filters", + "settings": { + "filters": [ { "query": "@metric:cpu" }, { "query": "@metric:logins.count" } ] + } + }, + { "type": "date_histogram", "field": "@timestamp", "id": "4" } + ], + "metrics": [{"type": "count", "id": "1" }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + filtersAgg := sr.Aggs[0] + So(filtersAgg.Key, ShouldEqual, "2") + So(filtersAgg.Aggregation.Type, ShouldEqual, "filters") + fAgg := filtersAgg.Aggregation.Aggregation.(*es.FiltersAggregation) + So(fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:cpu") + So(fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:logins.count") + + dateHistogramAgg := sr.Aggs[0].Aggregation.Aggs[0] + So(dateHistogramAgg.Key, ShouldEqual, "4") + So(dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp") + }) + + Convey("With filters aggs on es 5", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { + "id": "2", + "type": "filters", + "settings": { + "filters": [ { "query": "@metric:cpu" }, { "query": "@metric:logins.count" } ] + } + }, + { "type": "date_histogram", "field": "@timestamp", "id": "4" } + ], + "metrics": [{"type": "count", "id": "1" }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + filtersAgg := sr.Aggs[0] + So(filtersAgg.Key, ShouldEqual, "2") + So(filtersAgg.Aggregation.Type, ShouldEqual, "filters") + fAgg := filtersAgg.Aggregation.Aggregation.(*es.FiltersAggregation) + So(fAgg.Filters["@metric:cpu"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:cpu") + So(fAgg.Filters["@metric:logins.count"].(*es.QueryStringFilter).Query, ShouldEqual, "@metric:logins.count") + + dateHistogramAgg := sr.Aggs[0].Aggregation.Aggs[0] + So(dateHistogramAgg.Key, ShouldEqual, "4") + So(dateHistogramAgg.Aggregation.Aggregation.(*es.DateHistogramAgg).Field, ShouldEqual, "@timestamp") + }) + + Convey("With raw document metric", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [], + "metrics": [{ "id": "1", "type": "raw_document", "settings": {} }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + So(sr.Size, ShouldEqual, 500) + }) + + Convey("With raw document metric size set", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [], + "metrics": [{ "id": "1", "type": "raw_document", "settings": { "size": 1337 } }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + So(sr.Size, ShouldEqual, 1337) + }) + + Convey("With date histogram agg", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { + "id": "2", + "type": "date_histogram", + "field": "@timestamp", + "settings": { "interval": "auto", "min_doc_count": 2 } + } + ], + "metrics": [{"type": "count", "id": "1" }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + firstLevel := sr.Aggs[0] + So(firstLevel.Key, ShouldEqual, "2") + So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram") + hAgg := firstLevel.Aggregation.Aggregation.(*es.DateHistogramAgg) + So(hAgg.Field, ShouldEqual, "@timestamp") + So(hAgg.Interval, ShouldEqual, "$__interval") + So(hAgg.MinDocCount, ShouldEqual, 2) + }) + + Convey("With histogram agg", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { + "id": "3", + "type": "histogram", + "field": "bytes", + "settings": { "interval": 10, "min_doc_count": 2, "missing": 5 } + } + ], + "metrics": [{"type": "count", "id": "1" }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + firstLevel := sr.Aggs[0] + So(firstLevel.Key, ShouldEqual, "3") + So(firstLevel.Aggregation.Type, ShouldEqual, "histogram") + hAgg := firstLevel.Aggregation.Aggregation.(*es.HistogramAgg) + So(hAgg.Field, ShouldEqual, "bytes") + So(hAgg.Interval, ShouldEqual, 10) + So(hAgg.MinDocCount, ShouldEqual, 2) + So(*hAgg.Missing, ShouldEqual, 5) + }) + + Convey("With geo hash grid agg", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { + "id": "3", + "type": "geohash_grid", + "field": "@location", + "settings": { "precision": 3 } + } + ], + "metrics": [{"type": "count", "id": "1" }] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + firstLevel := sr.Aggs[0] + So(firstLevel.Key, ShouldEqual, "3") + So(firstLevel.Aggregation.Type, ShouldEqual, "geohash_grid") + ghGridAgg := firstLevel.Aggregation.Aggregation.(*es.GeoHashGridAggregation) + So(ghGridAgg.Field, ShouldEqual, "@location") + So(ghGridAgg.Precision, ShouldEqual, 3) + }) + + Convey("With moving average", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { "type": "date_histogram", "field": "@timestamp", "id": "4" } + ], + "metrics": [ + { "id": "3", "type": "sum", "field": "@value" }, + { + "id": "2", + "type": "moving_avg", + "field": "3", + "pipelineAgg": "3" + } + ] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + firstLevel := sr.Aggs[0] + So(firstLevel.Key, ShouldEqual, "4") + So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram") + So(firstLevel.Aggregation.Aggs, ShouldHaveLength, 2) + + sumAgg := firstLevel.Aggregation.Aggs[0] + So(sumAgg.Key, ShouldEqual, "3") + So(sumAgg.Aggregation.Type, ShouldEqual, "sum") + mAgg := sumAgg.Aggregation.Aggregation.(*es.MetricAggregation) + So(mAgg.Field, ShouldEqual, "@value") + + movingAvgAgg := firstLevel.Aggregation.Aggs[1] + So(movingAvgAgg.Key, ShouldEqual, "2") + So(movingAvgAgg.Aggregation.Type, ShouldEqual, "moving_avg") + pl := movingAvgAgg.Aggregation.Aggregation.(*es.PipelineAggregation) + So(pl.BucketPath, ShouldEqual, "3") + }) + + Convey("With broken moving average", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { "type": "date_histogram", "field": "@timestamp", "id": "5" } + ], + "metrics": [ + { "id": "3", "type": "sum", "field": "@value" }, + { + "id": "2", + "type": "moving_avg", + "pipelineAgg": "3" + }, + { + "id": "4", + "type": "moving_avg", + "pipelineAgg": "Metric to apply moving average" + } + ] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + firstLevel := sr.Aggs[0] + So(firstLevel.Key, ShouldEqual, "5") + So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram") + + So(firstLevel.Aggregation.Aggs, ShouldHaveLength, 2) + + movingAvgAgg := firstLevel.Aggregation.Aggs[1] + So(movingAvgAgg.Key, ShouldEqual, "2") + plAgg := movingAvgAgg.Aggregation.Aggregation.(*es.PipelineAggregation) + So(plAgg.BucketPath, ShouldEqual, "3") + }) + + Convey("With derivative", func() { + c := newFakeClient(5) + _, err := executeTsdbQuery(c, `{ + "timeField": "@timestamp", + "bucketAggs": [ + { "type": "date_histogram", "field": "@timestamp", "id": "4" } + ], + "metrics": [ + { "id": "3", "type": "sum", "field": "@value" }, + { + "id": "2", + "type": "derivative", + "pipelineAgg": "3" + } + ] + }`, from, to, 15*time.Second) + So(err, ShouldBeNil) + sr := c.multisearchRequests[0].Requests[0] + + firstLevel := sr.Aggs[0] + So(firstLevel.Key, ShouldEqual, "4") + So(firstLevel.Aggregation.Type, ShouldEqual, "date_histogram") + + derivativeAgg := firstLevel.Aggregation.Aggs[1] + So(derivativeAgg.Key, ShouldEqual, "2") + plAgg := derivativeAgg.Aggregation.Aggregation.(*es.PipelineAggregation) + So(plAgg.BucketPath, ShouldEqual, "3") + }) + + }) +} + +type fakeClient struct { + version int + timeField string + multiSearchResponse *es.MultiSearchResponse + multiSearchError error + builder *es.MultiSearchRequestBuilder + multisearchRequests []*es.MultiSearchRequest +} + +func newFakeClient(version int) *fakeClient { + return &fakeClient{ + version: version, + timeField: "@timestamp", + multisearchRequests: make([]*es.MultiSearchRequest, 0), + multiSearchResponse: &es.MultiSearchResponse{}, + } +} + +func (c *fakeClient) GetVersion() int { + return c.version +} + +func (c *fakeClient) GetTimeField() string { + return c.timeField +} + +func (c *fakeClient) GetMinInterval(queryInterval string) (time.Duration, error) { + return 15 * time.Second, nil +} + +func (c *fakeClient) ExecuteMultisearch(r *es.MultiSearchRequest) (*es.MultiSearchResponse, error) { + c.multisearchRequests = append(c.multisearchRequests, r) + return c.multiSearchResponse, c.multiSearchError +} + +func (c *fakeClient) MultiSearch() *es.MultiSearchRequestBuilder { + c.builder = es.NewMultiSearchRequestBuilder(c.version) + return c.builder +} + +func newTsdbQuery(body string) (*tsdb.TsdbQuery, error) { + json, err := simplejson.NewJson([]byte(body)) + if err != nil { + return nil, err + } + return &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: json, + }, + }, + }, nil +} + +func executeTsdbQuery(c es.Client, body string, from, to time.Time, minInterval time.Duration) (*tsdb.Response, error) { + json, err := simplejson.NewJson([]byte(body)) + if err != nil { + return nil, err + } + fromStr := fmt.Sprintf("%d", from.UnixNano()/int64(time.Millisecond)) + toStr := fmt.Sprintf("%d", to.UnixNano()/int64(time.Millisecond)) + tsdbQuery := &tsdb.TsdbQuery{ + Queries: []*tsdb.Query{ + { + Model: json, + }, + }, + TimeRange: tsdb.NewTimeRange(fromStr, toStr), + } + query := newTimeSeriesQuery(c, tsdbQuery, tsdb.NewIntervalCalculator(&tsdb.IntervalOptions{MinInterval: minInterval})) + return query.execute() +} + +func TestTimeSeriesQueryParser(t *testing.T) { + Convey("Test time series query parser", t, func() { + p := newTimeSeriesQueryParser() + + Convey("Should be able to parse query", func() { + body := `{ + "timeField": "@timestamp", + "query": "@metric:cpu", + "alias": "{{@hostname}} {{metric}}", + "metrics": [ + { + "field": "@value", + "id": "1", + "meta": {}, + "settings": { + "percents": [ + "90" + ] + }, + "type": "percentiles" + }, + { + "type": "count", + "field": "select field", + "id": "4", + "settings": {}, + "meta": {} + } + ], + "bucketAggs": [ + { + "fake": true, + "field": "@hostname", + "id": "3", + "settings": { + "min_doc_count": 1, + "order": "desc", + "orderBy": "_term", + "size": "10" + }, + "type": "terms" + }, + { + "field": "@timestamp", + "id": "2", + "settings": { + "interval": "5m", + "min_doc_count": 0, + "trimEdges": 0 + }, + "type": "date_histogram" + } + ] + }` + tsdbQuery, err := newTsdbQuery(body) + So(err, ShouldBeNil) + queries, err := p.parse(tsdbQuery) + So(err, ShouldBeNil) + So(queries, ShouldHaveLength, 1) + + q := queries[0] + + So(q.TimeField, ShouldEqual, "@timestamp") + So(q.RawQuery, ShouldEqual, "@metric:cpu") + So(q.Alias, ShouldEqual, "{{@hostname}} {{metric}}") + + So(q.Metrics, ShouldHaveLength, 2) + So(q.Metrics[0].Field, ShouldEqual, "@value") + So(q.Metrics[0].ID, ShouldEqual, "1") + So(q.Metrics[0].Type, ShouldEqual, "percentiles") + So(q.Metrics[0].Hide, ShouldBeFalse) + So(q.Metrics[0].PipelineAggregate, ShouldEqual, "") + So(q.Metrics[0].Settings.Get("percents").MustStringArray()[0], ShouldEqual, "90") + + So(q.Metrics[1].Field, ShouldEqual, "select field") + So(q.Metrics[1].ID, ShouldEqual, "4") + So(q.Metrics[1].Type, ShouldEqual, "count") + So(q.Metrics[1].Hide, ShouldBeFalse) + So(q.Metrics[1].PipelineAggregate, ShouldEqual, "") + So(q.Metrics[1].Settings.MustMap(), ShouldBeEmpty) + + So(q.BucketAggs, ShouldHaveLength, 2) + So(q.BucketAggs[0].Field, ShouldEqual, "@hostname") + So(q.BucketAggs[0].ID, ShouldEqual, "3") + So(q.BucketAggs[0].Type, ShouldEqual, "terms") + So(q.BucketAggs[0].Settings.Get("min_doc_count").MustInt64(), ShouldEqual, 1) + So(q.BucketAggs[0].Settings.Get("order").MustString(), ShouldEqual, "desc") + So(q.BucketAggs[0].Settings.Get("orderBy").MustString(), ShouldEqual, "_term") + So(q.BucketAggs[0].Settings.Get("size").MustString(), ShouldEqual, "10") + + So(q.BucketAggs[1].Field, ShouldEqual, "@timestamp") + So(q.BucketAggs[1].ID, ShouldEqual, "2") + So(q.BucketAggs[1].Type, ShouldEqual, "date_histogram") + So(q.BucketAggs[1].Settings.Get("interval").MustString(), ShouldEqual, "5m") + So(q.BucketAggs[1].Settings.Get("min_doc_count").MustInt64(), ShouldEqual, 0) + So(q.BucketAggs[1].Settings.Get("trimEdges").MustInt64(), ShouldEqual, 0) + }) + }) +} diff --git a/pkg/tsdb/influxdb/query_part.go b/pkg/tsdb/influxdb/query_part.go index 981aea40526..77f565a8597 100644 --- a/pkg/tsdb/influxdb/query_part.go +++ b/pkg/tsdb/influxdb/query_part.go @@ -31,6 +31,7 @@ func init() { renders["mean"] = QueryDefinition{Renderer: functionRenderer} renders["median"] = QueryDefinition{Renderer: functionRenderer} renders["sum"] = QueryDefinition{Renderer: functionRenderer} + renders["mode"] = QueryDefinition{Renderer: functionRenderer} renders["holt_winters"] = QueryDefinition{ Renderer: functionRenderer, diff --git a/pkg/tsdb/influxdb/query_part_test.go b/pkg/tsdb/influxdb/query_part_test.go index d23865174c8..08bcff9b727 100644 --- a/pkg/tsdb/influxdb/query_part_test.go +++ b/pkg/tsdb/influxdb/query_part_test.go @@ -4,77 +4,39 @@ import ( "testing" "github.com/grafana/grafana/pkg/tsdb" - . "github.com/smartystreets/goconvey/convey" ) func TestInfluxdbQueryPart(t *testing.T) { - Convey("Influxdb query parts", t, func() { + tcs := []struct { + mode string + input string + params []string + expected string + }{ + {mode: "field", params: []string{"value"}, input: "value", expected: `"value"`}, + {mode: "derivative", params: []string{"10s"}, input: "mean(value)", expected: `derivative(mean(value), 10s)`}, + {mode: "bottom", params: []string{"3"}, input: "value", expected: `bottom(value, 3)`}, + {mode: "time", params: []string{"$interval"}, input: "", expected: `time($interval)`}, + {mode: "time", params: []string{"auto"}, input: "", expected: `time($__interval)`}, + {mode: "spread", params: []string{}, input: "value", expected: `spread(value)`}, + {mode: "math", params: []string{"/ 100"}, input: "mean(value)", expected: `mean(value) / 100`}, + {mode: "alias", params: []string{"test"}, input: "mean(value)", expected: `mean(value) AS "test"`}, + {mode: "count", params: []string{}, input: "distinct(value)", expected: `count(distinct(value))`}, + {mode: "mode", params: []string{}, input: "value", expected: `mode(value)`}, + } - queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("5m", "now")} - query := &Query{} + queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("5m", "now")} + query := &Query{} - Convey("render field ", func() { - part, err := NewQueryPart("field", []string{"value"}) - So(err, ShouldBeNil) + for _, tc := range tcs { + part, err := NewQueryPart(tc.mode, tc.params) + if err != nil { + t.Errorf("Expected NewQueryPart to not return an error. error: %v", err) + } - res := part.Render(query, queryContext, "value") - So(res, ShouldEqual, `"value"`) - }) - - Convey("render nested part", func() { - part, err := NewQueryPart("derivative", []string{"10s"}) - So(err, ShouldBeNil) - - res := part.Render(query, queryContext, "mean(value)") - So(res, ShouldEqual, "derivative(mean(value), 10s)") - }) - - Convey("render bottom", func() { - part, err := NewQueryPart("bottom", []string{"3"}) - So(err, ShouldBeNil) - - res := part.Render(query, queryContext, "value") - So(res, ShouldEqual, "bottom(value, 3)") - }) - - Convey("render time with $interval", func() { - part, err := NewQueryPart("time", []string{"$interval"}) - So(err, ShouldBeNil) - - res := part.Render(query, queryContext, "") - So(res, ShouldEqual, "time($interval)") - }) - - Convey("render time with auto", func() { - part, err := NewQueryPart("time", []string{"auto"}) - So(err, ShouldBeNil) - - res := part.Render(query, queryContext, "") - So(res, ShouldEqual, "time($__interval)") - }) - - Convey("render spread", func() { - part, err := NewQueryPart("spread", []string{}) - So(err, ShouldBeNil) - - res := part.Render(query, queryContext, "value") - So(res, ShouldEqual, `spread(value)`) - }) - - Convey("render suffix", func() { - part, err := NewQueryPart("math", []string{"/ 100"}) - So(err, ShouldBeNil) - - res := part.Render(query, queryContext, "mean(value)") - So(res, ShouldEqual, "mean(value) / 100") - }) - - Convey("render alias", func() { - part, err := NewQueryPart("alias", []string{"test"}) - So(err, ShouldBeNil) - - res := part.Render(query, queryContext, "mean(value)") - So(res, ShouldEqual, `mean(value) AS "test"`) - }) - }) + res := part.Render(query, queryContext, tc.input) + if res != tc.expected { + t.Errorf("expected %v to render into %s", tc, tc.expected) + } + } } diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index bb9489cd654..ad3d1edd5d7 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -82,11 +82,12 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= DATEADD(s, %d, '1970-01-01') AND %s <= DATEADD(s, %d, '1970-01-01')", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil + + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", m.TimeRange.GetFromAsSecondsEpoch()), nil + return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("DATEADD(second, %d, '1970-01-01')", m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -108,7 +109,7 @@ func (m *MsSqlMacroEngine) evaluateMacro(name string, args []string) (string, er m.Query.Model.Set("fillValue", floatVal) } } - return fmt.Sprintf("CAST(ROUND(DATEDIFF(second, '1970-01-01', %s)/%.1f, 0) as bigint)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil + return fmt.Sprintf("FLOOR(DATEDIFF(second, '1970-01-01', %s)/%.0f)*%.0f", args[0], interval.Seconds(), interval.Seconds()), nil case "__unixEpochFilter": if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go index ae0d4f67d2b..49368fe3631 100644 --- a/pkg/tsdb/mssql/macros_test.go +++ b/pkg/tsdb/mssql/macros_test.go @@ -49,21 +49,21 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeGroup function", func() { sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") So(err, ShouldBeNil) - So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300") + So(sql, ShouldEqual, "GROUP BY FLOOR(DATEDIFF(second, '1970-01-01', time_column)/300)*300") }) Convey("interpolate __timeGroup function with spaces around arguments", func() { sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") So(err, ShouldBeNil) - So(sql, ShouldEqual, "GROUP BY CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)*300") + So(sql, ShouldEqual, "GROUP BY FLOOR(DATEDIFF(second, '1970-01-01', time_column)/300)*300") }) Convey("interpolate __timeGroup function with fill (value = NULL)", func() { @@ -96,14 +96,14 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { @@ -137,21 +137,21 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeFrom function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { @@ -185,21 +185,21 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= DATEADD(s, %d, '1970-01-01') AND time_column <= DATEADD(s, %d, '1970-01-01')", from.Unix(), to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeFrom function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", from.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select DATEADD(second, %d, '1970-01-01')", to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { diff --git a/pkg/tsdb/mssql/mssql_test.go b/pkg/tsdb/mssql/mssql_test.go index e62d30a6325..db04d6d1f02 100644 --- a/pkg/tsdb/mssql/mssql_test.go +++ b/pkg/tsdb/mssql/mssql_test.go @@ -210,11 +210,12 @@ func TestMSSQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) points := queryResult.Series[0].Points - So(len(points), ShouldEqual, 6) + // without fill this should result in 4 buckets + So(len(points), ShouldEqual, 4) dt := fromStart - for i := 0; i < 3; i++ { + for i := 0; i < 2; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 15) @@ -222,9 +223,9 @@ func TestMSSQL(t *testing.T) { dt = dt.Add(5 * time.Minute) } - // adjust for 5 minute gap - dt = dt.Add(5 * time.Minute) - for i := 3; i < 6; i++ { + // adjust for 10 minute gap between first and second set of points + dt = dt.Add(10 * time.Minute) + for i := 2; i < 4; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 20) @@ -260,7 +261,7 @@ func TestMSSQL(t *testing.T) { dt := fromStart - for i := 0; i < 3; i++ { + for i := 0; i < 2; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 15) @@ -268,17 +269,22 @@ func TestMSSQL(t *testing.T) { dt = dt.Add(5 * time.Minute) } + // check for NULL values inserted by fill + So(points[2][0].Valid, ShouldBeFalse) So(points[3][0].Valid, ShouldBeFalse) - // adjust for 5 minute gap - dt = dt.Add(5 * time.Minute) - for i := 4; i < 7; i++ { + // adjust for 10 minute gap between first and second set of points + dt = dt.Add(10 * time.Minute) + for i := 4; i < 6; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 20) So(aTime, ShouldEqual, dt) dt = dt.Add(5 * time.Minute) } + + So(points[6][0].Valid, ShouldBeFalse) + }) Convey("When doing a metric query using timeGroup with float fill enabled", func() { @@ -525,7 +531,7 @@ func TestMSSQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { @@ -547,7 +553,7 @@ func TestMSSQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { @@ -924,7 +930,7 @@ func TestMSSQL(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000)) + So(columns[0].(int64), ShouldEqual, dt.Unix()*1000) }) Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() { @@ -954,7 +960,7 @@ func TestMSSQL(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000)) + So(columns[0].(int64), ShouldEqual, dt.Unix()*1000) }) Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() { diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index fadcbe4edbc..584f731f3b8 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -77,11 +77,12 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) } - return fmt.Sprintf("%s >= FROM_UNIXTIME(%d) AND %s <= FROM_UNIXTIME(%d)", args[0], m.TimeRange.GetFromAsSecondsEpoch(), args[0], m.TimeRange.GetToAsSecondsEpoch()), nil + + return fmt.Sprintf("%s BETWEEN '%s' AND '%s'", args[0], m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339), m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeFrom": - return fmt.Sprintf("FROM_UNIXTIME(%d)", m.TimeRange.GetFromAsSecondsEpoch()), nil + return fmt.Sprintf("'%s'", m.TimeRange.GetFromAsTimeUTC().Format(time.RFC3339)), nil case "__timeTo": - return fmt.Sprintf("FROM_UNIXTIME(%d)", m.TimeRange.GetToAsSecondsEpoch()), nil + return fmt.Sprintf("'%s'", m.TimeRange.GetToAsTimeUTC().Format(time.RFC3339)), nil case "__timeGroup": if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) @@ -103,7 +104,7 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er m.Query.Model.Set("fillValue", floatVal) } } - return fmt.Sprintf("cast(cast(UNIX_TIMESTAMP(%s)/(%.0f) as signed)*%.0f as signed)", args[0], interval.Seconds(), interval.Seconds()), nil + return fmt.Sprintf("UNIX_TIMESTAMP(%s) DIV %.0f * %.0f", args[0], interval.Seconds(), interval.Seconds()), nil case "__unixEpochFilter": if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go index 66ec143eac8..2561661b385 100644 --- a/pkg/tsdb/mysql/macros_test.go +++ b/pkg/tsdb/mysql/macros_test.go @@ -39,7 +39,7 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") So(err, ShouldBeNil) - So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)") + So(sql, ShouldEqual, "GROUP BY UNIX_TIMESTAMP(time_column) DIV 300 * 300") }) Convey("interpolate __timeGroup function with spaces around arguments", func() { @@ -47,28 +47,28 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") So(err, ShouldBeNil) - So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)") + So(sql, ShouldEqual, "GROUP BY UNIX_TIMESTAMP(time_column) DIV 300 * 300") }) Convey("interpolate __timeFilter function", func() { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeFrom function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { @@ -102,21 +102,21 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeFrom function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { @@ -150,21 +150,21 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column >= FROM_UNIXTIME(%d) AND time_column <= FROM_UNIXTIME(%d)", from.Unix(), to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("WHERE time_column BETWEEN '%s' AND '%s'", from.Format(time.RFC3339), to.Format(time.RFC3339))) }) Convey("interpolate __timeFrom function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", from.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", from.Format(time.RFC3339))) }) Convey("interpolate __timeTo function", func() { sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) - So(sql, ShouldEqual, fmt.Sprintf("select FROM_UNIXTIME(%d)", to.Unix())) + So(sql, ShouldEqual, fmt.Sprintf("select '%s'", to.Format(time.RFC3339))) }) Convey("interpolate __unixEpochFilter function", func() { diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index 29c5b72b408..850a37617e2 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -132,8 +132,8 @@ func TestMySQL(t *testing.T) { So(column[7].(float64), ShouldEqual, 1.11) So(column[8].(float64), ShouldEqual, 2.22) So(*column[9].(*float32), ShouldEqual, 3.33) - So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now()) - So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now()) + So(column[10].(time.Time), ShouldHappenWithin, 10*time.Second, time.Now()) + So(column[11].(time.Time), ShouldHappenWithin, 10*time.Second, time.Now()) So(column[12].(string), ShouldEqual, "11:11:11") So(column[13].(int64), ShouldEqual, 2018) So(*column[14].(*[]byte), ShouldHaveSameTypeAs, []byte{1}) @@ -209,11 +209,12 @@ func TestMySQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) points := queryResult.Series[0].Points - So(len(points), ShouldEqual, 6) + // without fill this should result in 4 buckets + So(len(points), ShouldEqual, 4) dt := fromStart - for i := 0; i < 3; i++ { + for i := 0; i < 2; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 15) @@ -221,9 +222,9 @@ func TestMySQL(t *testing.T) { dt = dt.Add(5 * time.Minute) } - // adjust for 5 minute gap - dt = dt.Add(5 * time.Minute) - for i := 3; i < 6; i++ { + // adjust for 10 minute gap between first and second set of points + dt = dt.Add(10 * time.Minute) + for i := 2; i < 4; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 20) @@ -259,7 +260,7 @@ func TestMySQL(t *testing.T) { dt := fromStart - for i := 0; i < 3; i++ { + for i := 0; i < 2; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 15) @@ -267,17 +268,23 @@ func TestMySQL(t *testing.T) { dt = dt.Add(5 * time.Minute) } + // check for NULL values inserted by fill + So(points[2][0].Valid, ShouldBeFalse) So(points[3][0].Valid, ShouldBeFalse) - // adjust for 5 minute gap - dt = dt.Add(5 * time.Minute) - for i := 4; i < 7; i++ { + // adjust for 10 minute gap between first and second set of points + dt = dt.Add(10 * time.Minute) + for i := 4; i < 6; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 20) So(aTime, ShouldEqual, dt) dt = dt.Add(5 * time.Minute) } + + // check for NULL values inserted by fill + So(points[6][0].Valid, ShouldBeFalse) + }) Convey("When doing a metric query using timeGroup with float fill enabled", func() { @@ -571,7 +578,7 @@ func TestMySQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { @@ -593,7 +600,7 @@ func TestMySQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { @@ -601,7 +608,7 @@ func TestMySQL(t *testing.T) { Queries: []*tsdb.Query{ { Model: simplejson.NewFromAny(map[string]interface{}{ - "rawSql": `SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values ORDER BY 1`, + "rawSql": `SELECT $__time(time), CONCAT(measurement, ' - value one') as metric, valueOne FROM metric_values ORDER BY 1,2`, "format": "time_series", }), RefId: "A", @@ -615,8 +622,8 @@ func TestMySQL(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 2) - So(queryResult.Series[0].Name, ShouldEqual, "Metric B - value one") - So(queryResult.Series[1].Name, ShouldEqual, "Metric A - value one") + So(queryResult.Series[0].Name, ShouldEqual, "Metric A - value one") + So(queryResult.Series[1].Name, ShouldEqual, "Metric B - value one") }) Convey("When doing a metric query grouping by time should return correct series", func() { @@ -810,7 +817,7 @@ func TestMySQL(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000)) + So(columns[0].(int64), ShouldEqual, dt.Unix()*1000) }) Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() { diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index 05e39f2c762..61e88418ff4 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -109,7 +109,7 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, m.Query.Model.Set("fillValue", floatVal) } } - return fmt.Sprintf("(extract(epoch from %s)/%v)::bigint*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil + return fmt.Sprintf("floor(extract(epoch from %s)/%v)*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil case "__unixEpochFilter": if len(args) == 0 { return "", fmt.Errorf("missing time column argument for macro %v", name) diff --git a/pkg/tsdb/postgres/macros_test.go b/pkg/tsdb/postgres/macros_test.go index c3c15691e42..8c581850430 100644 --- a/pkg/tsdb/postgres/macros_test.go +++ b/pkg/tsdb/postgres/macros_test.go @@ -53,7 +53,7 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") So(err, ShouldBeNil) - So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time") + So(sql, ShouldEqual, "GROUP BY floor(extract(epoch from time_column)/300)*300 AS time") }) Convey("interpolate __timeGroup function with spaces between args", func() { @@ -61,7 +61,7 @@ func TestMacroEngine(t *testing.T) { sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") So(err, ShouldBeNil) - So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time") + So(sql, ShouldEqual, "GROUP BY floor(extract(epoch from time_column)/300)*300 AS time") }) Convey("interpolate __timeTo function", func() { diff --git a/pkg/tsdb/postgres/postgres_test.go b/pkg/tsdb/postgres/postgres_test.go index 7f24d5a2063..a3a6d6546df 100644 --- a/pkg/tsdb/postgres/postgres_test.go +++ b/pkg/tsdb/postgres/postgres_test.go @@ -189,21 +189,23 @@ func TestPostgres(t *testing.T) { So(queryResult.Error, ShouldBeNil) points := queryResult.Series[0].Points - So(len(points), ShouldEqual, 6) + // without fill this should result in 4 buckets + So(len(points), ShouldEqual, 4) dt := fromStart - for i := 0; i < 3; i++ { + for i := 0; i < 2; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 15) So(aTime, ShouldEqual, dt) + So(aTime.Unix()%300, ShouldEqual, 0) dt = dt.Add(5 * time.Minute) } - // adjust for 5 minute gap - dt = dt.Add(5 * time.Minute) - for i := 3; i < 6; i++ { + // adjust for 10 minute gap between first and second set of points + dt = dt.Add(10 * time.Minute) + for i := 2; i < 4; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 20) @@ -239,7 +241,7 @@ func TestPostgres(t *testing.T) { dt := fromStart - for i := 0; i < 3; i++ { + for i := 0; i < 2; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 15) @@ -247,17 +249,23 @@ func TestPostgres(t *testing.T) { dt = dt.Add(5 * time.Minute) } + // check for NULL values inserted by fill + So(points[2][0].Valid, ShouldBeFalse) So(points[3][0].Valid, ShouldBeFalse) - // adjust for 5 minute gap - dt = dt.Add(5 * time.Minute) - for i := 4; i < 7; i++ { + // adjust for 10 minute gap between first and second set of points + dt = dt.Add(10 * time.Minute) + for i := 4; i < 6; i++ { aValue := points[i][0].Float64 aTime := time.Unix(int64(points[i][1].Float64)/1000, 0) So(aValue, ShouldEqual, 20) So(aTime, ShouldEqual, dt) dt = dt.Add(5 * time.Minute) } + + // check for NULL values inserted by fill + So(points[6][0].Valid, ShouldBeFalse) + }) Convey("When doing a metric query using timeGroup with float fill enabled", func() { @@ -504,7 +512,7 @@ func TestPostgres(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query using epoch (float32 nullable) as time column and value column (float32 nullable) should return metric with time in milliseconds", func() { @@ -526,7 +534,7 @@ func TestPostgres(t *testing.T) { So(queryResult.Error, ShouldBeNil) So(len(queryResult.Series), ShouldEqual, 1) - So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float64(float32(tInitial.Unix())))*1e3) + So(queryResult.Series[0].Points[0][1].Float64, ShouldEqual, float64(float32(tInitial.Unix()))*1e3) }) Convey("When doing a metric query grouping by time and select metric column should return correct series", func() { @@ -713,7 +721,7 @@ func TestPostgres(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000)) + So(columns[0].(int64), ShouldEqual, dt.Unix()*1000) }) Convey("When doing an annotation query with a time column in epoch second format (int) should return ms", func() { @@ -743,7 +751,7 @@ func TestPostgres(t *testing.T) { columns := queryResult.Tables[0].Rows[0] //Should be in milliseconds - So(columns[0].(int64), ShouldEqual, int64(dt.Unix()*1000)) + So(columns[0].(int64), ShouldEqual, dt.Unix()*1000) }) Convey("When doing an annotation query with a time column in epoch millisecond format should return ms", func() { diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 274e5b05dc1..ec908aeb9de 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -68,6 +68,7 @@ func (e *DefaultSqlEngine) InitEngine(driverName string, dsInfo *models.DataSour engine.SetMaxOpenConns(10) engine.SetMaxIdleConns(10) + engineCache.versions[dsInfo.Id] = dsInfo.Version engineCache.cache[dsInfo.Id] = engine e.XormEngine = engine @@ -144,10 +145,10 @@ func ConvertSqlTimeColumnToEpochMs(values RowValues, timeIndex int) { if timeIndex >= 0 { switch value := values[timeIndex].(type) { case time.Time: - values[timeIndex] = EpochPrecisionToMs(float64(value.UnixNano())) + values[timeIndex] = float64(value.UnixNano()) / float64(time.Millisecond) case *time.Time: if value != nil { - values[timeIndex] = EpochPrecisionToMs(float64((*value).UnixNano())) + values[timeIndex] = float64((*value).UnixNano()) / float64(time.Millisecond) } case int64: values[timeIndex] = int64(EpochPrecisionToMs(float64(value))) diff --git a/pkg/tsdb/sql_engine_test.go b/pkg/tsdb/sql_engine_test.go index ce1fb45de21..854734fac31 100644 --- a/pkg/tsdb/sql_engine_test.go +++ b/pkg/tsdb/sql_engine_test.go @@ -12,14 +12,17 @@ import ( func TestSqlEngine(t *testing.T) { Convey("SqlEngine", t, func() { dt := time.Date(2018, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC) + earlyDt := time.Date(1970, 3, 14, 21, 20, 6, int(527345*time.Microsecond), time.UTC) Convey("Given row values with time.Time as time columns", func() { var nilPointer *time.Time - fixtures := make([]interface{}, 3) + fixtures := make([]interface{}, 5) fixtures[0] = dt fixtures[1] = &dt - fixtures[2] = nilPointer + fixtures[2] = earlyDt + fixtures[3] = &earlyDt + fixtures[4] = nilPointer for i := range fixtures { ConvertSqlTimeColumnToEpochMs(fixtures, i) @@ -27,9 +30,13 @@ func TestSqlEngine(t *testing.T) { Convey("When converting them should return epoch time with millisecond precision ", func() { expected := float64(dt.UnixNano()) / float64(time.Millisecond) + expectedEarly := float64(earlyDt.UnixNano()) / float64(time.Millisecond) + So(fixtures[0].(float64), ShouldEqual, expected) So(fixtures[1].(float64), ShouldEqual, expected) - So(fixtures[2], ShouldBeNil) + So(fixtures[2].(float64), ShouldEqual, expectedEarly) + So(fixtures[3].(float64), ShouldEqual, expectedEarly) + So(fixtures[4], ShouldBeNil) }) }) diff --git a/pkg/tsdb/testdata/testdata.go b/pkg/tsdb/testdata/testdata.go index a1ab250ad37..c2c2ea3f696 100644 --- a/pkg/tsdb/testdata/testdata.go +++ b/pkg/tsdb/testdata/testdata.go @@ -21,7 +21,7 @@ func NewTestDataExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, err } func init() { - tsdb.RegisterTsdbQueryEndpoint("grafana-testdata-datasource", NewTestDataExecutor) + tsdb.RegisterTsdbQueryEndpoint("testdata", NewTestDataExecutor) } func (e *TestDataExecutor) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { diff --git a/pkg/util/md5.go b/pkg/util/md5.go new file mode 100644 index 00000000000..2473a1a406c --- /dev/null +++ b/pkg/util/md5.go @@ -0,0 +1,26 @@ +package util + +import ( + "crypto/md5" + "encoding/hex" + "io" + "strings" +) + +// Md5Sum calculates the md5sum of a stream +func Md5Sum(reader io.Reader) (string, error) { + var returnMD5String string + hash := md5.New() + if _, err := io.Copy(hash, reader); err != nil { + return returnMD5String, err + } + hashInBytes := hash.Sum(nil)[:16] + returnMD5String = hex.EncodeToString(hashInBytes) + return returnMD5String, nil +} + +// Md5Sum calculates the md5sum of a string +func Md5SumString(input string) (string, error) { + buffer := strings.NewReader(input) + return Md5Sum(buffer) +} diff --git a/pkg/util/md5_test.go b/pkg/util/md5_test.go new file mode 100644 index 00000000000..1338d42bb51 --- /dev/null +++ b/pkg/util/md5_test.go @@ -0,0 +1,17 @@ +package util + +import "testing" + +func TestMd5Sum(t *testing.T) { + input := "dont hash passwords with md5" + + have, err := Md5SumString(input) + if err != nil { + t.Fatal("expected err to be nil") + } + + want := "2d6a56c82d09d374643b926d3417afba" + if have != want { + t.Fatalf("expected: %s got: %s", want, have) + } +} diff --git a/public/app/containers/Explore/ElapsedTime.tsx b/public/app/containers/Explore/ElapsedTime.tsx index 9cd8f674186..a2d941515cd 100644 --- a/public/app/containers/Explore/ElapsedTime.tsx +++ b/public/app/containers/Explore/ElapsedTime.tsx @@ -41,6 +41,6 @@ export default class ElapsedTime extends PureComponent { const { elapsed } = this.state; const { className, time } = this.props; const value = (time || elapsed) / 1000; - return {value.toFixed(1)}s; + return {value.toFixed(1)}s; } } diff --git a/public/app/containers/Explore/Explore.tsx b/public/app/containers/Explore/Explore.tsx index 40261ee635a..e50c06c8b17 100644 --- a/public/app/containers/Explore/Explore.tsx +++ b/public/app/containers/Explore/Explore.tsx @@ -1,103 +1,146 @@ import React from 'react'; import { hot } from 'react-hot-loader'; +import Select from 'react-select'; + +import kbn from 'app/core/utils/kbn'; import colors from 'app/core/utils/colors'; import TimeSeries from 'app/core/time_series2'; +import { decodePathComponent } from 'app/core/utils/location_util'; +import { parse as parseDate } from 'app/core/utils/datemath'; import ElapsedTime from './ElapsedTime'; -import Legend from './Legend'; import QueryRows from './QueryRows'; import Graph from './Graph'; import Table from './Table'; -import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; -import { buildQueryOptions, ensureQueries, generateQueryKey, hasQuery } from './utils/query'; -import { decodePathComponent } from 'app/core/utils/location_util'; +import TimePicker, { DEFAULT_RANGE } from './TimePicker'; +import { ensureQueries, generateQueryKey, hasQuery } from './utils/query'; function makeTimeSeriesList(dataList, options) { return dataList.map((seriesData, index) => { const datapoints = seriesData.datapoints || []; const alias = seriesData.target; - const colorIndex = index % colors.length; const color = colors[colorIndex]; const series = new TimeSeries({ - datapoints: datapoints, - alias: alias, - color: color, + datapoints, + alias, + color, unit: seriesData.unit, }); - if (datapoints && datapoints.length > 0) { - const last = datapoints[datapoints.length - 1][1]; - const from = options.range.from; - if (last - from < -10000) { - series.isOutsideRange = true; - } - } - return series; }); } -function parseInitialQueries(initial) { - if (!initial) { - return []; - } - try { - const parsed = JSON.parse(decodePathComponent(initial)); - return parsed.queries.map(q => q.query); - } catch (e) { - console.error(e); - return []; +function parseInitialState(initial: string | undefined) { + if (initial) { + try { + const parsed = JSON.parse(decodePathComponent(initial)); + return { + datasource: parsed.datasource, + queries: parsed.queries.map(q => q.query), + range: parsed.range, + }; + } catch (e) { + console.error(e); + } } + return { datasource: null, queries: [], range: DEFAULT_RANGE }; } interface IExploreState { datasource: any; datasourceError: any; - datasourceLoading: any; + datasourceLoading: boolean | null; + datasourceMissing: boolean; graphResult: any; + initialDatasource?: string; latency: number; loading: any; queries: any; + queryError: any; + range: any; requestOptions: any; showingGraph: boolean; showingTable: boolean; tableResult: any; } -// @observer export class Explore extends React.Component { - datasourceSrv: DatasourceSrv; + el: any; constructor(props) { super(props); - const initialQueries = parseInitialQueries(props.routeParams.initial); + const { datasource, queries, range } = parseInitialState(props.routeParams.state); this.state = { datasource: null, datasourceError: null, - datasourceLoading: true, + datasourceLoading: null, + datasourceMissing: false, graphResult: null, + initialDatasource: datasource, latency: 0, loading: false, - queries: ensureQueries(initialQueries), + queries: ensureQueries(queries), + queryError: null, + range: range || { ...DEFAULT_RANGE }, requestOptions: null, showingGraph: true, showingTable: true, tableResult: null, + ...props.initialState, }; } async componentDidMount() { - const datasource = await this.props.datasourceSrv.get(); - const testResult = await datasource.testDatasource(); - if (testResult.status === 'success') { - this.setState({ datasource, datasourceError: null, datasourceLoading: false }, () => this.handleSubmit()); + const { datasourceSrv } = this.props; + const { initialDatasource } = this.state; + if (!datasourceSrv) { + throw new Error('No datasource service passed as props.'); + } + const datasources = datasourceSrv.getExploreSources(); + if (datasources.length > 0) { + this.setState({ datasourceLoading: true }); + // Priority: datasource in url, default datasource, first explore datasource + let datasource; + if (initialDatasource) { + datasource = await datasourceSrv.get(initialDatasource); + } else { + datasource = await datasourceSrv.get(); + } + if (!datasource.meta.explore) { + datasource = await datasourceSrv.get(datasources[0].name); + } + this.setDatasource(datasource); } else { - this.setState({ datasource: null, datasourceError: testResult.message, datasourceLoading: false }); + this.setState({ datasourceMissing: true }); } } + componentDidCatch(error) { + this.setState({ datasourceError: error }); + console.error(error); + } + + async setDatasource(datasource) { + try { + const testResult = await datasource.testDatasource(); + if (testResult.status === 'success') { + this.setState({ datasource, datasourceError: null, datasourceLoading: false }, () => this.handleSubmit()); + } else { + this.setState({ datasource: datasource, datasourceError: testResult.message, datasourceLoading: false }); + } + } catch (error) { + const message = (error && error.statusText) || error; + this.setState({ datasource: datasource, datasourceError: message, datasourceLoading: false }); + } + } + + getRef = el => { + this.el = el; + }; + handleAddQueryRow = index => { const { queries } = this.state; const nextQueries = [ @@ -108,6 +151,18 @@ export class Explore extends React.Component { this.setState({ queries: nextQueries }); }; + handleChangeDatasource = async option => { + this.setState({ + datasource: null, + datasourceError: null, + datasourceLoading: true, + graphResult: null, + tableResult: null, + }); + const datasource = await this.props.datasourceSrv.get(option.value); + this.setDatasource(datasource); + }; + handleChangeQuery = (query, index) => { const { queries } = this.state; const nextQuery = { @@ -119,10 +174,32 @@ export class Explore extends React.Component { this.setState({ queries: nextQueries }); }; + handleChangeTime = nextRange => { + const range = { + from: nextRange.from, + to: nextRange.to, + }; + this.setState({ range }, () => this.handleSubmit()); + }; + + handleClickCloseSplit = () => { + const { onChangeSplit } = this.props; + if (onChangeSplit) { + onChangeSplit(false); + } + }; + handleClickGraphButton = () => { this.setState(state => ({ showingGraph: !state.showingGraph })); }; + handleClickSplit = () => { + const { onChangeSplit } = this.props; + if (onChangeSplit) { + onChangeSplit(true, this.state); + } + }; + handleClickTableButton = () => { this.setState(state => ({ showingTable: !state.showingTable })); }; @@ -146,28 +223,42 @@ export class Explore extends React.Component { } }; + buildQueryOptions(targetOptions: { format: string; instant: boolean }) { + const { datasource, queries, range } = this.state; + const resolution = this.el.offsetWidth; + const absoluteRange = { + from: parseDate(range.from, false), + to: parseDate(range.to, true), + }; + const { interval } = kbn.calculateInterval(absoluteRange, resolution, datasource.interval); + const targets = queries.map(q => ({ + ...targetOptions, + expr: q.query, + })); + return { + interval, + range, + targets, + }; + } + async runGraphQuery() { const { datasource, queries } = this.state; if (!hasQuery(queries)) { return; } - this.setState({ latency: 0, loading: true, graphResult: null }); + this.setState({ latency: 0, loading: true, graphResult: null, queryError: null }); const now = Date.now(); - const options = buildQueryOptions({ - format: 'time_series', - interval: datasource.interval, - instant: false, - now, - queries: queries.map(q => q.query), - }); + const options = this.buildQueryOptions({ format: 'time_series', instant: false }); try { const res = await datasource.query(options); const result = makeTimeSeriesList(res.data, options); const latency = Date.now() - now; this.setState({ latency, loading: false, graphResult: result, requestOptions: options }); - } catch (error) { - console.error(error); - this.setState({ loading: false, graphResult: error }); + } catch (response) { + console.error(response); + const queryError = response.data ? response.data.error : response; + this.setState({ loading: false, queryError }); } } @@ -176,23 +267,21 @@ export class Explore extends React.Component { if (!hasQuery(queries)) { return; } - this.setState({ latency: 0, loading: true, tableResult: null }); + this.setState({ latency: 0, loading: true, queryError: null, tableResult: null }); const now = Date.now(); - const options = buildQueryOptions({ + const options = this.buildQueryOptions({ format: 'table', - interval: datasource.interval, instant: true, - now, - queries: queries.map(q => q.query), }); try { const res = await datasource.query(options); const tableModel = res.data[0]; const latency = Date.now() - now; this.setState({ latency, loading: false, tableResult: tableModel, requestOptions: options }); - } catch (error) { - console.error(error); - this.setState({ loading: false, tableResult: null }); + } catch (response) { + console.error(response); + const queryError = response.data ? response.data.error : response; + this.setState({ loading: false, queryError }); } } @@ -202,67 +291,123 @@ export class Explore extends React.Component { }; render() { + const { datasourceSrv, position, split } = this.props; const { datasource, datasourceError, datasourceLoading, + datasourceMissing, graphResult, latency, loading, queries, + queryError, + range, requestOptions, showingGraph, showingTable, tableResult, } = this.state; const showingBoth = showingGraph && showingTable; - const graphHeight = showingBoth ? '200px' : null; - const graphButtonClassName = showingBoth || showingGraph ? 'btn m-r-1' : 'btn btn-inverse m-r-1'; - const tableButtonClassName = showingBoth || showingTable ? 'btn m-r-1' : 'btn btn-inverse m-r-1'; + const graphHeight = showingBoth ? '200px' : '400px'; + const graphButtonActive = showingBoth || showingGraph ? 'active' : ''; + const tableButtonActive = showingBoth || showingTable ? 'active' : ''; + const exploreClass = split ? 'explore explore-split' : 'explore'; + const datasources = datasourceSrv.getExploreSources().map(ds => ({ + value: ds.name, + label: ds.name, + })); + const selectedDatasource = datasource ? datasource.name : undefined; + return ( -
-
-

Explore

- {datasourceLoading ?
Loading datasource...
: null} - - {datasourceError ?
Error connecting to datasource.
: null} - - {datasource ? ( -
-
-
- {loading || latency ? : null} - -
-
- - -
-
- +
+ {position === 'left' ? ( + + ) : ( +
+ +
+ )} + {!datasourceMissing ? ( +
+ +
+
+ + +
+
+ +
+
+ + {/* +
+
+ +
+
*/} +
+ +
+
+ +
+

Quick ranges

+ {Object.keys(timeOptions).map(section => { + const group = timeOptions[section]; + return ( + + ); + })} +
+
+ ); + } + + render() { + const { isUtc, rangeString, refreshInterval } = this.state; + return ( +
+
+ + + +
+ {this.renderDropdown()} +
+ ); + } +} diff --git a/public/app/containers/Explore/Typeahead.tsx b/public/app/containers/Explore/Typeahead.tsx index 4943622fe4e..44fce7f8c7e 100644 --- a/public/app/containers/Explore/Typeahead.tsx +++ b/public/app/containers/Explore/Typeahead.tsx @@ -23,12 +23,13 @@ class TypeaheadItem extends React.PureComponent { }; render() { - const { isSelected, label, onClickItem } = this.props; + const { hint, isSelected, label, onClickItem } = this.props; const className = isSelected ? 'typeahead-item typeahead-item__selected' : 'typeahead-item'; const onClick = () => onClickItem(label); return (
  • {label} + {hint && isSelected ?
    {hint}
    : null}
  • ); } @@ -41,9 +42,19 @@ class TypeaheadGroup extends React.PureComponent {
  • {label}
      - {items.map(item => ( - -1} label={item} /> - ))} + {items.map(item => { + const text = typeof item === 'object' ? item.text : item; + const label = typeof item === 'object' ? item.display || item.text : item; + return ( + -1} + hint={item.hint} + label={label} + /> + ); + })}
  • ); diff --git a/public/app/containers/Explore/Wrapper.tsx b/public/app/containers/Explore/Wrapper.tsx new file mode 100644 index 00000000000..6bdbd7cc42f --- /dev/null +++ b/public/app/containers/Explore/Wrapper.tsx @@ -0,0 +1,33 @@ +import React, { PureComponent } from 'react'; + +import Explore from './Explore'; + +export default class Wrapper extends PureComponent { + state = { + initialState: null, + split: false, + }; + + handleChangeSplit = (split, initialState) => { + this.setState({ split, initialState }); + }; + + render() { + // State overrides for props from first Explore + const { initialState, split } = this.state; + return ( +
    + + {split ? ( + + ) : null} +
    + ); + } +} diff --git a/public/app/containers/Explore/slate-plugins/prism/index.tsx b/public/app/containers/Explore/slate-plugins/prism/index.tsx index 7c3fa296d8e..d185518790f 100644 --- a/public/app/containers/Explore/slate-plugins/prism/index.tsx +++ b/public/app/containers/Explore/slate-plugins/prism/index.tsx @@ -1,16 +1,12 @@ import React from 'react'; import Prism from 'prismjs'; -import Promql from './promql'; - -Prism.languages.promql = Promql; - const TOKEN_MARK = 'prism-token'; -export function configurePrismMetricsTokens(metrics) { - Prism.languages.promql.metric = { - alias: 'variable', - pattern: new RegExp(`(?:^|\\s)(${metrics.join('|')})(?:$|\\s)`), +export function setPrismTokens(language, field, values, alias = 'variable') { + Prism.languages[language][field] = { + alias, + pattern: new RegExp(`(?:^|\\s)(${values.join('|')})(?:$|\\s)`), }; } @@ -21,7 +17,12 @@ export function configurePrismMetricsTokens(metrics) { * (Adapted to handle nested grammar definitions.) */ -export default function PrismPlugin() { +export default function PrismPlugin({ definition, language }) { + if (definition) { + // Don't override exising modified definitions + Prism.languages[language] = Prism.languages[language] || definition; + } + return { /** * Render a Slate mark with appropiate CSS class names @@ -54,7 +55,7 @@ export default function PrismPlugin() { const texts = node.getTexts().toArray(); const tstring = texts.map(t => t.text).join('\n'); - const grammar = Prism.languages.promql; + const grammar = Prism.languages[language]; const tokens = Prism.tokenize(tstring, grammar); const decorations = []; let startText = texts.shift(); diff --git a/public/app/containers/Explore/utils/query.ts b/public/app/containers/Explore/utils/query.ts index d51c7339944..d774f619a30 100644 --- a/public/app/containers/Explore/utils/query.ts +++ b/public/app/containers/Explore/utils/query.ts @@ -1,20 +1,3 @@ -export function buildQueryOptions({ format, interval, instant, now, queries }) { - const to = now; - const from = to - 1000 * 60 * 60 * 3; - return { - interval, - range: { - from, - to, - }, - targets: queries.map(expr => ({ - expr, - format, - instant, - })), - }; -} - export function generateQueryKey(index = 0) { return `Q-${Date.now()}-${Math.random()}-${index}`; } diff --git a/public/app/containers/ManageDashboards/FolderPermissions.tsx b/public/app/containers/ManageDashboards/FolderPermissions.tsx index abbde63a179..aac5d32750a 100644 --- a/public/app/containers/ManageDashboards/FolderPermissions.tsx +++ b/public/app/containers/ManageDashboards/FolderPermissions.tsx @@ -54,7 +54,7 @@ export class FolderPermissions extends Component {
    -

    Folder Permissions

    +

    Folder Permissions

    @@ -68,7 +68,7 @@ export class FolderPermissions extends Component {
    - +
    diff --git a/public/app/containers/Teams/TeamGroupSync.tsx b/public/app/containers/Teams/TeamGroupSync.tsx new file mode 100644 index 00000000000..323dceae0d8 --- /dev/null +++ b/public/app/containers/Teams/TeamGroupSync.tsx @@ -0,0 +1,149 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam, ITeamGroup } from 'app/stores/TeamsStore/TeamsStore'; +import SlideDown from 'app/core/components/Animations/SlideDown'; +import Tooltip from 'app/core/components/Tooltip/Tooltip'; + +interface Props { + team: ITeam; +} + +interface State { + isAdding: boolean; + newGroupId?: string; +} + +const headerTooltip = `Sync LDAP or OAuth groups with your Grafana teams.`; + +@observer +export class TeamGroupSync extends React.Component { + constructor(props) { + super(props); + this.state = { isAdding: false, newGroupId: '' }; + } + + componentDidMount() { + this.props.team.loadGroups(); + } + + renderGroup(group: ITeamGroup) { + return ( + + {group.groupId} + + this.onRemoveGroup(group)}> + + + + + ); + } + + onToggleAdding = () => { + this.setState({ isAdding: !this.state.isAdding }); + }; + + onNewGroupIdChanged = evt => { + this.setState({ newGroupId: evt.target.value }); + }; + + onAddGroup = () => { + this.props.team.addGroup(this.state.newGroupId); + this.setState({ isAdding: false, newGroupId: '' }); + }; + + onRemoveGroup = (group: ITeamGroup) => { + this.props.team.removeGroup(group.groupId); + }; + + isNewGroupValid() { + return this.state.newGroupId.length > 1; + } + + render() { + const { isAdding, newGroupId } = this.state; + const groups = this.props.team.groups.values(); + + return ( +
    +
    +

    External group sync

    + + + +
    + {groups.length > 0 && ( + + )} +
    + + +
    + +
    Add External Group
    +
    +
    + +
    + +
    + +
    +
    +
    +
    + + {groups.length === 0 && + !isAdding && ( +
    +
    There are no external groups to sync with
    + +
    + {headerTooltip} + + Learn more + +
    +
    + )} + + {groups.length > 0 && ( +
    + + + + + + + {groups.map(group => this.renderGroup(group))} +
    External Group ID +
    +
    + )} +
    + ); + } +} + +export default hot(module)(TeamGroupSync); diff --git a/public/app/containers/Teams/TeamList.tsx b/public/app/containers/Teams/TeamList.tsx new file mode 100644 index 00000000000..4429764b1cc --- /dev/null +++ b/public/app/containers/Teams/TeamList.tsx @@ -0,0 +1,125 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { inject, observer } from 'mobx-react'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import { NavStore } from 'app/stores/NavStore/NavStore'; +import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { BackendSrv } from 'app/core/services/backend_srv'; +import appEvents from 'app/core/app_events'; + +interface Props { + nav: typeof NavStore.Type; + teams: typeof TeamsStore.Type; + backendSrv: BackendSrv; +} + +@inject('nav', 'teams') +@observer +export class TeamList extends React.Component { + constructor(props) { + super(props); + + this.props.nav.load('cfg', 'teams'); + this.fetchTeams(); + } + + fetchTeams() { + this.props.teams.loadTeams(); + } + + deleteTeam(team: ITeam) { + appEvents.emit('confirm-modal', { + title: 'Delete', + text: 'Are you sure you want to delete Team ' + team.name + '?', + yesText: 'Delete', + icon: 'fa-warning', + onConfirm: () => { + this.deleteTeamConfirmed(team); + }, + }); + } + + deleteTeamConfirmed(team) { + this.props.backendSrv.delete('/api/teams/' + team.id).then(this.fetchTeams.bind(this)); + } + + onSearchQueryChange = evt => { + this.props.teams.setSearchQuery(evt.target.value); + }; + + renderTeamMember(team: ITeam): JSX.Element { + let teamUrl = `org/teams/edit/${team.id}`; + + return ( + + + + + + + + {team.name} + + + {team.email} + + + {team.memberCount} + + + this.deleteTeam(team)} className="btn btn-danger btn-small"> + + + + + ); + } + + render() { + const { nav, teams } = this.props; + return ( +
    + +
    +
    +
    + +
    + + + +
    + + + + + + + + + {teams.filteredTeams.map(team => this.renderTeamMember(team))} +
    + NameEmailMembers +
    +
    +
    +
    + ); + } +} + +export default hot(module)(TeamList); diff --git a/public/app/containers/Teams/TeamMembers.tsx b/public/app/containers/Teams/TeamMembers.tsx new file mode 100644 index 00000000000..0d0762469a0 --- /dev/null +++ b/public/app/containers/Teams/TeamMembers.tsx @@ -0,0 +1,144 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam, ITeamMember } from 'app/stores/TeamsStore/TeamsStore'; +import appEvents from 'app/core/app_events'; +import SlideDown from 'app/core/components/Animations/SlideDown'; +import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; + +interface Props { + team: ITeam; +} + +interface State { + isAdding: boolean; + newTeamMember?: User; +} + +@observer +export class TeamMembers extends React.Component { + constructor(props) { + super(props); + this.state = { isAdding: false, newTeamMember: null }; + } + + componentDidMount() { + this.props.team.loadMembers(); + } + + onSearchQueryChange = evt => { + this.props.team.setSearchQuery(evt.target.value); + }; + + removeMember(member: ITeamMember) { + appEvents.emit('confirm-modal', { + title: 'Remove Member', + text: 'Are you sure you want to remove ' + member.login + ' from this group?', + yesText: 'Remove', + icon: 'fa-warning', + onConfirm: () => { + this.removeMemberConfirmed(member); + }, + }); + } + + removeMemberConfirmed(member: ITeamMember) { + this.props.team.removeMember(member); + } + + renderMember(member: ITeamMember) { + return ( + + + + + {member.login} + {member.email} + + this.removeMember(member)} className="btn btn-danger btn-mini"> + + + + + ); + } + + onToggleAdding = () => { + this.setState({ isAdding: !this.state.isAdding }); + }; + + onUserSelected = (user: User) => { + this.setState({ newTeamMember: user }); + }; + + onAddUserToTeam = async () => { + await this.props.team.addMember(this.state.newTeamMember.id); + await this.props.team.loadMembers(); + this.setState({ newTeamMember: null }); + }; + + render() { + const { newTeamMember, isAdding } = this.state; + const members = this.props.team.members.values(); + const newTeamMemberValue = newTeamMember && newTeamMember.id.toString(); + + return ( +
    +
    +
    + +
    + +
    + + +
    + + +
    + +
    Add Team Member
    +
    + + + {this.state.newTeamMember && ( + + )} +
    +
    +
    + +
    + + + + + + + + {members.map(member => this.renderMember(member))} +
    + NameEmail +
    +
    +
    + ); + } +} + +export default hot(module)(TeamMembers); diff --git a/public/app/containers/Teams/TeamPages.tsx b/public/app/containers/Teams/TeamPages.tsx new file mode 100644 index 00000000000..500a7cbe5e8 --- /dev/null +++ b/public/app/containers/Teams/TeamPages.tsx @@ -0,0 +1,77 @@ +import React from 'react'; +import _ from 'lodash'; +import { hot } from 'react-hot-loader'; +import { inject, observer } from 'mobx-react'; +import config from 'app/core/config'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import { NavStore } from 'app/stores/NavStore/NavStore'; +import { TeamsStore, ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { ViewStore } from 'app/stores/ViewStore/ViewStore'; +import TeamMembers from './TeamMembers'; +import TeamSettings from './TeamSettings'; +import TeamGroupSync from './TeamGroupSync'; + +interface Props { + nav: typeof NavStore.Type; + teams: typeof TeamsStore.Type; + view: typeof ViewStore.Type; +} + +@inject('nav', 'teams', 'view') +@observer +export class TeamPages extends React.Component { + isSyncEnabled: boolean; + currentPage: string; + + constructor(props) { + super(props); + + this.isSyncEnabled = config.buildInfo.isEnterprise; + this.currentPage = this.getCurrentPage(); + + this.loadTeam(); + } + + async loadTeam() { + const { teams, nav, view } = this.props; + + await teams.loadById(view.routeParams.get('id')); + + nav.initTeamPage(this.getCurrentTeam(), this.currentPage, this.isSyncEnabled); + } + + getCurrentTeam(): ITeam { + const { teams, view } = this.props; + return teams.map.get(view.routeParams.get('id')); + } + + getCurrentPage() { + const pages = ['members', 'settings', 'groupsync']; + const currentPage = this.props.view.routeParams.get('page'); + return _.includes(pages, currentPage) ? currentPage : pages[0]; + } + + render() { + const { nav } = this.props; + const currentTeam = this.getCurrentTeam(); + + if (!nav.main) { + return null; + } + + return ( +
    + + {currentTeam && ( +
    + {this.currentPage === 'members' && } + {this.currentPage === 'settings' && } + {this.currentPage === 'groupsync' && this.isSyncEnabled && } +
    + )} +
    + ); + } +} + +export default hot(module)(TeamPages); diff --git a/public/app/containers/Teams/TeamSettings.tsx b/public/app/containers/Teams/TeamSettings.tsx new file mode 100644 index 00000000000..142088a5d1e --- /dev/null +++ b/public/app/containers/Teams/TeamSettings.tsx @@ -0,0 +1,69 @@ +import React from 'react'; +import { hot } from 'react-hot-loader'; +import { observer } from 'mobx-react'; +import { ITeam } from 'app/stores/TeamsStore/TeamsStore'; +import { Label } from 'app/core/components/Forms/Forms'; + +interface Props { + team: ITeam; +} + +@observer +export class TeamSettings extends React.Component { + constructor(props) { + super(props); + } + + onChangeName = evt => { + this.props.team.setName(evt.target.value); + }; + + onChangeEmail = evt => { + this.props.team.setEmail(evt.target.value); + }; + + onUpdate = evt => { + evt.preventDefault(); + this.props.team.update(); + }; + + render() { + return ( +
    +

    Team Settings

    +
    +
    + + +
    +
    + + +
    + +
    + +
    +
    +
    + ); + } +} + +export default hot(module)(TeamSettings); diff --git a/public/app/core/angular_wrappers.ts b/public/app/core/angular_wrappers.ts index ace0eb00b07..a4439509f8e 100644 --- a/public/app/core/angular_wrappers.ts +++ b/public/app/core/angular_wrappers.ts @@ -5,7 +5,6 @@ import EmptyListCTA from './components/EmptyListCTA/EmptyListCTA'; import LoginBackground from './components/Login/LoginBackground'; import { SearchResult } from './components/search/SearchResult'; import { TagFilter } from './components/TagFilter/TagFilter'; -import UserPicker from './components/Picker/UserPicker'; import DashboardPermissions from './components/Permissions/DashboardPermissions'; export function registerAngularDirectives() { @@ -19,6 +18,5 @@ export function registerAngularDirectives() { ['onSelect', { watchDepth: 'reference' }], ['tagOptions', { watchDepth: 'reference' }], ]); - react2AngularDirective('selectUserPicker', UserPicker, ['backendSrv', 'handlePicked']); react2AngularDirective('dashboardPermissions', DashboardPermissions, ['backendSrv', 'dashboardId', 'folder']); } diff --git a/public/app/core/components/Forms/Forms.tsx b/public/app/core/components/Forms/Forms.tsx new file mode 100644 index 00000000000..4b74d48ba08 --- /dev/null +++ b/public/app/core/components/Forms/Forms.tsx @@ -0,0 +1,21 @@ +import React, { SFC, ReactNode } from 'react'; +import Tooltip from '../Tooltip/Tooltip'; + +interface Props { + tooltip?: string; + for?: string; + children: ReactNode; +} + +export const Label: SFC = props => { + return ( + + {props.children} + {props.tooltip && ( + + + + )} + + ); +}; diff --git a/public/app/core/components/Permissions/AddPermissions.jest.tsx b/public/app/core/components/Permissions/AddPermissions.jest.tsx index fe97c4c7e62..513a22ddea4 100644 --- a/public/app/core/components/Permissions/AddPermissions.jest.tsx +++ b/public/app/core/components/Permissions/AddPermissions.jest.tsx @@ -1,32 +1,32 @@ -import React from 'react'; +import React from 'react'; +import { shallow } from 'enzyme'; import AddPermissions from './AddPermissions'; import { RootStore } from 'app/stores/RootStore/RootStore'; -import { backendSrv } from 'test/mocks/common'; -import { shallow } from 'enzyme'; +import { getBackendSrv } from 'app/core/services/backend_srv'; + +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([ + { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' }, + { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' }, + ]); + }, + post: jest.fn(() => Promise.resolve({})), + }; + }, +})); describe('AddPermissions', () => { let wrapper; let store; let instance; + let backendSrv: any = getBackendSrv(); beforeAll(() => { - backendSrv.get.mockReturnValue( - Promise.resolve([ - { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' }, - { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' }, - ]) - ); - - backendSrv.post = jest.fn(() => Promise.resolve({})); - - store = RootStore.create( - {}, - { - backendSrv: backendSrv, - } - ); - - wrapper = shallow(); + store = RootStore.create({}, { backendSrv: backendSrv }); + wrapper = shallow(); instance = wrapper.instance(); return store.permissions.load(1, true, false); }); @@ -43,8 +43,8 @@ describe('AddPermissions', () => { login: 'user2', }; - instance.typeChanged(evt); - instance.userPicked(userItem); + instance.onTypeChanged(evt); + instance.onUserSelected(userItem); wrapper.update(); @@ -70,8 +70,8 @@ describe('AddPermissions', () => { name: 'ug1', }; - instance.typeChanged(evt); - instance.teamPicked(teamItem); + instance.onTypeChanged(evt); + instance.onTeamSelected(teamItem); wrapper.update(); diff --git a/public/app/core/components/Permissions/AddPermissions.tsx b/public/app/core/components/Permissions/AddPermissions.tsx index 4dcd07ffb48..289e27aa731 100644 --- a/public/app/core/components/Permissions/AddPermissions.tsx +++ b/public/app/core/components/Permissions/AddPermissions.tsx @@ -1,24 +1,19 @@ -import React, { Component } from 'react'; +import React, { Component } from 'react'; import { observer } from 'mobx-react'; import { aclTypes } from 'app/stores/PermissionsStore/PermissionsStore'; -import UserPicker, { User } from 'app/core/components/Picker/UserPicker'; -import TeamPicker, { Team } from 'app/core/components/Picker/TeamPicker'; +import { UserPicker, User } from 'app/core/components/Picker/UserPicker'; +import { TeamPicker, Team } from 'app/core/components/Picker/TeamPicker'; import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker'; import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; -export interface IProps { +export interface Props { permissions: any; - backendSrv: any; } + @observer -class AddPermissions extends Component { +class AddPermissions extends Component { constructor(props) { super(props); - this.userPicked = this.userPicked.bind(this); - this.teamPicked = this.teamPicked.bind(this); - this.permissionPicked = this.permissionPicked.bind(this); - this.typeChanged = this.typeChanged.bind(this); - this.handleSubmit = this.handleSubmit.bind(this); } componentWillMount() { @@ -26,49 +21,49 @@ class AddPermissions extends Component { permissions.resetNewType(); } - typeChanged(evt) { + onTypeChanged = evt => { const { value } = evt.target; const { permissions } = this.props; permissions.setNewType(value); - } + }; - userPicked(user: User) { + onUserSelected = (user: User) => { const { permissions } = this.props; if (!user) { permissions.newItem.setUser(null, null); return; } return permissions.newItem.setUser(user.id, user.login, user.avatarUrl); - } + }; - teamPicked(team: Team) { + onTeamSelected = (team: Team) => { const { permissions } = this.props; if (!team) { permissions.newItem.setTeam(null, null); return; } return permissions.newItem.setTeam(team.id, team.name, team.avatarUrl); - } + }; - permissionPicked(permission: OptionWithDescription) { + onPermissionChanged = (permission: OptionWithDescription) => { const { permissions } = this.props; return permissions.newItem.setPermission(permission.value); - } + }; resetNewType() { const { permissions } = this.props; return permissions.resetNewType(); } - handleSubmit(evt) { + onSubmit = evt => { evt.preventDefault(); const { permissions } = this.props; permissions.addStoreItem(); - } + }; render() { - const { permissions, backendSrv } = this.props; + const { permissions } = this.props; const newItem = permissions.newItem; const pickerClassName = 'width-20'; @@ -79,12 +74,12 @@ class AddPermissions extends Component { -
    -
    Add Permission For
    + +
    Add Permission For
    - {aclTypes.map((option, idx) => { return (
    - + {
    {}} + onSelected={() => {}} value={item.permission} disabled={true} className={'gf-form-input--form-dropdown-right'} diff --git a/public/app/core/components/Permissions/PermissionsListItem.tsx b/public/app/core/components/Permissions/PermissionsListItem.tsx index b0158525d52..a17aa8c04df 100644 --- a/public/app/core/components/Permissions/PermissionsListItem.tsx +++ b/public/app/core/components/Permissions/PermissionsListItem.tsx @@ -68,7 +68,7 @@ export default observer(({ item, removeItem, permissionChanged, itemIndex, folde
    void; + onSelected: (permission) => void; value: number; disabled: boolean; className?: string; @@ -16,14 +16,14 @@ export interface OptionWithDescription { description: string; } -class DescriptionPicker extends Component { +class DescriptionPicker extends Component { constructor(props) { super(props); this.state = {}; } render() { - const { optionsWithDesc, handlePicked, value, disabled, className } = this.props; + const { optionsWithDesc, onSelected, value, disabled, className } = this.props; return (
    @@ -34,7 +34,7 @@ class DescriptionPicker extends Component { clearable={false} labelKey="label" options={optionsWithDesc} - onChange={handlePicked} + onChange={onSelected} className={`width-7 gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={DescriptionOption} placeholder="Choose" diff --git a/public/app/core/components/Picker/TeamPicker.jest.tsx b/public/app/core/components/Picker/TeamPicker.jest.tsx index 20b7620e0ac..3db9f7bb4eb 100644 --- a/public/app/core/components/Picker/TeamPicker.jest.tsx +++ b/public/app/core/components/Picker/TeamPicker.jest.tsx @@ -1,19 +1,23 @@ -import React from 'react'; +import React from 'react'; import renderer from 'react-test-renderer'; -import TeamPicker from './TeamPicker'; +import { TeamPicker } from './TeamPicker'; -const model = { - backendSrv: { - get: () => { - return new Promise((resolve, reject) => {}); - }, +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([]); + }, + }; }, - handlePicked: () => {}, -}; +})); describe('TeamPicker', () => { it('renders correctly', () => { - const tree = renderer.create().toJSON(); + const props = { + onSelected: () => {}, + }; + const tree = renderer.create().toJSON(); expect(tree).toMatchSnapshot(); }); }); diff --git a/public/app/core/components/Picker/TeamPicker.tsx b/public/app/core/components/Picker/TeamPicker.tsx index 2dfff1850dd..04f108ff8da 100644 --- a/public/app/core/components/Picker/TeamPicker.tsx +++ b/public/app/core/components/Picker/TeamPicker.tsx @@ -1,18 +1,19 @@ -import React, { Component } from 'react'; +import React, { Component } from 'react'; import Select from 'react-select'; import PickerOption from './PickerOption'; -import withPicker from './withPicker'; import { debounce } from 'lodash'; +import { getBackendSrv } from 'app/core/services/backend_srv'; -export interface IProps { - backendSrv: any; - isLoading: boolean; - toggleLoading: any; - handlePicked: (user) => void; +export interface Props { + onSelected: (team: Team) => void; value?: string; className?: string; } +export interface State { + isLoading; +} + export interface Team { id: number; label: string; @@ -20,13 +21,12 @@ export interface Team { avatarUrl: string; } -class TeamPicker extends Component { +export class TeamPicker extends Component { debouncedSearch: any; - backendSrv: any; constructor(props) { super(props); - this.state = {}; + this.state = { isLoading: false }; this.search = this.search.bind(this); this.debouncedSearch = debounce(this.search, 300, { @@ -36,9 +36,9 @@ class TeamPicker extends Component { } search(query?: string) { - const { toggleLoading, backendSrv } = this.props; + const backendSrv = getBackendSrv(); + this.setState({ isLoading: true }); - toggleLoading(true); return backendSrv.get(`/api/teams/search?perpage=10&page=1&query=${query}`).then(result => { const teams = result.teams.map(team => { return { @@ -49,18 +49,18 @@ class TeamPicker extends Component { }; }); - toggleLoading(false); + this.setState({ isLoading: false }); return { options: teams }; }); } render() { - const AsyncComponent = this.state.creatable ? Select.AsyncCreatable : Select.Async; - const { isLoading, handlePicked, value, className } = this.props; + const { onSelected, value, className } = this.props; + const { isLoading } = this.state; return (
    - { loadOptions={this.debouncedSearch} loadingPlaceholder="Loading..." noResultsText="No teams found" - onChange={handlePicked} + onChange={onSelected} className={`gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={PickerOption} - placeholder="Choose" + placeholder="Select a team" value={value} autosize={true} /> @@ -80,5 +80,3 @@ class TeamPicker extends Component { ); } } - -export default withPicker(TeamPicker); diff --git a/public/app/core/components/Picker/UserPicker.jest.tsx b/public/app/core/components/Picker/UserPicker.jest.tsx index 756fa2d9801..054ca643700 100644 --- a/public/app/core/components/Picker/UserPicker.jest.tsx +++ b/public/app/core/components/Picker/UserPicker.jest.tsx @@ -1,19 +1,20 @@ -import React from 'react'; +import React from 'react'; import renderer from 'react-test-renderer'; -import UserPicker from './UserPicker'; +import { UserPicker } from './UserPicker'; -const model = { - backendSrv: { - get: () => { - return new Promise((resolve, reject) => {}); - }, +jest.mock('app/core/services/backend_srv', () => ({ + getBackendSrv: () => { + return { + get: () => { + return Promise.resolve([]); + }, + }; }, - handlePicked: () => {}, -}; +})); describe('UserPicker', () => { it('renders correctly', () => { - const tree = renderer.create().toJSON(); + const tree = renderer.create( {}} />).toJSON(); expect(tree).toMatchSnapshot(); }); }); diff --git a/public/app/core/components/Picker/UserPicker.tsx b/public/app/core/components/Picker/UserPicker.tsx index 77bf6c1fe15..e50513c44e1 100644 --- a/public/app/core/components/Picker/UserPicker.tsx +++ b/public/app/core/components/Picker/UserPicker.tsx @@ -1,18 +1,19 @@ import React, { Component } from 'react'; import Select from 'react-select'; import PickerOption from './PickerOption'; -import withPicker from './withPicker'; import { debounce } from 'lodash'; +import { getBackendSrv } from 'app/core/services/backend_srv'; -export interface IProps { - backendSrv: any; - isLoading: boolean; - toggleLoading: any; - handlePicked: (user) => void; +export interface Props { + onSelected: (user: User) => void; value?: string; className?: string; } +export interface State { + isLoading: boolean; +} + export interface User { id: number; label: string; @@ -20,13 +21,12 @@ export interface User { login: string; } -class UserPicker extends Component { +export class UserPicker extends Component { debouncedSearch: any; - backendSrv: any; constructor(props) { super(props); - this.state = {}; + this.state = { isLoading: false }; this.search = this.search.bind(this); this.debouncedSearch = debounce(this.search, 300, { @@ -36,29 +36,34 @@ class UserPicker extends Component { } search(query?: string) { - const { toggleLoading, backendSrv } = this.props; + const backendSrv = getBackendSrv(); - toggleLoading(true); - return backendSrv.get(`/api/org/users?query=${query}&limit=10`).then(result => { - const users = result.map(user => { + this.setState({ isLoading: true }); + + return backendSrv + .get(`/api/org/users?query=${query}&limit=10`) + .then(result => { return { - id: user.userId, - label: `${user.login} - ${user.email}`, - avatarUrl: user.avatarUrl, - login: user.login, + options: result.map(user => ({ + id: user.userId, + label: `${user.login} - ${user.email}`, + avatarUrl: user.avatarUrl, + login: user.login, + })), }; + }) + .finally(() => { + this.setState({ isLoading: false }); }); - toggleLoading(false); - return { options: users }; - }); } render() { - const AsyncComponent = this.state.creatable ? Select.AsyncCreatable : Select.Async; - const { isLoading, handlePicked, value, className } = this.props; + const { value, className } = this.props; + const { isLoading } = this.state; + return (
    - { loadOptions={this.debouncedSearch} loadingPlaceholder="Loading..." noResultsText="No users found" - onChange={handlePicked} + onChange={this.props.onSelected} className={`gf-form-input gf-form-input--form-dropdown ${className || ''}`} optionComponent={PickerOption} - placeholder="Choose" + placeholder="Select user" value={value} autosize={true} /> @@ -78,5 +83,3 @@ class UserPicker extends Component { ); } } - -export default withPicker(UserPicker); diff --git a/public/app/core/components/Picker/withPicker.tsx b/public/app/core/components/Picker/withPicker.tsx deleted file mode 100644 index 838ef927c30..00000000000 --- a/public/app/core/components/Picker/withPicker.tsx +++ /dev/null @@ -1,34 +0,0 @@ -import React, { Component } from 'react'; - -export interface IProps { - backendSrv: any; - handlePicked: (data) => void; - value?: string; - className?: string; -} - -export default function withPicker(WrappedComponent) { - return class WithPicker extends Component { - constructor(props) { - super(props); - this.toggleLoading = this.toggleLoading.bind(this); - - this.state = { - isLoading: false, - }; - } - - toggleLoading(isLoading) { - this.setState(prevState => { - return { - ...prevState, - isLoading: isLoading, - }; - }); - } - - render() { - return ; - } - }; -} diff --git a/public/app/core/components/grafana_app.ts b/public/app/core/components/grafana_app.ts index 89f25776a40..bd6b6975006 100644 --- a/public/app/core/components/grafana_app.ts +++ b/public/app/core/components/grafana_app.ts @@ -8,7 +8,7 @@ import appEvents from 'app/core/app_events'; import Drop from 'tether-drop'; import { createStore } from 'app/stores/store'; import colors from 'app/core/utils/colors'; -import { BackendSrv } from 'app/core/services/backend_srv'; +import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv'; import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; export class GrafanaCtrl { @@ -24,6 +24,8 @@ export class GrafanaCtrl { backendSrv: BackendSrv, datasourceSrv: DatasourceSrv ) { + // sets singleston instances for angular services so react components can access them + setBackendSrv(backendSrv); createStore({ backendSrv, datasourceSrv }); $scope.init = function() { @@ -199,7 +201,7 @@ export function grafanaAppDirective(playlistSrv, contextSrv, $timeout, $rootScop body.mousemove(userActivityDetected); body.keydown(userActivityDetected); // set useCapture = true to catch event here - document.addEventListener('wheel', userActivityDetected, true); + document.addEventListener('wheel', userActivityDetected, { capture: true, passive: true }); // treat tab change as activity document.addEventListener('visibilitychange', userActivityDetected); diff --git a/public/app/core/components/manage_dashboards/manage_dashboards.html b/public/app/core/components/manage_dashboards/manage_dashboards.html index 2dfb9c96d1b..92946b866a1 100644 --- a/public/app/core/components/manage_dashboards/manage_dashboards.html +++ b/public/app/core/components/manage_dashboards/manage_dashboards.html @@ -5,7 +5,7 @@
    - + Dashboard @@ -13,6 +13,10 @@ Folder + + + Import +
    diff --git a/public/app/core/components/manage_dashboards/manage_dashboards.ts b/public/app/core/components/manage_dashboards/manage_dashboards.ts index 545119a80d7..86cd3066c48 100644 --- a/public/app/core/components/manage_dashboards/manage_dashboards.ts +++ b/public/app/core/components/manage_dashboards/manage_dashboards.ts @@ -42,9 +42,12 @@ export class ManageDashboardsCtrl { // if user has editor role or higher isEditor: boolean; + hasEditPermissionInFolders: boolean; + /** @ngInject */ constructor(private backendSrv, navModelSrv, private searchSrv: SearchSrv, private contextSrv) { this.isEditor = this.contextSrv.isEditor; + this.hasEditPermissionInFolders = this.contextSrv.hasEditPermissionInFolders; this.query = { query: '', @@ -80,6 +83,9 @@ export class ManageDashboardsCtrl { return this.backendSrv.getFolderByUid(this.folderUid).then(folder => { this.canSave = folder.canSave; + if (!this.canSave) { + this.hasEditPermissionInFolders = false; + } }); }); } @@ -288,6 +294,16 @@ export class ManageDashboardsCtrl { return url; } + + importDashboardUrl() { + let url = 'dashboard/import'; + + if (this.folderId) { + url += `?folderId=${this.folderId}`; + } + + return url; + } } export function manageDashboardsDirective() { diff --git a/public/app/core/components/scroll/page_scroll.ts b/public/app/core/components/scroll/page_scroll.ts index e6db344a4d6..b6603f06175 100644 --- a/public/app/core/components/scroll/page_scroll.ts +++ b/public/app/core/components/scroll/page_scroll.ts @@ -29,11 +29,13 @@ export function pageScrollbar() { scope.$on('$routeChangeSuccess', () => { lastPos = 0; elem[0].scrollTop = 0; - elem[0].focus(); + // Focus page to enable scrolling by keyboard + elem[0].focus({ preventScroll: true }); }); elem[0].tabIndex = -1; - elem[0].focus(); + // Focus page to enable scrolling by keyboard + elem[0].focus({ preventScroll: true }); }, }; } diff --git a/public/app/core/components/search/search.html b/public/app/core/components/search/search.html index afb9e723cad..8723d5d0584 100644 --- a/public/app/core/components/search/search.html +++ b/public/app/core/components/search/search.html @@ -45,18 +45,18 @@
    - diff --git a/public/app/core/components/search/search.ts b/public/app/core/components/search/search.ts index 25e05c2139d..162eeb1b9f3 100644 --- a/public/app/core/components/search/search.ts +++ b/public/app/core/components/search/search.ts @@ -17,6 +17,7 @@ export class SearchCtrl { isLoading: boolean; initialFolderFilterTitle: string; isEditor: string; + hasEditPermissionInFolders: boolean; /** @ngInject */ constructor($scope, private $location, private $timeout, private searchSrv: SearchSrv) { @@ -27,6 +28,7 @@ export class SearchCtrl { this.getTags = this.getTags.bind(this); this.onTagSelect = this.onTagSelect.bind(this); this.isEditor = contextSrv.isEditor; + this.hasEditPermissionInFolders = contextSrv.hasEditPermissionInFolders; } closeSearch() { diff --git a/public/app/core/components/search/search_results.ts b/public/app/core/components/search/search_results.ts index 273af224660..35ee1365e22 100644 --- a/public/app/core/components/search/search_results.ts +++ b/public/app/core/components/search/search_results.ts @@ -63,7 +63,8 @@ export class SearchResultsCtrl { } onItemClick(item) { - if (this.$location.path().indexOf(item.url) > -1) { + //Check if one string can be found in the other + if (this.$location.path().indexOf(item.url) > -1 || item.url.indexOf(this.$location.path()) > -1) { appEvents.emit('hide-dash-search'); } } diff --git a/public/app/core/components/team_picker.ts b/public/app/core/components/team_picker.ts deleted file mode 100644 index 228767a76c4..00000000000 --- a/public/app/core/components/team_picker.ts +++ /dev/null @@ -1,64 +0,0 @@ -import coreModule from 'app/core/core_module'; -import _ from 'lodash'; - -const template = ` - -`; -export class TeamPickerCtrl { - group: any; - teamPicked: any; - debouncedSearchGroups: any; - - /** @ngInject */ - constructor(private backendSrv) { - this.debouncedSearchGroups = _.debounce(this.searchGroups, 500, { - leading: true, - trailing: false, - }); - this.reset(); - } - - reset() { - this.group = { text: 'Choose', value: null }; - } - - searchGroups(query: string) { - return Promise.resolve( - this.backendSrv.get('/api/teams/search?perpage=10&page=1&query=' + query).then(result => { - return _.map(result.teams, ug => { - return { text: ug.name, value: ug }; - }); - }) - ); - } - - onChange(option) { - this.teamPicked({ $group: option.value }); - } -} - -export function teamPicker() { - return { - restrict: 'E', - template: template, - controller: TeamPickerCtrl, - bindToController: true, - controllerAs: 'ctrl', - scope: { - teamPicked: '&', - }, - link: function(scope, elem, attrs, ctrl) { - scope.$on('team-picker-reset', () => { - ctrl.reset(); - }); - }, - }; -} - -coreModule.directive('teamPicker', teamPicker); diff --git a/public/app/core/components/user_picker.ts b/public/app/core/components/user_picker.ts deleted file mode 100644 index 606ded09885..00000000000 --- a/public/app/core/components/user_picker.ts +++ /dev/null @@ -1,71 +0,0 @@ -import coreModule from 'app/core/core_module'; -import _ from 'lodash'; - -const template = ` - -`; -export class UserPickerCtrl { - user: any; - debouncedSearchUsers: any; - userPicked: any; - - /** @ngInject */ - constructor(private backendSrv) { - this.reset(); - this.debouncedSearchUsers = _.debounce(this.searchUsers, 500, { - leading: true, - trailing: false, - }); - } - - searchUsers(query: string) { - return Promise.resolve( - this.backendSrv.get('/api/users/search?perpage=10&page=1&query=' + query).then(result => { - return _.map(result.users, user => { - return { text: user.login + ' - ' + user.email, value: user }; - }); - }) - ); - } - - onChange(option) { - this.userPicked({ $user: option.value }); - } - - reset() { - this.user = { text: 'Choose', value: null }; - } -} - -export interface User { - id: number; - name: string; - login: string; - email: string; -} - -export function userPicker() { - return { - restrict: 'E', - template: template, - controller: UserPickerCtrl, - bindToController: true, - controllerAs: 'ctrl', - scope: { - userPicked: '&', - }, - link: function(scope, elem, attrs, ctrl) { - scope.$on('user-picker-reset', () => { - ctrl.reset(); - }); - }, - }; -} - -coreModule.directive('userPicker', userPicker); diff --git a/public/app/core/config.ts b/public/app/core/config.ts index 91b1cfef3a4..e065ddb22fb 100644 --- a/public/app/core/config.ts +++ b/public/app/core/config.ts @@ -1,11 +1,18 @@ import _ from 'lodash'; -class Settings { +export interface BuildInfo { + version: string; + commit: string; + isEnterprise: boolean; + env: string; +} + +export class Settings { datasources: any; panels: any; appSubUrl: string; window_title_prefix: string; - buildInfo: any; + buildInfo: BuildInfo; new_panel_title: string; bootData: any; externalUserMngLinkUrl: string; @@ -16,6 +23,7 @@ class Settings { defaultDatasource: string; alertingEnabled: boolean; authProxyEnabled: boolean; + exploreEnabled: boolean; ldapEnabled: boolean; oauth: any; disableUserSignUp: boolean; @@ -31,7 +39,14 @@ class Settings { playlist_timespan: '1m', unsaved_changes_warning: true, appSubUrl: '', + buildInfo: { + version: 'v1.0', + commit: '1', + env: 'production', + isEnterprise: false, + }, }; + _.extend(this, defaults, options); } } diff --git a/public/app/core/controllers/login_ctrl.ts b/public/app/core/controllers/login_ctrl.ts index 313fc2efa1a..0a66f83d08a 100644 --- a/public/app/core/controllers/login_ctrl.ts +++ b/public/app/core/controllers/login_ctrl.ts @@ -11,10 +11,15 @@ export class LoginCtrl { password: '', }; + $scope.command = {}; + $scope.result = ''; + contextSrv.sidemenu = false; $scope.oauth = config.oauth; $scope.oauthEnabled = _.keys(config.oauth).length > 0; + $scope.ldapEnabled = config.ldapEnabled; + $scope.authProxyEnabled = config.authProxyEnabled; $scope.disableLoginForm = config.disableLoginForm; $scope.disableUserSignUp = config.disableUserSignUp; @@ -39,6 +44,43 @@ export class LoginCtrl { } }; + $scope.changeView = function() { + let loginView = document.querySelector('#login-view'); + let changePasswordView = document.querySelector('#change-password-view'); + + loginView.className += ' add'; + setTimeout(() => { + loginView.className += ' hidden'; + }, 250); + setTimeout(() => { + changePasswordView.classList.remove('hidden'); + }, 251); + setTimeout(() => { + changePasswordView.classList.remove('remove'); + }, 301); + + setTimeout(() => { + document.getElementById('newPassword').focus(); + }, 400); + }; + + $scope.changePassword = function() { + $scope.command.oldPassword = 'admin'; + + if ($scope.command.newPassword !== $scope.command.confirmNew) { + $scope.appEvent('alert-warning', ['New passwords do not match', '']); + return; + } + + backendSrv.put('/api/user/password', $scope.command).then(function() { + $scope.toGrafana(); + }); + }; + + $scope.skip = function() { + $scope.toGrafana(); + }; + $scope.loginModeChanged = function(newValue) { $scope.submitBtnText = newValue ? 'Log in' : 'Sign up'; }; @@ -65,18 +107,28 @@ export class LoginCtrl { } backendSrv.post('/login', $scope.formModel).then(function(result) { - var params = $location.search(); + $scope.result = result; - if (params.redirect && params.redirect[0] === '/') { - window.location.href = config.appSubUrl + params.redirect; - } else if (result.redirectUrl) { - window.location.href = result.redirectUrl; - } else { - window.location.href = config.appSubUrl + '/'; + if ($scope.formModel.password !== 'admin' || $scope.ldapEnabled || $scope.authProxyEnabled) { + $scope.toGrafana(); + return; } + $scope.changeView(); }); }; + $scope.toGrafana = function() { + var params = $location.search(); + + if (params.redirect && params.redirect[0] === '/') { + window.location.href = config.appSubUrl + params.redirect; + } else if ($scope.result.redirectUrl) { + window.location.href = $scope.result.redirectUrl; + } else { + window.location.href = config.appSubUrl + '/'; + } + }; + $scope.init(); } } diff --git a/public/app/core/core.ts b/public/app/core/core.ts index fb7021fe883..d6088283f3b 100644 --- a/public/app/core/core.ts +++ b/public/app/core/core.ts @@ -44,8 +44,6 @@ import { KeybindingSrv } from './services/keybindingSrv'; import { helpModal } from './components/help/help'; import { JsonExplorer } from './components/json_explorer/json_explorer'; import { NavModelSrv, NavModel } from './nav_model_srv'; -import { userPicker } from './components/user_picker'; -import { teamPicker } from './components/team_picker'; import { geminiScrollbar } from './components/scroll/scroll'; import { pageScrollbar } from './components/scroll/page_scroll'; import { gfPageDirective } from './components/gf_page'; @@ -83,8 +81,6 @@ export { JsonExplorer, NavModelSrv, NavModel, - userPicker, - teamPicker, geminiScrollbar, pageScrollbar, gfPageDirective, diff --git a/public/app/core/directives/value_select_dropdown.ts b/public/app/core/directives/value_select_dropdown.ts index d6c6c3af5c5..d384904c2d8 100644 --- a/public/app/core/directives/value_select_dropdown.ts +++ b/public/app/core/directives/value_select_dropdown.ts @@ -93,7 +93,7 @@ export class ValueSelectDropdownCtrl { tagValuesPromise = this.$q.when(tag.values); } - tagValuesPromise.then(values => { + return tagValuesPromise.then(values => { tag.values = values; tag.valuesText = values.join(' + '); _.each(this.options, option => { @@ -132,7 +132,7 @@ export class ValueSelectDropdownCtrl { this.highlightIndex = (this.highlightIndex + direction) % this.search.options.length; } - selectValue(option, event, commitChange, excludeOthers) { + selectValue(option, event, commitChange?, excludeOthers?) { if (!option) { return; } diff --git a/public/app/core/jquery_extended.js b/public/app/core/jquery_extended.js deleted file mode 100644 index 449afcf2019..00000000000 --- a/public/app/core/jquery_extended.js +++ /dev/null @@ -1,44 +0,0 @@ -define(['jquery', 'angular', 'lodash'], -function ($, angular, _) { - 'use strict'; - - var $win = $(window); - - $.fn.place_tt = (function () { - var defaults = { - offset: 5, - }; - - return function (x, y, opts) { - opts = $.extend(true, {}, defaults, opts); - - return this.each(function () { - var $tooltip = $(this), width, height; - - $tooltip.addClass('grafana-tooltip'); - - $("#tooltip").remove(); - $tooltip.appendTo(document.body); - - if (opts.compile) { - angular.element(document).injector().invoke(["$compile", "$rootScope", function($compile, $rootScope) { - var tmpScope = $rootScope.$new(true); - _.extend(tmpScope, opts.scopeData); - - $compile($tooltip)(tmpScope); - tmpScope.$digest(); - tmpScope.$destroy(); - }]); - } - - width = $tooltip.outerWidth(true); - height = $tooltip.outerHeight(true); - - $tooltip.css('left', x + opts.offset + width > $win.width() ? x - opts.offset - width : x + opts.offset); - $tooltip.css('top', y + opts.offset + height > $win.height() ? y - opts.offset - height : y + opts.offset); - }); - }; - })(); - - return $; -}); diff --git a/public/app/core/jquery_extended.ts b/public/app/core/jquery_extended.ts new file mode 100644 index 00000000000..7932d495ab0 --- /dev/null +++ b/public/app/core/jquery_extended.ts @@ -0,0 +1,50 @@ +import $ from 'jquery'; +import angular from 'angular'; +import _ from 'lodash'; + +var $win = $(window); + +$.fn.place_tt = (function() { + var defaults = { + offset: 5, + }; + + return function(x, y, opts) { + opts = $.extend(true, {}, defaults, opts); + + return this.each(function() { + var $tooltip = $(this), + width, + height; + + $tooltip.addClass('grafana-tooltip'); + + $('#tooltip').remove(); + $tooltip.appendTo(document.body); + + if (opts.compile) { + angular + .element(document) + .injector() + .invoke([ + '$compile', + '$rootScope', + function($compile, $rootScope) { + var tmpScope = $rootScope.$new(true); + _.extend(tmpScope, opts.scopeData); + + $compile($tooltip)(tmpScope); + tmpScope.$digest(); + tmpScope.$destroy(); + }, + ]); + } + + width = $tooltip.outerWidth(true); + height = $tooltip.outerHeight(true); + + $tooltip.css('left', x + opts.offset + width > $win.width() ? x - opts.offset - width : x + opts.offset); + $tooltip.css('top', y + opts.offset + height > $win.height() ? y - opts.offset - height : y + opts.offset); + }); + }; +})(); diff --git a/public/app/core/lodash_extended.js b/public/app/core/lodash_extended.js deleted file mode 100644 index 6487b19193e..00000000000 --- a/public/app/core/lodash_extended.js +++ /dev/null @@ -1,32 +0,0 @@ -define([ - 'lodash-src' -], -function () { - 'use strict'; - - var _ = window._; - - /* - Mixins :) - */ - _.mixin({ - move: function (array, fromIndex, toIndex) { - array.splice(toIndex, 0, array.splice(fromIndex, 1)[0]); - return array; - }, - // If variable is value, then return alt. If variable is anything else, return value; - toggle: function (variable, value, alt) { - return variable === value ? alt : value; - }, - toggleInOut: function(array,value) { - if(_.includes(array,value)) { - array = _.without(array,value); - } else { - array.push(value); - } - return array; - } - }); - - return _; -}); diff --git a/public/app/core/lodash_extended.ts b/public/app/core/lodash_extended.ts new file mode 100644 index 00000000000..1a8820fb0db --- /dev/null +++ b/public/app/core/lodash_extended.ts @@ -0,0 +1,11 @@ +import _ from 'lodash'; + +/* + Mixins :) +*/ +_.mixin({ + move: function(array, fromIndex, toIndex) { + array.splice(toIndex, 0, array.splice(fromIndex, 1)[0]); + return array; + }, +}); diff --git a/public/app/core/profiler.ts b/public/app/core/profiler.ts index f459c5d4557..adf2fe67cb8 100644 --- a/public/app/core/profiler.ts +++ b/public/app/core/profiler.ts @@ -9,12 +9,14 @@ export class Profiler { digestCounter: any; $rootScope: any; scopeCount: any; + window: any; init(config, $rootScope) { this.enabled = config.buildInfo.env === 'development'; this.timings = {}; this.timings.appStart = { loadStart: new Date().getTime() }; this.$rootScope = $rootScope; + this.window = window; if (!this.enabled) { return; @@ -102,7 +104,10 @@ export class Profiler { // add render counter to root scope // used by phantomjs render.js to know when panel has rendered this.panelsRendered = (this.panelsRendered || 0) + 1; - this.$rootScope.panelsRendered = this.panelsRendered; + + // this window variable is used by backend rendering tools to know + // all panels have completed rendering + this.window.panelsRendered = this.panelsRendered; if (this.enabled) { panelTimings.renderEnd = new Date().getTime(); diff --git a/public/app/core/services/backend_srv.ts b/public/app/core/services/backend_srv.ts index d582b6a3b18..1aeeedef4dd 100644 --- a/public/app/core/services/backend_srv.ts +++ b/public/app/core/services/backend_srv.ts @@ -368,3 +368,17 @@ export class BackendSrv { } coreModule.service('backendSrv', BackendSrv); + +// +// Code below is to expore the service to react components +// + +let singletonInstance: BackendSrv; + +export function setBackendSrv(instance: BackendSrv) { + singletonInstance = instance; +} + +export function getBackendSrv(): BackendSrv { + return singletonInstance; +} diff --git a/public/app/core/services/context_srv.ts b/public/app/core/services/context_srv.ts index be8a0af7b7b..8959573e731 100644 --- a/public/app/core/services/context_srv.ts +++ b/public/app/core/services/context_srv.ts @@ -34,14 +34,10 @@ export class ContextSrv { constructor() { this.sidemenu = store.getBool('grafana.sidemenu', true); - if (!config.buildInfo) { - config.buildInfo = {}; - } if (!config.bootData) { config.bootData = { user: {}, settings: {} }; } - this.version = config.buildInfo.version; this.user = new User(); this.isSignedIn = this.user.isSignedIn; this.isGrafanaAdmin = this.user.isGrafanaAdmin; diff --git a/public/app/core/services/keybindingSrv.ts b/public/app/core/services/keybindingSrv.ts index 94bf9efb31b..672ae29740b 100644 --- a/public/app/core/services/keybindingSrv.ts +++ b/public/app/core/services/keybindingSrv.ts @@ -1,6 +1,7 @@ import $ from 'jquery'; import _ from 'lodash'; +import config from 'app/core/config'; import coreModule from 'app/core/core_module'; import appEvents from 'app/core/app_events'; import { encodePathComponent } from 'app/core/utils/location_util'; @@ -14,7 +15,7 @@ export class KeybindingSrv { timepickerOpen = false; /** @ngInject */ - constructor(private $rootScope, private $location, private datasourceSrv) { + constructor(private $rootScope, private $location, private datasourceSrv, private timeSrv, private contextSrv) { // clear out all shortcuts on route change $rootScope.$on('$routeChangeSuccess', () => { Mousetrap.reset(); @@ -177,16 +178,24 @@ export class KeybindingSrv { } }); - this.bind('x', async () => { - if (dashboard.meta.focusPanelId) { - const panel = dashboard.getPanelById(dashboard.meta.focusPanelId); - const datasource = await this.datasourceSrv.get(panel.datasource); - if (datasource && datasource.supportsExplore) { - const exploreState = encodePathComponent(JSON.stringify(datasource.getExploreState(panel))); - this.$location.url(`/explore/${exploreState}`); + // jump to explore if permissions allow + if (this.contextSrv.isEditor && config.exploreEnabled) { + this.bind('x', async () => { + if (dashboard.meta.focusPanelId) { + const panel = dashboard.getPanelById(dashboard.meta.focusPanelId); + const datasource = await this.datasourceSrv.get(panel.datasource); + if (datasource && datasource.supportsExplore) { + const range = this.timeSrv.timeRangeForUrl(); + const state = { + ...datasource.getExploreState(panel), + range, + }; + const exploreState = encodePathComponent(JSON.stringify(state)); + this.$location.url(`/explore?state=${exploreState}`); + } } - } - }); + }); + } // delete panel this.bind('p r', () => { diff --git a/public/app/core/specs/file_export.jest.ts b/public/app/core/specs/file_export.jest.ts index 82097227b97..915ce08fcd2 100644 --- a/public/app/core/specs/file_export.jest.ts +++ b/public/app/core/specs/file_export.jest.ts @@ -63,7 +63,6 @@ describe('file_export', () => { }); describe('when exporting table data to csv', () => { - it('should properly escape special characters and quote all string values', () => { const inputTable = { columns: [ @@ -104,13 +103,11 @@ describe('file_export', () => { it('should decode HTML encoded characters', function() { const inputTable = { - columns: [ - { text: 'string_value' }, - ], + columns: [{ text: 'string_value' }], rows: [ ['"&ä'], ['"some html"'], - ['some text'] + ['some text'], ], }; diff --git a/public/app/core/specs/table_model.jest.ts b/public/app/core/specs/table_model.jest.ts index a2c1eb5e1af..3d4c526cfea 100644 --- a/public/app/core/specs/table_model.jest.ts +++ b/public/app/core/specs/table_model.jest.ts @@ -44,3 +44,38 @@ describe('when sorting table asc', () => { expect(table.rows[2][1]).toBe(15); }); }); + +describe('when sorting with nulls', () => { + var table; + var values; + + beforeEach(() => { + table = new TableModel(); + table.columns = [{}, {}]; + table.rows = [[42, ''], [19, 'a'], [null, 'b'], [0, 'd'], [null, null], [2, 'c'], [0, null], [-8, '']]; + }); + + it('numbers with nulls at end with asc sort', () => { + table.sort({ col: 0, desc: false }); + values = table.rows.map(row => row[0]); + expect(values).toEqual([-8, 0, 0, 2, 19, 42, null, null]); + }); + + it('numbers with nulls at start with desc sort', () => { + table.sort({ col: 0, desc: true }); + values = table.rows.map(row => row[0]); + expect(values).toEqual([null, null, 42, 19, 2, 0, 0, -8]); + }); + + it('strings with nulls at end with asc sort', () => { + table.sort({ col: 1, desc: false }); + values = table.rows.map(row => row[1]); + expect(values).toEqual(['', '', 'a', 'b', 'c', 'd', null, null]); + }); + + it('strings with nulls at start with desc sort', () => { + table.sort({ col: 1, desc: true }); + values = table.rows.map(row => row[1]); + expect(values).toEqual([null, null, 'd', 'c', 'b', 'a', '', '']); + }); +}); diff --git a/public/app/core/specs/ticks.jest.ts b/public/app/core/specs/ticks.jest.ts new file mode 100644 index 00000000000..8b7e0cd73b5 --- /dev/null +++ b/public/app/core/specs/ticks.jest.ts @@ -0,0 +1,25 @@ +import * as ticks from '../utils/ticks'; + +describe('ticks', () => { + describe('getFlotTickDecimals()', () => { + let ctx: any = {}; + + beforeEach(() => { + ctx.axis = {}; + }); + + it('should calculate decimals precision based on graph height', () => { + let dec = ticks.getFlotTickDecimals(0, 10, ctx.axis, 200); + expect(dec.tickDecimals).toBe(1); + expect(dec.scaledDecimals).toBe(1); + + dec = ticks.getFlotTickDecimals(0, 100, ctx.axis, 200); + expect(dec.tickDecimals).toBe(0); + expect(dec.scaledDecimals).toBe(-1); + + dec = ticks.getFlotTickDecimals(0, 1, ctx.axis, 200); + expect(dec.tickDecimals).toBe(2); + expect(dec.scaledDecimals).toBe(3); + }); + }); +}); diff --git a/public/app/core/specs/time_series.jest.ts b/public/app/core/specs/time_series.jest.ts index 6214c687add..bf50d807e03 100644 --- a/public/app/core/specs/time_series.jest.ts +++ b/public/app/core/specs/time_series.jest.ts @@ -1,4 +1,5 @@ import TimeSeries from 'app/core/time_series2'; +import { updateLegendValues } from 'app/core/time_series2'; describe('TimeSeries', function() { var points, series; @@ -118,6 +119,20 @@ describe('TimeSeries', function() { series.getFlotPairs('null'); expect(series.stats.avg).toBe(null); }); + + it('calculates timeStep', function() { + series = new TimeSeries({ + datapoints: [[null, 1], [null, 2], [null, 3]], + }); + series.getFlotPairs('null'); + expect(series.stats.timeStep).toBe(1); + + series = new TimeSeries({ + datapoints: [[0, 1530529290], [0, 1530529305], [0, 1530529320]], + }); + series.getFlotPairs('null'); + expect(series.stats.timeStep).toBe(15); + }); }); describe('When checking if ms resolution is needed', function() { @@ -311,4 +326,55 @@ describe('TimeSeries', function() { expect(series.formatValue(-Infinity)).toBe(''); }); }); + + describe('legend decimals', function() { + let series, panel; + let height = 200; + beforeEach(function() { + testData = { + alias: 'test', + datapoints: [[1, 2], [0, 3], [10, 4], [8, 5]], + }; + series = new TimeSeries(testData); + series.getFlotPairs(); + panel = { + decimals: null, + yaxes: [ + { + decimals: null, + }, + ], + }; + }); + + it('should set decimals based on Y axis (expect calculated decimals = 1)', function() { + let data = [series]; + // Expect ticks with this data will have decimals = 1 + updateLegendValues(data, panel, height); + expect(data[0].decimals).toBe(2); + }); + + it('should set decimals based on Y axis to 0 if calculated decimals = 0)', function() { + testData.datapoints = [[10, 2], [0, 3], [100, 4], [80, 5]]; + series = new TimeSeries(testData); + series.getFlotPairs(); + let data = [series]; + updateLegendValues(data, panel, height); + expect(data[0].decimals).toBe(0); + }); + + it('should set decimals to Y axis decimals + 1', function() { + panel.yaxes[0].decimals = 2; + let data = [series]; + updateLegendValues(data, panel, height); + expect(data[0].decimals).toBe(3); + }); + + it('should set decimals to legend decimals value if it was set explicitly', function() { + panel.decimals = 3; + let data = [series]; + updateLegendValues(data, panel, height); + expect(data[0].decimals).toBe(3); + }); + }); }); diff --git a/public/app/core/specs/value_select_dropdown.jest.ts b/public/app/core/specs/value_select_dropdown.jest.ts new file mode 100644 index 00000000000..3cc310435b7 --- /dev/null +++ b/public/app/core/specs/value_select_dropdown.jest.ts @@ -0,0 +1,159 @@ +import 'app/core/directives/value_select_dropdown'; +import { ValueSelectDropdownCtrl } from '../directives/value_select_dropdown'; +import q from 'q'; + +describe('SelectDropdownCtrl', () => { + let tagValuesMap: any = {}; + + ValueSelectDropdownCtrl.prototype.onUpdated = jest.fn(); + let ctrl; + + describe('Given simple variable', () => { + beforeEach(() => { + ctrl = new ValueSelectDropdownCtrl(q); + ctrl.variable = { + current: { text: 'hej', value: 'hej' }, + getValuesForTag: key => { + return Promise.resolve(tagValuesMap[key]); + }, + }; + ctrl.init(); + }); + + it('Should init labelText and linkText', () => { + expect(ctrl.linkText).toBe('hej'); + }); + }); + + describe('Given variable with tags and dropdown is opened', () => { + beforeEach(() => { + ctrl = new ValueSelectDropdownCtrl(q); + ctrl.variable = { + current: { text: 'server-1', value: 'server-1' }, + options: [ + { text: 'server-1', value: 'server-1', selected: true }, + { text: 'server-2', value: 'server-2' }, + { text: 'server-3', value: 'server-3' }, + ], + tags: ['key1', 'key2', 'key3'], + getValuesForTag: key => { + return Promise.resolve(tagValuesMap[key]); + }, + multi: true, + }; + tagValuesMap.key1 = ['server-1', 'server-3']; + tagValuesMap.key2 = ['server-2', 'server-3']; + tagValuesMap.key3 = ['server-1', 'server-2', 'server-3']; + ctrl.init(); + ctrl.show(); + }); + + it('should init tags model', () => { + expect(ctrl.tags.length).toBe(3); + expect(ctrl.tags[0].text).toBe('key1'); + }); + + it('should init options model', () => { + expect(ctrl.options.length).toBe(3); + }); + + it('should init selected values array', () => { + expect(ctrl.selectedValues.length).toBe(1); + }); + + it('should set linkText', () => { + expect(ctrl.linkText).toBe('server-1'); + }); + + describe('after adititional value is selected', () => { + beforeEach(() => { + ctrl.selectValue(ctrl.options[2], {}); + ctrl.commitChanges(); + }); + + it('should update link text', () => { + expect(ctrl.linkText).toBe('server-1 + server-3'); + }); + }); + + describe('When tag is selected', () => { + beforeEach(async () => { + await ctrl.selectTag(ctrl.tags[0]); + ctrl.commitChanges(); + }); + + it('should select tag', () => { + expect(ctrl.selectedTags.length).toBe(1); + }); + + it('should select values', () => { + expect(ctrl.options[0].selected).toBe(true); + expect(ctrl.options[2].selected).toBe(true); + }); + + it('link text should not include tag values', () => { + expect(ctrl.linkText).toBe(''); + }); + + describe('and then dropdown is opened and closed without changes', () => { + beforeEach(() => { + ctrl.show(); + ctrl.commitChanges(); + }); + + it('should still have selected tag', () => { + expect(ctrl.selectedTags.length).toBe(1); + }); + }); + + describe('and then unselected', () => { + beforeEach(async () => { + await ctrl.selectTag(ctrl.tags[0]); + }); + + it('should deselect tag', () => { + expect(ctrl.selectedTags.length).toBe(0); + }); + }); + + describe('and then value is unselected', () => { + beforeEach(() => { + ctrl.selectValue(ctrl.options[0], {}); + }); + + it('should deselect tag', () => { + expect(ctrl.selectedTags.length).toBe(0); + }); + }); + }); + }); + + describe('Given variable with selected tags', () => { + beforeEach(() => { + ctrl = new ValueSelectDropdownCtrl(q); + ctrl.variable = { + current: { + text: 'server-1', + value: 'server-1', + tags: [{ text: 'key1', selected: true }], + }, + options: [ + { text: 'server-1', value: 'server-1' }, + { text: 'server-2', value: 'server-2' }, + { text: 'server-3', value: 'server-3' }, + ], + tags: ['key1', 'key2', 'key3'], + getValuesForTag: key => { + return Promise.resolve(tagValuesMap[key]); + }, + multi: true, + }; + ctrl.init(); + ctrl.show(); + }); + + it('should set tag as selected', () => { + expect(ctrl.tags[0].selected).toBe(true); + }); + }); +}); diff --git a/public/app/core/specs/value_select_dropdown_specs.ts b/public/app/core/specs/value_select_dropdown_specs.ts deleted file mode 100644 index 8f6408fb389..00000000000 --- a/public/app/core/specs/value_select_dropdown_specs.ts +++ /dev/null @@ -1,171 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks, sinon } from 'test/lib/common'; -import 'app/core/directives/value_select_dropdown'; - -describe('SelectDropdownCtrl', function() { - var scope; - var ctrl; - var tagValuesMap: any = {}; - var rootScope; - var q; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach( - angularMocks.inject(function($controller, $rootScope, $q, $httpBackend) { - rootScope = $rootScope; - q = $q; - scope = $rootScope.$new(); - ctrl = $controller('ValueSelectDropdownCtrl', { $scope: scope }); - ctrl.onUpdated = sinon.spy(); - $httpBackend.when('GET', /\.html$/).respond(''); - }) - ); - - describe('Given simple variable', function() { - beforeEach(function() { - ctrl.variable = { - current: { text: 'hej', value: 'hej' }, - getValuesForTag: function(key) { - return q.when(tagValuesMap[key]); - }, - }; - ctrl.init(); - }); - - it('Should init labelText and linkText', function() { - expect(ctrl.linkText).to.be('hej'); - }); - }); - - describe('Given variable with tags and dropdown is opened', function() { - beforeEach(function() { - ctrl.variable = { - current: { text: 'server-1', value: 'server-1' }, - options: [ - { text: 'server-1', value: 'server-1', selected: true }, - { text: 'server-2', value: 'server-2' }, - { text: 'server-3', value: 'server-3' }, - ], - tags: ['key1', 'key2', 'key3'], - getValuesForTag: function(key) { - return q.when(tagValuesMap[key]); - }, - multi: true, - }; - tagValuesMap.key1 = ['server-1', 'server-3']; - tagValuesMap.key2 = ['server-2', 'server-3']; - tagValuesMap.key3 = ['server-1', 'server-2', 'server-3']; - ctrl.init(); - ctrl.show(); - }); - - it('should init tags model', function() { - expect(ctrl.tags.length).to.be(3); - expect(ctrl.tags[0].text).to.be('key1'); - }); - - it('should init options model', function() { - expect(ctrl.options.length).to.be(3); - }); - - it('should init selected values array', function() { - expect(ctrl.selectedValues.length).to.be(1); - }); - - it('should set linkText', function() { - expect(ctrl.linkText).to.be('server-1'); - }); - - describe('after adititional value is selected', function() { - beforeEach(function() { - ctrl.selectValue(ctrl.options[2], {}); - ctrl.commitChanges(); - }); - - it('should update link text', function() { - expect(ctrl.linkText).to.be('server-1 + server-3'); - }); - }); - - describe('When tag is selected', function() { - beforeEach(function() { - ctrl.selectTag(ctrl.tags[0]); - rootScope.$digest(); - ctrl.commitChanges(); - }); - - it('should select tag', function() { - expect(ctrl.selectedTags.length).to.be(1); - }); - - it('should select values', function() { - expect(ctrl.options[0].selected).to.be(true); - expect(ctrl.options[2].selected).to.be(true); - }); - - it('link text should not include tag values', function() { - expect(ctrl.linkText).to.be(''); - }); - - describe('and then dropdown is opened and closed without changes', function() { - beforeEach(function() { - ctrl.show(); - ctrl.commitChanges(); - rootScope.$digest(); - }); - - it('should still have selected tag', function() { - expect(ctrl.selectedTags.length).to.be(1); - }); - }); - - describe('and then unselected', function() { - beforeEach(function() { - ctrl.selectTag(ctrl.tags[0]); - rootScope.$digest(); - }); - - it('should deselect tag', function() { - expect(ctrl.selectedTags.length).to.be(0); - }); - }); - - describe('and then value is unselected', function() { - beforeEach(function() { - ctrl.selectValue(ctrl.options[0], {}); - }); - - it('should deselect tag', function() { - expect(ctrl.selectedTags.length).to.be(0); - }); - }); - }); - }); - - describe('Given variable with selected tags', function() { - beforeEach(function() { - ctrl.variable = { - current: { - text: 'server-1', - value: 'server-1', - tags: [{ text: 'key1', selected: true }], - }, - options: [ - { text: 'server-1', value: 'server-1' }, - { text: 'server-2', value: 'server-2' }, - { text: 'server-3', value: 'server-3' }, - ], - tags: ['key1', 'key2', 'key3'], - getValuesForTag: function(key) { - return q.when(tagValuesMap[key]); - }, - multi: true, - }; - ctrl.init(); - ctrl.show(); - }); - - it('should set tag as selected', function() { - expect(ctrl.tags[0].selected).to.be(true); - }); - }); -}); diff --git a/public/app/core/table_model.ts b/public/app/core/table_model.ts index 57800b3e48d..04857eb806d 100644 --- a/public/app/core/table_model.ts +++ b/public/app/core/table_model.ts @@ -19,23 +19,16 @@ export default class TableModel { this.rows.sort(function(a, b) { a = a[options.col]; b = b[options.col]; - if (a < b) { - return -1; - } - if (a > b) { - return 1; - } - return 0; + // Sort null or undefined seperately from comparable values + return +(a == null) - +(b == null) || +(a > b) || -(a < b); }); - this.columns[options.col].sort = true; - if (options.desc) { this.rows.reverse(); - this.columns[options.col].desc = true; - } else { - this.columns[options.col].desc = false; } + + this.columns[options.col].sort = true; + this.columns[options.col].desc = options.desc; } addColumn(col) { @@ -44,4 +37,8 @@ export default class TableModel { this.columnMap[col.text] = col; } } + + addRow(row) { + this.rows.push(row); + } } diff --git a/public/app/core/time_series2.ts b/public/app/core/time_series2.ts index 4da64850e59..59729ebc312 100644 --- a/public/app/core/time_series2.ts +++ b/public/app/core/time_series2.ts @@ -23,23 +23,27 @@ function translateFillOption(fill) { * Calculate decimals for legend and update values for each series. * @param data series data * @param panel + * @param height */ -export function updateLegendValues(data: TimeSeries[], panel) { +export function updateLegendValues(data: TimeSeries[], panel, height) { for (let i = 0; i < data.length; i++) { let series = data[i]; - let yaxes = panel.yaxes; + const yaxes = panel.yaxes; const seriesYAxis = series.yaxis || 1; - let axis = yaxes[seriesYAxis - 1]; - let { tickDecimals, scaledDecimals } = getFlotTickDecimals(data, axis); - let formater = kbn.valueFormats[panel.yaxes[seriesYAxis - 1].format]; + const axis = yaxes[seriesYAxis - 1]; + let formater = kbn.valueFormats[axis.format]; // decimal override if (_.isNumber(panel.decimals)) { series.updateLegendValues(formater, panel.decimals, null); + } else if (_.isNumber(axis.decimals)) { + series.updateLegendValues(formater, axis.decimals + 1, null); } else { // auto decimals // legend and tooltip gets one more decimal precision // than graph legend ticks + const { datamin, datamax } = getDataMinMax(data); + let { tickDecimals, scaledDecimals } = getFlotTickDecimals(datamin, datamax, axis, height); tickDecimals = (tickDecimals || -1) + 1; series.updateLegendValues(formater, tickDecimals, scaledDecimals + 2); } diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index f1c782846fc..463025567cd 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -448,6 +448,7 @@ kbn.valueFormats.currencyISK = kbn.formatBuilders.currency('kr'); kbn.valueFormats.currencyNOK = kbn.formatBuilders.currency('kr'); kbn.valueFormats.currencySEK = kbn.formatBuilders.currency('kr'); kbn.valueFormats.currencyCZK = kbn.formatBuilders.currency('czk'); +kbn.valueFormats.currencyCHF = kbn.formatBuilders.currency('CHF'); // Data (Binary) kbn.valueFormats.bits = kbn.formatBuilders.binarySIPrefix('b'); @@ -498,6 +499,7 @@ kbn.valueFormats.watt = kbn.formatBuilders.decimalSIPrefix('W'); kbn.valueFormats.kwatt = kbn.formatBuilders.decimalSIPrefix('W', 1); kbn.valueFormats.mwatt = kbn.formatBuilders.decimalSIPrefix('W', -1); kbn.valueFormats.kwattm = kbn.formatBuilders.decimalSIPrefix('W/Min', 1); +kbn.valueFormats.Wm2 = kbn.formatBuilders.fixedUnit('W/m2'); kbn.valueFormats.voltamp = kbn.formatBuilders.decimalSIPrefix('VA'); kbn.valueFormats.kvoltamp = kbn.formatBuilders.decimalSIPrefix('VA', 1); kbn.valueFormats.voltampreact = kbn.formatBuilders.decimalSIPrefix('var'); @@ -527,6 +529,7 @@ kbn.valueFormats.pressurebar = kbn.formatBuilders.decimalSIPrefix('bar'); kbn.valueFormats.pressurembar = kbn.formatBuilders.decimalSIPrefix('bar', -1); kbn.valueFormats.pressurekbar = kbn.formatBuilders.decimalSIPrefix('bar', 1); kbn.valueFormats.pressurehpa = kbn.formatBuilders.fixedUnit('hPa'); +kbn.valueFormats.pressurekpa = kbn.formatBuilders.fixedUnit('kPa'); kbn.valueFormats.pressurehg = kbn.formatBuilders.fixedUnit('"Hg'); kbn.valueFormats.pressurepsi = kbn.formatBuilders.scaledUnits(1000, [' psi', ' ksi', ' Mpsi']); @@ -578,6 +581,9 @@ kbn.valueFormats.flowgpm = kbn.formatBuilders.fixedUnit('gpm'); kbn.valueFormats.flowcms = kbn.formatBuilders.fixedUnit('cms'); kbn.valueFormats.flowcfs = kbn.formatBuilders.fixedUnit('cfs'); kbn.valueFormats.flowcfm = kbn.formatBuilders.fixedUnit('cfm'); +kbn.valueFormats.litreh = kbn.formatBuilders.fixedUnit('l/h'); +kbn.valueFormats.flowlpm = kbn.formatBuilders.decimalSIPrefix('L'); +kbn.valueFormats.flowmlpm = kbn.formatBuilders.decimalSIPrefix('L', -1); // Angle kbn.valueFormats.degree = kbn.formatBuilders.fixedUnit('°'); @@ -873,6 +879,7 @@ kbn.getUnitFormats = function() { { text: 'Norwegian Krone (kr)', value: 'currencyNOK' }, { text: 'Swedish Krona (kr)', value: 'currencySEK' }, { text: 'Czech koruna (czk)', value: 'currencyCZK' }, + { text: 'Swiss franc (CHF)', value: 'currencyCHF' }, ], }, { @@ -950,7 +957,7 @@ kbn.getUnitFormats = function() { text: 'throughput', submenu: [ { text: 'ops/sec (ops)', value: 'ops' }, - { text: 'requets/sec (rps)', value: 'reqps' }, + { text: 'requests/sec (rps)', value: 'reqps' }, { text: 'reads/sec (rps)', value: 'rps' }, { text: 'writes/sec (wps)', value: 'wps' }, { text: 'I/O ops/sec (iops)', value: 'iops' }, @@ -989,17 +996,17 @@ kbn.getUnitFormats = function() { { text: 'velocity', submenu: [ - { text: 'm/s', value: 'velocityms' }, - { text: 'km/h', value: 'velocitykmh' }, - { text: 'mph', value: 'velocitymph' }, + { text: 'metres/second (m/s)', value: 'velocityms' }, + { text: 'kilometers/hour (km/h)', value: 'velocitykmh' }, + { text: 'miles/hour (mph)', value: 'velocitymph' }, { text: 'knot (kn)', value: 'velocityknot' }, ], }, { text: 'volume', submenu: [ - { text: 'millilitre', value: 'mlitre' }, - { text: 'litre', value: 'litre' }, + { text: 'millilitre (mL)', value: 'mlitre' }, + { text: 'litre (L)', value: 'litre' }, { text: 'cubic metre', value: 'm3' }, { text: 'Normal cubic metre', value: 'Nm3' }, { text: 'cubic decimetre', value: 'dm3' }, @@ -1012,6 +1019,7 @@ kbn.getUnitFormats = function() { { text: 'Watt (W)', value: 'watt' }, { text: 'Kilowatt (kW)', value: 'kwatt' }, { text: 'Milliwatt (mW)', value: 'mwatt' }, + { text: 'Watt per square metre (W/m2)', value: 'Wm2' }, { text: 'Volt-ampere (VA)', value: 'voltamp' }, { text: 'Kilovolt-ampere (kVA)', value: 'kvoltamp' }, { text: 'Volt-ampere reactive (var)', value: 'voltampreact' }, @@ -1047,6 +1055,7 @@ kbn.getUnitFormats = function() { { text: 'Bars', value: 'pressurebar' }, { text: 'Kilobars', value: 'pressurekbar' }, { text: 'Hectopascals', value: 'pressurehpa' }, + { text: 'Kilopascals', value: 'pressurekpa' }, { text: 'Inches of mercury', value: 'pressurehg' }, { text: 'PSI', value: 'pressurepsi' }, ], @@ -1067,6 +1076,9 @@ kbn.getUnitFormats = function() { { text: 'Cubic meters/sec (cms)', value: 'flowcms' }, { text: 'Cubic feet/sec (cfs)', value: 'flowcfs' }, { text: 'Cubic feet/min (cfm)', value: 'flowcfm' }, + { text: 'Litre/hour', value: 'litreh' }, + { text: 'Litre/min (l/min)', value: 'flowlpm' }, + { text: 'milliLitre/min (mL/min)', value: 'flowmlpm' }, ], }, { diff --git a/public/app/core/utils/ticks.ts b/public/app/core/utils/ticks.ts index db65104cfc0..66e6a7ce4fc 100644 --- a/public/app/core/utils/ticks.ts +++ b/public/app/core/utils/ticks.ts @@ -1,5 +1,3 @@ -import { getDataMinMax } from 'app/core/time_series2'; - /** * Calculate tick step. * Implementation from d3-array (ticks.js) @@ -121,12 +119,10 @@ export function getFlotRange(panelMin, panelMax, datamin, datamax) { * Calculate tick decimals. * Implementation from Flot. */ -export function getFlotTickDecimals(data, axis) { - let { datamin, datamax } = getDataMinMax(data); - let { min, max } = getFlotRange(axis.min, axis.max, datamin, datamax); - let noTicks = 3; - let tickDecimals, maxDec; - let delta = (max - min) / noTicks; +export function getFlotTickDecimals(datamin, datamax, axis, height) { + const { min, max } = getFlotRange(axis.min, axis.max, datamin, datamax); + const noTicks = 0.3 * Math.sqrt(height); + const delta = (max - min) / noTicks; let dec = -Math.floor(Math.log(delta) / Math.LN10); let magn = Math.pow(10, -dec); @@ -139,19 +135,17 @@ export function getFlotTickDecimals(data, axis) { } else if (norm < 3) { size = 2; // special case for 2.5, requires an extra decimal - if (norm > 2.25 && (maxDec == null || dec + 1 <= maxDec)) { + if (norm > 2.25) { size = 2.5; - ++dec; } } else if (norm < 7.5) { size = 5; } else { size = 10; } - size *= magn; - tickDecimals = Math.max(0, maxDec != null ? maxDec : dec); + const tickDecimals = Math.max(0, -Math.floor(Math.log(delta) / Math.LN10) + 1); // grafana addition const scaledDecimals = tickDecimals - Math.floor(Math.log(size) / Math.LN10); return { tickDecimals, scaledDecimals }; diff --git a/public/app/features/admin/admin_edit_user_ctrl.ts b/public/app/features/admin/admin_edit_user_ctrl.ts index 8203c7399c1..1d4fb9cf19a 100644 --- a/public/app/features/admin/admin_edit_user_ctrl.ts +++ b/public/app/features/admin/admin_edit_user_ctrl.ts @@ -75,6 +75,7 @@ export class AdminEditUserCtrl { $scope.removeOrgUser = function(orgUser) { backendSrv.delete('/api/orgs/' + orgUser.orgId + '/users/' + $scope.user_id).then(function() { + $scope.getUser($scope.user_id); $scope.getUserOrgs($scope.user_id); }); }; @@ -108,6 +109,7 @@ export class AdminEditUserCtrl { $scope.newOrg.loginOrEmail = $scope.user.login; backendSrv.post('/api/orgs/' + orgInfo.id + '/users/', $scope.newOrg).then(function() { + $scope.getUser($scope.user_id); $scope.getUserOrgs($scope.user_id); }); }; diff --git a/public/app/features/alerting/specs/threshold_mapper_specs.ts b/public/app/features/alerting/specs/threshold_mapper.jest.ts similarity index 70% rename from public/app/features/alerting/specs/threshold_mapper_specs.ts rename to public/app/features/alerting/specs/threshold_mapper.jest.ts index 1d68fce7050..b9fa45a6e49 100644 --- a/public/app/features/alerting/specs/threshold_mapper_specs.ts +++ b/public/app/features/alerting/specs/threshold_mapper.jest.ts @@ -18,9 +18,9 @@ describe('ThresholdMapper', () => { }; var updated = ThresholdMapper.alertToGraphThresholds(panel); - expect(updated).to.be(true); - expect(panel.thresholds[0].op).to.be('gt'); - expect(panel.thresholds[0].value).to.be(100); + expect(updated).toBe(true); + expect(panel.thresholds[0].op).toBe('gt'); + expect(panel.thresholds[0].value).toBe(100); }); }); @@ -39,12 +39,12 @@ describe('ThresholdMapper', () => { }; var updated = ThresholdMapper.alertToGraphThresholds(panel); - expect(updated).to.be(true); - expect(panel.thresholds[0].op).to.be('lt'); - expect(panel.thresholds[0].value).to.be(100); + expect(updated).toBe(true); + expect(panel.thresholds[0].op).toBe('lt'); + expect(panel.thresholds[0].value).toBe(100); - expect(panel.thresholds[1].op).to.be('gt'); - expect(panel.thresholds[1].value).to.be(200); + expect(panel.thresholds[1].op).toBe('gt'); + expect(panel.thresholds[1].value).toBe(200); }); }); @@ -63,12 +63,12 @@ describe('ThresholdMapper', () => { }; var updated = ThresholdMapper.alertToGraphThresholds(panel); - expect(updated).to.be(true); - expect(panel.thresholds[0].op).to.be('gt'); - expect(panel.thresholds[0].value).to.be(100); + expect(updated).toBe(true); + expect(panel.thresholds[0].op).toBe('gt'); + expect(panel.thresholds[0].value).toBe(100); - expect(panel.thresholds[1].op).to.be('lt'); - expect(panel.thresholds[1].value).to.be(200); + expect(panel.thresholds[1].op).toBe('lt'); + expect(panel.thresholds[1].value).toBe(200); }); }); }); diff --git a/public/app/features/annotations/editor_ctrl.ts b/public/app/features/annotations/editor_ctrl.ts index 169e2e4c2bb..34b9635ec85 100644 --- a/public/app/features/annotations/editor_ctrl.ts +++ b/public/app/features/annotations/editor_ctrl.ts @@ -70,6 +70,10 @@ export class AnnotationsEditorCtrl { this.mode = 'list'; } + move(index, dir) { + _.move(this.annotations, index, index + dir); + } + add() { this.annotations.push(this.currentAnnotation); this.reset(); diff --git a/public/app/features/annotations/partials/editor.html b/public/app/features/annotations/partials/editor.html index 289f368ad0e..65ee7e52bd0 100644 --- a/public/app/features/annotations/partials/editor.html +++ b/public/app/features/annotations/partials/editor.html @@ -21,7 +21,7 @@ - +   {{annotation.name}} @@ -33,8 +33,8 @@ {{annotation.datasource || 'Default'}} - - + + diff --git a/public/app/features/annotations/specs/annotations_srv_specs.ts b/public/app/features/annotations/specs/annotations_srv.jest.ts similarity index 52% rename from public/app/features/annotations/specs/annotations_srv_specs.ts rename to public/app/features/annotations/specs/annotations_srv.jest.ts index c18638e3f12..7db7b6c9f05 100644 --- a/public/app/features/annotations/specs/annotations_srv_specs.ts +++ b/public/app/features/annotations/specs/annotations_srv.jest.ts @@ -1,15 +1,18 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; import '../annotations_srv'; -import helpers from 'test/specs/helpers'; +import 'app/features/dashboard/time_srv'; +import { AnnotationsSrv } from '../annotations_srv'; describe('AnnotationsSrv', function() { - var ctx = new helpers.ServiceTestContext(); + let $rootScope = { + onAppEvent: jest.fn(), + }; + let $q; + let datasourceSrv; + let backendSrv; + let timeSrv; + + let annotationsSrv = new AnnotationsSrv($rootScope, $q, datasourceSrv, backendSrv, timeSrv); - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach(() => { - ctx.createService('annotationsSrv'); - }); describe('When translating the query result', () => { const annotationSource = { datasource: '-- Grafana --', @@ -27,11 +30,11 @@ describe('AnnotationsSrv', function() { let translatedAnnotations; beforeEach(() => { - translatedAnnotations = ctx.service.translateQueryResult(annotationSource, annotations); + translatedAnnotations = annotationsSrv.translateQueryResult(annotationSource, annotations); }); it('should set defaults', () => { - expect(translatedAnnotations[0].source).to.eql(annotationSource); + expect(translatedAnnotations[0].source).toEqual(annotationSource); }); }); }); diff --git a/public/app/features/dashboard/dashboard_import_ctrl.ts b/public/app/features/dashboard/dashboard_import_ctrl.ts index d127e628a77..73e9e316b4e 100644 --- a/public/app/features/dashboard/dashboard_import_ctrl.ts +++ b/public/app/features/dashboard/dashboard_import_ctrl.ts @@ -7,6 +7,7 @@ export class DashboardImportCtrl { jsonText: string; parseError: string; nameExists: boolean; + uidExists: boolean; dash: any; inputs: any[]; inputsValid: boolean; @@ -16,6 +17,13 @@ export class DashboardImportCtrl { titleTouched: boolean; hasNameValidationError: boolean; nameValidationError: any; + hasUidValidationError: boolean; + uidValidationError: any; + autoGenerateUid: boolean; + autoGenerateUidValue: string; + folderId: number; + initialFolderTitle: string; + isValidFolderSelection: boolean; /** @ngInject */ constructor(private backendSrv, private validationSrv, navModelSrv, private $location, $routeParams) { @@ -23,6 +31,11 @@ export class DashboardImportCtrl { this.step = 1; this.nameExists = false; + this.uidExists = false; + this.autoGenerateUid = true; + this.autoGenerateUidValue = 'auto-generated'; + this.folderId = $routeParams.folderId ? Number($routeParams.folderId) || 0 : null; + this.initialFolderTitle = 'Select a folder'; // check gnetId in url if ($routeParams.gnetId) { @@ -61,6 +74,7 @@ export class DashboardImportCtrl { this.inputsValid = this.inputs.length === 0; this.titleChanged(); + this.uidChanged(true); } setDatasourceOptions(input, inputModel) { @@ -93,8 +107,9 @@ export class DashboardImportCtrl { this.nameExists = false; this.validationSrv - .validateNewDashboardName(0, this.dash.title) + .validateNewDashboardName(this.folderId, this.dash.title) .then(() => { + this.nameExists = false; this.hasNameValidationError = false; }) .catch(err => { @@ -107,6 +122,45 @@ export class DashboardImportCtrl { }); } + uidChanged(initial) { + this.uidExists = false; + this.hasUidValidationError = false; + + if (initial === true && this.dash.uid) { + this.autoGenerateUidValue = 'value set'; + } + + this.backendSrv + .getDashboardByUid(this.dash.uid) + .then(res => { + this.uidExists = true; + this.hasUidValidationError = true; + this.uidValidationError = `Dashboard named '${res.dashboard.title}' in folder '${ + res.meta.folderTitle + }' has the same uid`; + }) + .catch(err => { + err.isHandled = true; + }); + } + + onFolderChange(folder) { + this.folderId = folder.id; + this.titleChanged(); + } + + onEnterFolderCreation() { + this.inputsValid = false; + } + + onExitFolderCreation() { + this.inputValueChanged(); + } + + isValid() { + return this.inputsValid && this.folderId !== null; + } + saveDashboard() { var inputs = this.inputs.map(input => { return { @@ -122,6 +176,7 @@ export class DashboardImportCtrl { dashboard: this.dash, overwrite: true, inputs: inputs, + folderId: this.folderId, }) .then(res => { this.$location.url(res.importedUrl); diff --git a/public/app/features/dashboard/dashboard_model.ts b/public/app/features/dashboard/dashboard_model.ts index 8a300a80341..976e4213920 100644 --- a/public/app/features/dashboard/dashboard_model.ts +++ b/public/app/features/dashboard/dashboard_model.ts @@ -22,8 +22,10 @@ export class DashboardModel { editable: any; graphTooltip: any; time: any; + private originalTime: any; timepicker: any; templating: any; + private originalTemplating: any; annotations: any; refresh: any; snapshot: any; @@ -48,6 +50,8 @@ export class DashboardModel { meta: true, panels: true, // needs special handling templating: true, // needs special handling + originalTime: true, + originalTemplating: true, }; constructor(data, meta?) { @@ -79,6 +83,9 @@ export class DashboardModel { this.gnetId = data.gnetId || null; this.panels = _.map(data.panels || [], panelData => new PanelModel(panelData)); + this.resetOriginalVariables(); + this.resetOriginalTime(); + this.initMeta(meta); this.updateSchema(data); @@ -130,7 +137,12 @@ export class DashboardModel { } // cleans meta data and other non persistent state - getSaveModelClone() { + getSaveModelClone(options?) { + let defaults = _.defaults(options || {}, { + saveVariables: true, + saveTimerange: true, + }); + // make clone var copy: any = {}; for (var property in this) { @@ -146,6 +158,27 @@ export class DashboardModel { list: _.map(this.templating.list, variable => (variable.getSaveModel ? variable.getSaveModel() : variable)), }; + if (!defaults.saveVariables) { + for (let i = 0; i < copy.templating.list.length; i++) { + let current = copy.templating.list[i]; + let original = _.find(this.originalTemplating, { name: current.name, type: current.type }); + + if (!original) { + continue; + } + + if (current.type === 'adhoc') { + copy.templating.list[i].filters = original.filters; + } else { + copy.templating.list[i].current = original.current; + } + } + } + + if (!defaults.saveTimerange) { + copy.time = this.originalTime; + } + // get panel save models copy.panels = _.chain(this.panels) .filter(panel => panel.type !== 'add-panel') @@ -761,4 +794,40 @@ export class DashboardModel { let migrator = new DashboardMigrator(this); migrator.updateSchema(old); } + + resetOriginalTime() { + this.originalTime = _.cloneDeep(this.time); + } + + hasTimeChanged() { + return !_.isEqual(this.time, this.originalTime); + } + + resetOriginalVariables() { + this.originalTemplating = _.map(this.templating.list, variable => { + return { + name: variable.name, + type: variable.type, + current: _.cloneDeep(variable.current), + filters: _.cloneDeep(variable.filters), + }; + }); + } + + hasVariableValuesChanged() { + if (this.templating.list.length !== this.originalTemplating.length) { + return false; + } + + const updated = _.map(this.templating.list, variable => { + return { + name: variable.name, + type: variable.type, + current: _.cloneDeep(variable.current), + filters: _.cloneDeep(variable.filters), + }; + }); + + return !_.isEqual(updated, this.originalTemplating); + } } diff --git a/public/app/features/dashboard/dashgrid/AddPanelPanel.tsx b/public/app/features/dashboard/dashgrid/AddPanelPanel.tsx index 094bc49b708..f1f2290ce40 100644 --- a/public/app/features/dashboard/dashgrid/AddPanelPanel.tsx +++ b/public/app/features/dashboard/dashgrid/AddPanelPanel.tsx @@ -154,6 +154,15 @@ export class AddPanelPanel extends React.Component { @@ -229,10 +238,12 @@ export class AddPanelPanel extends React.Component diff --git a/public/app/features/dashboard/dashgrid/DashboardGrid.tsx b/public/app/features/dashboard/dashgrid/DashboardGrid.tsx index 03bf65afc6e..290e587eace 100644 --- a/public/app/features/dashboard/dashgrid/DashboardGrid.tsx +++ b/public/app/features/dashboard/dashgrid/DashboardGrid.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import ReactGridLayout from 'react-grid-layout-grafana'; +import ReactGridLayout from 'react-grid-layout'; import { GRID_CELL_HEIGHT, GRID_CELL_VMARGIN, GRID_COLUMN_COUNT } from 'app/core/constants'; import { DashboardPanel } from './DashboardPanel'; import { DashboardModel } from '../dashboard_model'; diff --git a/public/app/features/dashboard/dashgrid/DashboardRow.tsx b/public/app/features/dashboard/dashgrid/DashboardRow.tsx index b133d4450bb..74630ac8f47 100644 --- a/public/app/features/dashboard/dashgrid/DashboardRow.tsx +++ b/public/app/features/dashboard/dashgrid/DashboardRow.tsx @@ -84,15 +84,18 @@ export class DashboardRow extends React.Component { 'fa-chevron-right': this.state.collapsed, }); - let title = templateSrv.replaceWithText(this.props.panel.title, this.props.panel.scopedVars); - const hiddenPanels = this.props.panel.panels ? this.props.panel.panels.length : 0; + const title = templateSrv.replaceWithText(this.props.panel.title, this.props.panel.scopedVars); + const count = this.props.panel.panels ? this.props.panel.panels.length : 0; + const panels = count === 1 ? 'panel' : 'panels'; return (
    {title} - ({hiddenPanels} hidden panels) + + ({count} {panels}) + {this.dashboard.meta.canEdit === true && (
    @@ -104,6 +107,11 @@ export class DashboardRow extends React.Component {
    )} + {this.state.collapsed === true && ( +
    +   +
    + )}
    ); diff --git a/public/app/features/dashboard/dashnav/dashnav.html b/public/app/features/dashboard/dashnav/dashnav.html index 269d4b0bada..6ec272b5ca4 100644 --- a/public/app/features/dashboard/dashnav/dashnav.html +++ b/public/app/features/dashboard/dashnav/dashnav.html @@ -3,7 +3,7 @@ diff --git a/public/app/features/dashboard/export/exporter.ts b/public/app/features/dashboard/export/exporter.ts index 8b93c12bf50..fc24de76fcc 100644 --- a/public/app/features/dashboard/export/exporter.ts +++ b/public/app/features/dashboard/export/exporter.ts @@ -63,8 +63,7 @@ export class DashboardExporter { ); }; - // check up panel data sources - for (let panel of saveModel.panels) { + const processPanel = panel => { if (panel.datasource !== undefined) { templateizeDatasourceUsage(panel); } @@ -86,6 +85,18 @@ export class DashboardExporter { version: panelDef.info.version, }; } + }; + + // check up panel data sources + for (let panel of saveModel.panels) { + processPanel(panel); + + // handle collapsed rows + if (panel.collapsed !== undefined && panel.collapsed === true && panel.panels) { + for (let rowPanel of panel.panels) { + processPanel(rowPanel); + } + } } // templatize template vars diff --git a/public/app/features/dashboard/folder_picker/folder_picker.ts b/public/app/features/dashboard/folder_picker/folder_picker.ts index b8ae18b14d3..352b29d27a0 100644 --- a/public/app/features/dashboard/folder_picker/folder_picker.ts +++ b/public/app/features/dashboard/folder_picker/folder_picker.ts @@ -12,6 +12,7 @@ export class FolderPickerCtrl { enterFolderCreation: any; exitFolderCreation: any; enableCreateNew: boolean; + enableReset: boolean; rootName = 'General'; folder: any; createNewFolder: boolean; @@ -58,6 +59,10 @@ export class FolderPickerCtrl { result.unshift({ title: '-- New Folder --', id: -1 }); } + if (this.enableReset && query === '' && this.initialTitle !== '') { + result.unshift({ title: this.initialTitle, id: null }); + } + return _.map(result, item => { return { text: item.title, value: item.id }; }); @@ -65,7 +70,9 @@ export class FolderPickerCtrl { } onFolderChange(option) { - if (option.value === -1) { + if (!option) { + option = { value: 0, text: this.rootName }; + } else if (option.value === -1) { this.createNewFolder = true; this.enterFolderCreation(); return; @@ -97,10 +104,7 @@ export class FolderPickerCtrl { appEvents.emit('alert-success', ['Folder Created', 'OK']); this.closeCreateFolder(); - this.folder = { - text: result.title, - value: result.id, - }; + this.folder = { text: result.title, value: result.id }; this.onFolderChange(this.folder); }); } @@ -125,31 +129,31 @@ export class FolderPickerCtrl { } private loadInitialValue() { - if (this.initialFolderId && this.initialFolderId > 0) { - this.getOptions('').then(result => { - this.folder = _.find(result, { value: this.initialFolderId }); - if (!this.folder) { - this.folder = { text: this.initialTitle, value: this.initialFolderId }; - } - this.onFolderLoad(); - }); - } else { - if (this.initialTitle) { - this.folder = { text: this.initialTitle, value: null }; - } else { - this.folder = { text: this.rootName, value: 0 }; + const resetFolder = { text: this.initialTitle, value: null }; + const rootFolder = { text: this.rootName, value: 0 }; + this.getOptions('').then(result => { + let folder; + if (this.initialFolderId) { + folder = _.find(result, { value: this.initialFolderId }); + } else if (this.enableReset && this.initialTitle && this.initialFolderId === null) { + folder = resetFolder; } - this.onFolderLoad(); - } - } + if (!folder) { + if (this.isEditor) { + folder = rootFolder; + } else { + folder = result.length > 0 ? result[0] : resetFolder; + } + } - private onFolderLoad() { - if (this.onLoad) { - this.onLoad({ - $folder: { id: this.folder.value, title: this.folder.text }, - }); - } + this.folder = folder; + + // if this is not the same as our initial value notify parent + if (this.folder.id !== this.initialFolderId) { + this.onChange({ $folder: { id: this.folder.value, title: this.folder.text } }); + } + }); } } @@ -166,11 +170,11 @@ export function folderPicker() { labelClass: '@', rootName: '@', onChange: '&', - onLoad: '&', onCreateFolder: '&', enterFolderCreation: '&', exitFolderCreation: '&', enableCreateNew: '@', + enableReset: '@', }, }; } diff --git a/public/app/features/dashboard/history/history_srv.ts b/public/app/features/dashboard/history/history_srv.ts index bd6e7223a23..7f7dc950de3 100644 --- a/public/app/features/dashboard/history/history_srv.ts +++ b/public/app/features/dashboard/history/history_srv.ts @@ -32,11 +32,11 @@ export interface DiffTarget { export class HistorySrv { /** @ngInject */ - constructor(private backendSrv, private $q) {} + constructor(private backendSrv) {} getHistoryList(dashboard: DashboardModel, options: HistoryListOpts) { const id = dashboard && dashboard.id ? dashboard.id : void 0; - return id ? this.backendSrv.get(`api/dashboards/id/${id}/versions`, options) : this.$q.when([]); + return id ? this.backendSrv.get(`api/dashboards/id/${id}/versions`, options) : Promise.resolve([]); } calculateDiff(options: CalculateDiffOptions) { @@ -46,7 +46,8 @@ export class HistorySrv { restoreDashboard(dashboard: DashboardModel, version: number) { const id = dashboard && dashboard.id ? dashboard.id : void 0; const url = `api/dashboards/id/${id}/restore`; - return id && _.isNumber(version) ? this.backendSrv.post(url, { version }) : this.$q.when({}); + + return id && _.isNumber(version) ? this.backendSrv.post(url, { version }) : Promise.resolve({}); } } diff --git a/public/app/features/dashboard/partials/dashboard_import.html b/public/app/features/dashboard/partials/dashboard_import.html index 020bb98e8b0..b5358dfd0f8 100644 --- a/public/app/features/dashboard/partials/dashboard_import.html +++ b/public/app/features/dashboard/partials/dashboard_import.html @@ -80,6 +80,48 @@
    +
    +
    + + +
    +
    + +
    +
    + + Unique identifier (uid) + + The unique identifier (uid) of a dashboard can be used for uniquely identify a dashboard between multiple Grafana installs. + The uid allows having consistent URL’s for accessing dashboards so changing the title of a dashboard will not break any + bookmarked links to that dashboard. + + + + change + + +
    +
    + +
    +
    + +
    +
    +
    - - Cancel diff --git a/public/app/features/dashboard/save_modal.ts b/public/app/features/dashboard/save_modal.ts index 33165758555..3afcbab707c 100644 --- a/public/app/features/dashboard/save_modal.ts +++ b/public/app/features/dashboard/save_modal.ts @@ -14,19 +14,29 @@ const template = `
    -
    Add a note to describe your changes
    -
    +
    +
    + + + + +
    - +
    @@ -48,14 +67,26 @@ const template = ` export class SaveDashboardModalCtrl { message: string; + saveVariables = false; + saveTimerange = false; + time: any; + originalTime: any; + current = []; + originalCurrent = []; max: number; saveForm: any; + isSaving: boolean; dismiss: () => void; + timeChange = false; + variableValueChange = false; /** @ngInject */ constructor(private dashboardSrv) { this.message = ''; this.max = 64; + this.isSaving = false; + this.timeChange = this.dashboardSrv.getCurrent().hasTimeChanged(); + this.variableValueChange = this.dashboardSrv.getCurrent().hasVariableValuesChanged(); } save() { @@ -63,11 +94,30 @@ export class SaveDashboardModalCtrl { return; } - var dashboard = this.dashboardSrv.getCurrent(); - var saveModel = dashboard.getSaveModelClone(); - var options = { message: this.message }; + var options = { + saveVariables: this.saveVariables, + saveTimerange: this.saveTimerange, + message: this.message, + }; - return this.dashboardSrv.save(saveModel, options).then(this.dismiss); + var dashboard = this.dashboardSrv.getCurrent(); + var saveModel = dashboard.getSaveModelClone(options); + + this.isSaving = true; + + return this.dashboardSrv.save(saveModel, options).then(this.postSave.bind(this, options)); + } + + postSave(options) { + if (options.saveVariables) { + this.dashboardSrv.getCurrent().resetOriginalVariables(); + } + + if (options.saveTimerange) { + this.dashboardSrv.getCurrent().resetOriginalTime(); + } + + this.dismiss(); } } diff --git a/public/app/features/dashboard/save_provisioned_modal.ts b/public/app/features/dashboard/save_provisioned_modal.ts index ba96ce0b0b9..3f2dcd0f57b 100644 --- a/public/app/features/dashboard/save_provisioned_modal.ts +++ b/public/app/features/dashboard/save_provisioned_modal.ts @@ -48,7 +48,7 @@ export class SaveProvisionedDashboardModalCtrl { constructor(dashboardSrv) { this.dash = dashboardSrv.getCurrent().getSaveModelClone(); delete this.dash.id; - this.dashboardJson = JSON.stringify(this.dash, null, 2); + this.dashboardJson = angular.toJson(this.dash, true); } save() { diff --git a/public/app/features/dashboard/settings/settings.ts b/public/app/features/dashboard/settings/settings.ts index 5acbbcf29c5..457cac5af72 100755 --- a/public/app/features/dashboard/settings/settings.ts +++ b/public/app/features/dashboard/settings/settings.ts @@ -2,6 +2,7 @@ import { coreModule, appEvents, contextSrv } from 'app/core/core'; import { DashboardModel } from '../dashboard_model'; import $ from 'jquery'; import _ from 'lodash'; +import angular from 'angular'; import config from 'app/core/config'; export class SettingsCtrl { @@ -118,7 +119,7 @@ export class SettingsCtrl { this.viewId = this.$location.search().editview; if (this.viewId) { - this.json = JSON.stringify(this.dashboard.getSaveModelClone(), null, 2); + this.json = angular.toJson(this.dashboard.getSaveModelClone(), true); } if (this.viewId === 'settings' && this.dashboard.meta.canMakeEditable) { diff --git a/public/app/features/dashboard/shareModalCtrl.ts b/public/app/features/dashboard/shareModalCtrl.ts index 8e30eaef91e..985c20f03b2 100644 --- a/public/app/features/dashboard/shareModalCtrl.ts +++ b/public/app/features/dashboard/shareModalCtrl.ts @@ -1,6 +1,6 @@ import angular from 'angular'; -import moment from 'moment'; import config from 'app/core/config'; +import moment from 'moment'; export class ShareModalCtrl { /** @ngInject */ @@ -86,9 +86,30 @@ export class ShareModalCtrl { config.appSubUrl + '/render/dashboard-solo/' ); $scope.imageUrl = $scope.imageUrl.replace(config.appSubUrl + '/d-solo/', config.appSubUrl + '/render/d-solo/'); - $scope.imageUrl += '&width=1000'; - $scope.imageUrl += '&height=500'; - $scope.imageUrl += '&tz=UTC' + encodeURIComponent(moment().format('Z')); + $scope.imageUrl += '&width=1000&height=500' + $scope.getLocalTimeZone(); + }; + + // This function will try to return the proper full name of the local timezone + // Chrome does not handle the timezone offset (but phantomjs does) + $scope.getLocalTimeZone = function() { + let utcOffset = '&tz=UTC' + encodeURIComponent(moment().format('Z')); + + // Older browser does not the internationalization API + if (!(window).Intl) { + return utcOffset; + } + + const dateFormat = (window).Intl.DateTimeFormat(); + if (!dateFormat.resolvedOptions) { + return utcOffset; + } + + const options = dateFormat.resolvedOptions(); + if (!options.timeZone) { + return utcOffset; + } + + return '&tz=' + encodeURIComponent(options.timeZone); }; $scope.getShareUrl = function() { diff --git a/public/app/features/dashboard/share_snapshot_ctrl.ts b/public/app/features/dashboard/share_snapshot_ctrl.ts index aa146dcad63..7d5bd112dfd 100644 --- a/public/app/features/dashboard/share_snapshot_ctrl.ts +++ b/public/app/features/dashboard/share_snapshot_ctrl.ts @@ -123,6 +123,9 @@ export class ShareSnapshotCtrl { enable: annotation.enable, iconColor: annotation.iconColor, snapshotData: annotation.snapshotData, + type: annotation.type, + builtIn: annotation.builtIn, + hide: annotation.hide, }; }) .value(); diff --git a/public/app/features/dashboard/specs/dashboard_import_ctrl.jest.ts b/public/app/features/dashboard/specs/dashboard_import_ctrl.jest.ts index 737eb360461..d75bd42f0c1 100644 --- a/public/app/features/dashboard/specs/dashboard_import_ctrl.jest.ts +++ b/public/app/features/dashboard/specs/dashboard_import_ctrl.jest.ts @@ -15,6 +15,7 @@ describe('DashboardImportCtrl', function() { backendSrv = { search: jest.fn().mockReturnValue(Promise.resolve([])), + getDashboardByUid: jest.fn().mockReturnValue(Promise.resolve([])), get: jest.fn(), }; diff --git a/public/app/features/dashboard/specs/dashboard_model.jest.ts b/public/app/features/dashboard/specs/dashboard_model.jest.ts index 6f0b45c9ba8..6ac642cd58e 100644 --- a/public/app/features/dashboard/specs/dashboard_model.jest.ts +++ b/public/app/features/dashboard/specs/dashboard_model.jest.ts @@ -434,4 +434,205 @@ describe('DashboardModel', function() { }); }); }); + + describe('Given model with time', () => { + let model: DashboardModel; + + beforeEach(() => { + model = new DashboardModel({ + time: { + from: 'now-6h', + to: 'now', + }, + }); + expect(model.hasTimeChanged()).toBeFalsy(); + model.time = { + from: 'now-3h', + to: 'now-1h', + }; + }); + + it('hasTimeChanged should be true', () => { + expect(model.hasTimeChanged()).toBeTruthy(); + }); + + it('getSaveModelClone should return original time when saveTimerange=false', () => { + let options = { saveTimerange: false }; + let saveModel = model.getSaveModelClone(options); + + expect(saveModel.time.from).toBe('now-6h'); + expect(saveModel.time.to).toBe('now'); + }); + + it('getSaveModelClone should return updated time when saveTimerange=true', () => { + let options = { saveTimerange: true }; + let saveModel = model.getSaveModelClone(options); + + expect(saveModel.time.from).toBe('now-3h'); + expect(saveModel.time.to).toBe('now-1h'); + }); + + it('hasTimeChanged should be false when reset original time', () => { + model.resetOriginalTime(); + expect(model.hasTimeChanged()).toBeFalsy(); + }); + + it('getSaveModelClone should return original time when saveTimerange=false', () => { + let options = { saveTimerange: false }; + let saveModel = model.getSaveModelClone(options); + + expect(saveModel.time.from).toBe('now-6h'); + expect(saveModel.time.to).toBe('now'); + }); + + it('getSaveModelClone should return updated time when saveTimerange=true', () => { + let options = { saveTimerange: true }; + let saveModel = model.getSaveModelClone(options); + + expect(saveModel.time.from).toBe('now-3h'); + expect(saveModel.time.to).toBe('now-1h'); + }); + }); + + describe('Given model with template variable of type query', () => { + let model: DashboardModel; + + beforeEach(() => { + model = new DashboardModel({ + templating: { + list: [ + { + name: 'Server', + type: 'query', + current: { + selected: true, + text: 'server_001', + value: 'server_001', + }, + }, + ], + }, + }); + expect(model.hasVariableValuesChanged()).toBeFalsy(); + }); + + it('hasVariableValuesChanged should be false when adding a template variable', () => { + model.templating.list.push({ + name: 'Server2', + type: 'query', + current: { + selected: true, + text: 'server_002', + value: 'server_002', + }, + }); + expect(model.hasVariableValuesChanged()).toBeFalsy(); + }); + + it('hasVariableValuesChanged should be false when removing existing template variable', () => { + model.templating.list = []; + expect(model.hasVariableValuesChanged()).toBeFalsy(); + }); + + it('hasVariableValuesChanged should be true when changing value of template variable', () => { + model.templating.list[0].current.text = 'server_002'; + expect(model.hasVariableValuesChanged()).toBeTruthy(); + }); + + it('getSaveModelClone should return original variable when saveVariables=false', () => { + model.templating.list[0].current.text = 'server_002'; + + let options = { saveVariables: false }; + let saveModel = model.getSaveModelClone(options); + + expect(saveModel.templating.list[0].current.text).toBe('server_001'); + }); + + it('getSaveModelClone should return updated variable when saveVariables=true', () => { + model.templating.list[0].current.text = 'server_002'; + + let options = { saveVariables: true }; + let saveModel = model.getSaveModelClone(options); + + expect(saveModel.templating.list[0].current.text).toBe('server_002'); + }); + }); + + describe('Given model with template variable of type adhoc', () => { + let model: DashboardModel; + + beforeEach(() => { + model = new DashboardModel({ + templating: { + list: [ + { + name: 'Filter', + type: 'adhoc', + filters: [ + { + key: '@hostname', + operator: '=', + value: 'server 20', + }, + ], + }, + ], + }, + }); + expect(model.hasVariableValuesChanged()).toBeFalsy(); + }); + + it('hasVariableValuesChanged should be false when adding a template variable', () => { + model.templating.list.push({ + name: 'Filter', + type: 'adhoc', + filters: [ + { + key: '@hostname', + operator: '=', + value: 'server 1', + }, + ], + }); + expect(model.hasVariableValuesChanged()).toBeFalsy(); + }); + + it('hasVariableValuesChanged should be false when removing existing template variable', () => { + model.templating.list = []; + expect(model.hasVariableValuesChanged()).toBeFalsy(); + }); + + it('hasVariableValuesChanged should be true when changing value of filter', () => { + model.templating.list[0].filters[0].value = 'server 1'; + expect(model.hasVariableValuesChanged()).toBeTruthy(); + }); + + it('hasVariableValuesChanged should be true when adding an additional condition', () => { + model.templating.list[0].filters[0].condition = 'AND'; + model.templating.list[0].filters[1] = { + key: '@metric', + operator: '=', + value: 'logins.count', + }; + expect(model.hasVariableValuesChanged()).toBeTruthy(); + }); + + it('getSaveModelClone should return original variable when saveVariables=false', () => { + model.templating.list[0].filters[0].value = 'server 1'; + + let options = { saveVariables: false }; + let saveModel = model.getSaveModelClone(options); + + expect(saveModel.templating.list[0].filters[0].value).toBe('server 20'); + }); + + it('getSaveModelClone should return updated variable when saveVariables=true', () => { + model.templating.list[0].filters[0].value = 'server 1'; + + let options = { saveVariables: true }; + let saveModel = model.getSaveModelClone(options); + + expect(saveModel.templating.list[0].filters[0].value).toBe('server 1'); + }); + }); }); diff --git a/public/app/features/dashboard/specs/exporter.jest.ts b/public/app/features/dashboard/specs/exporter.jest.ts new file mode 100644 index 00000000000..c7727a4af4d --- /dev/null +++ b/public/app/features/dashboard/specs/exporter.jest.ts @@ -0,0 +1,244 @@ +jest.mock('app/core/store', () => { + return { + getBool: jest.fn(), + }; +}); + +import _ from 'lodash'; +import config from 'app/core/config'; +import { DashboardExporter } from '../export/exporter'; +import { DashboardModel } from '../dashboard_model'; + +describe('given dashboard with repeated panels', () => { + var dash, exported; + + beforeEach(done => { + dash = { + templating: { + list: [ + { + name: 'apps', + type: 'query', + datasource: 'gfdb', + current: { value: 'Asd', text: 'Asd' }, + options: [{ value: 'Asd', text: 'Asd' }], + }, + { + name: 'prefix', + type: 'constant', + current: { value: 'collectd', text: 'collectd' }, + options: [], + }, + { + name: 'ds', + type: 'datasource', + query: 'testdb', + current: { value: 'prod', text: 'prod' }, + options: [], + }, + ], + }, + annotations: { + list: [ + { + name: 'logs', + datasource: 'gfdb', + }, + ], + }, + panels: [ + { id: 6, datasource: 'gfdb', type: 'graph' }, + { id: 7 }, + { + id: 8, + datasource: '-- Mixed --', + targets: [{ datasource: 'other' }], + }, + { id: 9, datasource: '$ds' }, + { + id: 2, + repeat: 'apps', + datasource: 'gfdb', + type: 'graph', + }, + { id: 3, repeat: null, repeatPanelId: 2 }, + { + id: 4, + collapsed: true, + panels: [ + { id: 10, datasource: 'gfdb', type: 'table' }, + { id: 11 }, + { + id: 12, + datasource: '-- Mixed --', + targets: [{ datasource: 'other' }], + }, + { id: 13, datasource: '$ds' }, + { + id: 14, + repeat: 'apps', + datasource: 'gfdb', + type: 'heatmap', + }, + { id: 15, repeat: null, repeatPanelId: 14 }, + ], + }, + ], + }; + + config.buildInfo.version = '3.0.2'; + + //Stubs test function calls + var datasourceSrvStub = { get: jest.fn(arg => getStub(arg)) }; + + config.panels['graph'] = { + id: 'graph', + name: 'Graph', + info: { version: '1.1.0' }, + }; + + config.panels['table'] = { + id: 'table', + name: 'Table', + info: { version: '1.1.1' }, + }; + + config.panels['heatmap'] = { + id: 'heatmap', + name: 'Heatmap', + info: { version: '1.1.2' }, + }; + + dash = new DashboardModel(dash, {}); + var exporter = new DashboardExporter(datasourceSrvStub); + exporter.makeExportable(dash).then(clean => { + exported = clean; + done(); + }); + }); + + it('should replace datasource refs', () => { + var panel = exported.panels[0]; + expect(panel.datasource).toBe('${DS_GFDB}'); + }); + + it('should replace datasource refs in collapsed row', () => { + var panel = exported.panels[5].panels[0]; + expect(panel.datasource).toBe('${DS_GFDB}'); + }); + + it('should replace datasource in variable query', () => { + expect(exported.templating.list[0].datasource).toBe('${DS_GFDB}'); + expect(exported.templating.list[0].options.length).toBe(0); + expect(exported.templating.list[0].current.value).toBe(undefined); + expect(exported.templating.list[0].current.text).toBe(undefined); + }); + + it('should replace datasource in annotation query', () => { + expect(exported.annotations.list[1].datasource).toBe('${DS_GFDB}'); + }); + + it('should add datasource as input', () => { + expect(exported.__inputs[0].name).toBe('DS_GFDB'); + expect(exported.__inputs[0].pluginId).toBe('testdb'); + expect(exported.__inputs[0].type).toBe('datasource'); + }); + + it('should add datasource to required', () => { + var require = _.find(exported.__requires, { name: 'TestDB' }); + expect(require.name).toBe('TestDB'); + expect(require.id).toBe('testdb'); + expect(require.type).toBe('datasource'); + expect(require.version).toBe('1.2.1'); + }); + + it('should not add built in datasources to required', () => { + var require = _.find(exported.__requires, { name: 'Mixed' }); + expect(require).toBe(undefined); + }); + + it('should add datasources used in mixed mode', () => { + var require = _.find(exported.__requires, { name: 'OtherDB' }); + expect(require).not.toBe(undefined); + }); + + it('should add graph panel to required', () => { + var require = _.find(exported.__requires, { name: 'Graph' }); + expect(require.name).toBe('Graph'); + expect(require.id).toBe('graph'); + expect(require.version).toBe('1.1.0'); + }); + + it('should add table panel to required', () => { + var require = _.find(exported.__requires, { name: 'Table' }); + expect(require.name).toBe('Table'); + expect(require.id).toBe('table'); + expect(require.version).toBe('1.1.1'); + }); + + it('should add heatmap panel to required', () => { + var require = _.find(exported.__requires, { name: 'Heatmap' }); + expect(require.name).toBe('Heatmap'); + expect(require.id).toBe('heatmap'); + expect(require.version).toBe('1.1.2'); + }); + + it('should add grafana version', () => { + var require = _.find(exported.__requires, { name: 'Grafana' }); + expect(require.type).toBe('grafana'); + expect(require.id).toBe('grafana'); + expect(require.version).toBe('3.0.2'); + }); + + it('should add constant template variables as inputs', () => { + var input = _.find(exported.__inputs, { name: 'VAR_PREFIX' }); + expect(input.type).toBe('constant'); + expect(input.label).toBe('prefix'); + expect(input.value).toBe('collectd'); + }); + + it('should templatize constant variables', () => { + var variable = _.find(exported.templating.list, { name: 'prefix' }); + expect(variable.query).toBe('${VAR_PREFIX}'); + expect(variable.current.text).toBe('${VAR_PREFIX}'); + expect(variable.current.value).toBe('${VAR_PREFIX}'); + expect(variable.options[0].text).toBe('${VAR_PREFIX}'); + expect(variable.options[0].value).toBe('${VAR_PREFIX}'); + }); +}); + +// Stub responses +var stubs = []; +stubs['gfdb'] = { + name: 'gfdb', + meta: { id: 'testdb', info: { version: '1.2.1' }, name: 'TestDB' }, +}; + +stubs['other'] = { + name: 'other', + meta: { id: 'other', info: { version: '1.2.1' }, name: 'OtherDB' }, +}; + +stubs['-- Mixed --'] = { + name: 'mixed', + meta: { + id: 'mixed', + info: { version: '1.2.1' }, + name: 'Mixed', + builtIn: true, + }, +}; + +stubs['-- Grafana --'] = { + name: '-- Grafana --', + meta: { + id: 'grafana', + info: { version: '1.2.1' }, + name: 'grafana', + builtIn: true, + }, +}; + +function getStub(arg) { + return Promise.resolve(stubs[arg]); +} diff --git a/public/app/features/dashboard/specs/exporter_specs.ts b/public/app/features/dashboard/specs/exporter_specs.ts deleted file mode 100644 index 38ce1824f68..00000000000 --- a/public/app/features/dashboard/specs/exporter_specs.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { describe, beforeEach, it, sinon, expect } from 'test/lib/common'; - -import _ from 'lodash'; -import config from 'app/core/config'; -import { DashboardExporter } from '../export/exporter'; -import { DashboardModel } from '../dashboard_model'; - -describe('given dashboard with repeated panels', function() { - var dash, exported; - - beforeEach(done => { - dash = { - templating: { list: [] }, - annotations: { list: [] }, - }; - - config.buildInfo = { - version: '3.0.2', - }; - - dash.templating.list.push({ - name: 'apps', - type: 'query', - datasource: 'gfdb', - current: { value: 'Asd', text: 'Asd' }, - options: [{ value: 'Asd', text: 'Asd' }], - }); - - dash.templating.list.push({ - name: 'prefix', - type: 'constant', - current: { value: 'collectd', text: 'collectd' }, - options: [], - }); - - dash.templating.list.push({ - name: 'ds', - type: 'datasource', - query: 'testdb', - current: { value: 'prod', text: 'prod' }, - options: [], - }); - - dash.annotations.list.push({ - name: 'logs', - datasource: 'gfdb', - }); - - dash.panels = [ - { id: 6, datasource: 'gfdb', type: 'graph' }, - { id: 7 }, - { - id: 8, - datasource: '-- Mixed --', - targets: [{ datasource: 'other' }], - }, - { id: 9, datasource: '$ds' }, - ]; - - dash.panels.push({ - id: 2, - repeat: 'apps', - datasource: 'gfdb', - type: 'graph', - }); - dash.panels.push({ id: 3, repeat: null, repeatPanelId: 2 }); - - var datasourceSrvStub = { get: sinon.stub() }; - datasourceSrvStub.get.withArgs('gfdb').returns( - Promise.resolve({ - name: 'gfdb', - meta: { id: 'testdb', info: { version: '1.2.1' }, name: 'TestDB' }, - }) - ); - datasourceSrvStub.get.withArgs('other').returns( - Promise.resolve({ - name: 'other', - meta: { id: 'other', info: { version: '1.2.1' }, name: 'OtherDB' }, - }) - ); - datasourceSrvStub.get.withArgs('-- Mixed --').returns( - Promise.resolve({ - name: 'mixed', - meta: { - id: 'mixed', - info: { version: '1.2.1' }, - name: 'Mixed', - builtIn: true, - }, - }) - ); - datasourceSrvStub.get.withArgs('-- Grafana --').returns( - Promise.resolve({ - name: '-- Grafana --', - meta: { - id: 'grafana', - info: { version: '1.2.1' }, - name: 'grafana', - builtIn: true, - }, - }) - ); - - config.panels['graph'] = { - id: 'graph', - name: 'Graph', - info: { version: '1.1.0' }, - }; - - dash = new DashboardModel(dash, {}); - var exporter = new DashboardExporter(datasourceSrvStub); - exporter.makeExportable(dash).then(clean => { - exported = clean; - done(); - }); - }); - - it('should replace datasource refs', function() { - var panel = exported.panels[0]; - expect(panel.datasource).to.be('${DS_GFDB}'); - }); - - it('should replace datasource in variable query', function() { - expect(exported.templating.list[0].datasource).to.be('${DS_GFDB}'); - expect(exported.templating.list[0].options.length).to.be(0); - expect(exported.templating.list[0].current.value).to.be(undefined); - expect(exported.templating.list[0].current.text).to.be(undefined); - }); - - it('should replace datasource in annotation query', function() { - expect(exported.annotations.list[1].datasource).to.be('${DS_GFDB}'); - }); - - it('should add datasource as input', function() { - expect(exported.__inputs[0].name).to.be('DS_GFDB'); - expect(exported.__inputs[0].pluginId).to.be('testdb'); - expect(exported.__inputs[0].type).to.be('datasource'); - }); - - it('should add datasource to required', function() { - var require = _.find(exported.__requires, { name: 'TestDB' }); - expect(require.name).to.be('TestDB'); - expect(require.id).to.be('testdb'); - expect(require.type).to.be('datasource'); - expect(require.version).to.be('1.2.1'); - }); - - it('should not add built in datasources to required', function() { - var require = _.find(exported.__requires, { name: 'Mixed' }); - expect(require).to.be(undefined); - }); - - it('should add datasources used in mixed mode', function() { - var require = _.find(exported.__requires, { name: 'OtherDB' }); - expect(require).to.not.be(undefined); - }); - - it('should add panel to required', function() { - var require = _.find(exported.__requires, { name: 'Graph' }); - expect(require.name).to.be('Graph'); - expect(require.id).to.be('graph'); - expect(require.version).to.be('1.1.0'); - }); - - it('should add grafana version', function() { - var require = _.find(exported.__requires, { name: 'Grafana' }); - expect(require.type).to.be('grafana'); - expect(require.id).to.be('grafana'); - expect(require.version).to.be('3.0.2'); - }); - - it('should add constant template variables as inputs', function() { - var input = _.find(exported.__inputs, { name: 'VAR_PREFIX' }); - expect(input.type).to.be('constant'); - expect(input.label).to.be('prefix'); - expect(input.value).to.be('collectd'); - }); - - it('should templatize constant variables', function() { - var variable = _.find(exported.templating.list, { name: 'prefix' }); - expect(variable.query).to.be('${VAR_PREFIX}'); - expect(variable.current.text).to.be('${VAR_PREFIX}'); - expect(variable.current.value).to.be('${VAR_PREFIX}'); - expect(variable.options[0].text).to.be('${VAR_PREFIX}'); - expect(variable.options[0].value).to.be('${VAR_PREFIX}'); - }); -}); diff --git a/public/app/features/dashboard/specs/history_ctrl.jest.ts b/public/app/features/dashboard/specs/history_ctrl.jest.ts new file mode 100644 index 00000000000..991ecb2c60d --- /dev/null +++ b/public/app/features/dashboard/specs/history_ctrl.jest.ts @@ -0,0 +1,313 @@ +import _ from 'lodash'; +import { HistoryListCtrl } from 'app/features/dashboard/history/history'; +import { versions, compare, restore } from './history_mocks'; +import $q from 'q'; + +describe('HistoryListCtrl', () => { + const RESTORE_ID = 4; + + const versionsResponse: any = versions(); + + restore(7, RESTORE_ID); + + let historySrv; + let $rootScope; + let historyListCtrl; + beforeEach(() => { + historySrv = { + calculateDiff: jest.fn(), + restoreDashboard: jest.fn(() => $q.when({})), + }; + $rootScope = { + appEvent: jest.fn(), + onAppEvent: jest.fn(), + }; + }); + + describe('when the history list component is loaded', () => { + let deferred; + + beforeEach(() => { + deferred = $q.defer({}); + historySrv.getHistoryList = jest.fn(() => deferred.promise); + + historyListCtrl = new HistoryListCtrl({}, $rootScope, {}, $q, historySrv, {}); + + historyListCtrl.dashboard = { + id: 2, + version: 3, + formatDate: jest.fn(() => 'date'), + }; + }); + + it('should immediately attempt to fetch the history list', () => { + expect(historySrv.getHistoryList).toHaveBeenCalledTimes(1); + }); + + describe('and the history list is successfully fetched', () => { + beforeEach(async () => { + deferred.resolve(versionsResponse); + await historyListCtrl.getLog(); + }); + + it("should reset the controller's state", async () => { + expect(historyListCtrl.mode).toBe('list'); + expect(historyListCtrl.delta).toEqual({ basic: '', json: '' }); + + expect(historyListCtrl.canCompare).toBe(false); + expect(_.find(historyListCtrl.revisions, rev => rev.checked)).toBe(undefined); + }); + + it('should indicate loading has finished', () => { + expect(historyListCtrl.loading).toBe(false); + }); + + it('should store the revisions sorted desc by version id', () => { + expect(historyListCtrl.revisions[0].version).toBe(4); + expect(historyListCtrl.revisions[1].version).toBe(3); + expect(historyListCtrl.revisions[2].version).toBe(2); + expect(historyListCtrl.revisions[3].version).toBe(1); + }); + + it('should add a checked property to each revision', () => { + let actual = _.filter(historyListCtrl.revisions, rev => rev.hasOwnProperty('checked')); + expect(actual.length).toBe(4); + }); + + it('should set all checked properties to false on reset', () => { + historyListCtrl.revisions[0].checked = true; + historyListCtrl.revisions[2].checked = true; + historyListCtrl.reset(); + let actual = _.filter(historyListCtrl.revisions, rev => !rev.checked); + expect(actual.length).toBe(4); + }); + }); + + describe('and fetching the history list fails', () => { + beforeEach(async () => { + deferred = $q.defer(); + + historySrv.getHistoryList = jest.fn(() => deferred.promise); + + historyListCtrl = new HistoryListCtrl({}, $rootScope, {}, $q, historySrv, {}); + + deferred.reject(new Error('HistoryListError')); + + await historyListCtrl.getLog(); + }); + + it("should reset the controller's state", () => { + expect(historyListCtrl.mode).toBe('list'); + expect(historyListCtrl.delta).toEqual({ basic: '', json: '' }); + expect(_.find(historyListCtrl.revisions, rev => rev.checked)).toBe(undefined); + }); + + it('should indicate loading has finished', () => { + expect(historyListCtrl.loading).toBe(false); + }); + + it('should have an empty revisions list', () => { + expect(historyListCtrl.revisions).toEqual([]); + }); + }); + + describe('should update the history list when the dashboard is saved', () => { + beforeEach(() => { + historyListCtrl.dashboard = { version: 3 }; + historyListCtrl.resetFromSource = jest.fn(); + }); + + it('should listen for the `dashboard-saved` appEvent', () => { + expect($rootScope.onAppEvent).toHaveBeenCalledTimes(1); + expect($rootScope.onAppEvent.mock.calls[0][0]).toBe('dashboard-saved'); + }); + + it('should call `onDashboardSaved` when the appEvent is received', () => { + expect($rootScope.onAppEvent.mock.calls[0][1]).not.toBe(historyListCtrl.onDashboardSaved); + expect($rootScope.onAppEvent.mock.calls[0][1].toString).toBe(historyListCtrl.onDashboardSaved.toString); + }); + }); + }); + + describe('when the user wants to compare two revisions', () => { + let deferred; + + beforeEach(async () => { + deferred = $q.defer({}); + historySrv.getHistoryList = jest.fn(() => $q.when(versionsResponse)); + historySrv.calculateDiff = jest.fn(() => deferred.promise); + + historyListCtrl = new HistoryListCtrl({}, $rootScope, {}, $q, historySrv, {}); + + historyListCtrl.dashboard = { + id: 2, + version: 3, + formatDate: jest.fn(() => 'date'), + }; + + deferred.resolve(versionsResponse); + await historyListCtrl.getLog(); + }); + + it('should have already fetched the history list', () => { + expect(historySrv.getHistoryList).toHaveBeenCalled(); + expect(historyListCtrl.revisions.length).toBeGreaterThan(0); + }); + + it('should check that two valid versions are selected', () => { + // [] + expect(historyListCtrl.canCompare).toBe(false); + + // single value + historyListCtrl.revisions = [{ checked: true }]; + historyListCtrl.revisionSelectionChanged(); + expect(historyListCtrl.canCompare).toBe(false); + + // both values in range + historyListCtrl.revisions = [{ checked: true }, { checked: true }]; + historyListCtrl.revisionSelectionChanged(); + expect(historyListCtrl.canCompare).toBe(true); + }); + + describe('and the basic diff is successfully fetched', () => { + beforeEach(async () => { + deferred = $q.defer({}); + historySrv.calculateDiff = jest.fn(() => deferred.promise); + deferred.resolve(compare('basic')); + historyListCtrl.revisions[1].checked = true; + historyListCtrl.revisions[3].checked = true; + await historyListCtrl.getDiff('basic'); + }); + + it('should fetch the basic diff if two valid versions are selected', () => { + expect(historySrv.calculateDiff).toHaveBeenCalledTimes(1); + expect(historyListCtrl.delta.basic).toBe('
    '); + expect(historyListCtrl.delta.json).toBe(''); + }); + + it('should set the basic diff view as active', () => { + expect(historyListCtrl.mode).toBe('compare'); + expect(historyListCtrl.diff).toBe('basic'); + }); + + it('should indicate loading has finished', () => { + expect(historyListCtrl.loading).toBe(false); + }); + }); + + describe('and the json diff is successfully fetched', () => { + beforeEach(async () => { + deferred = $q.defer({}); + historySrv.calculateDiff = jest.fn(() => deferred.promise); + deferred.resolve(compare('json')); + historyListCtrl.revisions[1].checked = true; + historyListCtrl.revisions[3].checked = true; + await historyListCtrl.getDiff('json'); + }); + + it('should fetch the json diff if two valid versions are selected', () => { + expect(historySrv.calculateDiff).toHaveBeenCalledTimes(1); + expect(historyListCtrl.delta.basic).toBe(''); + expect(historyListCtrl.delta.json).toBe('
    '); + }); + + it('should set the json diff view as active', () => { + expect(historyListCtrl.mode).toBe('compare'); + expect(historyListCtrl.diff).toBe('json'); + }); + + it('should indicate loading has finished', () => { + expect(historyListCtrl.loading).toBe(false); + }); + }); + + describe('and diffs have already been fetched', () => { + beforeEach(async () => { + deferred.resolve(compare('basic')); + + historyListCtrl.revisions[3].checked = true; + historyListCtrl.revisions[1].checked = true; + historyListCtrl.delta.basic = 'cached basic'; + historyListCtrl.getDiff('basic'); + await historySrv.calculateDiff(); + }); + + it('should use the cached diffs instead of fetching', () => { + expect(historySrv.calculateDiff).toHaveBeenCalledTimes(1); + expect(historyListCtrl.delta.basic).toBe('cached basic'); + }); + + it('should indicate loading has finished', () => { + expect(historyListCtrl.loading).toBe(false); + }); + }); + + describe('and fetching the diff fails', () => { + beforeEach(async () => { + deferred = $q.defer({}); + historySrv.calculateDiff = jest.fn(() => deferred.promise); + + historyListCtrl.revisions[3].checked = true; + historyListCtrl.revisions[1].checked = true; + deferred.reject(); + await historyListCtrl.getDiff('basic'); + }); + + it('should fetch the diff if two valid versions are selected', () => { + expect(historySrv.calculateDiff).toHaveBeenCalledTimes(1); + }); + + it('should return to the history list view', () => { + expect(historyListCtrl.mode).toBe('list'); + }); + + it('should indicate loading has finished', () => { + expect(historyListCtrl.loading).toBe(false); + }); + + it('should have an empty delta/changeset', () => { + expect(historyListCtrl.delta).toEqual({ basic: '', json: '' }); + }); + }); + }); + + describe('when the user wants to restore a revision', () => { + let deferred; + + beforeEach(async () => { + deferred = $q.defer(); + historySrv.getHistoryList = jest.fn(() => $q.when(versionsResponse)); + historySrv.restoreDashboard = jest.fn(() => deferred.promise); + + historyListCtrl = new HistoryListCtrl({}, $rootScope, {}, $q, historySrv, {}); + + historyListCtrl.dashboard = { + id: 1, + }; + historyListCtrl.restore(); + deferred.resolve(versionsResponse); + await historyListCtrl.getLog(); + }); + + it('should display a modal allowing the user to restore or cancel', () => { + expect($rootScope.appEvent).toHaveBeenCalledTimes(1); + expect($rootScope.appEvent.mock.calls[0][0]).toBe('confirm-modal'); + }); + + describe('and restore fails to fetch', () => { + beforeEach(async () => { + deferred = $q.defer(); + historySrv.getHistoryList = jest.fn(() => $q.when(versionsResponse)); + historySrv.restoreDashboard = jest.fn(() => deferred.promise); + historyListCtrl = new HistoryListCtrl({}, $rootScope, {}, $q, historySrv, {}); + deferred.reject(new Error('RestoreError')); + historyListCtrl.restoreConfirm(RESTORE_ID); + await historyListCtrl.getLog(); + }); + + it('should indicate loading has finished', () => { + expect(historyListCtrl.loading).toBe(false); + }); + }); + }); +}); diff --git a/public/app/features/dashboard/specs/history_ctrl_specs.ts b/public/app/features/dashboard/specs/history_ctrl_specs.ts deleted file mode 100644 index cf5fb13b6c1..00000000000 --- a/public/app/features/dashboard/specs/history_ctrl_specs.ts +++ /dev/null @@ -1,329 +0,0 @@ -import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common'; - -import _ from 'lodash'; -import { HistoryListCtrl } from 'app/features/dashboard/history/history'; -import { versions, compare, restore } from './history_mocks'; - -describe('HistoryListCtrl', function() { - var RESTORE_ID = 4; - - var ctx: any = {}; - var versionsResponse: any = versions(); - - restore(7, RESTORE_ID); - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.inject($rootScope => { - ctx.scope = $rootScope.$new(); - }) - ); - - var historySrv; - var $rootScope; - beforeEach(function() { - historySrv = { - getHistoryList: sinon.stub(), - calculateDiff: sinon.stub(), - restoreDashboard: sinon.stub(), - }; - $rootScope = { - appEvent: sinon.spy(), - onAppEvent: sinon.spy(), - }; - }); - - describe('when the history list component is loaded', function() { - var deferred; - - beforeEach( - angularMocks.inject(($controller, $q) => { - deferred = $q.defer(); - historySrv.getHistoryList.returns(deferred.promise); - ctx.ctrl = $controller( - HistoryListCtrl, - { - historySrv, - $rootScope, - $scope: ctx.scope, - }, - { - dashboard: { - id: 2, - version: 3, - formatDate: sinon.stub().returns('date'), - }, - } - ); - }) - ); - - it('should immediately attempt to fetch the history list', function() { - expect(historySrv.getHistoryList.calledOnce).to.be(true); - }); - - describe('and the history list is successfully fetched', function() { - beforeEach(function() { - deferred.resolve(versionsResponse); - ctx.ctrl.$scope.$apply(); - }); - - it("should reset the controller's state", function() { - expect(ctx.ctrl.mode).to.be('list'); - expect(ctx.ctrl.delta).to.eql({ basic: '', json: '' }); - expect(ctx.ctrl.canCompare).to.be(false); - expect(_.find(ctx.ctrl.revisions, rev => rev.checked)).to.be(undefined); - }); - - it('should indicate loading has finished', function() { - expect(ctx.ctrl.loading).to.be(false); - }); - - it('should store the revisions sorted desc by version id', function() { - expect(ctx.ctrl.revisions[0].version).to.be(4); - expect(ctx.ctrl.revisions[1].version).to.be(3); - expect(ctx.ctrl.revisions[2].version).to.be(2); - expect(ctx.ctrl.revisions[3].version).to.be(1); - }); - - it('should add a checked property to each revision', function() { - var actual = _.filter(ctx.ctrl.revisions, rev => rev.hasOwnProperty('checked')); - expect(actual.length).to.be(4); - }); - - it('should set all checked properties to false on reset', function() { - ctx.ctrl.revisions[0].checked = true; - ctx.ctrl.revisions[2].checked = true; - ctx.ctrl.reset(); - var actual = _.filter(ctx.ctrl.revisions, rev => !rev.checked); - expect(actual.length).to.be(4); - }); - }); - - describe('and fetching the history list fails', function() { - beforeEach(function() { - deferred.reject(new Error('HistoryListError')); - ctx.ctrl.$scope.$apply(); - }); - - it("should reset the controller's state", function() { - expect(ctx.ctrl.mode).to.be('list'); - expect(ctx.ctrl.delta).to.eql({ basic: '', json: '' }); - expect(_.find(ctx.ctrl.revisions, rev => rev.checked)).to.be(undefined); - }); - - it('should indicate loading has finished', function() { - expect(ctx.ctrl.loading).to.be(false); - }); - - it('should have an empty revisions list', function() { - expect(ctx.ctrl.revisions).to.eql([]); - }); - }); - - describe('should update the history list when the dashboard is saved', function() { - beforeEach(function() { - ctx.ctrl.dashboard = { version: 3 }; - ctx.ctrl.resetFromSource = sinon.spy(); - }); - - it('should listen for the `dashboard-saved` appEvent', function() { - expect($rootScope.onAppEvent.calledOnce).to.be(true); - expect($rootScope.onAppEvent.getCall(0).args[0]).to.be('dashboard-saved'); - }); - - it('should call `onDashboardSaved` when the appEvent is received', function() { - expect($rootScope.onAppEvent.getCall(0).args[1]).to.not.be(ctx.ctrl.onDashboardSaved); - expect($rootScope.onAppEvent.getCall(0).args[1].toString).to.be(ctx.ctrl.onDashboardSaved.toString); - }); - }); - }); - - describe('when the user wants to compare two revisions', function() { - var deferred; - - beforeEach( - angularMocks.inject(($controller, $q) => { - deferred = $q.defer(); - historySrv.getHistoryList.returns($q.when(versionsResponse)); - historySrv.calculateDiff.returns(deferred.promise); - ctx.ctrl = $controller( - HistoryListCtrl, - { - historySrv, - $rootScope, - $scope: ctx.scope, - }, - { - dashboard: { - id: 2, - version: 3, - formatDate: sinon.stub().returns('date'), - }, - } - ); - - ctx.ctrl.$scope.onDashboardSaved = sinon.spy(); - ctx.ctrl.$scope.$apply(); - }) - ); - - it('should have already fetched the history list', function() { - expect(historySrv.getHistoryList.calledOnce).to.be(true); - expect(ctx.ctrl.revisions.length).to.be.above(0); - }); - - it('should check that two valid versions are selected', function() { - // [] - expect(ctx.ctrl.canCompare).to.be(false); - - // single value - ctx.ctrl.revisions = [{ checked: true }]; - ctx.ctrl.revisionSelectionChanged(); - expect(ctx.ctrl.canCompare).to.be(false); - - // both values in range - ctx.ctrl.revisions = [{ checked: true }, { checked: true }]; - ctx.ctrl.revisionSelectionChanged(); - expect(ctx.ctrl.canCompare).to.be(true); - }); - - describe('and the basic diff is successfully fetched', function() { - beforeEach(function() { - deferred.resolve(compare('basic')); - ctx.ctrl.revisions[1].checked = true; - ctx.ctrl.revisions[3].checked = true; - ctx.ctrl.getDiff('basic'); - ctx.ctrl.$scope.$apply(); - }); - - it('should fetch the basic diff if two valid versions are selected', function() { - expect(historySrv.calculateDiff.calledOnce).to.be(true); - expect(ctx.ctrl.delta.basic).to.be('
    '); - expect(ctx.ctrl.delta.json).to.be(''); - }); - - it('should set the basic diff view as active', function() { - expect(ctx.ctrl.mode).to.be('compare'); - expect(ctx.ctrl.diff).to.be('basic'); - }); - - it('should indicate loading has finished', function() { - expect(ctx.ctrl.loading).to.be(false); - }); - }); - - describe('and the json diff is successfully fetched', function() { - beforeEach(function() { - deferred.resolve(compare('json')); - ctx.ctrl.revisions[1].checked = true; - ctx.ctrl.revisions[3].checked = true; - ctx.ctrl.getDiff('json'); - ctx.ctrl.$scope.$apply(); - }); - - it('should fetch the json diff if two valid versions are selected', function() { - expect(historySrv.calculateDiff.calledOnce).to.be(true); - expect(ctx.ctrl.delta.basic).to.be(''); - expect(ctx.ctrl.delta.json).to.be('
    '); - }); - - it('should set the json diff view as active', function() { - expect(ctx.ctrl.mode).to.be('compare'); - expect(ctx.ctrl.diff).to.be('json'); - }); - - it('should indicate loading has finished', function() { - expect(ctx.ctrl.loading).to.be(false); - }); - }); - - describe('and diffs have already been fetched', function() { - beforeEach(function() { - deferred.resolve(compare('basic')); - ctx.ctrl.revisions[3].checked = true; - ctx.ctrl.revisions[1].checked = true; - ctx.ctrl.delta.basic = 'cached basic'; - ctx.ctrl.getDiff('basic'); - ctx.ctrl.$scope.$apply(); - }); - - it('should use the cached diffs instead of fetching', function() { - expect(historySrv.calculateDiff.calledOnce).to.be(false); - expect(ctx.ctrl.delta.basic).to.be('cached basic'); - }); - - it('should indicate loading has finished', function() { - expect(ctx.ctrl.loading).to.be(false); - }); - }); - - describe('and fetching the diff fails', function() { - beforeEach(function() { - deferred.reject(new Error('DiffError')); - ctx.ctrl.revisions[3].checked = true; - ctx.ctrl.revisions[1].checked = true; - ctx.ctrl.getDiff('basic'); - ctx.ctrl.$scope.$apply(); - }); - - it('should fetch the diff if two valid versions are selected', function() { - expect(historySrv.calculateDiff.calledOnce).to.be(true); - }); - - it('should return to the history list view', function() { - expect(ctx.ctrl.mode).to.be('list'); - }); - - it('should indicate loading has finished', function() { - expect(ctx.ctrl.loading).to.be(false); - }); - - it('should have an empty delta/changeset', function() { - expect(ctx.ctrl.delta).to.eql({ basic: '', json: '' }); - }); - }); - }); - - describe('when the user wants to restore a revision', function() { - var deferred; - - beforeEach( - angularMocks.inject(($controller, $q) => { - deferred = $q.defer(); - historySrv.getHistoryList.returns($q.when(versionsResponse)); - historySrv.restoreDashboard.returns(deferred.promise); - ctx.ctrl = $controller(HistoryListCtrl, { - historySrv, - contextSrv: { user: { name: 'Carlos' } }, - $rootScope, - $scope: ctx.scope, - }); - ctx.ctrl.dashboard = { id: 1 }; - ctx.ctrl.restore(); - ctx.ctrl.$scope.$apply(); - }) - ); - - it('should display a modal allowing the user to restore or cancel', function() { - expect($rootScope.appEvent.calledOnce).to.be(true); - expect($rootScope.appEvent.calledWith('confirm-modal')).to.be(true); - }); - - describe('and restore fails to fetch', function() { - beforeEach(function() { - deferred.reject(new Error('RestoreError')); - ctx.ctrl.restoreConfirm(RESTORE_ID); - try { - // this throws error, due to promise rejection - ctx.ctrl.$scope.$apply(); - } catch (e) {} - }); - - it('should indicate loading has finished', function() { - expect(ctx.ctrl.loading).to.be(false); - }); - }); - }); -}); diff --git a/public/app/features/dashboard/specs/history_srv.jest.ts b/public/app/features/dashboard/specs/history_srv.jest.ts new file mode 100644 index 00000000000..401b098a0e1 --- /dev/null +++ b/public/app/features/dashboard/specs/history_srv.jest.ts @@ -0,0 +1,61 @@ +import '../history/history_srv'; +import { versions, restore } from './history_mocks'; +import { HistorySrv } from '../history/history_srv'; +import { DashboardModel } from '../dashboard_model'; +jest.mock('app/core/store'); + +describe('historySrv', function() { + const versionsResponse = versions(); + const restoreResponse = restore; + + let backendSrv = { + get: jest.fn(() => Promise.resolve({})), + post: jest.fn(() => Promise.resolve({})), + }; + + let historySrv = new HistorySrv(backendSrv); + + const dash = new DashboardModel({ id: 1 }); + const emptyDash = new DashboardModel({}); + const historyListOpts = { limit: 10, start: 0 }; + + describe('getHistoryList', function() { + it('should return a versions array for the given dashboard id', function() { + backendSrv.get = jest.fn(() => Promise.resolve(versionsResponse)); + historySrv = new HistorySrv(backendSrv); + + return historySrv.getHistoryList(dash, historyListOpts).then(function(versions) { + expect(versions).toEqual(versionsResponse); + }); + }); + + it('should return an empty array when not given an id', function() { + return historySrv.getHistoryList(emptyDash, historyListOpts).then(function(versions) { + expect(versions).toEqual([]); + }); + }); + + it('should return an empty array when not given a dashboard', function() { + return historySrv.getHistoryList(null, historyListOpts).then(function(versions) { + expect(versions).toEqual([]); + }); + }); + }); + + describe('restoreDashboard', () => { + it('should return a success response given valid parameters', function() { + let version = 6; + backendSrv.post = jest.fn(() => Promise.resolve(restoreResponse(version))); + historySrv = new HistorySrv(backendSrv); + return historySrv.restoreDashboard(dash, version).then(function(response) { + expect(response).toEqual(restoreResponse(version)); + }); + }); + + it('should return an empty object when not given an id', async () => { + historySrv = new HistorySrv(backendSrv); + let rsp = await historySrv.restoreDashboard(emptyDash, 6); + expect(rsp).toEqual({}); + }); + }); +}); diff --git a/public/app/features/dashboard/specs/history_srv_specs.ts b/public/app/features/dashboard/specs/history_srv_specs.ts deleted file mode 100644 index a4a28ab9a34..00000000000 --- a/public/app/features/dashboard/specs/history_srv_specs.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; - -import helpers from 'test/specs/helpers'; -import '../history/history_srv'; -import { versions, restore } from './history_mocks'; - -describe('historySrv', function() { - var ctx = new helpers.ServiceTestContext(); - - var versionsResponse = versions(); - var restoreResponse = restore; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.inject(function($httpBackend) { - ctx.$httpBackend = $httpBackend; - $httpBackend.whenRoute('GET', 'api/dashboards/id/:id/versions').respond(versionsResponse); - $httpBackend - .whenRoute('POST', 'api/dashboards/id/:id/restore') - .respond(function(method, url, data, headers, params) { - const parsedData = JSON.parse(data); - return [200, restoreResponse(parsedData.version)]; - }); - }) - ); - - beforeEach(ctx.createService('historySrv')); - - function wrapPromise(ctx, angularPromise) { - return new Promise((resolve, reject) => { - angularPromise.then(resolve, reject); - ctx.$httpBackend.flush(); - }); - } - - describe('getHistoryList', function() { - it('should return a versions array for the given dashboard id', function() { - return wrapPromise( - ctx, - ctx.service.getHistoryList({ id: 1 }).then(function(versions) { - expect(versions).to.eql(versionsResponse); - }) - ); - }); - - it('should return an empty array when not given an id', function() { - return wrapPromise( - ctx, - ctx.service.getHistoryList({}).then(function(versions) { - expect(versions).to.eql([]); - }) - ); - }); - - it('should return an empty array when not given a dashboard', function() { - return wrapPromise( - ctx, - ctx.service.getHistoryList().then(function(versions) { - expect(versions).to.eql([]); - }) - ); - }); - }); - - describe('restoreDashboard', function() { - it('should return a success response given valid parameters', function() { - let version = 6; - return wrapPromise( - ctx, - ctx.service.restoreDashboard({ id: 1 }, version).then(function(response) { - expect(response).to.eql(restoreResponse(version)); - }) - ); - }); - - it('should return an empty object when not given an id', function() { - return wrapPromise( - ctx, - ctx.service.restoreDashboard({}, 6).then(function(response) { - expect(response).to.eql({}); - }) - ); - }); - }); -}); diff --git a/public/app/features/dashboard/specs/save_modal.jest.ts b/public/app/features/dashboard/specs/save_modal.jest.ts new file mode 100644 index 00000000000..669ae43a0ff --- /dev/null +++ b/public/app/features/dashboard/specs/save_modal.jest.ts @@ -0,0 +1,57 @@ +import { SaveDashboardModalCtrl } from '../save_modal'; + +const setup = (timeChanged, variableValuesChanged, cb) => { + const dash = { + hasTimeChanged: jest.fn().mockReturnValue(timeChanged), + hasVariableValuesChanged: jest.fn().mockReturnValue(variableValuesChanged), + resetOriginalTime: jest.fn(), + resetOriginalVariables: jest.fn(), + getSaveModelClone: jest.fn().mockReturnValue({}), + }; + const dashboardSrvMock = { + getCurrent: jest.fn().mockReturnValue(dash), + save: jest.fn().mockReturnValue(Promise.resolve()), + }; + const ctrl = new SaveDashboardModalCtrl(dashboardSrvMock); + ctrl.saveForm = { + $valid: true, + }; + ctrl.dismiss = () => Promise.resolve(); + cb(dash, ctrl, dashboardSrvMock); +}; + +describe('SaveDashboardModal', () => { + describe('Given time and template variable values have not changed', () => { + setup(false, false, (dash, ctrl: SaveDashboardModalCtrl) => { + it('When creating ctrl should set time and template variable values changed', () => { + expect(ctrl.timeChange).toBeFalsy(); + expect(ctrl.variableValueChange).toBeFalsy(); + }); + }); + }); + + describe('Given time and template variable values have changed', () => { + setup(true, true, (dash, ctrl: SaveDashboardModalCtrl) => { + it('When creating ctrl should set time and template variable values changed', () => { + expect(ctrl.timeChange).toBeTruthy(); + expect(ctrl.variableValueChange).toBeTruthy(); + }); + + it('When save time and variable value changes disabled and saving should reset original time and template variable values', async () => { + ctrl.saveTimerange = false; + ctrl.saveVariables = false; + await ctrl.save(); + expect(dash.resetOriginalTime).toHaveBeenCalledTimes(0); + expect(dash.resetOriginalVariables).toHaveBeenCalledTimes(0); + }); + + it('When save time and variable value changes enabled and saving should reset original time and template variable values', async () => { + ctrl.saveTimerange = true; + ctrl.saveVariables = true; + await ctrl.save(); + expect(dash.resetOriginalTime).toHaveBeenCalledTimes(1); + expect(dash.resetOriginalVariables).toHaveBeenCalledTimes(1); + }); + }); + }); +}); diff --git a/public/app/features/dashboard/specs/time_srv.jest.ts b/public/app/features/dashboard/specs/time_srv.jest.ts new file mode 100644 index 00000000000..f8d9e42cfd4 --- /dev/null +++ b/public/app/features/dashboard/specs/time_srv.jest.ts @@ -0,0 +1,163 @@ +import { TimeSrv } from '../time_srv'; +import '../time_srv'; +import moment from 'moment'; + +describe('timeSrv', function() { + var rootScope = { + $on: jest.fn(), + onAppEvent: jest.fn(), + appEvent: jest.fn(), + }; + + var timer = { + register: jest.fn(), + cancel: jest.fn(), + cancelAll: jest.fn(), + }; + + var location = { + search: jest.fn(() => ({})), + }; + + var timeSrv; + + var _dashboard: any = { + time: { from: 'now-6h', to: 'now' }, + getTimezone: jest.fn(() => 'browser'), + }; + + beforeEach(function() { + timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() }); + timeSrv.init(_dashboard); + }); + + describe('timeRange', function() { + it('should return unparsed when parse is false', function() { + timeSrv.setTime({ from: 'now', to: 'now-1h' }); + var time = timeSrv.timeRange(); + expect(time.raw.from).toBe('now'); + expect(time.raw.to).toBe('now-1h'); + }); + + it('should return parsed when parse is true', function() { + timeSrv.setTime({ from: 'now', to: 'now-1h' }); + var time = timeSrv.timeRange(); + expect(moment.isMoment(time.from)).toBe(true); + expect(moment.isMoment(time.to)).toBe(true); + }); + }); + + describe('init time from url', function() { + it('should handle relative times', function() { + location = { + search: jest.fn(() => ({ + from: 'now-2d', + to: 'now', + })), + }; + + timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() }); + timeSrv.init(_dashboard); + var time = timeSrv.timeRange(); + expect(time.raw.from).toBe('now-2d'); + expect(time.raw.to).toBe('now'); + }); + + it('should handle formatted dates', function() { + location = { + search: jest.fn(() => ({ + from: '20140410T052010', + to: '20140520T031022', + })), + }; + + timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() }); + + timeSrv.init(_dashboard); + var time = timeSrv.timeRange(); + expect(time.from.valueOf()).toEqual(new Date('2014-04-10T05:20:10Z').getTime()); + expect(time.to.valueOf()).toEqual(new Date('2014-05-20T03:10:22Z').getTime()); + }); + + it('should handle formatted dates without time', function() { + location = { + search: jest.fn(() => ({ + from: '20140410', + to: '20140520', + })), + }; + + timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() }); + + timeSrv.init(_dashboard); + var time = timeSrv.timeRange(); + expect(time.from.valueOf()).toEqual(new Date('2014-04-10T00:00:00Z').getTime()); + expect(time.to.valueOf()).toEqual(new Date('2014-05-20T00:00:00Z').getTime()); + }); + + it('should handle epochs', function() { + location = { + search: jest.fn(() => ({ + from: '1410337646373', + to: '1410337665699', + })), + }; + + timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() }); + + timeSrv.init(_dashboard); + var time = timeSrv.timeRange(); + expect(time.from.valueOf()).toEqual(1410337646373); + expect(time.to.valueOf()).toEqual(1410337665699); + }); + + it('should handle bad dates', function() { + location = { + search: jest.fn(() => ({ + from: '20151126T00010%3C%2Fp%3E%3Cspan%20class', + to: 'now', + })), + }; + + timeSrv = new TimeSrv(rootScope, jest.fn(), location, timer, { isGrafanaVisibile: jest.fn() }); + + _dashboard.time.from = 'now-6h'; + timeSrv.init(_dashboard); + expect(timeSrv.time.from).toEqual('now-6h'); + expect(timeSrv.time.to).toEqual('now'); + }); + }); + + describe('setTime', function() { + it('should return disable refresh if refresh is disabled for any range', function() { + _dashboard.refresh = false; + + timeSrv.setTime({ from: '2011-01-01', to: '2015-01-01' }); + expect(_dashboard.refresh).toBe(false); + }); + + it('should restore refresh for absolute time range', function() { + _dashboard.refresh = '30s'; + + timeSrv.setTime({ from: '2011-01-01', to: '2015-01-01' }); + expect(_dashboard.refresh).toBe('30s'); + }); + + it('should restore refresh after relative time range is set', function() { + _dashboard.refresh = '10s'; + timeSrv.setTime({ + from: moment([2011, 1, 1]), + to: moment([2015, 1, 1]), + }); + expect(_dashboard.refresh).toBe(false); + timeSrv.setTime({ from: '2011-01-01', to: 'now' }); + expect(_dashboard.refresh).toBe('10s'); + }); + + it('should keep refresh after relative time range is changed and now delay exists', function() { + _dashboard.refresh = '10s'; + timeSrv.setTime({ from: 'now-1h', to: 'now-10s' }); + expect(_dashboard.refresh).toBe('10s'); + }); + }); +}); diff --git a/public/app/features/dashboard/specs/time_srv_specs.ts b/public/app/features/dashboard/specs/time_srv_specs.ts deleted file mode 100644 index 6e180679ff2..00000000000 --- a/public/app/features/dashboard/specs/time_srv_specs.ts +++ /dev/null @@ -1,115 +0,0 @@ -import { describe, beforeEach, it, expect, sinon, angularMocks } from 'test/lib/common'; - -import helpers from 'test/specs/helpers'; -import '../time_srv'; -import moment from 'moment'; - -describe('timeSrv', function() { - var ctx = new helpers.ServiceTestContext(); - var _dashboard: any = { - time: { from: 'now-6h', to: 'now' }, - getTimezone: sinon.stub().returns('browser'), - }; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.services')); - beforeEach(ctx.createService('timeSrv')); - - beforeEach(function() { - ctx.service.init(_dashboard); - }); - - describe('timeRange', function() { - it('should return unparsed when parse is false', function() { - ctx.service.setTime({ from: 'now', to: 'now-1h' }); - var time = ctx.service.timeRange(); - expect(time.raw.from).to.be('now'); - expect(time.raw.to).to.be('now-1h'); - }); - - it('should return parsed when parse is true', function() { - ctx.service.setTime({ from: 'now', to: 'now-1h' }); - var time = ctx.service.timeRange(); - expect(moment.isMoment(time.from)).to.be(true); - expect(moment.isMoment(time.to)).to.be(true); - }); - }); - - describe('init time from url', function() { - it('should handle relative times', function() { - ctx.$location.search({ from: 'now-2d', to: 'now' }); - ctx.service.init(_dashboard); - var time = ctx.service.timeRange(); - expect(time.raw.from).to.be('now-2d'); - expect(time.raw.to).to.be('now'); - }); - - it('should handle formatted dates', function() { - ctx.$location.search({ from: '20140410T052010', to: '20140520T031022' }); - ctx.service.init(_dashboard); - var time = ctx.service.timeRange(true); - expect(time.from.valueOf()).to.equal(new Date('2014-04-10T05:20:10Z').getTime()); - expect(time.to.valueOf()).to.equal(new Date('2014-05-20T03:10:22Z').getTime()); - }); - - it('should handle formatted dates without time', function() { - ctx.$location.search({ from: '20140410', to: '20140520' }); - ctx.service.init(_dashboard); - var time = ctx.service.timeRange(true); - expect(time.from.valueOf()).to.equal(new Date('2014-04-10T00:00:00Z').getTime()); - expect(time.to.valueOf()).to.equal(new Date('2014-05-20T00:00:00Z').getTime()); - }); - - it('should handle epochs', function() { - ctx.$location.search({ from: '1410337646373', to: '1410337665699' }); - ctx.service.init(_dashboard); - var time = ctx.service.timeRange(true); - expect(time.from.valueOf()).to.equal(1410337646373); - expect(time.to.valueOf()).to.equal(1410337665699); - }); - - it('should handle bad dates', function() { - ctx.$location.search({ - from: '20151126T00010%3C%2Fp%3E%3Cspan%20class', - to: 'now', - }); - _dashboard.time.from = 'now-6h'; - ctx.service.init(_dashboard); - expect(ctx.service.time.from).to.equal('now-6h'); - expect(ctx.service.time.to).to.equal('now'); - }); - }); - - describe('setTime', function() { - it('should return disable refresh if refresh is disabled for any range', function() { - _dashboard.refresh = false; - - ctx.service.setTime({ from: '2011-01-01', to: '2015-01-01' }); - expect(_dashboard.refresh).to.be(false); - }); - - it('should restore refresh for absolute time range', function() { - _dashboard.refresh = '30s'; - - ctx.service.setTime({ from: '2011-01-01', to: '2015-01-01' }); - expect(_dashboard.refresh).to.be('30s'); - }); - - it('should restore refresh after relative time range is set', function() { - _dashboard.refresh = '10s'; - ctx.service.setTime({ - from: moment([2011, 1, 1]), - to: moment([2015, 1, 1]), - }); - expect(_dashboard.refresh).to.be(false); - ctx.service.setTime({ from: '2011-01-01', to: 'now' }); - expect(_dashboard.refresh).to.be('10s'); - }); - - it('should keep refresh after relative time range is changed and now delay exists', function() { - _dashboard.refresh = '10s'; - ctx.service.setTime({ from: 'now-1h', to: 'now-10s' }); - expect(_dashboard.refresh).to.be('10s'); - }); - }); -}); diff --git a/public/app/features/dashboard/specs/viewstate_srv.jest.ts b/public/app/features/dashboard/specs/viewstate_srv.jest.ts new file mode 100644 index 00000000000..08166c6f2bd --- /dev/null +++ b/public/app/features/dashboard/specs/viewstate_srv.jest.ts @@ -0,0 +1,67 @@ +//import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; +import 'app/features/dashboard/view_state_srv'; +import config from 'app/core/config'; +import { DashboardViewState } from '../view_state_srv'; + +describe('when updating view state', () => { + let location = { + replace: jest.fn(), + search: jest.fn(), + }; + + let $scope = { + onAppEvent: jest.fn(() => {}), + dashboard: { + meta: {}, + panels: [], + }, + }; + + let $rootScope = {}; + let viewState; + + beforeEach(() => { + config.bootData = { + user: { + orgId: 1, + }, + }; + }); + + describe('to fullscreen true and edit true', () => { + beforeEach(() => { + location.search = jest.fn(() => { + return { fullscreen: true, edit: true, panelId: 1 }; + }); + viewState = new DashboardViewState($scope, location, {}, $rootScope); + }); + + it('should update querystring and view state', () => { + var updateState = { fullscreen: true, edit: true, panelId: 1 }; + + viewState.update(updateState); + + expect(location.search).toHaveBeenCalledWith({ + edit: true, + editview: null, + fullscreen: true, + orgId: 1, + panelId: 1, + }); + expect(viewState.dashboard.meta.fullscreen).toBe(true); + expect(viewState.state.fullscreen).toBe(true); + }); + }); + + describe('to fullscreen false', () => { + beforeEach(() => { + viewState = new DashboardViewState($scope, location, {}, $rootScope); + }); + it('should remove params from query string', () => { + viewState.update({ fullscreen: true, panelId: 1, edit: true }); + viewState.update({ fullscreen: false }); + expect(viewState.dashboard.meta.fullscreen).toBe(false); + expect(viewState.state.fullscreen).toBe(null); + }); + }); +}); diff --git a/public/app/features/dashboard/specs/viewstate_srv_specs.ts b/public/app/features/dashboard/specs/viewstate_srv_specs.ts deleted file mode 100644 index d34b15b9113..00000000000 --- a/public/app/features/dashboard/specs/viewstate_srv_specs.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; -import 'app/features/dashboard/view_state_srv'; -import config from 'app/core/config'; - -describe('when updating view state', function() { - var viewState, location; - var timeSrv = {}; - var templateSrv = {}; - var contextSrv = { - user: { - orgId: 19, - }, - }; - beforeEach(function() { - config.bootData = { - user: { - orgId: 1, - }, - }; - }); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.module(function($provide) { - $provide.value('timeSrv', timeSrv); - $provide.value('templateSrv', templateSrv); - $provide.value('contextSrv', contextSrv); - }) - ); - - beforeEach( - angularMocks.inject(function(dashboardViewStateSrv, $location, $rootScope) { - $rootScope.onAppEvent = function() {}; - $rootScope.dashboard = { - meta: {}, - panels: [], - }; - viewState = dashboardViewStateSrv.create($rootScope); - location = $location; - }) - ); - - describe('to fullscreen true and edit true', function() { - it('should update querystring and view state', function() { - var updateState = { fullscreen: true, edit: true, panelId: 1 }; - viewState.update(updateState); - expect(location.search()).to.eql({ - fullscreen: true, - edit: true, - panelId: 1, - orgId: 1, - }); - expect(viewState.dashboard.meta.fullscreen).to.be(true); - expect(viewState.state.fullscreen).to.be(true); - }); - }); - - describe('to fullscreen false', function() { - it('should remove params from query string', function() { - viewState.update({ fullscreen: true, panelId: 1, edit: true }); - viewState.update({ fullscreen: false }); - expect(viewState.dashboard.meta.fullscreen).to.be(false); - expect(viewState.state.fullscreen).to.be(null); - }); - }); -}); diff --git a/public/app/features/dashboard/time_srv.ts b/public/app/features/dashboard/time_srv.ts index 3f7b5836653..7fd5aed7847 100644 --- a/public/app/features/dashboard/time_srv.ts +++ b/public/app/features/dashboard/time_srv.ts @@ -4,7 +4,7 @@ import coreModule from 'app/core/core_module'; import kbn from 'app/core/utils/kbn'; import * as dateMath from 'app/core/utils/datemath'; -class TimeSrv { +export class TimeSrv { time: any; refreshTimer: any; refresh: boolean; diff --git a/public/app/features/dashlinks/module.ts b/public/app/features/dashlinks/module.ts index 148d32f4399..380144dbcd5 100644 --- a/public/app/features/dashlinks/module.ts +++ b/public/app/features/dashlinks/module.ts @@ -41,18 +41,20 @@ function dashLink($compile, $sanitize, linkSrv) { elem.html(template); $compile(elem.contents())(scope); - var anchor = elem.find('a'); - var icon = elem.find('i'); - var span = elem.find('span'); - function update() { var linkInfo = linkSrv.getAnchorInfo(link); - span.text(linkInfo.title); - anchor.attr('href', linkInfo.href); - sanitizeAnchor(); + const anchor = elem.find('a'); + const span = elem.find('span'); + span.text(linkInfo.title); + + if (!link.asDropdown) { + anchor.attr('href', linkInfo.href); + sanitizeAnchor(); + } + anchor.attr('data-placement', 'bottom'); // tooltip - elem.find('a').tooltip({ + anchor.tooltip({ title: $sanitize(scope.link.tooltip), html: true, container: 'body', @@ -60,12 +62,13 @@ function dashLink($compile, $sanitize, linkSrv) { } function sanitizeAnchor() { + const anchor = elem.find('a'); const anchorSanitized = $sanitize(anchor.parent().html()); anchor.parent().html(anchorSanitized); } - icon.attr('class', 'fa fa-fw ' + scope.link.icon); - anchor.attr('target', scope.link.target); + elem.find('i').attr('class', 'fa fa-fw ' + scope.link.icon); + elem.find('a').attr('target', scope.link.target); // fix for menus on the far right if (link.asDropdown && scope.$last) { diff --git a/public/app/features/org/all.ts b/public/app/features/org/all.ts index 97e01c53fe3..8872450e3ab 100644 --- a/public/app/features/org/all.ts +++ b/public/app/features/org/all.ts @@ -5,8 +5,6 @@ import './select_org_ctrl'; import './change_password_ctrl'; import './new_org_ctrl'; import './user_invite_ctrl'; -import './teams_ctrl'; -import './team_details_ctrl'; import './create_team_ctrl'; import './org_api_keys_ctrl'; import './org_details_ctrl'; diff --git a/public/app/features/org/partials/team_details.html b/public/app/features/org/partials/team_details.html deleted file mode 100644 index 3fce8b3c720..00000000000 --- a/public/app/features/org/partials/team_details.html +++ /dev/null @@ -1,67 +0,0 @@ - - -
    -

    Team Details

    - -
    -
    - Name - -
    -
    - - Email - - This is optional and is primarily used for allowing custom team avatars. - - - -
    - -
    - -
    -
    - -
    - -

    Team Members

    -
    -
    - Add member - - -
    -
    - - - - - - - - - - - - - - - - -
    UsernameEmail
    {{member.login}}{{member.email}} - - - -
    -
    - - This team has no members yet. - -
    - diff --git a/public/app/features/org/partials/teams.html b/public/app/features/org/partials/teams.html deleted file mode 100755 index e15a15cf573..00000000000 --- a/public/app/features/org/partials/teams.html +++ /dev/null @@ -1,68 +0,0 @@ - - -
    -
    - -
    - - - - Add Team - -
    - -
    - - - - - - - - - - - - - - - - - - - -
    NameEmailMembers
    - - - -
    -
    - -
    -
      -
    1. - -
    2. -
    -
    - - - No Teams found. - -
    diff --git a/public/app/features/org/specs/team_details_ctrl_specs.ts b/public/app/features/org/specs/team_details_ctrl_specs.ts deleted file mode 100644 index 347f3796170..00000000000 --- a/public/app/features/org/specs/team_details_ctrl_specs.ts +++ /dev/null @@ -1,48 +0,0 @@ -import '../team_details_ctrl'; -import { describe, beforeEach, it, expect, sinon, angularMocks } from 'test/lib/common'; -import TeamDetailsCtrl from '../team_details_ctrl'; - -describe('TeamDetailsCtrl', () => { - var ctx: any = {}; - var backendSrv = { - searchUsers: sinon.stub().returns(Promise.resolve([])), - get: sinon.stub().returns(Promise.resolve([])), - post: sinon.stub().returns(Promise.resolve([])), - }; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach(angularMocks.module('grafana.controllers')); - - beforeEach( - angularMocks.inject(($rootScope, $controller, $q) => { - ctx.$q = $q; - ctx.scope = $rootScope.$new(); - ctx.ctrl = $controller(TeamDetailsCtrl, { - $scope: ctx.scope, - backendSrv: backendSrv, - $routeParams: { id: 1 }, - navModelSrv: { getNav: sinon.stub() }, - }); - }) - ); - - describe('when user is chosen to be added to team', () => { - beforeEach(() => { - const userItem = { - id: 2, - login: 'user2', - }; - ctx.ctrl.userPicked(userItem); - }); - - it('should parse the result and save to db', () => { - expect(backendSrv.post.getCall(0).args[0]).to.eql('/api/teams/1/members'); - expect(backendSrv.post.getCall(0).args[1].userId).to.eql(2); - }); - - it('should refresh the list after saving.', () => { - expect(backendSrv.get.getCall(0).args[0]).to.eql('/api/teams/1'); - expect(backendSrv.get.getCall(1).args[0]).to.eql('/api/teams/1/members'); - }); - }); -}); diff --git a/public/app/features/org/team_details_ctrl.ts b/public/app/features/org/team_details_ctrl.ts deleted file mode 100644 index 3d193880635..00000000000 --- a/public/app/features/org/team_details_ctrl.ts +++ /dev/null @@ -1,81 +0,0 @@ -import coreModule from 'app/core/core_module'; - -export default class TeamDetailsCtrl { - team: Team; - teamMembers: User[] = []; - navModel: any; - - /** @ngInject **/ - constructor(private $scope, private backendSrv, private $routeParams, navModelSrv) { - this.navModel = navModelSrv.getNav('cfg', 'teams', 0); - this.userPicked = this.userPicked.bind(this); - this.get = this.get.bind(this); - this.get(); - } - - get() { - if (this.$routeParams && this.$routeParams.id) { - this.backendSrv.get(`/api/teams/${this.$routeParams.id}`).then(result => { - this.team = result; - }); - this.backendSrv.get(`/api/teams/${this.$routeParams.id}/members`).then(result => { - this.teamMembers = result; - }); - } - } - - removeTeamMember(teamMember: TeamMember) { - this.$scope.appEvent('confirm-modal', { - title: 'Remove Member', - text: 'Are you sure you want to remove ' + teamMember.login + ' from this group?', - yesText: 'Remove', - icon: 'fa-warning', - onConfirm: () => { - this.removeMemberConfirmed(teamMember); - }, - }); - } - - removeMemberConfirmed(teamMember: TeamMember) { - this.backendSrv.delete(`/api/teams/${this.$routeParams.id}/members/${teamMember.userId}`).then(this.get); - } - - update() { - if (!this.$scope.teamDetailsForm.$valid) { - return; - } - - this.backendSrv.put('/api/teams/' + this.team.id, { - name: this.team.name, - email: this.team.email, - }); - } - - userPicked(user) { - this.backendSrv.post(`/api/teams/${this.$routeParams.id}/members`, { userId: user.id }).then(() => { - this.$scope.$broadcast('user-picker-reset'); - this.get(); - }); - } -} - -export interface Team { - id: number; - name: string; - email: string; -} - -export interface User { - id: number; - name: string; - login: string; - email: string; -} - -export interface TeamMember { - userId: number; - name: string; - login: string; -} - -coreModule.controller('TeamDetailsCtrl', TeamDetailsCtrl); diff --git a/public/app/features/org/teams_ctrl.ts b/public/app/features/org/teams_ctrl.ts deleted file mode 100644 index 29317e73d3b..00000000000 --- a/public/app/features/org/teams_ctrl.ts +++ /dev/null @@ -1,66 +0,0 @@ -import coreModule from 'app/core/core_module'; -import appEvents from 'app/core/app_events'; - -export class TeamsCtrl { - teams: any; - pages = []; - perPage = 50; - page = 1; - totalPages: number; - showPaging = false; - query: any = ''; - navModel: any; - - /** @ngInject */ - constructor(private backendSrv, navModelSrv) { - this.navModel = navModelSrv.getNav('cfg', 'teams', 0); - this.get(); - } - - get() { - this.backendSrv - .get(`/api/teams/search?perpage=${this.perPage}&page=${this.page}&query=${this.query}`) - .then(result => { - this.teams = result.teams; - this.page = result.page; - this.perPage = result.perPage; - this.totalPages = Math.ceil(result.totalCount / result.perPage); - this.showPaging = this.totalPages > 1; - this.pages = []; - - for (var i = 1; i < this.totalPages + 1; i++) { - this.pages.push({ page: i, current: i === this.page }); - } - }); - } - - navigateToPage(page) { - this.page = page.page; - this.get(); - } - - deleteTeam(team) { - appEvents.emit('confirm-modal', { - title: 'Delete', - text: 'Are you sure you want to delete Team ' + team.name + '?', - yesText: 'Delete', - icon: 'fa-warning', - onConfirm: () => { - this.deleteTeamConfirmed(team); - }, - }); - } - - deleteTeamConfirmed(team) { - this.backendSrv.delete('/api/teams/' + team.id).then(this.get.bind(this)); - } - - openTeamModal() { - appEvents.emit('show-modal', { - templateHtml: '', - modalClass: 'modal--narrow', - }); - } -} - -coreModule.controller('TeamsCtrl', TeamsCtrl); diff --git a/public/app/features/panel/metrics_panel_ctrl.ts b/public/app/features/panel/metrics_panel_ctrl.ts index acf46a193e8..3d8a4ed3736 100644 --- a/public/app/features/panel/metrics_panel_ctrl.ts +++ b/public/app/features/panel/metrics_panel_ctrl.ts @@ -1,9 +1,9 @@ -import config from 'app/core/config'; import $ from 'jquery'; import _ from 'lodash'; + +import config from 'app/core/config'; import kbn from 'app/core/utils/kbn'; import { PanelCtrl } from 'app/features/panel/panel_ctrl'; - import * as rangeUtil from 'app/core/utils/rangeutil'; import * as dateMath from 'app/core/utils/datemath'; import { encodePathComponent } from 'app/core/utils/location_util'; @@ -16,6 +16,7 @@ class MetricsPanelCtrl extends PanelCtrl { datasourceName: any; $q: any; $timeout: any; + contextSrv: any; datasourceSrv: any; timeSrv: any; templateSrv: any; @@ -37,6 +38,7 @@ class MetricsPanelCtrl extends PanelCtrl { // make metrics tab the default this.editorTabIndex = 1; this.$q = $injector.get('$q'); + this.contextSrv = $injector.get('contextSrv'); this.datasourceSrv = $injector.get('datasourceSrv'); this.timeSrv = $injector.get('timeSrv'); this.templateSrv = $injector.get('templateSrv'); @@ -312,7 +314,7 @@ class MetricsPanelCtrl extends PanelCtrl { getAdditionalMenuItems() { const items = []; - if (this.datasource.supportsExplore) { + if (config.exploreEnabled && this.contextSrv.isEditor && this.datasource && this.datasource.supportsExplore) { items.push({ text: 'Explore', click: 'ctrl.explore();', @@ -324,8 +326,13 @@ class MetricsPanelCtrl extends PanelCtrl { } explore() { - const exploreState = encodePathComponent(JSON.stringify(this.datasource.getExploreState(this.panel))); - this.$location.url(`/explore/${exploreState}`); + const range = this.timeSrv.timeRangeForUrl(); + const state = { + ...this.datasource.getExploreState(this.panel), + range, + }; + const exploreState = encodePathComponent(JSON.stringify(state)); + this.$location.url(`/explore?state=${exploreState}`); } addQuery(target) { diff --git a/public/app/features/panel/panel_header.ts b/public/app/features/panel/panel_header.ts index f65be321207..1e2bc241d63 100644 --- a/public/app/features/panel/panel_header.ts +++ b/public/app/features/panel/panel_header.ts @@ -25,7 +25,7 @@ var template = `
  • Remove
  • - {{ctrl.timeInfo}} + {{ctrl.timeInfo}} `; function renderMenuItem(item, ctrl) { diff --git a/public/app/features/panel/specs/metrics_panel_ctrl.jest.ts b/public/app/features/panel/specs/metrics_panel_ctrl.jest.ts new file mode 100644 index 00000000000..a28bf92e63b --- /dev/null +++ b/public/app/features/panel/specs/metrics_panel_ctrl.jest.ts @@ -0,0 +1,77 @@ +jest.mock('app/core/core', () => ({})); +jest.mock('app/core/config', () => { + return { + exploreEnabled: true, + panels: { + test: { + id: 'test', + name: 'test', + }, + }, + }; +}); + +import q from 'q'; +import { PanelModel } from 'app/features/dashboard/panel_model'; +import { MetricsPanelCtrl } from '../metrics_panel_ctrl'; + +describe('MetricsPanelCtrl', () => { + let ctrl; + + beforeEach(() => { + ctrl = setupController(); + }); + + describe('when getting additional menu items', () => { + let additionalItems; + + describe('and has no datasource set', () => { + beforeEach(() => { + additionalItems = ctrl.getAdditionalMenuItems(); + }); + + it('should not return any items', () => { + expect(additionalItems.length).toBe(0); + }); + }); + + describe('and has datasource set that supports explore and user has powers', () => { + beforeEach(() => { + ctrl.contextSrv = { isEditor: true }; + ctrl.datasource = { supportsExplore: true }; + additionalItems = ctrl.getAdditionalMenuItems(); + }); + + it('should not return any items', () => { + expect(additionalItems.length).toBe(1); + }); + }); + }); +}); + +function setupController() { + const injectorStub = { + get: type => { + switch (type) { + case '$q': { + return q; + } + default: { + return jest.fn(); + } + } + }, + }; + + const scope = { + panel: { events: [] }, + appEvent: jest.fn(), + onAppEvent: jest.fn(), + $on: jest.fn(), + colors: [], + }; + + MetricsPanelCtrl.prototype.panel = new PanelModel({ type: 'test' }); + + return new MetricsPanelCtrl(scope, injectorStub); +} diff --git a/public/app/features/playlist/specs/playlist_edit_ctrl_specs.ts b/public/app/features/playlist/specs/playlist_edit_ctrl.jest.ts similarity index 70% rename from public/app/features/playlist/specs/playlist_edit_ctrl_specs.ts rename to public/app/features/playlist/specs/playlist_edit_ctrl.jest.ts index 7884dd090e5..f313c6e8e6a 100644 --- a/public/app/features/playlist/specs/playlist_edit_ctrl_specs.ts +++ b/public/app/features/playlist/specs/playlist_edit_ctrl.jest.ts @@ -1,5 +1,4 @@ import '../playlist_edit_ctrl'; -import { describe, beforeEach, it, expect } from 'test/lib/common'; import { PlaylistEditCtrl } from '../playlist_edit_ctrl'; describe('PlaylistEditCtrl', () => { @@ -20,13 +19,13 @@ describe('PlaylistEditCtrl', () => { describe('searchresult returns 2 dashboards, ', () => { it('found dashboard should be 2', () => { - expect(ctx.dashboardresult.length).to.be(2); + expect(ctx.dashboardresult.length).toBe(2); }); it('filtred result should be 2', () => { ctx.filterFoundPlaylistItems(); - expect(ctx.filteredDashboards.length).to.be(2); - expect(ctx.filteredTags.length).to.be(2); + expect(ctx.filteredDashboards.length).toBe(2); + expect(ctx.filteredTags.length).toBe(2); }); describe('adds one dashboard to playlist, ', () => { @@ -37,16 +36,16 @@ describe('PlaylistEditCtrl', () => { }); it('playlistitems should be increased by one', () => { - expect(ctx.playlistItems.length).to.be(2); + expect(ctx.playlistItems.length).toBe(2); }); it('filtred playlistitems should be reduced by one', () => { - expect(ctx.filteredDashboards.length).to.be(1); - expect(ctx.filteredTags.length).to.be(1); + expect(ctx.filteredDashboards.length).toBe(1); + expect(ctx.filteredTags.length).toBe(1); }); it('found dashboard should be 2', () => { - expect(ctx.dashboardresult.length).to.be(2); + expect(ctx.dashboardresult.length).toBe(2); }); describe('removes one dashboard from playlist, ', () => { @@ -57,14 +56,14 @@ describe('PlaylistEditCtrl', () => { }); it('playlistitems should be increased by one', () => { - expect(ctx.playlistItems.length).to.be(0); + expect(ctx.playlistItems.length).toBe(0); }); it('found dashboard should be 2', () => { - expect(ctx.dashboardresult.length).to.be(2); - expect(ctx.filteredDashboards.length).to.be(2); - expect(ctx.filteredTags.length).to.be(2); - expect(ctx.tagresult.length).to.be(2); + expect(ctx.dashboardresult.length).toBe(2); + expect(ctx.filteredDashboards.length).toBe(2); + expect(ctx.filteredTags.length).toBe(2); + expect(ctx.tagresult.length).toBe(2); }); }); }); diff --git a/public/app/features/plugins/built_in_plugins.ts b/public/app/features/plugins/built_in_plugins.ts index 6998321dd75..656ce2bfa38 100644 --- a/public/app/features/plugins/built_in_plugins.ts +++ b/public/app/features/plugins/built_in_plugins.ts @@ -9,6 +9,7 @@ import * as mysqlPlugin from 'app/plugins/datasource/mysql/module'; import * as postgresPlugin from 'app/plugins/datasource/postgres/module'; import * as prometheusPlugin from 'app/plugins/datasource/prometheus/module'; import * as mssqlPlugin from 'app/plugins/datasource/mssql/module'; +import * as testDataDSPlugin from 'app/plugins/datasource/testdata/module'; import * as textPanel from 'app/plugins/panel/text/module'; import * as graphPanel from 'app/plugins/panel/graph/module'; @@ -20,9 +21,6 @@ import * as tablePanel from 'app/plugins/panel/table/module'; import * as singlestatPanel from 'app/plugins/panel/singlestat/module'; import * as gettingStartedPanel from 'app/plugins/panel/gettingstarted/module'; -import * as testDataAppPlugin from 'app/plugins/app/testdata/module'; -import * as testDataDSPlugin from 'app/plugins/app/testdata/datasource/module'; - const builtInPlugins = { 'app/plugins/datasource/graphite/module': graphitePlugin, 'app/plugins/datasource/cloudwatch/module': cloudwatchPlugin, @@ -35,8 +33,7 @@ const builtInPlugins = { 'app/plugins/datasource/postgres/module': postgresPlugin, 'app/plugins/datasource/mssql/module': mssqlPlugin, 'app/plugins/datasource/prometheus/module': prometheusPlugin, - 'app/plugins/app/testdata/module': testDataAppPlugin, - 'app/plugins/app/testdata/datasource/module': testDataDSPlugin, + 'app/plugins/datasource/testdata/module': testDataDSPlugin, 'app/plugins/panel/text/module': textPanel, 'app/plugins/panel/graph/module': graphPanel, diff --git a/public/app/features/plugins/datasource_srv.ts b/public/app/features/plugins/datasource_srv.ts index aef43a4760b..df743640062 100644 --- a/public/app/features/plugins/datasource_srv.ts +++ b/public/app/features/plugins/datasource_srv.ts @@ -7,7 +7,7 @@ export class DatasourceSrv { datasources: any; /** @ngInject */ - constructor(private $q, private $injector, $rootScope, private templateSrv) { + constructor(private $q, private $injector, private $rootScope, private templateSrv) { this.init(); } @@ -34,13 +34,13 @@ export class DatasourceSrv { } loadDatasource(name) { - var dsConfig = config.datasources[name]; + const dsConfig = config.datasources[name]; if (!dsConfig) { return this.$q.reject({ message: 'Datasource named ' + name + ' was not found' }); } - var deferred = this.$q.defer(); - var pluginDef = dsConfig.meta; + const deferred = this.$q.defer(); + const pluginDef = dsConfig.meta; importPluginModule(pluginDef.module) .then(plugin => { @@ -55,13 +55,13 @@ export class DatasourceSrv { throw new Error('Plugin module is missing Datasource constructor'); } - var instance = this.$injector.instantiate(plugin.Datasource, { instanceSettings: dsConfig }); + const instance = this.$injector.instantiate(plugin.Datasource, { instanceSettings: dsConfig }); instance.meta = pluginDef; instance.name = name; this.datasources[name] = instance; deferred.resolve(instance); }) - .catch(function(err) { + .catch(err => { this.$rootScope.appEvent('alert-error', [dsConfig.name + ' plugin failed', err.toString()]); }); @@ -73,7 +73,7 @@ export class DatasourceSrv { } getAnnotationSources() { - var sources = []; + const sources = []; this.addDataSourceVariables(sources); @@ -86,15 +86,33 @@ export class DatasourceSrv { return sources; } + getExploreSources() { + const { datasources } = config; + const es = Object.keys(datasources) + .map(name => datasources[name]) + .filter(ds => ds.meta && ds.meta.explore); + return _.sortBy(es, ['name']); + } + getMetricSources(options) { var metricSources = []; _.each(config.datasources, function(value, key) { if (value.meta && value.meta.metrics) { - metricSources.push({ value: key, name: key, meta: value.meta }); + let metricSource = { value: key, name: key, meta: value.meta, sort: key }; + + //Make sure grafana and mixed are sorted at the bottom + if (value.meta.id === 'grafana') { + metricSource.sort = String.fromCharCode(253); + } else if (value.meta.id === 'mixed') { + metricSource.sort = String.fromCharCode(254); + } + + metricSources.push(metricSource); if (key === config.defaultDatasource) { - metricSources.push({ value: null, name: 'default', meta: value.meta }); + metricSource = { value: null, name: 'default', meta: value.meta, sort: key }; + metricSources.push(metricSource); } } }); @@ -104,17 +122,10 @@ export class DatasourceSrv { } metricSources.sort(function(a, b) { - // these two should always be at the bottom - if (a.meta.id === 'mixed' || a.meta.id === 'grafana') { + if (a.sort.toLowerCase() > b.sort.toLowerCase()) { return 1; } - if (b.meta.id === 'mixed' || b.meta.id === 'grafana') { - return -1; - } - if (a.name.toLowerCase() > b.name.toLowerCase()) { - return 1; - } - if (a.name.toLowerCase() < b.name.toLowerCase()) { + if (a.sort.toLowerCase() < b.sort.toLowerCase()) { return -1; } return 0; @@ -139,10 +150,12 @@ export class DatasourceSrv { var ds = config.datasources[first]; if (ds) { + const key = `$${variable.name}`; list.push({ - name: '$' + variable.name, - value: '$' + variable.name, + name: key, + value: key, meta: ds.meta, + sort: key, }); } } @@ -150,3 +163,4 @@ export class DatasourceSrv { } coreModule.service('datasourceSrv', DatasourceSrv); +export default DatasourceSrv; diff --git a/public/app/features/plugins/ds_edit_ctrl.ts b/public/app/features/plugins/ds_edit_ctrl.ts index b98f0f48910..542e9cc3648 100644 --- a/public/app/features/plugins/ds_edit_ctrl.ts +++ b/public/app/features/plugins/ds_edit_ctrl.ts @@ -13,6 +13,7 @@ var defaults = { access: 'proxy', jsonData: {}, secureJsonFields: {}, + secureJsonData: {}, }; var datasourceCreated = false; @@ -204,10 +205,18 @@ coreModule.directive('datasourceHttpSettings', function() { scope: { current: '=', suggestUrl: '@', + noDirectAccess: '@', }, templateUrl: 'public/app/features/plugins/partials/ds_http_settings.html', link: { pre: function($scope, elem, attrs) { + // do not show access option if direct access is disabled + $scope.showAccessOption = $scope.noDirectAccess !== 'true'; + $scope.showAccessHelp = false; + $scope.toggleAccessHelp = function() { + $scope.showAccessHelp = !$scope.showAccessHelp; + }; + $scope.getSuggestUrls = function() { return [$scope.suggestUrl]; }; diff --git a/public/app/features/plugins/ds_list_ctrl.ts b/public/app/features/plugins/ds_list_ctrl.ts index 577b931551a..89c760ae253 100644 --- a/public/app/features/plugins/ds_list_ctrl.ts +++ b/public/app/features/plugins/ds_list_ctrl.ts @@ -19,6 +19,7 @@ export class DataSourcesCtrl { onQueryUpdated() { let regex = new RegExp(this.searchQuery, 'ig'); this.datasources = _.filter(this.unfiltered, item => { + regex.lastIndex = 0; return regex.test(item.name) || regex.test(item.type); }); } diff --git a/public/app/features/plugins/partials/ds_http_settings.html b/public/app/features/plugins/partials/ds_http_settings.html index b9f5683129c..6d014af567c 100644 --- a/public/app/features/plugins/partials/ds_http_settings.html +++ b/public/app/features/plugins/partials/ds_http_settings.html @@ -22,7 +22,7 @@
    -
    +
    Access
    @@ -30,15 +30,15 @@
    -
    -
    +

    Access mode controls how requests to the data source will be handled. diff --git a/public/app/features/plugins/plugin_component.ts b/public/app/features/plugins/plugin_component.ts index c9bd0442e1b..1936e57f558 100644 --- a/public/app/features/plugins/plugin_component.ts +++ b/public/app/features/plugins/plugin_component.ts @@ -6,7 +6,6 @@ import coreModule from 'app/core/core_module'; import { importPluginModule } from './plugin_loader'; import { UnknownPanelCtrl } from 'app/plugins/panel/unknown/module'; -import { DashboardRowCtrl } from './row_ctrl'; /** @ngInject **/ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $templateCache) { @@ -59,15 +58,6 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $ } function loadPanelComponentInfo(scope, attrs) { - if (scope.panel.type === 'row') { - return $q.when({ - name: 'dashboard-row', - bindings: { dashboard: '=', panel: '=' }, - attrs: { dashboard: 'ctrl.dashboard', panel: 'panel' }, - Component: DashboardRowCtrl, - }); - } - var componentInfo: any = { name: 'panel-plugin-' + scope.panel.type, bindings: { dashboard: '=', panel: '=', row: '=' }, @@ -136,24 +126,6 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $ }); }); } - // QueryOptionsCtrl - case 'query-options-ctrl': { - return datasourceSrv.get(scope.ctrl.panel.datasource).then(ds => { - return importPluginModule(ds.meta.module).then((dsModule): any => { - if (!dsModule.QueryOptionsCtrl) { - return { notFound: true }; - } - - return { - baseUrl: ds.meta.baseUrl, - name: 'query-options-ctrl-' + ds.meta.id, - bindings: { panelCtrl: '=' }, - attrs: { 'panel-ctrl': 'ctrl.panelCtrl' }, - Component: dsModule.QueryOptionsCtrl, - }; - }); - }); - } // Annotations case 'annotations-query-ctrl': { return importPluginModule(scope.ctrl.currentDatasource.meta.module).then(function(dsModule) { diff --git a/public/app/features/plugins/plugin_loader.ts b/public/app/features/plugins/plugin_loader.ts index 57edfb35885..cce494d0a60 100644 --- a/public/app/features/plugins/plugin_loader.ts +++ b/public/app/features/plugins/plugin_loader.ts @@ -5,6 +5,15 @@ import kbn from 'app/core/utils/kbn'; import moment from 'moment'; import angular from 'angular'; import jquery from 'jquery'; + +// Experimental module exports +import prismjs from 'prismjs'; +import slate from 'slate'; +import slateReact from 'slate-react'; +import slatePlain from 'slate-plain-serializer'; +import react from 'react'; +import reactDom from 'react-dom'; + import config from 'app/core/config'; import TimeSeries from 'app/core/time_series2'; import TableModel from 'app/core/table_model'; @@ -27,6 +36,13 @@ import 'rxjs/add/observable/from'; import 'rxjs/add/operator/map'; import 'rxjs/add/operator/combineAll'; +// add cache busting +const bust = `?_cache=${Date.now()}`; +function locate(load) { + return load.address + bust; +} +System.registry.set('plugin-loader', System.newModule({ locate: locate })); + System.config({ baseURL: 'public', defaultExtension: 'js', @@ -40,23 +56,14 @@ System.config({ css: 'vendor/plugin-css/css.js', }, meta: { - '*': { + '/*': { esModule: true, authorization: true, + loader: 'plugin-loader', }, }, }); -// add cache busting -var systemLocate = System.locate; -System.cacheBust = '?bust=' + Date.now(); -System.locate = function(load) { - var System = this; - return Promise.resolve(systemLocate.call(this, load)).then(function(address) { - return address + System.cacheBust; - }); -}; - function exposeToPlugin(name: string, component: any) { System.registerDynamic(name, [], true, function(require, exports, module) { module.exports = component; @@ -71,6 +78,14 @@ exposeToPlugin('d3', d3); exposeToPlugin('rxjs/Subject', Subject); exposeToPlugin('rxjs/Observable', Observable); +// Experimental modules +exposeToPlugin('prismjs', prismjs); +exposeToPlugin('slate', slate); +exposeToPlugin('slate-react', slateReact); +exposeToPlugin('slate-plain-serializer', slatePlain); +exposeToPlugin('react', react); +exposeToPlugin('react-dom', reactDom); + // backward compatible path exposeToPlugin('vendor/npm/rxjs/Rx', { Subject: Subject, @@ -111,6 +126,7 @@ import 'vendor/flot/jquery.flot.stackpercent'; import 'vendor/flot/jquery.flot.fillbelow'; import 'vendor/flot/jquery.flot.crosshair'; import 'vendor/flot/jquery.flot.dashes'; +import 'vendor/flot/jquery.flot.gauge'; const flotDeps = [ 'jquery.flot', @@ -122,6 +138,7 @@ const flotDeps = [ 'jquery.flot.selection', 'jquery.flot.stackpercent', 'jquery.flot.events', + 'jquery.flot.gauge', ]; for (let flotDep of flotDeps) { exposeToPlugin(flotDep, { fakeDep: 1 }); diff --git a/public/app/features/plugins/row_ctrl.ts b/public/app/features/plugins/row_ctrl.ts deleted file mode 100644 index 90fc0d07e38..00000000000 --- a/public/app/features/plugins/row_ctrl.ts +++ /dev/null @@ -1,100 +0,0 @@ -import _ from 'lodash'; - -export class DashboardRowCtrl { - static template = ` -

    - -
    - ({{ctrl.panel.hiddenPanels.length}} hidden panels) -
    -
    -
    - `; - - dashboard: any; - panel: any; - - constructor() { - this.panel.hiddenPanels = this.panel.hiddenPanels || []; - } - - toggle() { - if (this.panel.collapse) { - let panelIndex = _.indexOf(this.dashboard.panels, this.panel); - - for (let child of this.panel.hiddenPanels) { - this.dashboard.panels.splice(panelIndex + 1, 0, child); - child.y = this.panel.y + 1; - console.log('restoring child', child); - } - - this.panel.hiddenPanels = []; - this.panel.collapse = false; - return; - } - - this.panel.collapse = true; - let foundRow = false; - - for (let i = 0; i < this.dashboard.panels.length; i++) { - let panel = this.dashboard.panels[i]; - - if (panel === this.panel) { - console.log('found row'); - foundRow = true; - continue; - } - - if (!foundRow) { - continue; - } - - if (panel.type === 'row') { - break; - } - - this.panel.hiddenPanels.push(panel); - console.log('hiding child', panel.id); - } - - for (let hiddenPanel of this.panel.hiddenPanels) { - this.dashboard.removePanel(hiddenPanel, false); - } - } - - moveUp() { - // let panelIndex = _.indexOf(this.dashboard.panels, this.panel); - // let rowAbove = null; - // for (let index = panelIndex-1; index > 0; index--) { - // panel = this.dashboard.panels[index]; - // if (panel.type === 'row') { - // rowAbove = panel; - // } - // } - // - // if (rowAbove) { - // this.panel.y = rowAbove.y; - // } - } - - link(scope, elem) { - elem.addClass('dashboard-row'); - - scope.$watch('ctrl.panel.collapse', () => { - elem.toggleClass('dashboard-row--collapse', this.panel.collapse === true); - }); - } -} diff --git a/public/app/features/plugins/specs/datasource_srv.jest.ts b/public/app/features/plugins/specs/datasource_srv.jest.ts new file mode 100644 index 00000000000..b63e8537837 --- /dev/null +++ b/public/app/features/plugins/specs/datasource_srv.jest.ts @@ -0,0 +1,100 @@ +import config from 'app/core/config'; +import 'app/features/plugins/datasource_srv'; +import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; + +// Datasource variable $datasource with current value 'BBB' +const templateSrv = { + variables: [ + { + type: 'datasource', + name: 'datasource', + current: { + value: 'BBB', + }, + }, + ], +}; + +describe('datasource_srv', function() { + let _datasourceSrv = new DatasourceSrv({}, {}, {}, templateSrv); + + describe('when loading explore sources', () => { + beforeEach(() => { + config.datasources = { + explore1: { + name: 'explore1', + meta: { explore: true, metrics: true }, + }, + explore2: { + name: 'explore2', + meta: { explore: true, metrics: false }, + }, + nonExplore: { + name: 'nonExplore', + meta: { explore: false, metrics: true }, + }, + }; + }); + + it('should return list of explore sources', () => { + const exploreSources = _datasourceSrv.getExploreSources(); + expect(exploreSources.length).toBe(2); + expect(exploreSources[0].name).toBe('explore1'); + expect(exploreSources[1].name).toBe('explore2'); + }); + }); + + describe('when loading metric sources', () => { + let metricSources; + let unsortedDatasources = { + mmm: { + type: 'test-db', + meta: { metrics: { m: 1 } }, + }, + '--Grafana--': { + type: 'grafana', + meta: { builtIn: true, metrics: { m: 1 }, id: 'grafana' }, + }, + '--Mixed--': { + type: 'test-db', + meta: { builtIn: true, metrics: { m: 1 }, id: 'mixed' }, + }, + ZZZ: { + type: 'test-db', + meta: { metrics: { m: 1 } }, + }, + aaa: { + type: 'test-db', + meta: { metrics: { m: 1 } }, + }, + BBB: { + type: 'test-db', + meta: { metrics: { m: 1 } }, + }, + }; + beforeEach(() => { + config.datasources = unsortedDatasources; + metricSources = _datasourceSrv.getMetricSources({}); + config.defaultDatasource = 'BBB'; + }); + + it('should return a list of sources sorted case insensitively with builtin sources last', () => { + expect(metricSources[1].name).toBe('aaa'); + expect(metricSources[2].name).toBe('BBB'); + expect(metricSources[3].name).toBe('mmm'); + expect(metricSources[4].name).toBe('ZZZ'); + expect(metricSources[5].name).toBe('--Grafana--'); + expect(metricSources[6].name).toBe('--Mixed--'); + }); + + it('should set default data source', () => { + expect(metricSources[3].name).toBe('default'); + expect(metricSources[3].sort).toBe('BBB'); + }); + + it('should set default inject the variable datasources', () => { + expect(metricSources[0].name).toBe('$datasource'); + expect(metricSources[0].sort).toBe('$datasource'); + }); + }); +}); diff --git a/public/app/features/plugins/specs/datasource_srv_specs.ts b/public/app/features/plugins/specs/datasource_srv_specs.ts deleted file mode 100644 index 85a66b59ee7..00000000000 --- a/public/app/features/plugins/specs/datasource_srv_specs.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common'; -import config from 'app/core/config'; -import 'app/features/plugins/datasource_srv'; - -describe('datasource_srv', function() { - var _datasourceSrv; - var metricSources; - var templateSrv = {}; - - beforeEach(angularMocks.module('grafana.core')); - beforeEach( - angularMocks.module(function($provide) { - $provide.value('templateSrv', templateSrv); - }) - ); - beforeEach(angularMocks.module('grafana.services')); - beforeEach( - angularMocks.inject(function(datasourceSrv) { - _datasourceSrv = datasourceSrv; - }) - ); - - describe('when loading metric sources', function() { - var unsortedDatasources = { - mmm: { - type: 'test-db', - meta: { metrics: { m: 1 } }, - }, - '--Grafana--': { - type: 'grafana', - meta: { builtIn: true, metrics: { m: 1 }, id: 'grafana' }, - }, - '--Mixed--': { - type: 'test-db', - meta: { builtIn: true, metrics: { m: 1 }, id: 'mixed' }, - }, - ZZZ: { - type: 'test-db', - meta: { metrics: { m: 1 } }, - }, - aaa: { - type: 'test-db', - meta: { metrics: { m: 1 } }, - }, - BBB: { - type: 'test-db', - meta: { metrics: { m: 1 } }, - }, - }; - beforeEach(function() { - config.datasources = unsortedDatasources; - metricSources = _datasourceSrv.getMetricSources({ skipVariables: true }); - }); - - it('should return a list of sources sorted case insensitively with builtin sources last', function() { - expect(metricSources[0].name).to.be('aaa'); - expect(metricSources[1].name).to.be('BBB'); - expect(metricSources[2].name).to.be('mmm'); - expect(metricSources[3].name).to.be('ZZZ'); - expect(metricSources[4].name).to.be('--Grafana--'); - expect(metricSources[5].name).to.be('--Mixed--'); - }); - }); -}); diff --git a/public/app/features/snapshot/snapshot_ctrl.ts b/public/app/features/snapshot/snapshot_ctrl.ts index 1edeafe0f6f..1dde4876cd5 100644 --- a/public/app/features/snapshot/snapshot_ctrl.ts +++ b/public/app/features/snapshot/snapshot_ctrl.ts @@ -15,12 +15,9 @@ export class SnapshotsCtrl { removeSnapshotConfirmed(snapshot) { _.remove(this.snapshots, { key: snapshot.key }); - this.backendSrv.get('/api/snapshots-delete/' + snapshot.deleteKey).then( + this.backendSrv.delete('/api/snapshots/' + snapshot.key).then( + () => {}, () => { - this.$rootScope.appEvent('alert-success', ['Snapshot deleted', '']); - }, - () => { - this.$rootScope.appEvent('alert-error', ['Unable to delete snapshot', '']); this.snapshots.push(snapshot); } ); diff --git a/public/app/features/templating/adhoc_variable.ts b/public/app/features/templating/adhoc_variable.ts index babeaf1f34e..9f8bd4c39a7 100644 --- a/public/app/features/templating/adhoc_variable.ts +++ b/public/app/features/templating/adhoc_variable.ts @@ -3,6 +3,7 @@ import { Variable, assignModelProperties, variableTypes } from './variable'; export class AdhocVariable implements Variable { filters: any[]; + skipUrlSync: boolean; defaults = { type: 'adhoc', @@ -11,6 +12,7 @@ export class AdhocVariable implements Variable { hide: 0, datasource: null, filters: [], + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/constant_variable.ts b/public/app/features/templating/constant_variable.ts index f2fb4294537..e727c6e98af 100644 --- a/public/app/features/templating/constant_variable.ts +++ b/public/app/features/templating/constant_variable.ts @@ -4,6 +4,7 @@ export class ConstantVariable implements Variable { query: string; options: any[]; current: any; + skipUrlSync: boolean; defaults = { type: 'constant', @@ -13,6 +14,7 @@ export class ConstantVariable implements Variable { query: '', current: {}, options: [], + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/custom_variable.ts b/public/app/features/templating/custom_variable.ts index c15178f6644..4490a41a38f 100644 --- a/public/app/features/templating/custom_variable.ts +++ b/public/app/features/templating/custom_variable.ts @@ -7,6 +7,7 @@ export class CustomVariable implements Variable { includeAll: boolean; multi: boolean; current: any; + skipUrlSync: boolean; defaults = { type: 'custom', @@ -19,6 +20,7 @@ export class CustomVariable implements Variable { includeAll: false, multi: false, allValue: null, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/datasource_variable.ts b/public/app/features/templating/datasource_variable.ts index 4c326a94e3b..519ce21e4d4 100644 --- a/public/app/features/templating/datasource_variable.ts +++ b/public/app/features/templating/datasource_variable.ts @@ -7,6 +7,7 @@ export class DatasourceVariable implements Variable { options: any; current: any; refresh: any; + skipUrlSync: boolean; defaults = { type: 'datasource', @@ -18,6 +19,7 @@ export class DatasourceVariable implements Variable { options: [], query: '', refresh: 1, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/interval_variable.ts b/public/app/features/templating/interval_variable.ts index 3faac316f98..b932819a7b7 100644 --- a/public/app/features/templating/interval_variable.ts +++ b/public/app/features/templating/interval_variable.ts @@ -11,6 +11,7 @@ export class IntervalVariable implements Variable { query: string; refresh: number; current: any; + skipUrlSync: boolean; defaults = { type: 'interval', @@ -24,6 +25,7 @@ export class IntervalVariable implements Variable { auto: false, auto_min: '10s', auto_count: 30, + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/query_variable.ts b/public/app/features/templating/query_variable.ts index 54bd7bb660c..5ddd6d32864 100644 --- a/public/app/features/templating/query_variable.ts +++ b/public/app/features/templating/query_variable.ts @@ -22,6 +22,7 @@ export class QueryVariable implements Variable { tagsQuery: string; tagValuesQuery: string; tags: any[]; + skipUrlSync: boolean; defaults = { type: 'query', @@ -42,6 +43,7 @@ export class QueryVariable implements Variable { useTags: false, tagsQuery: '', tagValuesQuery: '', + skipUrlSync: false, }; /** @ngInject **/ diff --git a/public/app/features/templating/specs/template_srv.jest.ts b/public/app/features/templating/specs/template_srv.jest.ts index 59915776b4f..86b6aa7ec99 100644 --- a/public/app/features/templating/specs/template_srv.jest.ts +++ b/public/app/features/templating/specs/template_srv.jest.ts @@ -345,6 +345,49 @@ describe('templateSrv', function() { }); }); + describe('fillVariableValuesForUrl skip url sync', function() { + beforeEach(function() { + initTemplateSrv([ + { + name: 'test', + skipUrlSync: true, + current: { value: 'value' }, + getValueForUrl: function() { + return this.current.value; + }, + }, + ]); + }); + + it('should not include template variable value in url', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params); + expect(params['var-test']).toBe(undefined); + }); + }); + + describe('fillVariableValuesForUrl with multi value with skip url sync', function() { + beforeEach(function() { + initTemplateSrv([ + { + type: 'query', + name: 'test', + skipUrlSync: true, + current: { value: ['val1', 'val2'] }, + getValueForUrl: function() { + return this.current.value; + }, + }, + ]); + }); + + it('should not include template variable value in url', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params); + expect(params['var-test']).toBe(undefined); + }); + }); + describe('fillVariableValuesForUrl with multi value and scopedVars', function() { beforeEach(function() { initTemplateSrv([{ type: 'query', name: 'test', current: { value: ['val1', 'val2'] } }]); @@ -359,6 +402,20 @@ describe('templateSrv', function() { }); }); + describe('fillVariableValuesForUrl with multi value, scopedVars and skip url sync', function() { + beforeEach(function() { + initTemplateSrv([{ type: 'query', name: 'test', current: { value: ['val1', 'val2'] } }]); + }); + + it('should not set scoped value as url params', function() { + var params = {}; + _templateSrv.fillVariableValuesForUrl(params, { + test: { name: 'test', value: 'val1', skipUrlSync: true }, + }); + expect(params['var-test']).toBe(undefined); + }); + }); + describe('replaceWithText', function() { beforeEach(function() { initTemplateSrv([ diff --git a/public/app/features/templating/specs/variable_srv_init_specs.ts b/public/app/features/templating/specs/variable_srv_init_specs.ts index cb98d1d7736..11639c6aa8f 100644 --- a/public/app/features/templating/specs/variable_srv_init_specs.ts +++ b/public/app/features/templating/specs/variable_srv_init_specs.ts @@ -179,4 +179,38 @@ describe('VariableSrv init', function() { expect(variable.options[2].selected).to.be(false); }); }); + + describeInitScenario('when template variable is present in url multiple times using key/values', scenario => { + scenario.setup(() => { + scenario.variables = [ + { + name: 'apps', + type: 'query', + multi: true, + current: { text: 'Val1', value: 'val1' }, + options: [ + { text: 'Val1', value: 'val1' }, + { text: 'Val2', value: 'val2' }, + { text: 'Val3', value: 'val3', selected: true }, + ], + }, + ]; + scenario.urlParams['var-apps'] = ['val2', 'val1']; + }); + + it('should update current value', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.current.value.length).to.be(2); + expect(variable.current.value[0]).to.be('val2'); + expect(variable.current.value[1]).to.be('val1'); + expect(variable.current.text).to.be('Val2 + Val1'); + expect(variable.options[0].selected).to.be(true); + expect(variable.options[1].selected).to.be(true); + }); + + it('should set options that are not in value to selected false', function() { + var variable = ctx.variableSrv.variables[0]; + expect(variable.options[2].selected).to.be(false); + }); + }); }); diff --git a/public/app/features/templating/template_srv.ts b/public/app/features/templating/template_srv.ts index 99a9f53d547..fc79d12ff9e 100644 --- a/public/app/features/templating/template_srv.ts +++ b/public/app/features/templating/template_srv.ts @@ -75,7 +75,7 @@ export class TemplateSrv { return luceneEscape(value); } if (value instanceof Array && value.length === 0) { - return '__empty__'; + return '__empty__'; } var quotedValues = _.map(value, function(val) { return '"' + luceneEscape(val) + '"'; @@ -250,8 +250,14 @@ export class TemplateSrv { fillVariableValuesForUrl(params, scopedVars) { _.each(this.variables, function(variable) { if (scopedVars && scopedVars[variable.name] !== void 0) { + if (scopedVars[variable.name].skipUrlSync) { + return; + } params['var-' + variable.name] = scopedVars[variable.name].value; } else { + if (variable.skipUrlSync) { + return; + } params['var-' + variable.name] = variable.getValueForUrl(); } }); diff --git a/public/app/features/templating/variable_srv.ts b/public/app/features/templating/variable_srv.ts index fb882516e85..8ad3c2845e2 100644 --- a/public/app/features/templating/variable_srv.ts +++ b/public/app/features/templating/variable_srv.ts @@ -38,7 +38,11 @@ export class VariableSrv { }); } - onDashboardRefresh() { + onDashboardRefresh(evt, payload) { + if (payload && payload.fromVariableValueUpdated) { + return Promise.resolve({}); + } + var promises = this.variables.filter(variable => variable.refresh === 2).map(variable => { var previousOptions = variable.options.slice(); @@ -130,7 +134,7 @@ export class VariableSrv { return this.$q.all(promises).then(() => { if (emitChangeEvents) { this.$rootScope.$emit('template-variable-value-updated'); - this.$rootScope.$broadcast('refresh'); + this.$rootScope.$broadcast('refresh', { fromVariableValueUpdated: true }); } }); } @@ -209,7 +213,24 @@ export class VariableSrv { return op.text === urlValue || op.value === urlValue; }); - option = option || { text: urlValue, value: urlValue }; + let defaultText = urlValue; + let defaultValue = urlValue; + + if (!option && _.isArray(urlValue)) { + defaultText = []; + + for (let n = 0; n < urlValue.length; n++) { + let t = _.find(variable.options, op => { + return op.value === urlValue[n]; + }); + + if (t) { + defaultText.push(t.text); + } + } + } + + option = option || { text: defaultText, value: defaultValue }; return variable.setValue(option); }); } diff --git a/public/app/partials/login.html b/public/app/partials/login.html index 8680924977f..1919759334b 100644 --- a/public/app/partials/login.html +++ b/public/app/partials/login.html @@ -4,70 +4,101 @@ Grafana
    -