mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge branch 'master' into postgres-query-builder
This commit is contained in:
@@ -5,9 +5,11 @@ aliases:
|
||||
ignore: /.*/
|
||||
tags:
|
||||
only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
|
||||
- &filter-not-release
|
||||
- &filter-not-release-or-master
|
||||
tags:
|
||||
ignore: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
|
||||
branches:
|
||||
ignore: master
|
||||
- &filter-only-master
|
||||
branches:
|
||||
only: master
|
||||
@@ -89,7 +91,7 @@ jobs:
|
||||
name: run linters
|
||||
command: 'gometalinter.v2 --enable-gc --vendor --deadline 10m --disable-all --enable=deadcode --enable=ineffassign --enable=structcheck --enable=unconvert --enable=varcheck ./...'
|
||||
- run:
|
||||
name: run go vet
|
||||
name: run go vet
|
||||
command: 'go vet ./pkg/...'
|
||||
|
||||
test-frontend:
|
||||
@@ -156,8 +158,65 @@ jobs:
|
||||
- dist/grafana*
|
||||
- scripts/*.sh
|
||||
- scripts/publish
|
||||
- store_artifacts:
|
||||
path: dist
|
||||
|
||||
build:
|
||||
docker:
|
||||
- image: grafana/build-container:1.0.0
|
||||
working_directory: /go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: prepare build tools
|
||||
command: '/tmp/bootstrap.sh'
|
||||
- run:
|
||||
name: build and package grafana
|
||||
command: './scripts/build/build.sh'
|
||||
- run:
|
||||
name: sign packages
|
||||
command: './scripts/build/sign_packages.sh'
|
||||
- run:
|
||||
name: sha-sum packages
|
||||
command: 'go run build.go sha-dist'
|
||||
- persist_to_workspace:
|
||||
root: .
|
||||
paths:
|
||||
- dist/grafana*
|
||||
|
||||
grafana-docker-master:
|
||||
docker:
|
||||
- image: docker:stable-git
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- setup_remote_docker
|
||||
- run: docker info
|
||||
- run: cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
|
||||
- run: cd packaging/docker && ./build-deploy.sh "master-${CIRCLE_SHA1}"
|
||||
|
||||
grafana-docker-pr:
|
||||
docker:
|
||||
- image: docker:stable-git
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- setup_remote_docker
|
||||
- run: docker info
|
||||
- run: cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
|
||||
- run: cd packaging/docker && ./build.sh "${CIRCLE_SHA1}"
|
||||
|
||||
grafana-docker-release:
|
||||
docker:
|
||||
- image: docker:stable-git
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- setup_remote_docker
|
||||
- run: docker info
|
||||
- run: cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
|
||||
- run: cd packaging/docker && ./build-deploy.sh "${CIRCLE_TAG}"
|
||||
|
||||
build-enterprise:
|
||||
docker:
|
||||
@@ -213,9 +272,6 @@ jobs:
|
||||
- run:
|
||||
name: Trigger Windows build
|
||||
command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} master'
|
||||
- run:
|
||||
name: Trigger Docker build
|
||||
command: './scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN} master-$(echo "${CIRCLE_SHA1}" | cut -b1-7)'
|
||||
- run:
|
||||
name: Publish to Grafana.com
|
||||
command: |
|
||||
@@ -237,30 +293,27 @@ jobs:
|
||||
- run:
|
||||
name: Trigger Windows build
|
||||
command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} release'
|
||||
- run:
|
||||
name: Trigger Docker build
|
||||
command: './scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN} ${CIRCLE_TAG}'
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
test-and-build:
|
||||
build-master:
|
||||
jobs:
|
||||
- build-all:
|
||||
filters: *filter-only-master
|
||||
- build-enterprise:
|
||||
filters: *filter-only-master
|
||||
- codespell:
|
||||
filters: *filter-not-release
|
||||
filters: *filter-only-master
|
||||
- gometalinter:
|
||||
filters: *filter-not-release
|
||||
filters: *filter-only-master
|
||||
- test-frontend:
|
||||
filters: *filter-not-release
|
||||
filters: *filter-only-master
|
||||
- test-backend:
|
||||
filters: *filter-not-release
|
||||
filters: *filter-only-master
|
||||
- mysql-integration-test:
|
||||
filters: *filter-not-release
|
||||
filters: *filter-only-master
|
||||
- postgres-integration-test:
|
||||
filters: *filter-not-release
|
||||
filters: *filter-only-master
|
||||
- deploy-master:
|
||||
requires:
|
||||
- build-all
|
||||
@@ -270,7 +323,17 @@ workflows:
|
||||
- gometalinter
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-master
|
||||
filters: *filter-only-master
|
||||
- grafana-docker-master:
|
||||
requires:
|
||||
- build-all
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- codespell
|
||||
- gometalinter
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-master
|
||||
- deploy-enterprise-master:
|
||||
requires:
|
||||
- build-all
|
||||
@@ -309,3 +372,40 @@ workflows:
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
- grafana-docker-release:
|
||||
requires:
|
||||
- build-all
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- codespell
|
||||
- gometalinter
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
|
||||
build-branches-and-prs:
|
||||
jobs:
|
||||
- build:
|
||||
filters: *filter-not-release-or-master
|
||||
- codespell:
|
||||
filters: *filter-not-release-or-master
|
||||
- gometalinter:
|
||||
filters: *filter-not-release-or-master
|
||||
- test-frontend:
|
||||
filters: *filter-not-release-or-master
|
||||
- test-backend:
|
||||
filters: *filter-not-release-or-master
|
||||
- mysql-integration-test:
|
||||
filters: *filter-not-release-or-master
|
||||
- postgres-integration-test:
|
||||
filters: *filter-not-release-or-master
|
||||
- grafana-docker-pr:
|
||||
requires:
|
||||
- build
|
||||
- test-backend
|
||||
- test-frontend
|
||||
- codespell
|
||||
- gometalinter
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-not-release-or-master
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -58,6 +58,7 @@ debug.test
|
||||
/examples/*/dist
|
||||
/packaging/**/*.rpm
|
||||
/packaging/**/*.deb
|
||||
/packaging/**/*.tar.gz
|
||||
|
||||
# Ignore OSX indexing
|
||||
.DS_Store
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
* **Prometheus**: Heatmap - fix unhandled error when some points are missing [#12484](https://github.com/grafana/grafana/issues/12484)
|
||||
* **Prometheus**: Add $interval, $interval_ms, $range, and $range_ms support for dashboard and template queries [#12597](https://github.com/grafana/grafana/issues/12597)
|
||||
* **Variables**: Skip unneeded extra query request when de-selecting variable values used for repeated panels [#8186](https://github.com/grafana/grafana/issues/8186), thx [@mtanda](https://github.com/mtanda)
|
||||
* **Postgres/MySQL/MSSQL**: Add previous fill mode to $__timeGroup macro which will fill in previously seen value when point is missing [#12756](https://github.com/grafana/grafana/issues/12756), thx [@svenklemm](https://github.com/svenklemm)
|
||||
* **Postgres/MySQL/MSSQL**: Use floor rounding in $__timeGroup macro function [#12460](https://github.com/grafana/grafana/issues/12460), thx [@svenklemm](https://github.com/svenklemm)
|
||||
* **Postgres/MySQL/MSSQL**: Use metric column as prefix when returning multiple value columns [#12727](https://github.com/grafana/grafana/issues/12727), thx [@svenklemm](https://github.com/svenklemm)
|
||||
* **Postgres/MySQL/MSSQL**: New $__timeGroupAlias macro. Postgres $__timeGroup no longer automatically adds time column alias [#12749](https://github.com/grafana/grafana/issues/12749), thx [@svenklemm](https://github.com/svenklemm)
|
||||
@@ -37,11 +38,18 @@
|
||||
* **Units**: Change units to include characters for power of 2 and 3 [#12744](https://github.com/grafana/grafana/pull/12744), thx [@Worty](https://github.com/Worty)
|
||||
* **Graph**: Option to hide series from tooltip [#3341](https://github.com/grafana/grafana/issues/3341), thx [@mtanda](https://github.com/mtanda)
|
||||
* **UI**: Fix iOS home screen "app" icon and Windows 10 app experience [#12752](https://github.com/grafana/grafana/issues/12752), thx [@andig](https://github.com/andig)
|
||||
* **Datasource**: Fix UI issue with secret fields after updating datasource [#11270](https://github.com/grafana/grafana/issues/11270)
|
||||
|
||||
### Breaking changes
|
||||
|
||||
* Postgres datasource no longer automatically adds time column alias when using the $__timeGroup alias. However, there's code in place which should make this change backward compatible and shouldn't create any issues.
|
||||
|
||||
### New experimental features
|
||||
|
||||
These are new features that's still being worked on and are in an experimental phase. We incourage users to try these out and provide any feedback in related issue.
|
||||
|
||||
* **Dashboard**: Auto fit dashboard panels to optimize space used for current TV / Monitor [#12768](https://github.com/grafana/grafana/issues/12768)
|
||||
|
||||
# 5.2.2 (2018-07-25)
|
||||
|
||||
### Minor
|
||||
|
||||
8
Makefile
8
Makefile
@@ -24,6 +24,12 @@ build-js:
|
||||
|
||||
build: build-go build-js
|
||||
|
||||
build-docker-dev:
|
||||
@echo "\033[92mInfo:\033[0m the frontend code is expected to be built already."
|
||||
go run build.go -goos linux -pkg-arch amd64 ${OPT} build package-only latest
|
||||
cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
|
||||
cd packaging/docker && docker build --tag grafana/grafana:dev .
|
||||
|
||||
test-go:
|
||||
go test -v ./pkg/...
|
||||
|
||||
@@ -36,4 +42,4 @@ run:
|
||||
./bin/grafana-server
|
||||
|
||||
protoc:
|
||||
protoc -I pkg/tsdb/models pkg/tsdb/models/*.proto --go_out=plugins=grpc:pkg/tsdb/models/.
|
||||
protoc -I pkg/tsdb/models pkg/tsdb/models/*.proto --go_out=plugins=grpc:pkg/tsdb/models/.
|
||||
|
||||
@@ -74,6 +74,15 @@ bra run
|
||||
|
||||
Open grafana in your browser (default: `http://localhost:3000`) and login with admin user (default: `user/pass = admin/admin`).
|
||||
|
||||
### Building a docker image (on linux/amd64)
|
||||
|
||||
This builds a docker image from your local sources:
|
||||
|
||||
1. Build the frontend `go run build.go build-frontend`
|
||||
2. Build the docker image `make build-docker-dev`
|
||||
|
||||
The resulting image will be tagged as `grafana/grafana:dev`
|
||||
|
||||
### Dev config
|
||||
|
||||
Create a custom.ini in the conf directory to override default configuration options.
|
||||
|
||||
16
ROADMAP.md
16
ROADMAP.md
@@ -1,9 +1,10 @@
|
||||
# Roadmap (2018-06-26)
|
||||
# Roadmap (2018-08-07)
|
||||
|
||||
This roadmap is a tentative plan for the core development team. Things change constantly as PRs come in and priorities change.
|
||||
But it will give you an idea of our current vision and plan.
|
||||
|
||||
### Short term (1-2 months)
|
||||
- PRs & Bugs
|
||||
- Multi-Stat panel
|
||||
- Metrics & Log Explore UI
|
||||
|
||||
@@ -11,17 +12,16 @@ But it will give you an idea of our current vision and plan.
|
||||
- React Panels
|
||||
- Change visualization (panel type) on the fly.
|
||||
- Templating Query Editor UI Plugin hook
|
||||
- Backend plugins
|
||||
|
||||
### Long term (4 - 8 months)
|
||||
|
||||
- Alerting improvements (silence, per series tracking, etc)
|
||||
- Progress on React migration
|
||||
- Alerting improvements (silence, per series tracking, etc)
|
||||
- Progress on React migration
|
||||
|
||||
### In a distant future far far away
|
||||
|
||||
- Meta queries
|
||||
- Integrated light weight TSDB
|
||||
- Web socket & live data sources
|
||||
- Meta queries
|
||||
- Integrated light weight TSDB
|
||||
- Web socket & live data sources
|
||||
|
||||
### Outside contributions
|
||||
We know this is being worked on right now by contributors (and we hope to merge it when it's ready).
|
||||
|
||||
@@ -64,7 +64,7 @@
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"iteration": 1532949769359,
|
||||
"iteration": 1533713720618,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
@@ -338,8 +338,8 @@
|
||||
"datasource": "gdev-mssql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 0,
|
||||
"y": 7
|
||||
},
|
||||
@@ -421,9 +421,9 @@
|
||||
"datasource": "gdev-mssql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 8,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 6,
|
||||
"y": 7
|
||||
},
|
||||
"id": 9,
|
||||
@@ -504,9 +504,9 @@
|
||||
"datasource": "gdev-mssql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 16,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 12,
|
||||
"y": 7
|
||||
},
|
||||
"id": 10,
|
||||
@@ -579,6 +579,89 @@
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "gdev-mssql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 18,
|
||||
"y": 7
|
||||
},
|
||||
"id": 36,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null as zero",
|
||||
"percentage": false,
|
||||
"pointradius": 3,
|
||||
"points": true,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": true,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "",
|
||||
"format": "time_series",
|
||||
"rawSql": "SELECT $__timeGroupAlias(time, '5m', previous), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '5m') ORDER BY 1",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeShift": null,
|
||||
"title": "timeGroup macro 5m with fill(previous) and null as zero",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": "0",
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": true,
|
||||
@@ -587,10 +670,10 @@
|
||||
"datasource": "gdev-mssql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 0,
|
||||
"y": 16
|
||||
"y": 13
|
||||
},
|
||||
"id": 16,
|
||||
"legend": {
|
||||
@@ -670,10 +753,10 @@
|
||||
"datasource": "gdev-mssql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 8,
|
||||
"y": 16
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 6,
|
||||
"y": 13
|
||||
},
|
||||
"id": 12,
|
||||
"legend": {
|
||||
@@ -753,10 +836,10 @@
|
||||
"datasource": "gdev-mssql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 16,
|
||||
"y": 16
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 12,
|
||||
"y": 13
|
||||
},
|
||||
"id": 13,
|
||||
"legend": {
|
||||
@@ -828,6 +911,89 @@
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": true,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "gdev-mssql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 18,
|
||||
"y": 13
|
||||
},
|
||||
"id": 37,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": false,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"percentage": false,
|
||||
"pointradius": 3,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": true,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "",
|
||||
"format": "time_series",
|
||||
"rawSql": "SELECT $__timeGroupAlias(time, '$summarize', previous), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY $__timeGroup(time, '$summarize') ORDER BY 1",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeShift": null,
|
||||
"title": "Metrics - timeGroup macro $summarize with fill(previous)",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
@@ -839,7 +1005,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 25
|
||||
"y": 19
|
||||
},
|
||||
"id": 27,
|
||||
"legend": {
|
||||
@@ -926,7 +1092,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 25
|
||||
"y": 19
|
||||
},
|
||||
"id": 5,
|
||||
"legend": {
|
||||
@@ -1029,7 +1195,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 33
|
||||
"y": 27
|
||||
},
|
||||
"id": 4,
|
||||
"legend": {
|
||||
@@ -1116,7 +1282,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 33
|
||||
"y": 27
|
||||
},
|
||||
"id": 28,
|
||||
"legend": {
|
||||
@@ -1201,7 +1367,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 41
|
||||
"y": 35
|
||||
},
|
||||
"id": 19,
|
||||
"legend": {
|
||||
@@ -1288,7 +1454,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 41
|
||||
"y": 35
|
||||
},
|
||||
"id": 18,
|
||||
"legend": {
|
||||
@@ -1373,7 +1539,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 49
|
||||
"y": 43
|
||||
},
|
||||
"id": 17,
|
||||
"legend": {
|
||||
@@ -1460,7 +1626,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 49
|
||||
"y": 43
|
||||
},
|
||||
"id": 20,
|
||||
"legend": {
|
||||
@@ -1545,7 +1711,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 57
|
||||
"y": 51
|
||||
},
|
||||
"id": 29,
|
||||
"legend": {
|
||||
@@ -1632,7 +1798,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 57
|
||||
"y": 51
|
||||
},
|
||||
"id": 30,
|
||||
"legend": {
|
||||
@@ -1719,7 +1885,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 65
|
||||
"y": 59
|
||||
},
|
||||
"id": 14,
|
||||
"legend": {
|
||||
@@ -1807,7 +1973,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 65
|
||||
"y": 59
|
||||
},
|
||||
"id": 15,
|
||||
"legend": {
|
||||
@@ -1894,7 +2060,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 73
|
||||
"y": 67
|
||||
},
|
||||
"id": 25,
|
||||
"legend": {
|
||||
@@ -1982,7 +2148,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 73
|
||||
"y": 67
|
||||
},
|
||||
"id": 22,
|
||||
"legend": {
|
||||
@@ -2069,7 +2235,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 81
|
||||
"y": 75
|
||||
},
|
||||
"id": 21,
|
||||
"legend": {
|
||||
@@ -2157,7 +2323,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 81
|
||||
"y": 75
|
||||
},
|
||||
"id": 26,
|
||||
"legend": {
|
||||
@@ -2244,7 +2410,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 89
|
||||
"y": 83
|
||||
},
|
||||
"id": 23,
|
||||
"legend": {
|
||||
@@ -2332,7 +2498,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 89
|
||||
"y": 83
|
||||
},
|
||||
"id": 24,
|
||||
"legend": {
|
||||
@@ -2542,5 +2708,5 @@
|
||||
"timezone": "",
|
||||
"title": "Datasource tests - MSSQL (unit test)",
|
||||
"uid": "GlAqcPgmz",
|
||||
"version": 3
|
||||
"version": 10
|
||||
}
|
||||
@@ -64,7 +64,7 @@
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"iteration": 1532949531280,
|
||||
"iteration": 1533714324007,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
@@ -338,8 +338,8 @@
|
||||
"datasource": "gdev-mysql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 0,
|
||||
"y": 7
|
||||
},
|
||||
@@ -421,9 +421,9 @@
|
||||
"datasource": "gdev-mysql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 8,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 6,
|
||||
"y": 7
|
||||
},
|
||||
"id": 9,
|
||||
@@ -504,9 +504,9 @@
|
||||
"datasource": "gdev-mysql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 16,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 12,
|
||||
"y": 7
|
||||
},
|
||||
"id": 10,
|
||||
@@ -579,6 +579,89 @@
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "gdev-mysql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 18,
|
||||
"y": 7
|
||||
},
|
||||
"id": 36,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"percentage": false,
|
||||
"pointradius": 3,
|
||||
"points": true,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": true,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "",
|
||||
"format": "time_series",
|
||||
"rawSql": "SELECT $__timeGroupAlias(time, '5m', previous), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeShift": null,
|
||||
"title": "timeGroup macro 5m with fill(previous)",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": "0",
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": true,
|
||||
@@ -587,10 +670,10 @@
|
||||
"datasource": "gdev-mysql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 0,
|
||||
"y": 16
|
||||
"y": 13
|
||||
},
|
||||
"id": 16,
|
||||
"legend": {
|
||||
@@ -670,10 +753,10 @@
|
||||
"datasource": "gdev-mysql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 8,
|
||||
"y": 16
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 6,
|
||||
"y": 13
|
||||
},
|
||||
"id": 12,
|
||||
"legend": {
|
||||
@@ -753,10 +836,10 @@
|
||||
"datasource": "gdev-mysql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 16,
|
||||
"y": 16
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 12,
|
||||
"y": 13
|
||||
},
|
||||
"id": 13,
|
||||
"legend": {
|
||||
@@ -828,6 +911,89 @@
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": true,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "gdev-mysql-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 18,
|
||||
"y": 13
|
||||
},
|
||||
"id": 37,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": false,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"percentage": false,
|
||||
"pointradius": 3,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": true,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "",
|
||||
"format": "time_series",
|
||||
"rawSql": "SELECT $__timeGroupAlias(time, '$summarize', previous), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeShift": null,
|
||||
"title": "Metrics - timeGroup macro $summarize with fill(previous)",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
@@ -839,7 +1005,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 25
|
||||
"y": 19
|
||||
},
|
||||
"id": 27,
|
||||
"legend": {
|
||||
@@ -926,7 +1092,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 25
|
||||
"y": 19
|
||||
},
|
||||
"id": 5,
|
||||
"legend": {
|
||||
@@ -1023,7 +1189,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 33
|
||||
"y": 27
|
||||
},
|
||||
"id": 4,
|
||||
"legend": {
|
||||
@@ -1110,7 +1276,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 33
|
||||
"y": 27
|
||||
},
|
||||
"id": 28,
|
||||
"legend": {
|
||||
@@ -1195,7 +1361,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 41
|
||||
"y": 35
|
||||
},
|
||||
"id": 19,
|
||||
"legend": {
|
||||
@@ -1282,7 +1448,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 41
|
||||
"y": 35
|
||||
},
|
||||
"id": 18,
|
||||
"legend": {
|
||||
@@ -1367,7 +1533,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 49
|
||||
"y": 43
|
||||
},
|
||||
"id": 17,
|
||||
"legend": {
|
||||
@@ -1454,7 +1620,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 49
|
||||
"y": 43
|
||||
},
|
||||
"id": 20,
|
||||
"legend": {
|
||||
@@ -1539,7 +1705,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 57
|
||||
"y": 51
|
||||
},
|
||||
"id": 14,
|
||||
"legend": {
|
||||
@@ -1627,7 +1793,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 57
|
||||
"y": 51
|
||||
},
|
||||
"id": 15,
|
||||
"legend": {
|
||||
@@ -1714,7 +1880,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 65
|
||||
"y": 59
|
||||
},
|
||||
"id": 25,
|
||||
"legend": {
|
||||
@@ -1802,7 +1968,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 65
|
||||
"y": 59
|
||||
},
|
||||
"id": 22,
|
||||
"legend": {
|
||||
@@ -1889,7 +2055,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 73
|
||||
"y": 67
|
||||
},
|
||||
"id": 21,
|
||||
"legend": {
|
||||
@@ -1977,7 +2143,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 73
|
||||
"y": 67
|
||||
},
|
||||
"id": 26,
|
||||
"legend": {
|
||||
@@ -2064,7 +2230,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 81
|
||||
"y": 75
|
||||
},
|
||||
"id": 23,
|
||||
"legend": {
|
||||
@@ -2152,7 +2318,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 81
|
||||
"y": 75
|
||||
},
|
||||
"id": 24,
|
||||
"legend": {
|
||||
@@ -2360,5 +2526,5 @@
|
||||
"timezone": "",
|
||||
"title": "Datasource tests - MySQL (unittest)",
|
||||
"uid": "Hmf8FDkmz",
|
||||
"version": 1
|
||||
"version": 9
|
||||
}
|
||||
@@ -64,7 +64,7 @@
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"iteration": 1532951521836,
|
||||
"iteration": 1533714184500,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
@@ -338,8 +338,8 @@
|
||||
"datasource": "gdev-postgres-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 0,
|
||||
"y": 7
|
||||
},
|
||||
@@ -421,9 +421,9 @@
|
||||
"datasource": "gdev-postgres-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 8,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 6,
|
||||
"y": 7
|
||||
},
|
||||
"id": 9,
|
||||
@@ -504,9 +504,9 @@
|
||||
"datasource": "gdev-postgres-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 16,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 12,
|
||||
"y": 7
|
||||
},
|
||||
"id": 10,
|
||||
@@ -579,6 +579,89 @@
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "gdev-postgres-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 18,
|
||||
"y": 7
|
||||
},
|
||||
"id": 36,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"percentage": false,
|
||||
"pointradius": 3,
|
||||
"points": true,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": true,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "",
|
||||
"format": "time_series",
|
||||
"rawSql": "SELECT $__timeGroupAlias(time, '5m', previous), avg(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeShift": null,
|
||||
"title": "timeGroup macro 5m with fill(previous)",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": "0",
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": true,
|
||||
@@ -587,10 +670,10 @@
|
||||
"datasource": "gdev-postgres-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 0,
|
||||
"y": 16
|
||||
"y": 13
|
||||
},
|
||||
"id": 16,
|
||||
"legend": {
|
||||
@@ -670,10 +753,10 @@
|
||||
"datasource": "gdev-postgres-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 8,
|
||||
"y": 16
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 6,
|
||||
"y": 13
|
||||
},
|
||||
"id": 12,
|
||||
"legend": {
|
||||
@@ -753,10 +836,10 @@
|
||||
"datasource": "gdev-postgres-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 8,
|
||||
"x": 16,
|
||||
"y": 16
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 12,
|
||||
"y": 13
|
||||
},
|
||||
"id": 13,
|
||||
"legend": {
|
||||
@@ -828,6 +911,89 @@
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": true,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "gdev-postgres-ds-tests",
|
||||
"fill": 2,
|
||||
"gridPos": {
|
||||
"h": 6,
|
||||
"w": 6,
|
||||
"x": 18,
|
||||
"y": 13
|
||||
},
|
||||
"id": 37,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": false,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "null",
|
||||
"percentage": false,
|
||||
"pointradius": 3,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": true,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "",
|
||||
"format": "time_series",
|
||||
"rawSql": "SELECT $__timeGroupAlias(time, '$summarize', previous), sum(value) as value FROM metric WHERE $__timeFilter(time) GROUP BY 1 ORDER BY 1",
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeShift": null,
|
||||
"title": "Metrics - timeGroup macro $summarize with fill(previous)",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
@@ -839,7 +1005,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 25
|
||||
"y": 19
|
||||
},
|
||||
"id": 27,
|
||||
"legend": {
|
||||
@@ -926,7 +1092,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 25
|
||||
"y": 19
|
||||
},
|
||||
"id": 5,
|
||||
"legend": {
|
||||
@@ -1011,7 +1177,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 33
|
||||
"y": 27
|
||||
},
|
||||
"id": 4,
|
||||
"legend": {
|
||||
@@ -1098,7 +1264,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 33
|
||||
"y": 27
|
||||
},
|
||||
"id": 28,
|
||||
"legend": {
|
||||
@@ -1183,7 +1349,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 41
|
||||
"y": 35
|
||||
},
|
||||
"id": 19,
|
||||
"legend": {
|
||||
@@ -1270,7 +1436,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 41
|
||||
"y": 35
|
||||
},
|
||||
"id": 18,
|
||||
"legend": {
|
||||
@@ -1355,7 +1521,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 49
|
||||
"y": 43
|
||||
},
|
||||
"id": 17,
|
||||
"legend": {
|
||||
@@ -1442,7 +1608,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 49
|
||||
"y": 43
|
||||
},
|
||||
"id": 20,
|
||||
"legend": {
|
||||
@@ -1527,7 +1693,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 57
|
||||
"y": 51
|
||||
},
|
||||
"id": 14,
|
||||
"legend": {
|
||||
@@ -1615,7 +1781,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 57
|
||||
"y": 51
|
||||
},
|
||||
"id": 15,
|
||||
"legend": {
|
||||
@@ -1702,7 +1868,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 65
|
||||
"y": 59
|
||||
},
|
||||
"id": 25,
|
||||
"legend": {
|
||||
@@ -1790,7 +1956,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 65
|
||||
"y": 59
|
||||
},
|
||||
"id": 22,
|
||||
"legend": {
|
||||
@@ -1877,7 +2043,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 73
|
||||
"y": 67
|
||||
},
|
||||
"id": 21,
|
||||
"legend": {
|
||||
@@ -1965,7 +2131,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 73
|
||||
"y": 67
|
||||
},
|
||||
"id": 26,
|
||||
"legend": {
|
||||
@@ -2052,7 +2218,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 81
|
||||
"y": 75
|
||||
},
|
||||
"id": 23,
|
||||
"legend": {
|
||||
@@ -2140,7 +2306,7 @@
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 81
|
||||
"y": 75
|
||||
},
|
||||
"id": 24,
|
||||
"legend": {
|
||||
@@ -2352,6 +2518,5 @@
|
||||
"timezone": "",
|
||||
"title": "Datasource tests - Postgres (unittest)",
|
||||
"uid": "vHQdlVziz",
|
||||
"version": 1
|
||||
}
|
||||
|
||||
"version": 9
|
||||
}
|
||||
@@ -115,7 +115,7 @@ The Elasticsearch data source supports two types of queries you can use in the *
|
||||
|
||||
Query | Description
|
||||
------------ | -------------
|
||||
*{"find": "fields", "type": "keyword"} | Returns a list of field names with the index type `keyword`.
|
||||
*{"find": "fields", "type": "keyword"}* | Returns a list of field names with the index type `keyword`.
|
||||
*{"find": "terms", "field": "@hostname", "size": 1000}* | Returns a list of values for a field using term aggregation. Query will user current dashboard time range as time range for query.
|
||||
*{"find": "terms", "field": "@hostname", "query": '<lucene query>'}* | Returns a list of values for a field using term aggregation & and a specified lucene query filter. Query will use current dashboard time range as time range for query.
|
||||
|
||||
|
||||
@@ -81,7 +81,9 @@ Macro example | Description
|
||||
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
|
||||
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
|
||||
*$__timeGroup(dateColumn,'5m'[, fillvalue])* | Will be replaced by an expression usable in GROUP BY clause. Providing a *fillValue* of *NULL* or *floating value* will automatically fill empty series in timerange with that value. <br/>For example, *CAST(ROUND(DATEDIFF(second, '1970-01-01', time_column)/300.0, 0) as bigint)\*300*.
|
||||
*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
|
||||
*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
|
||||
*$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.
|
||||
*$__timeGroup(dateColumn,'5m', previous)* | Same as above but the previous value in that series will be used as fill value if no value has been seen yet NULL will be used (only available in Grafana 5.3+).
|
||||
*$__timeGroupAlias(dateColumn,'5m')* | Will be replaced identical to $__timeGroup but with an added column alias (only available in Grafana 5.3+).
|
||||
*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
|
||||
*$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
|
||||
|
||||
@@ -64,7 +64,9 @@ Macro example | Description
|
||||
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
|
||||
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
|
||||
*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *cast(cast(UNIX_TIMESTAMP(dateColumn)/(300) as signed)*300 as signed),*
|
||||
*$__timeGroup(dateColumn,'5m',0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
|
||||
*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
|
||||
*$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.
|
||||
*$__timeGroup(dateColumn,'5m', previous)* | Same as above but the previous value in that series will be used as fill value if no value has been seen yet NULL will be used (only available in Grafana 5.3+).
|
||||
*$__timeGroupAlias(dateColumn,'5m')* | Will be replaced identical to $__timeGroup but with an added column alias (only available in Grafana 5.3+).
|
||||
*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn > 1494410783 AND dateColumn < 1494497183*
|
||||
*$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
|
||||
|
||||
@@ -61,7 +61,9 @@ Macro example | Description
|
||||
*$__timeFrom()* | Will be replaced by the start of the currently active time selection. For example, *'2017-04-21T05:01:17Z'*
|
||||
*$__timeTo()* | Will be replaced by the end of the currently active time selection. For example, *'2017-04-21T05:06:17Z'*
|
||||
*$__timeGroup(dateColumn,'5m')* | Will be replaced by an expression usable in GROUP BY clause. For example, *(extract(epoch from dateColumn)/300)::bigint*300*
|
||||
*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so all null values will be converted to the fill value (all null values would be set to zero using this example).
|
||||
*$__timeGroup(dateColumn,'5m', 0)* | Same as above but with a fill parameter so missing points in that series will be added by grafana and 0 will be used as value.
|
||||
*$__timeGroup(dateColumn,'5m', NULL)* | Same as above but NULL will be used as value for missing points.
|
||||
*$__timeGroup(dateColumn,'5m', previous)* | Same as above but the previous value in that series will be used as fill value if no value has been seen yet NULL will be used (only available in Grafana 5.3+).
|
||||
*$__timeGroupAlias(dateColumn,'5m')* | Will be replaced identical to $__timeGroup but with an added column alias (only available in Grafana 5.3+).
|
||||
*$__unixEpochFilter(dateColumn)* | Will be replaced by a time range filter using the specified column name with times represented as unix timestamp. For example, *dateColumn >= 1494410783 AND dateColumn <= 1494497183*
|
||||
*$__unixEpochFrom()* | Will be replaced by the start of the currently active time selection as unix timestamp. For example, *1494410783*
|
||||
|
||||
@@ -181,7 +181,7 @@ embedded database (included in the main Grafana binary).
|
||||
|
||||
### url
|
||||
|
||||
Use either URL or or the other fields below to configure the database
|
||||
Use either URL or the other fields below to configure the database
|
||||
Example: `mysql://user:secret@host:port/database`
|
||||
|
||||
### type
|
||||
@@ -195,9 +195,9 @@ will be stored.
|
||||
|
||||
### host
|
||||
|
||||
Only applicable to MySQL or Postgres. Includes IP or hostname and port.
|
||||
Only applicable to MySQL or Postgres. Includes IP or hostname and port or in case of unix sockets the path to it.
|
||||
For example, for MySQL running on the same host as Grafana: `host =
|
||||
127.0.0.1:3306`
|
||||
127.0.0.1:3306` or with unix sockets: `host = /var/run/mysqld/mysqld.sock`
|
||||
|
||||
### name
|
||||
|
||||
@@ -697,9 +697,9 @@ session provider you have configured.
|
||||
|
||||
- **file:** session file path, e.g. `data/sessions`
|
||||
- **mysql:** go-sql-driver/mysql dsn config string, e.g. `user:password@tcp(127.0.0.1:3306)/database_name`
|
||||
- **postgres:** ex: user=a password=b host=localhost port=5432 dbname=c sslmode=verify-full
|
||||
- **memcache:** ex: 127.0.0.1:11211
|
||||
- **redis:** ex: `addr=127.0.0.1:6379,pool_size=100,prefix=grafana`
|
||||
- **postgres:** ex: `user=a password=b host=localhost port=5432 dbname=c sslmode=verify-full`
|
||||
- **memcache:** ex: `127.0.0.1:11211`
|
||||
- **redis:** ex: `addr=127.0.0.1:6379,pool_size=100,prefix=grafana`. For unix socket, use for example: `network=unix,addr=/var/run/redis/redis.sock,pool_size=100,db=grafana`
|
||||
|
||||
Postgres valid `sslmode` are `disable`, `require`, `verify-ca`, and `verify-full` (default).
|
||||
|
||||
|
||||
@@ -166,6 +166,7 @@
|
||||
"mousetrap-global-bind": "^1.1.0",
|
||||
"prismjs": "^1.6.0",
|
||||
"prop-types": "^15.6.0",
|
||||
"rc-cascader": "^0.14.0",
|
||||
"react": "^16.2.0",
|
||||
"react-dom": "^16.2.0",
|
||||
"react-grid-layout": "0.16.6",
|
||||
@@ -187,4 +188,4 @@
|
||||
"resolutions": {
|
||||
"caniuse-db": "1.0.30000772"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
51
packaging/docker/Dockerfile
Normal file
51
packaging/docker/Dockerfile
Normal file
@@ -0,0 +1,51 @@
|
||||
FROM debian:stretch-slim
|
||||
|
||||
ARG GRAFANA_TGZ="grafana-latest.linux-x64.tar.gz"
|
||||
|
||||
RUN apt-get update && apt-get install -qq -y tar && \
|
||||
apt-get autoremove -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY ${GRAFANA_TGZ} /tmp/grafana.tar.gz
|
||||
|
||||
RUN mkdir /tmp/grafana && tar xfvz /tmp/grafana.tar.gz --strip-components=1 -C /tmp/grafana
|
||||
|
||||
FROM debian:stretch-slim
|
||||
|
||||
ARG GF_UID="472"
|
||||
ARG GF_GID="472"
|
||||
|
||||
ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin \
|
||||
GF_PATHS_CONFIG="/etc/grafana/grafana.ini" \
|
||||
GF_PATHS_DATA="/var/lib/grafana" \
|
||||
GF_PATHS_HOME="/usr/share/grafana" \
|
||||
GF_PATHS_LOGS="/var/log/grafana" \
|
||||
GF_PATHS_PLUGINS="/var/lib/grafana/plugins" \
|
||||
GF_PATHS_PROVISIONING="/etc/grafana/provisioning"
|
||||
|
||||
RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates && \
|
||||
apt-get autoremove -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY --from=0 /tmp/grafana "$GF_PATHS_HOME"
|
||||
|
||||
RUN mkdir -p "$GF_PATHS_HOME/.aws" && \
|
||||
groupadd -r -g $GF_GID grafana && \
|
||||
useradd -r -u $GF_UID -g grafana grafana && \
|
||||
mkdir -p "$GF_PATHS_PROVISIONING/datasources" \
|
||||
"$GF_PATHS_PROVISIONING/dashboards" \
|
||||
"$GF_PATHS_LOGS" \
|
||||
"$GF_PATHS_PLUGINS" \
|
||||
"$GF_PATHS_DATA" && \
|
||||
cp "$GF_PATHS_HOME/conf/sample.ini" "$GF_PATHS_CONFIG" && \
|
||||
cp "$GF_PATHS_HOME/conf/ldap.toml" /etc/grafana/ldap.toml && \
|
||||
chown -R grafana:grafana "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS" && \
|
||||
chmod 777 "$GF_PATHS_DATA" "$GF_PATHS_HOME/.aws" "$GF_PATHS_LOGS" "$GF_PATHS_PLUGINS"
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
COPY ./run.sh /run.sh
|
||||
|
||||
USER grafana
|
||||
WORKDIR /
|
||||
ENTRYPOINT [ "/run.sh" ]
|
||||
45
packaging/docker/README.md
Normal file
45
packaging/docker/README.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# Grafana Docker image
|
||||
|
||||
[](https://circleci.com/gh/grafana/grafana-docker)
|
||||
|
||||
## Running your Grafana container
|
||||
|
||||
Start your container binding the external port `3000`.
|
||||
|
||||
```bash
|
||||
docker run -d --name=grafana -p 3000:3000 grafana/grafana
|
||||
```
|
||||
|
||||
Try it out, default admin user is admin/admin.
|
||||
|
||||
## How to use the container
|
||||
|
||||
Further documentation can be found at http://docs.grafana.org/installation/docker/
|
||||
|
||||
## Changelog
|
||||
|
||||
### v5.1.5, v5.2.0-beta2
|
||||
* Fix: config keys ending with _FILE are not respected [#170](https://github.com/grafana/grafana-docker/issues/170)
|
||||
|
||||
### v5.2.0-beta1
|
||||
* Support for Docker Secrets
|
||||
|
||||
### v5.1.0
|
||||
* Major restructuring of the container
|
||||
* Usage of `chown` removed
|
||||
* File permissions incompatibility with previous versions
|
||||
* user id changed from 104 to 472
|
||||
* group id changed from 107 to 472
|
||||
* Runs as the grafana user by default (instead of root)
|
||||
* All default volumes removed
|
||||
|
||||
### v4.2.0
|
||||
* Plugins are now installed into ${GF_PATHS_PLUGINS}
|
||||
* Building the container now requires a full url to the deb package instead of just version
|
||||
* Fixes bug caused by installing multiple plugins
|
||||
|
||||
### v4.0.0-beta2
|
||||
* Plugins dir (`/var/lib/grafana/plugins`) is no longer a separate volume
|
||||
|
||||
### v3.1.1
|
||||
* Make it possible to install specific plugin version https://github.com/grafana/grafana-docker/issues/59#issuecomment-260584026
|
||||
13
packaging/docker/build-deploy.sh
Executable file
13
packaging/docker/build-deploy.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
_grafana_version=$1
|
||||
./build.sh "$_grafana_version"
|
||||
docker login -u "$DOCKER_USER" -p "$DOCKER_PASS"
|
||||
|
||||
./push_to_docker_hub.sh "$_grafana_version"
|
||||
|
||||
if echo "$_grafana_version" | grep -q "^master-"; then
|
||||
apk add --no-cache curl
|
||||
./deploy_to_k8s.sh "grafana/grafana-dev:$_grafana_version"
|
||||
fi
|
||||
25
packaging/docker/build.sh
Executable file
25
packaging/docker/build.sh
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/bin/sh
|
||||
|
||||
_grafana_tag=$1
|
||||
|
||||
# If the tag starts with v, treat this as a official release
|
||||
if echo "$_grafana_tag" | grep -q "^v"; then
|
||||
_grafana_version=$(echo "${_grafana_tag}" | cut -d "v" -f 2)
|
||||
_docker_repo=${2:-grafana/grafana}
|
||||
else
|
||||
_grafana_version=$_grafana_tag
|
||||
_docker_repo=${2:-grafana/grafana-dev}
|
||||
fi
|
||||
|
||||
echo "Building ${_docker_repo}:${_grafana_version}"
|
||||
|
||||
docker build \
|
||||
--tag "${_docker_repo}:${_grafana_version}" \
|
||||
--no-cache=true .
|
||||
|
||||
# Tag as 'latest' for official release; otherwise tag as grafana/grafana:master
|
||||
if echo "$_grafana_tag" | grep -q "^v"; then
|
||||
docker tag "${_docker_repo}:${_grafana_version}" "${_docker_repo}:latest"
|
||||
else
|
||||
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana:master"
|
||||
fi
|
||||
16
packaging/docker/custom/Dockerfile
Normal file
16
packaging/docker/custom/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
ARG GRAFANA_VERSION="latest"
|
||||
|
||||
FROM grafana/grafana:${GRAFANA_VERSION}
|
||||
|
||||
USER grafana
|
||||
|
||||
ARG GF_INSTALL_PLUGINS=""
|
||||
|
||||
RUN if [ ! -z "${GF_INSTALL_PLUGINS}" ]; then \
|
||||
OLDIFS=$IFS; \
|
||||
IFS=','; \
|
||||
for plugin in ${GF_INSTALL_PLUGINS}; do \
|
||||
IFS=$OLDIFS; \
|
||||
grafana-cli --pluginsDir "$GF_PATHS_PLUGINS" plugins install ${plugin}; \
|
||||
done; \
|
||||
fi
|
||||
6
packaging/docker/deploy_to_k8s.sh
Executable file
6
packaging/docker/deploy_to_k8s.sh
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/bin/sh
|
||||
|
||||
curl -s --header "Content-Type: application/json" \
|
||||
--data "{\"build_parameters\": {\"CIRCLE_JOB\": \"deploy\", \"IMAGE_NAMES\": \"$1\"}}" \
|
||||
--request POST \
|
||||
https://circleci.com/api/v1.1/project/github/raintank/deployment_tools/tree/master?circle-token=$CIRCLE_TOKEN
|
||||
24
packaging/docker/push_to_docker_hub.sh
Executable file
24
packaging/docker/push_to_docker_hub.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
_grafana_tag=$1
|
||||
|
||||
# If the tag starts with v, treat this as a official release
|
||||
if echo "$_grafana_tag" | grep -q "^v"; then
|
||||
_grafana_version=$(echo "${_grafana_tag}" | cut -d "v" -f 2)
|
||||
_docker_repo=${2:-grafana/grafana}
|
||||
else
|
||||
_grafana_version=$_grafana_tag
|
||||
_docker_repo=${2:-grafana/grafana-dev}
|
||||
fi
|
||||
|
||||
echo "pushing ${_docker_repo}:${_grafana_version}"
|
||||
docker push "${_docker_repo}:${_grafana_version}"
|
||||
|
||||
if echo "$_grafana_tag" | grep -q "^v"; then
|
||||
echo "pushing ${_docker_repo}:latest"
|
||||
docker push "${_docker_repo}:latest"
|
||||
else
|
||||
echo "pushing grafana/grafana:master"
|
||||
docker push grafana/grafana:master
|
||||
fi
|
||||
82
packaging/docker/run.sh
Executable file
82
packaging/docker/run.sh
Executable file
@@ -0,0 +1,82 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
PERMISSIONS_OK=0
|
||||
|
||||
if [ ! -r "$GF_PATHS_CONFIG" ]; then
|
||||
echo "GF_PATHS_CONFIG='$GF_PATHS_CONFIG' is not readable."
|
||||
PERMISSIONS_OK=1
|
||||
fi
|
||||
|
||||
if [ ! -w "$GF_PATHS_DATA" ]; then
|
||||
echo "GF_PATHS_DATA='$GF_PATHS_DATA' is not writable."
|
||||
PERMISSIONS_OK=1
|
||||
fi
|
||||
|
||||
if [ ! -r "$GF_PATHS_HOME" ]; then
|
||||
echo "GF_PATHS_HOME='$GF_PATHS_HOME' is not readable."
|
||||
PERMISSIONS_OK=1
|
||||
fi
|
||||
|
||||
if [ $PERMISSIONS_OK -eq 1 ]; then
|
||||
echo "You may have issues with file permissions, more information here: http://docs.grafana.org/installation/docker/#migration-from-a-previous-version-of-the-docker-container-to-5-1-or-later"
|
||||
fi
|
||||
|
||||
if [ ! -d "$GF_PATHS_PLUGINS" ]; then
|
||||
mkdir "$GF_PATHS_PLUGINS"
|
||||
fi
|
||||
|
||||
if [ ! -z ${GF_AWS_PROFILES+x} ]; then
|
||||
> "$GF_PATHS_HOME/.aws/credentials"
|
||||
|
||||
for profile in ${GF_AWS_PROFILES}; do
|
||||
access_key_varname="GF_AWS_${profile}_ACCESS_KEY_ID"
|
||||
secret_key_varname="GF_AWS_${profile}_SECRET_ACCESS_KEY"
|
||||
region_varname="GF_AWS_${profile}_REGION"
|
||||
|
||||
if [ ! -z "${!access_key_varname}" -a ! -z "${!secret_key_varname}" ]; then
|
||||
echo "[${profile}]" >> "$GF_PATHS_HOME/.aws/credentials"
|
||||
echo "aws_access_key_id = ${!access_key_varname}" >> "$GF_PATHS_HOME/.aws/credentials"
|
||||
echo "aws_secret_access_key = ${!secret_key_varname}" >> "$GF_PATHS_HOME/.aws/credentials"
|
||||
if [ ! -z "${!region_varname}" ]; then
|
||||
echo "region = ${!region_varname}" >> "$GF_PATHS_HOME/.aws/credentials"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
chmod 600 "$GF_PATHS_HOME/.aws/credentials"
|
||||
fi
|
||||
|
||||
# Convert all environment variables with names ending in __FILE into the content of
|
||||
# the file that they point at and use the name without the trailing __FILE.
|
||||
# This can be used to carry in Docker secrets.
|
||||
for VAR_NAME in $(env | grep '^GF_[^=]\+__FILE=.\+' | sed -r "s/([^=]*)__FILE=.*/\1/g"); do
|
||||
VAR_NAME_FILE="$VAR_NAME"__FILE
|
||||
if [ "${!VAR_NAME}" ]; then
|
||||
echo >&2 "ERROR: Both $VAR_NAME and $VAR_NAME_FILE are set (but are exclusive)"
|
||||
exit 1
|
||||
fi
|
||||
echo "Getting secret $VAR_NAME from ${!VAR_NAME_FILE}"
|
||||
export "$VAR_NAME"="$(< "${!VAR_NAME_FILE}")"
|
||||
unset "$VAR_NAME_FILE"
|
||||
done
|
||||
|
||||
export HOME="$GF_PATHS_HOME"
|
||||
|
||||
if [ ! -z "${GF_INSTALL_PLUGINS}" ]; then
|
||||
OLDIFS=$IFS
|
||||
IFS=','
|
||||
for plugin in ${GF_INSTALL_PLUGINS}; do
|
||||
IFS=$OLDIFS
|
||||
grafana-cli --pluginsDir "${GF_PATHS_PLUGINS}" plugins install ${plugin}
|
||||
done
|
||||
fi
|
||||
|
||||
exec grafana-server \
|
||||
--homepath="$GF_PATHS_HOME" \
|
||||
--config="$GF_PATHS_CONFIG" \
|
||||
"$@" \
|
||||
cfg:default.log.mode="console" \
|
||||
cfg:default.paths.data="$GF_PATHS_DATA" \
|
||||
cfg:default.paths.logs="$GF_PATHS_LOGS" \
|
||||
cfg:default.paths.plugins="$GF_PATHS_PLUGINS" \
|
||||
cfg:default.paths.provisioning="$GF_PATHS_PROVISIONING"
|
||||
@@ -158,12 +158,26 @@ func UpdateDataSource(c *m.ReqContext, cmd m.UpdateDataSourceCommand) Response {
|
||||
}
|
||||
return Error(500, "Failed to update datasource", err)
|
||||
}
|
||||
ds := convertModelToDtos(cmd.Result)
|
||||
|
||||
query := m.GetDataSourceByIdQuery{
|
||||
Id: cmd.Id,
|
||||
OrgId: c.OrgId,
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&query); err != nil {
|
||||
if err == m.ErrDataSourceNotFound {
|
||||
return Error(404, "Data source not found", nil)
|
||||
}
|
||||
return Error(500, "Failed to query datasources", err)
|
||||
}
|
||||
|
||||
dtos := convertModelToDtos(query.Result)
|
||||
|
||||
return JSON(200, util.DynMap{
|
||||
"message": "Datasource updated",
|
||||
"id": cmd.Id,
|
||||
"name": cmd.Name,
|
||||
"datasource": ds,
|
||||
"datasource": dtos,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -99,9 +99,13 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er
|
||||
if len(args) == 3 {
|
||||
m.query.Model.Set("fill", true)
|
||||
m.query.Model.Set("fillInterval", interval.Seconds())
|
||||
if args[2] == "NULL" {
|
||||
m.query.Model.Set("fillNull", true)
|
||||
} else {
|
||||
switch args[2] {
|
||||
case "NULL":
|
||||
m.query.Model.Set("fillMode", "null")
|
||||
case "previous":
|
||||
m.query.Model.Set("fillMode", "previous")
|
||||
default:
|
||||
m.query.Model.Set("fillMode", "value")
|
||||
floatVal, err := strconv.ParseFloat(args[2], 64)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error parsing fill value %v", args[2])
|
||||
|
||||
@@ -76,12 +76,25 @@ func TestMacroEngine(t *testing.T) {
|
||||
_, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m', NULL)")
|
||||
|
||||
fill := query.Model.Get("fill").MustBool()
|
||||
fillNull := query.Model.Get("fillNull").MustBool()
|
||||
fillMode := query.Model.Get("fillMode").MustString()
|
||||
fillInterval := query.Model.Get("fillInterval").MustInt()
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
So(fill, ShouldBeTrue)
|
||||
So(fillNull, ShouldBeTrue)
|
||||
So(fillMode, ShouldEqual, "null")
|
||||
So(fillInterval, ShouldEqual, 5*time.Minute.Seconds())
|
||||
})
|
||||
|
||||
Convey("interpolate __timeGroup function with fill (value = previous)", func() {
|
||||
_, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m', previous)")
|
||||
|
||||
fill := query.Model.Get("fill").MustBool()
|
||||
fillMode := query.Model.Get("fillMode").MustString()
|
||||
fillInterval := query.Model.Get("fillInterval").MustInt()
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
So(fill, ShouldBeTrue)
|
||||
So(fillMode, ShouldEqual, "previous")
|
||||
So(fillInterval, ShouldEqual, 5*time.Minute.Seconds())
|
||||
})
|
||||
|
||||
|
||||
@@ -94,9 +94,13 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er
|
||||
if len(args) == 3 {
|
||||
m.query.Model.Set("fill", true)
|
||||
m.query.Model.Set("fillInterval", interval.Seconds())
|
||||
if args[2] == "NULL" {
|
||||
m.query.Model.Set("fillNull", true)
|
||||
} else {
|
||||
switch args[2] {
|
||||
case "NULL":
|
||||
m.query.Model.Set("fillMode", "null")
|
||||
case "previous":
|
||||
m.query.Model.Set("fillMode", "previous")
|
||||
default:
|
||||
m.query.Model.Set("fillMode", "value")
|
||||
floatVal, err := strconv.ParseFloat(args[2], 64)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error parsing fill value %v", args[2])
|
||||
|
||||
@@ -295,7 +295,7 @@ func TestMySQL(t *testing.T) {
|
||||
|
||||
})
|
||||
|
||||
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
|
||||
Convey("When doing a metric query using timeGroup with value fill enabled", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
@@ -320,6 +320,35 @@ func TestMySQL(t *testing.T) {
|
||||
points := queryResult.Series[0].Points
|
||||
So(points[3][0].Float64, ShouldEqual, 1.5)
|
||||
})
|
||||
|
||||
Convey("When doing a metric query using timeGroup with previous fill enabled", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": "SELECT $__timeGroup(time, '5m', previous) as time_sec, avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
|
||||
"format": "time_series",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
|
||||
points := queryResult.Series[0].Points
|
||||
So(points[2][0].Float64, ShouldEqual, 15.0)
|
||||
So(points[3][0].Float64, ShouldEqual, 15.0)
|
||||
So(points[6][0].Float64, ShouldEqual, 20.0)
|
||||
})
|
||||
|
||||
})
|
||||
|
||||
Convey("Given a table with metrics having multiple values and measurements", func() {
|
||||
|
||||
@@ -116,9 +116,13 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string,
|
||||
if len(args) == 3 {
|
||||
m.query.Model.Set("fill", true)
|
||||
m.query.Model.Set("fillInterval", interval.Seconds())
|
||||
if args[2] == "NULL" {
|
||||
m.query.Model.Set("fillNull", true)
|
||||
} else {
|
||||
switch args[2] {
|
||||
case "NULL":
|
||||
m.query.Model.Set("fillMode", "null")
|
||||
case "previous":
|
||||
m.query.Model.Set("fillMode", "previous")
|
||||
default:
|
||||
m.query.Model.Set("fillMode", "value")
|
||||
floatVal, err := strconv.ParseFloat(args[2], 64)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("error parsing fill value %v", args[2])
|
||||
|
||||
@@ -276,7 +276,7 @@ func TestPostgres(t *testing.T) {
|
||||
|
||||
})
|
||||
|
||||
Convey("When doing a metric query using timeGroup with float fill enabled", func() {
|
||||
Convey("When doing a metric query using timeGroup with value fill enabled", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
@@ -303,6 +303,34 @@ func TestPostgres(t *testing.T) {
|
||||
})
|
||||
})
|
||||
|
||||
Convey("When doing a metric query using timeGroup with previous fill enabled", func() {
|
||||
query := &tsdb.TsdbQuery{
|
||||
Queries: []*tsdb.Query{
|
||||
{
|
||||
Model: simplejson.NewFromAny(map[string]interface{}{
|
||||
"rawSql": "SELECT $__timeGroup(time, '5m', previous), avg(value) as value FROM metric GROUP BY 1 ORDER BY 1",
|
||||
"format": "time_series",
|
||||
}),
|
||||
RefId: "A",
|
||||
},
|
||||
},
|
||||
TimeRange: &tsdb.TimeRange{
|
||||
From: fmt.Sprintf("%v", fromStart.Unix()*1000),
|
||||
To: fmt.Sprintf("%v", fromStart.Add(34*time.Minute).Unix()*1000),
|
||||
},
|
||||
}
|
||||
|
||||
resp, err := endpoint.Query(nil, nil, query)
|
||||
So(err, ShouldBeNil)
|
||||
queryResult := resp.Results["A"]
|
||||
So(queryResult.Error, ShouldBeNil)
|
||||
|
||||
points := queryResult.Series[0].Points
|
||||
So(points[2][0].Float64, ShouldEqual, 15.0)
|
||||
So(points[3][0].Float64, ShouldEqual, 15.0)
|
||||
So(points[6][0].Float64, ShouldEqual, 20.0)
|
||||
})
|
||||
|
||||
Convey("Given a table with metrics having multiple values and measurements", func() {
|
||||
type metric_values struct {
|
||||
Time time.Time
|
||||
|
||||
@@ -274,9 +274,15 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows,
|
||||
fillMissing := query.Model.Get("fill").MustBool(false)
|
||||
var fillInterval float64
|
||||
fillValue := null.Float{}
|
||||
fillPrevious := false
|
||||
|
||||
if fillMissing {
|
||||
fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000
|
||||
if !query.Model.Get("fillNull").MustBool(false) {
|
||||
switch query.Model.Get("fillMode").MustString() {
|
||||
case "null":
|
||||
case "previous":
|
||||
fillPrevious = true
|
||||
case "value":
|
||||
fillValue.Float64 = query.Model.Get("fillValue").MustFloat64()
|
||||
fillValue.Valid = true
|
||||
}
|
||||
@@ -352,6 +358,14 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows,
|
||||
intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval
|
||||
}
|
||||
|
||||
if fillPrevious {
|
||||
if len(series.Points) > 0 {
|
||||
fillValue = series.Points[len(series.Points)-1][0]
|
||||
} else {
|
||||
fillValue.Valid = false
|
||||
}
|
||||
}
|
||||
|
||||
// align interval start
|
||||
intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
|
||||
|
||||
@@ -377,6 +391,14 @@ func (e *sqlQueryEndpoint) transformToTimeSeries(query *Query, rows *core.Rows,
|
||||
intervalStart := series.Points[len(series.Points)-1][1].Float64
|
||||
intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6)
|
||||
|
||||
if fillPrevious {
|
||||
if len(series.Points) > 0 {
|
||||
fillValue = series.Points[len(series.Points)-1][0]
|
||||
} else {
|
||||
fillValue.Valid = false
|
||||
}
|
||||
}
|
||||
|
||||
// align interval start
|
||||
intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval
|
||||
for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval {
|
||||
|
||||
@@ -10,7 +10,7 @@ type UrlQueryReader struct {
|
||||
}
|
||||
|
||||
func NewUrlQueryReader(urlInfo *url.URL) (*UrlQueryReader, error) {
|
||||
u, err := url.ParseQuery(urlInfo.String())
|
||||
u, err := url.ParseQuery(urlInfo.RawQuery)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
func TestUrl(t *testing.T) {
|
||||
@@ -43,4 +44,30 @@ func TestUrl(t *testing.T) {
|
||||
|
||||
So(result, ShouldEqual, "http://localhost:8080/api/")
|
||||
})
|
||||
|
||||
Convey("When joining two urls where lefthand side has a trailing slash and righthand side has preceding slash", t, func() {
|
||||
result := JoinUrlFragments("http://localhost:8080/", "/api/")
|
||||
|
||||
So(result, ShouldEqual, "http://localhost:8080/api/")
|
||||
})
|
||||
}
|
||||
|
||||
func TestNewUrlQueryReader(t *testing.T) {
|
||||
u, _ := url.Parse("http://www.abc.com/foo?bar=baz&bar2=baz2")
|
||||
uqr, _ := NewUrlQueryReader(u)
|
||||
|
||||
Convey("when trying to retrieve the first query value", t, func() {
|
||||
result := uqr.Get("bar", "foodef")
|
||||
So(result, ShouldEqual, "baz")
|
||||
})
|
||||
|
||||
Convey("when trying to retrieve the second query value", t, func() {
|
||||
result := uqr.Get("bar2", "foodef")
|
||||
So(result, ShouldEqual, "baz2")
|
||||
})
|
||||
|
||||
Convey("when trying to retrieve from a non-existent key, the default value is returned", t, func() {
|
||||
result := uqr.Get("bar3", "foodef")
|
||||
So(result, ShouldEqual, "foodef")
|
||||
})
|
||||
}
|
||||
|
||||
22
pkg/util/validation_test.go
Normal file
22
pkg/util/validation_test.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package util
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
)
|
||||
|
||||
func TestIsEmail(t *testing.T) {
|
||||
|
||||
Convey("When validating a string that is a valid email", t, func() {
|
||||
result := IsEmail("abc@def.com")
|
||||
|
||||
So(result, ShouldEqual, true)
|
||||
})
|
||||
|
||||
Convey("When validating a string that is not a valid email", t, func() {
|
||||
result := IsEmail("abcdef.com")
|
||||
|
||||
So(result, ShouldEqual, false)
|
||||
})
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import Select from 'react-select';
|
||||
|
||||
import kbn from 'app/core/utils/kbn';
|
||||
import colors from 'app/core/utils/colors';
|
||||
import store from 'app/core/store';
|
||||
import TimeSeries from 'app/core/time_series2';
|
||||
import { decodePathComponent } from 'app/core/utils/location_util';
|
||||
import { parse as parseDate } from 'app/core/utils/datemath';
|
||||
@@ -16,6 +17,8 @@ import Table from './Table';
|
||||
import TimePicker, { DEFAULT_RANGE } from './TimePicker';
|
||||
import { ensureQueries, generateQueryKey, hasQuery } from './utils/query';
|
||||
|
||||
const MAX_HISTORY_ITEMS = 100;
|
||||
|
||||
function makeTimeSeriesList(dataList, options) {
|
||||
return dataList.map((seriesData, index) => {
|
||||
const datapoints = seriesData.datapoints || [];
|
||||
@@ -56,6 +59,7 @@ interface IExploreState {
|
||||
datasourceLoading: boolean | null;
|
||||
datasourceMissing: boolean;
|
||||
graphResult: any;
|
||||
history: any[];
|
||||
initialDatasource?: string;
|
||||
latency: number;
|
||||
loading: any;
|
||||
@@ -86,6 +90,7 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
datasourceMissing: false,
|
||||
graphResult: null,
|
||||
initialDatasource: datasource,
|
||||
history: [],
|
||||
latency: 0,
|
||||
loading: false,
|
||||
logsResult: null,
|
||||
@@ -138,6 +143,7 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
const supportsGraph = datasource.meta.metrics;
|
||||
const supportsLogs = datasource.meta.logs;
|
||||
const supportsTable = datasource.meta.metrics;
|
||||
const datasourceId = datasource.meta.id;
|
||||
let datasourceError = null;
|
||||
|
||||
try {
|
||||
@@ -147,16 +153,20 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
datasourceError = (error && error.statusText) || error;
|
||||
}
|
||||
|
||||
const historyKey = `grafana.explore.history.${datasourceId}`;
|
||||
const history = store.getObject(historyKey, []);
|
||||
|
||||
this.setState(
|
||||
{
|
||||
datasource,
|
||||
datasourceError,
|
||||
history,
|
||||
supportsGraph,
|
||||
supportsLogs,
|
||||
supportsTable,
|
||||
datasourceLoading: false,
|
||||
},
|
||||
() => datasourceError === null && this.handleSubmit()
|
||||
() => datasourceError === null && this.onSubmit()
|
||||
);
|
||||
}
|
||||
|
||||
@@ -164,7 +174,7 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
this.el = el;
|
||||
};
|
||||
|
||||
handleAddQueryRow = index => {
|
||||
onAddQueryRow = index => {
|
||||
const { queries } = this.state;
|
||||
const nextQueries = [
|
||||
...queries.slice(0, index + 1),
|
||||
@@ -174,7 +184,7 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
this.setState({ queries: nextQueries });
|
||||
};
|
||||
|
||||
handleChangeDatasource = async option => {
|
||||
onChangeDatasource = async option => {
|
||||
this.setState({
|
||||
datasource: null,
|
||||
datasourceError: null,
|
||||
@@ -187,61 +197,85 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
this.setDatasource(datasource);
|
||||
};
|
||||
|
||||
handleChangeQuery = (query, index) => {
|
||||
onChangeQuery = (value: string, index: number, override?: boolean) => {
|
||||
const { queries } = this.state;
|
||||
const prevQuery = queries[index];
|
||||
const edited = override ? false : prevQuery.query !== value;
|
||||
const nextQuery = {
|
||||
...queries[index],
|
||||
query,
|
||||
edited,
|
||||
query: value,
|
||||
};
|
||||
const nextQueries = [...queries];
|
||||
nextQueries[index] = nextQuery;
|
||||
this.setState({ queries: nextQueries });
|
||||
this.setState({ queries: nextQueries }, override ? () => this.onSubmit() : undefined);
|
||||
};
|
||||
|
||||
handleChangeTime = nextRange => {
|
||||
onChangeTime = nextRange => {
|
||||
const range = {
|
||||
from: nextRange.from,
|
||||
to: nextRange.to,
|
||||
};
|
||||
this.setState({ range }, () => this.handleSubmit());
|
||||
this.setState({ range }, () => this.onSubmit());
|
||||
};
|
||||
|
||||
handleClickCloseSplit = () => {
|
||||
onClickClear = () => {
|
||||
this.setState({
|
||||
graphResult: null,
|
||||
logsResult: null,
|
||||
queries: ensureQueries(),
|
||||
tableResult: null,
|
||||
});
|
||||
};
|
||||
|
||||
onClickCloseSplit = () => {
|
||||
const { onChangeSplit } = this.props;
|
||||
if (onChangeSplit) {
|
||||
onChangeSplit(false);
|
||||
}
|
||||
};
|
||||
|
||||
handleClickGraphButton = () => {
|
||||
onClickGraphButton = () => {
|
||||
this.setState(state => ({ showingGraph: !state.showingGraph }));
|
||||
};
|
||||
|
||||
handleClickLogsButton = () => {
|
||||
onClickLogsButton = () => {
|
||||
this.setState(state => ({ showingLogs: !state.showingLogs }));
|
||||
};
|
||||
|
||||
handleClickSplit = () => {
|
||||
onClickSplit = () => {
|
||||
const { onChangeSplit } = this.props;
|
||||
if (onChangeSplit) {
|
||||
onChangeSplit(true, this.state);
|
||||
}
|
||||
};
|
||||
|
||||
handleClickTableButton = () => {
|
||||
onClickTableButton = () => {
|
||||
this.setState(state => ({ showingTable: !state.showingTable }));
|
||||
};
|
||||
|
||||
handleRemoveQueryRow = index => {
|
||||
onClickTableCell = (columnKey: string, rowValue: string) => {
|
||||
const { datasource, queries } = this.state;
|
||||
if (datasource && datasource.modifyQuery) {
|
||||
const nextQueries = queries.map(q => ({
|
||||
...q,
|
||||
edited: false,
|
||||
query: datasource.modifyQuery(q.query, { addFilter: { key: columnKey, value: rowValue } }),
|
||||
}));
|
||||
this.setState({ queries: nextQueries }, () => this.onSubmit());
|
||||
}
|
||||
};
|
||||
|
||||
onRemoveQueryRow = index => {
|
||||
const { queries } = this.state;
|
||||
if (queries.length <= 1) {
|
||||
return;
|
||||
}
|
||||
const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)];
|
||||
this.setState({ queries: nextQueries }, () => this.handleSubmit());
|
||||
this.setState({ queries: nextQueries }, () => this.onSubmit());
|
||||
};
|
||||
|
||||
handleSubmit = () => {
|
||||
onSubmit = () => {
|
||||
const { showingLogs, showingGraph, showingTable, supportsGraph, supportsLogs, supportsTable } = this.state;
|
||||
if (showingTable && supportsTable) {
|
||||
this.runTableQuery();
|
||||
@@ -254,6 +288,27 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
}
|
||||
};
|
||||
|
||||
onQuerySuccess(datasourceId: string, queries: any[]): void {
|
||||
// save queries to history
|
||||
let { datasource, history } = this.state;
|
||||
if (datasource.meta.id !== datasourceId) {
|
||||
// Navigated away, queries did not matter
|
||||
return;
|
||||
}
|
||||
const ts = Date.now();
|
||||
queries.forEach(q => {
|
||||
const { query } = q;
|
||||
history = [{ query, ts }, ...history];
|
||||
});
|
||||
if (history.length > MAX_HISTORY_ITEMS) {
|
||||
history = history.slice(0, MAX_HISTORY_ITEMS);
|
||||
}
|
||||
// Combine all queries of a datasource type into one history
|
||||
const historyKey = `grafana.explore.history.${datasourceId}`;
|
||||
store.setObject(historyKey, history);
|
||||
this.setState({ history });
|
||||
}
|
||||
|
||||
buildQueryOptions(targetOptions: { format: string; instant?: boolean }) {
|
||||
const { datasource, queries, range } = this.state;
|
||||
const resolution = this.el.offsetWidth;
|
||||
@@ -286,6 +341,7 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
const result = makeTimeSeriesList(res.data, options);
|
||||
const latency = Date.now() - now;
|
||||
this.setState({ latency, loading: false, graphResult: result, requestOptions: options });
|
||||
this.onQuerySuccess(datasource.meta.id, queries);
|
||||
} catch (response) {
|
||||
console.error(response);
|
||||
const queryError = response.data ? response.data.error : response;
|
||||
@@ -309,6 +365,7 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
const tableModel = res.data[0];
|
||||
const latency = Date.now() - now;
|
||||
this.setState({ latency, loading: false, tableResult: tableModel, requestOptions: options });
|
||||
this.onQuerySuccess(datasource.meta.id, queries);
|
||||
} catch (response) {
|
||||
console.error(response);
|
||||
const queryError = response.data ? response.data.error : response;
|
||||
@@ -332,6 +389,7 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
const logsData = res.data;
|
||||
const latency = Date.now() - now;
|
||||
this.setState({ latency, loading: false, logsResult: logsData, requestOptions: options });
|
||||
this.onQuerySuccess(datasource.meta.id, queries);
|
||||
} catch (response) {
|
||||
console.error(response);
|
||||
const queryError = response.data ? response.data.error : response;
|
||||
@@ -352,6 +410,7 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
datasourceLoading,
|
||||
datasourceMissing,
|
||||
graphResult,
|
||||
history,
|
||||
latency,
|
||||
loading,
|
||||
logsResult,
|
||||
@@ -390,18 +449,18 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
</a>
|
||||
</div>
|
||||
) : (
|
||||
<div className="navbar-buttons explore-first-button">
|
||||
<button className="btn navbar-button" onClick={this.handleClickCloseSplit}>
|
||||
Close Split
|
||||
<div className="navbar-buttons explore-first-button">
|
||||
<button className="btn navbar-button" onClick={this.onClickCloseSplit}>
|
||||
Close Split
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{!datasourceMissing ? (
|
||||
<div className="navbar-buttons">
|
||||
<Select
|
||||
className="datasource-picker"
|
||||
clearable={false}
|
||||
onChange={this.handleChangeDatasource}
|
||||
onChange={this.onChangeDatasource}
|
||||
options={datasources}
|
||||
placeholder="Loading datasources..."
|
||||
value={selectedDatasource}
|
||||
@@ -411,31 +470,19 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
<div className="navbar__spacer" />
|
||||
{position === 'left' && !split ? (
|
||||
<div className="navbar-buttons">
|
||||
<button className="btn navbar-button" onClick={this.handleClickSplit}>
|
||||
<button className="btn navbar-button" onClick={this.onClickSplit}>
|
||||
Split
|
||||
</button>
|
||||
</div>
|
||||
) : null}
|
||||
<TimePicker range={range} onChangeTime={this.onChangeTime} />
|
||||
<div className="navbar-buttons">
|
||||
{supportsGraph ? (
|
||||
<button className={`btn navbar-button ${graphButtonActive}`} onClick={this.handleClickGraphButton}>
|
||||
Graph
|
||||
</button>
|
||||
) : null}
|
||||
{supportsTable ? (
|
||||
<button className={`btn navbar-button ${tableButtonActive}`} onClick={this.handleClickTableButton}>
|
||||
Table
|
||||
</button>
|
||||
) : null}
|
||||
{supportsLogs ? (
|
||||
<button className={`btn navbar-button ${logsButtonActive}`} onClick={this.handleClickLogsButton}>
|
||||
Logs
|
||||
</button>
|
||||
) : null}
|
||||
<button className="btn navbar-button navbar-button--no-icon" onClick={this.onClickClear}>
|
||||
Clear All
|
||||
</button>
|
||||
</div>
|
||||
<TimePicker range={range} onChangeTime={this.handleChangeTime} />
|
||||
<div className="navbar-buttons relative">
|
||||
<button className="btn navbar-button--primary" onClick={this.handleSubmit}>
|
||||
<button className="btn navbar-button--primary" onClick={this.onSubmit}>
|
||||
Run Query <i className="fa fa-level-down run-icon" />
|
||||
</button>
|
||||
{loading || latency ? <ElapsedTime time={latency} className="text-info" /> : null}
|
||||
@@ -455,26 +502,49 @@ export class Explore extends React.Component<any, IExploreState> {
|
||||
{datasource && !datasourceError ? (
|
||||
<div className="explore-container">
|
||||
<QueryRows
|
||||
history={history}
|
||||
queries={queries}
|
||||
request={this.request}
|
||||
onAddQueryRow={this.handleAddQueryRow}
|
||||
onChangeQuery={this.handleChangeQuery}
|
||||
onExecuteQuery={this.handleSubmit}
|
||||
onRemoveQueryRow={this.handleRemoveQueryRow}
|
||||
onAddQueryRow={this.onAddQueryRow}
|
||||
onChangeQuery={this.onChangeQuery}
|
||||
onExecuteQuery={this.onSubmit}
|
||||
onRemoveQueryRow={this.onRemoveQueryRow}
|
||||
/>
|
||||
{queryError ? <div className="text-warning m-a-2">{queryError}</div> : null}
|
||||
{queryError && !loading ? <div className="text-warning m-a-2">{queryError}</div> : null}
|
||||
|
||||
<div className="result-options">
|
||||
{supportsGraph ? (
|
||||
<button className={`btn navbar-button ${graphButtonActive}`} onClick={this.onClickGraphButton}>
|
||||
Graph
|
||||
</button>
|
||||
) : null}
|
||||
{supportsTable ? (
|
||||
<button className={`btn navbar-button ${tableButtonActive}`} onClick={this.onClickTableButton}>
|
||||
Table
|
||||
</button>
|
||||
) : null}
|
||||
{supportsLogs ? (
|
||||
<button className={`btn navbar-button ${logsButtonActive}`} onClick={this.onClickLogsButton}>
|
||||
Logs
|
||||
</button>
|
||||
) : null}
|
||||
</div>
|
||||
|
||||
<main className="m-t-2">
|
||||
{supportsGraph && showingGraph ? (
|
||||
<Graph
|
||||
data={graphResult}
|
||||
height={graphHeight}
|
||||
loading={loading}
|
||||
id={`explore-graph-${position}`}
|
||||
options={requestOptions}
|
||||
height={graphHeight}
|
||||
split={split}
|
||||
/>
|
||||
) : null}
|
||||
{supportsTable && showingTable ? <Table data={tableResult} className="m-t-3" /> : null}
|
||||
{supportsLogs && showingLogs ? <Logs data={logsResult} /> : null}
|
||||
{supportsTable && showingTable ? (
|
||||
<Table className="m-t-3" data={tableResult} loading={loading} onClickCell={this.onClickTableCell} />
|
||||
) : null}
|
||||
{supportsLogs && showingLogs ? <Logs data={logsResult} loading={loading} /> : null}
|
||||
</main>
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
@@ -84,7 +84,9 @@ class Graph extends Component<any, any> {
|
||||
|
||||
draw() {
|
||||
const { data, options: userOptions } = this.props;
|
||||
const $el = $(`#${this.props.id}`);
|
||||
if (!data) {
|
||||
$el.empty();
|
||||
return;
|
||||
}
|
||||
const series = data.map((ts: TimeSeries) => ({
|
||||
@@ -93,7 +95,6 @@ class Graph extends Component<any, any> {
|
||||
data: ts.getFlotPairs('null'),
|
||||
}));
|
||||
|
||||
const $el = $(`#${this.props.id}`);
|
||||
const ticks = $el.width() / 100;
|
||||
let { from, to } = userOptions.range;
|
||||
if (!moment.isMoment(from)) {
|
||||
@@ -123,7 +124,14 @@ class Graph extends Component<any, any> {
|
||||
}
|
||||
|
||||
render() {
|
||||
const { data, height } = this.props;
|
||||
const { data, height, loading } = this.props;
|
||||
if (!loading && data && data.length === 0) {
|
||||
return (
|
||||
<div className="panel-container">
|
||||
<div className="muted m-a-1">The queries returned no time series to graph.</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<div className="panel-container">
|
||||
<div id={this.props.id} className="explore-graph" style={{ height }} />
|
||||
|
||||
@@ -5,6 +5,7 @@ import { LogsModel, LogRow } from 'app/core/logs_model';
|
||||
interface LogsProps {
|
||||
className?: string;
|
||||
data: LogsModel;
|
||||
loading: boolean;
|
||||
}
|
||||
|
||||
const EXAMPLE_QUERY = '{job="default/prometheus"}';
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import React from 'react';
|
||||
import Enzyme, { shallow } from 'enzyme';
|
||||
import Adapter from 'enzyme-adapter-react-16';
|
||||
|
||||
Enzyme.configure({ adapter: new Adapter() });
|
||||
import Plain from 'slate-plain-serializer';
|
||||
|
||||
import PromQueryField from './PromQueryField';
|
||||
|
||||
Enzyme.configure({ adapter: new Adapter() });
|
||||
|
||||
describe('PromQueryField typeahead handling', () => {
|
||||
const defaultProps = {
|
||||
request: () => ({ data: { data: [] } }),
|
||||
@@ -59,20 +60,35 @@ describe('PromQueryField typeahead handling', () => {
|
||||
describe('label suggestions', () => {
|
||||
it('returns default label suggestions on label context and no metric', () => {
|
||||
const instance = shallow(<PromQueryField {...defaultProps} />).instance() as PromQueryField;
|
||||
const result = instance.getTypeahead({ text: 'j', prefix: 'j', wrapperClasses: ['context-labels'] });
|
||||
const value = Plain.deserialize('{}');
|
||||
const range = value.selection.merge({
|
||||
anchorOffset: 1,
|
||||
});
|
||||
const valueWithSelection = value.change().select(range).value;
|
||||
const result = instance.getTypeahead({
|
||||
text: '',
|
||||
prefix: '',
|
||||
wrapperClasses: ['context-labels'],
|
||||
value: valueWithSelection,
|
||||
});
|
||||
expect(result.context).toBe('context-labels');
|
||||
expect(result.suggestions).toEqual([{ items: [{ label: 'job' }, { label: 'instance' }], label: 'Labels' }]);
|
||||
});
|
||||
|
||||
it('returns label suggestions on label context and metric', () => {
|
||||
const instance = shallow(
|
||||
<PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} />
|
||||
<PromQueryField {...defaultProps} labelKeys={{ '{__name__="metric"}': ['bar'] }} />
|
||||
).instance() as PromQueryField;
|
||||
const value = Plain.deserialize('metric{}');
|
||||
const range = value.selection.merge({
|
||||
anchorOffset: 7,
|
||||
});
|
||||
const valueWithSelection = value.change().select(range).value;
|
||||
const result = instance.getTypeahead({
|
||||
text: 'job',
|
||||
prefix: 'job',
|
||||
text: '',
|
||||
prefix: '',
|
||||
wrapperClasses: ['context-labels'],
|
||||
metric: 'foo',
|
||||
value: valueWithSelection,
|
||||
});
|
||||
expect(result.context).toBe('context-labels');
|
||||
expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]);
|
||||
@@ -80,13 +96,18 @@ describe('PromQueryField typeahead handling', () => {
|
||||
|
||||
it('returns a refresher on label context and unavailable metric', () => {
|
||||
const instance = shallow(
|
||||
<PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} />
|
||||
<PromQueryField {...defaultProps} labelKeys={{ '{__name__="foo"}': ['bar'] }} />
|
||||
).instance() as PromQueryField;
|
||||
const value = Plain.deserialize('metric{}');
|
||||
const range = value.selection.merge({
|
||||
anchorOffset: 7,
|
||||
});
|
||||
const valueWithSelection = value.change().select(range).value;
|
||||
const result = instance.getTypeahead({
|
||||
text: 'job',
|
||||
prefix: 'job',
|
||||
text: '',
|
||||
prefix: '',
|
||||
wrapperClasses: ['context-labels'],
|
||||
metric: 'xxx',
|
||||
value: valueWithSelection,
|
||||
});
|
||||
expect(result.context).toBeUndefined();
|
||||
expect(result.refresher).toBeInstanceOf(Promise);
|
||||
@@ -95,28 +116,61 @@ describe('PromQueryField typeahead handling', () => {
|
||||
|
||||
it('returns label values on label context when given a metric and a label key', () => {
|
||||
const instance = shallow(
|
||||
<PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} labelValues={{ foo: { bar: ['baz'] } }} />
|
||||
<PromQueryField
|
||||
{...defaultProps}
|
||||
labelKeys={{ '{__name__="metric"}': ['bar'] }}
|
||||
labelValues={{ '{__name__="metric"}': { bar: ['baz'] } }}
|
||||
/>
|
||||
).instance() as PromQueryField;
|
||||
const value = Plain.deserialize('metric{bar=ba}');
|
||||
const range = value.selection.merge({
|
||||
anchorOffset: 13,
|
||||
});
|
||||
const valueWithSelection = value.change().select(range).value;
|
||||
const result = instance.getTypeahead({
|
||||
text: '=ba',
|
||||
prefix: 'ba',
|
||||
wrapperClasses: ['context-labels'],
|
||||
metric: 'foo',
|
||||
labelKey: 'bar',
|
||||
value: valueWithSelection,
|
||||
});
|
||||
expect(result.context).toBe('context-label-values');
|
||||
expect(result.suggestions).toEqual([{ items: [{ label: 'baz' }], label: 'Label values' }]);
|
||||
expect(result.suggestions).toEqual([{ items: [{ label: 'baz' }], label: 'Label values for "bar"' }]);
|
||||
});
|
||||
|
||||
it('returns label suggestions on aggregation context and metric', () => {
|
||||
it('returns label suggestions on aggregation context and metric w/ selector', () => {
|
||||
const instance = shallow(
|
||||
<PromQueryField {...defaultProps} labelKeys={{ foo: ['bar'] }} />
|
||||
<PromQueryField {...defaultProps} labelKeys={{ '{__name__="metric",foo="xx"}': ['bar'] }} />
|
||||
).instance() as PromQueryField;
|
||||
const value = Plain.deserialize('sum(metric{foo="xx"}) by ()');
|
||||
const range = value.selection.merge({
|
||||
anchorOffset: 26,
|
||||
});
|
||||
const valueWithSelection = value.change().select(range).value;
|
||||
const result = instance.getTypeahead({
|
||||
text: 'job',
|
||||
prefix: 'job',
|
||||
text: '',
|
||||
prefix: '',
|
||||
wrapperClasses: ['context-aggregation'],
|
||||
metric: 'foo',
|
||||
value: valueWithSelection,
|
||||
});
|
||||
expect(result.context).toBe('context-aggregation');
|
||||
expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]);
|
||||
});
|
||||
|
||||
it('returns label suggestions on aggregation context and metric w/o selector', () => {
|
||||
const instance = shallow(
|
||||
<PromQueryField {...defaultProps} labelKeys={{ '{__name__="metric"}': ['bar'] }} />
|
||||
).instance() as PromQueryField;
|
||||
const value = Plain.deserialize('sum(metric) by ()');
|
||||
const range = value.selection.merge({
|
||||
anchorOffset: 16,
|
||||
});
|
||||
const valueWithSelection = value.change().select(range).value;
|
||||
const result = instance.getTypeahead({
|
||||
text: '',
|
||||
prefix: '',
|
||||
wrapperClasses: ['context-aggregation'],
|
||||
value: valueWithSelection,
|
||||
});
|
||||
expect(result.context).toBe('context-aggregation');
|
||||
expect(result.suggestions).toEqual([{ items: [{ label: 'bar' }], label: 'Labels' }]);
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
import React from 'react';
|
||||
import { Value } from 'slate';
|
||||
import Cascader from 'rc-cascader';
|
||||
|
||||
// dom also includes Element polyfills
|
||||
import { getNextCharacter, getPreviousCousin } from './utils/dom';
|
||||
import PluginPrism, { setPrismTokens } from './slate-plugins/prism/index';
|
||||
import PrismPromql, { FUNCTIONS } from './slate-plugins/prism/promql';
|
||||
import BracesPlugin from './slate-plugins/braces';
|
||||
import RunnerPlugin from './slate-plugins/runner';
|
||||
import { processLabels, RATE_RANGES, cleanText } from './utils/prometheus';
|
||||
import { processLabels, RATE_RANGES, cleanText, getCleanSelector } from './utils/prometheus';
|
||||
|
||||
import TypeaheadField, {
|
||||
Suggestion,
|
||||
@@ -16,16 +20,53 @@ import TypeaheadField, {
|
||||
TypeaheadOutput,
|
||||
} from './QueryField';
|
||||
|
||||
const EMPTY_METRIC = '';
|
||||
const DEFAULT_KEYS = ['job', 'instance'];
|
||||
const EMPTY_SELECTOR = '{}';
|
||||
const HISTOGRAM_GROUP = '__histograms__';
|
||||
const HISTOGRAM_SELECTOR = '{le!=""}'; // Returns all timeseries for histograms
|
||||
const HISTORY_ITEM_COUNT = 5;
|
||||
const HISTORY_COUNT_CUTOFF = 1000 * 60 * 60 * 24; // 24h
|
||||
const METRIC_MARK = 'metric';
|
||||
const PRISM_LANGUAGE = 'promql';
|
||||
|
||||
export const wrapLabel = label => ({ label });
|
||||
export const wrapLabel = (label: string) => ({ label });
|
||||
export const setFunctionMove = (suggestion: Suggestion): Suggestion => {
|
||||
suggestion.move = -1;
|
||||
return suggestion;
|
||||
};
|
||||
|
||||
export function addHistoryMetadata(item: Suggestion, history: any[]): Suggestion {
|
||||
const cutoffTs = Date.now() - HISTORY_COUNT_CUTOFF;
|
||||
const historyForItem = history.filter(h => h.ts > cutoffTs && h.query === item.label);
|
||||
const count = historyForItem.length;
|
||||
const recent = historyForItem[0];
|
||||
let hint = `Queried ${count} times in the last 24h.`;
|
||||
if (recent) {
|
||||
const lastQueried = moment(recent.ts).fromNow();
|
||||
hint = `${hint} Last queried ${lastQueried}.`;
|
||||
}
|
||||
return {
|
||||
...item,
|
||||
documentation: hint,
|
||||
};
|
||||
}
|
||||
|
||||
export function groupMetricsByPrefix(metrics: string[], delimiter = '_'): CascaderOption[] {
|
||||
return _.chain(metrics)
|
||||
.groupBy(metric => metric.split(delimiter)[0])
|
||||
.map((metricsForPrefix: string[], prefix: string): CascaderOption => {
|
||||
const prefixIsMetric = metricsForPrefix.length === 1 && metricsForPrefix[0] === prefix;
|
||||
const children = prefixIsMetric ? [] : metricsForPrefix.sort().map(m => ({ label: m, value: m }));
|
||||
return {
|
||||
children,
|
||||
label: prefix,
|
||||
value: prefix,
|
||||
};
|
||||
})
|
||||
.sortBy('label')
|
||||
.value();
|
||||
}
|
||||
|
||||
export function willApplySuggestion(
|
||||
suggestion: string,
|
||||
{ typeaheadContext, typeaheadText }: TypeaheadFieldState
|
||||
@@ -56,58 +97,95 @@ export function willApplySuggestion(
|
||||
return suggestion;
|
||||
}
|
||||
|
||||
interface CascaderOption {
|
||||
label: string;
|
||||
value: string;
|
||||
children?: CascaderOption[];
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
interface PromQueryFieldProps {
|
||||
history?: any[];
|
||||
histogramMetrics?: string[];
|
||||
initialQuery?: string | null;
|
||||
labelKeys?: { [index: string]: string[] }; // metric -> [labelKey,...]
|
||||
labelValues?: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...]
|
||||
metrics?: string[];
|
||||
metricsByPrefix?: CascaderOption[];
|
||||
onPressEnter?: () => void;
|
||||
onQueryChange?: (value: string) => void;
|
||||
onQueryChange?: (value: string, override?: boolean) => void;
|
||||
portalPrefix?: string;
|
||||
request?: (url: string) => any;
|
||||
}
|
||||
|
||||
interface PromQueryFieldState {
|
||||
histogramMetrics: string[];
|
||||
labelKeys: { [index: string]: string[] }; // metric -> [labelKey,...]
|
||||
labelValues: { [index: string]: { [index: string]: string[] } }; // metric -> labelKey -> [labelValue,...]
|
||||
metrics: string[];
|
||||
metricsByPrefix: CascaderOption[];
|
||||
}
|
||||
|
||||
interface PromTypeaheadInput {
|
||||
text: string;
|
||||
prefix: string;
|
||||
wrapperClasses: string[];
|
||||
metric?: string;
|
||||
labelKey?: string;
|
||||
value?: Value;
|
||||
}
|
||||
|
||||
class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryFieldState> {
|
||||
plugins: any[];
|
||||
|
||||
constructor(props, context) {
|
||||
constructor(props: PromQueryFieldProps, context) {
|
||||
super(props, context);
|
||||
|
||||
this.plugins = [
|
||||
BracesPlugin(),
|
||||
RunnerPlugin({ handler: props.onPressEnter }),
|
||||
PluginPrism({ definition: PrismPromql, language: PRISM_LANGUAGE }),
|
||||
];
|
||||
|
||||
this.state = {
|
||||
histogramMetrics: props.histogramMetrics || [],
|
||||
labelKeys: props.labelKeys || {},
|
||||
labelValues: props.labelValues || {},
|
||||
metrics: props.metrics || [],
|
||||
metricsByPrefix: props.metricsByPrefix || [],
|
||||
};
|
||||
}
|
||||
|
||||
componentDidMount() {
|
||||
this.fetchMetricNames();
|
||||
this.fetchHistogramMetrics();
|
||||
}
|
||||
|
||||
onChangeQuery = value => {
|
||||
onChangeMetrics = (values: string[], selectedOptions: CascaderOption[]) => {
|
||||
let query;
|
||||
if (selectedOptions.length === 1) {
|
||||
if (selectedOptions[0].children.length === 0) {
|
||||
query = selectedOptions[0].value;
|
||||
} else {
|
||||
// Ignore click on group
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
const prefix = selectedOptions[0].value;
|
||||
const metric = selectedOptions[1].value;
|
||||
if (prefix === HISTOGRAM_GROUP) {
|
||||
query = `histogram_quantile(0.95, sum(rate(${metric}[5m])) by (le))`;
|
||||
} else {
|
||||
query = metric;
|
||||
}
|
||||
}
|
||||
this.onChangeQuery(query, true);
|
||||
};
|
||||
|
||||
onChangeQuery = (value: string, override?: boolean) => {
|
||||
// Send text change to parent
|
||||
const { onQueryChange } = this.props;
|
||||
if (onQueryChange) {
|
||||
onQueryChange(value);
|
||||
onQueryChange(value, override);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -119,25 +197,23 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
|
||||
};
|
||||
|
||||
onTypeahead = (typeahead: TypeaheadInput): TypeaheadOutput => {
|
||||
const { editorNode, prefix, text, wrapperNode } = typeahead;
|
||||
const { prefix, text, value, wrapperNode } = typeahead;
|
||||
|
||||
// Get DOM-dependent context
|
||||
const wrapperClasses = Array.from(wrapperNode.classList);
|
||||
// Take first metric as lucky guess
|
||||
const metricNode = editorNode.querySelector(`.${METRIC_MARK}`);
|
||||
const metric = metricNode && metricNode.textContent;
|
||||
const labelKeyNode = getPreviousCousin(wrapperNode, '.attr-name');
|
||||
const labelKey = labelKeyNode && labelKeyNode.textContent;
|
||||
const nextChar = getNextCharacter();
|
||||
|
||||
const result = this.getTypeahead({ text, prefix, wrapperClasses, metric, labelKey });
|
||||
const result = this.getTypeahead({ text, value, prefix, wrapperClasses, labelKey });
|
||||
|
||||
console.log('handleTypeahead', wrapperClasses, text, prefix, result.context);
|
||||
console.log('handleTypeahead', wrapperClasses, text, prefix, nextChar, labelKey, result.context);
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
// Keep this DOM-free for testing
|
||||
getTypeahead({ prefix, wrapperClasses, metric, text }: PromTypeaheadInput): TypeaheadOutput {
|
||||
getTypeahead({ prefix, wrapperClasses, text }: PromTypeaheadInput): TypeaheadOutput {
|
||||
// Determine candidates by CSS context
|
||||
if (_.includes(wrapperClasses, 'context-range')) {
|
||||
// Suggestions for metric[|]
|
||||
@@ -145,12 +221,11 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
|
||||
} else if (_.includes(wrapperClasses, 'context-labels')) {
|
||||
// Suggestions for metric{|} and metric{foo=|}, as well as metric-independent label queries like {|}
|
||||
return this.getLabelTypeahead.apply(this, arguments);
|
||||
} else if (metric && _.includes(wrapperClasses, 'context-aggregation')) {
|
||||
} else if (_.includes(wrapperClasses, 'context-aggregation')) {
|
||||
return this.getAggregationTypeahead.apply(this, arguments);
|
||||
} else if (
|
||||
// Non-empty but not inside known token unless it's a metric
|
||||
// Non-empty but not inside known token
|
||||
(prefix && !_.includes(wrapperClasses, 'token')) ||
|
||||
prefix === metric ||
|
||||
(prefix === '' && !text.match(/^[)\s]+$/)) || // Empty context or after ')'
|
||||
text.match(/[+\-*/^%]/) // After binary operator
|
||||
) {
|
||||
@@ -163,17 +238,37 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
|
||||
}
|
||||
|
||||
getEmptyTypeahead(): TypeaheadOutput {
|
||||
const { history } = this.props;
|
||||
const { metrics } = this.state;
|
||||
const suggestions: SuggestionGroup[] = [];
|
||||
|
||||
if (history && history.length > 0) {
|
||||
const historyItems = _.chain(history)
|
||||
.uniqBy('query')
|
||||
.take(HISTORY_ITEM_COUNT)
|
||||
.map(h => h.query)
|
||||
.map(wrapLabel)
|
||||
.map(item => addHistoryMetadata(item, history))
|
||||
.value();
|
||||
|
||||
suggestions.push({
|
||||
prefixMatch: true,
|
||||
skipSort: true,
|
||||
label: 'History',
|
||||
items: historyItems,
|
||||
});
|
||||
}
|
||||
|
||||
suggestions.push({
|
||||
prefixMatch: true,
|
||||
label: 'Functions',
|
||||
items: FUNCTIONS.map(setFunctionMove),
|
||||
});
|
||||
|
||||
if (this.state.metrics) {
|
||||
if (metrics) {
|
||||
suggestions.push({
|
||||
label: 'Metrics',
|
||||
items: this.state.metrics.map(wrapLabel),
|
||||
items: metrics.map(wrapLabel),
|
||||
});
|
||||
}
|
||||
return { suggestions };
|
||||
@@ -191,14 +286,27 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
|
||||
};
|
||||
}
|
||||
|
||||
getAggregationTypeahead({ metric }: PromTypeaheadInput): TypeaheadOutput {
|
||||
getAggregationTypeahead({ value }: PromTypeaheadInput): TypeaheadOutput {
|
||||
let refresher: Promise<any> = null;
|
||||
const suggestions: SuggestionGroup[] = [];
|
||||
const labelKeys = this.state.labelKeys[metric];
|
||||
|
||||
// sum(foo{bar="1"}) by (|)
|
||||
const line = value.anchorBlock.getText();
|
||||
const cursorOffset: number = value.anchorOffset;
|
||||
// sum(foo{bar="1"}) by (
|
||||
const leftSide = line.slice(0, cursorOffset);
|
||||
const openParensAggregationIndex = leftSide.lastIndexOf('(');
|
||||
const openParensSelectorIndex = leftSide.slice(0, openParensAggregationIndex).lastIndexOf('(');
|
||||
const closeParensSelectorIndex = leftSide.slice(openParensSelectorIndex).indexOf(')') + openParensSelectorIndex;
|
||||
// foo{bar="1"}
|
||||
const selectorString = leftSide.slice(openParensSelectorIndex + 1, closeParensSelectorIndex);
|
||||
const selector = getCleanSelector(selectorString, selectorString.length - 2);
|
||||
|
||||
const labelKeys = this.state.labelKeys[selector];
|
||||
if (labelKeys) {
|
||||
suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) });
|
||||
} else {
|
||||
refresher = this.fetchMetricLabels(metric);
|
||||
refresher = this.fetchSeriesLabels(selector);
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -208,59 +316,51 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
|
||||
};
|
||||
}
|
||||
|
||||
getLabelTypeahead({ metric, text, wrapperClasses, labelKey }: PromTypeaheadInput): TypeaheadOutput {
|
||||
getLabelTypeahead({ text, wrapperClasses, labelKey, value }: PromTypeaheadInput): TypeaheadOutput {
|
||||
let context: string;
|
||||
let refresher: Promise<any> = null;
|
||||
const suggestions: SuggestionGroup[] = [];
|
||||
if (metric) {
|
||||
const labelKeys = this.state.labelKeys[metric];
|
||||
if (labelKeys) {
|
||||
if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) {
|
||||
// Label values
|
||||
if (labelKey) {
|
||||
const labelValues = this.state.labelValues[metric][labelKey];
|
||||
context = 'context-label-values';
|
||||
suggestions.push({
|
||||
label: 'Label values',
|
||||
items: labelValues.map(wrapLabel),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Label keys
|
||||
context = 'context-labels';
|
||||
suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) });
|
||||
}
|
||||
} else {
|
||||
refresher = this.fetchMetricLabels(metric);
|
||||
const line = value.anchorBlock.getText();
|
||||
const cursorOffset: number = value.anchorOffset;
|
||||
|
||||
// Get normalized selector
|
||||
let selector;
|
||||
try {
|
||||
selector = getCleanSelector(line, cursorOffset);
|
||||
} catch {
|
||||
selector = EMPTY_SELECTOR;
|
||||
}
|
||||
const containsMetric = selector.indexOf('__name__=') > -1;
|
||||
|
||||
if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) {
|
||||
// Label values
|
||||
if (labelKey && this.state.labelValues[selector] && this.state.labelValues[selector][labelKey]) {
|
||||
const labelValues = this.state.labelValues[selector][labelKey];
|
||||
context = 'context-label-values';
|
||||
suggestions.push({
|
||||
label: `Label values for "${labelKey}"`,
|
||||
items: labelValues.map(wrapLabel),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Metric-independent label queries
|
||||
const defaultKeys = ['job', 'instance'];
|
||||
// Munge all keys that we have seen together
|
||||
const labelKeys = Object.keys(this.state.labelKeys).reduce((acc, metric) => {
|
||||
return acc.concat(this.state.labelKeys[metric].filter(key => acc.indexOf(key) === -1));
|
||||
}, defaultKeys);
|
||||
if ((text && text.startsWith('=')) || _.includes(wrapperClasses, 'attr-value')) {
|
||||
// Label values
|
||||
if (labelKey) {
|
||||
if (this.state.labelValues[EMPTY_METRIC]) {
|
||||
const labelValues = this.state.labelValues[EMPTY_METRIC][labelKey];
|
||||
context = 'context-label-values';
|
||||
suggestions.push({
|
||||
label: 'Label values',
|
||||
items: labelValues.map(wrapLabel),
|
||||
});
|
||||
} else {
|
||||
// Can only query label values for now (API to query keys is under development)
|
||||
refresher = this.fetchLabelValues(labelKey);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Label keys
|
||||
// Label keys
|
||||
const labelKeys = this.state.labelKeys[selector] || (containsMetric ? null : DEFAULT_KEYS);
|
||||
if (labelKeys) {
|
||||
context = 'context-labels';
|
||||
suggestions.push({ label: 'Labels', items: labelKeys.map(wrapLabel) });
|
||||
suggestions.push({ label: `Labels`, items: labelKeys.map(wrapLabel) });
|
||||
}
|
||||
}
|
||||
|
||||
// Query labels for selector
|
||||
if (selector && !this.state.labelValues[selector]) {
|
||||
if (selector === EMPTY_SELECTOR) {
|
||||
// Query label values for default labels
|
||||
refresher = Promise.all(DEFAULT_KEYS.map(key => this.fetchLabelValues(key)));
|
||||
} else {
|
||||
refresher = this.fetchSeriesLabels(selector, !containsMetric);
|
||||
}
|
||||
}
|
||||
|
||||
return { context, refresher, suggestions };
|
||||
}
|
||||
|
||||
@@ -271,19 +371,29 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
|
||||
return fetch(url);
|
||||
};
|
||||
|
||||
async fetchLabelValues(key) {
|
||||
fetchHistogramMetrics() {
|
||||
this.fetchSeriesLabels(HISTOGRAM_SELECTOR, true, () => {
|
||||
const histogramSeries = this.state.labelValues[HISTOGRAM_SELECTOR];
|
||||
if (histogramSeries && histogramSeries['__name__']) {
|
||||
const histogramMetrics = histogramSeries['__name__'].slice().sort();
|
||||
this.setState({ histogramMetrics });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async fetchLabelValues(key: string) {
|
||||
const url = `/api/v1/label/${key}/values`;
|
||||
try {
|
||||
const res = await this.request(url);
|
||||
const body = await (res.data || res.json());
|
||||
const pairs = this.state.labelValues[EMPTY_METRIC];
|
||||
const exisingValues = this.state.labelValues[EMPTY_SELECTOR];
|
||||
const values = {
|
||||
...pairs,
|
||||
...exisingValues,
|
||||
[key]: body.data,
|
||||
};
|
||||
const labelValues = {
|
||||
...this.state.labelValues,
|
||||
[EMPTY_METRIC]: values,
|
||||
[EMPTY_SELECTOR]: values,
|
||||
};
|
||||
this.setState({ labelValues });
|
||||
} catch (e) {
|
||||
@@ -291,12 +401,12 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
|
||||
}
|
||||
}
|
||||
|
||||
async fetchMetricLabels(name) {
|
||||
async fetchSeriesLabels(name: string, withName?: boolean, callback?: () => void) {
|
||||
const url = `/api/v1/series?match[]=${name}`;
|
||||
try {
|
||||
const res = await this.request(url);
|
||||
const body = await (res.data || res.json());
|
||||
const { keys, values } = processLabels(body.data);
|
||||
const { keys, values } = processLabels(body.data, withName);
|
||||
const labelKeys = {
|
||||
...this.state.labelKeys,
|
||||
[name]: keys,
|
||||
@@ -305,7 +415,7 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
|
||||
...this.state.labelValues,
|
||||
[name]: values,
|
||||
};
|
||||
this.setState({ labelKeys, labelValues });
|
||||
this.setState({ labelKeys, labelValues }, callback);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
@@ -316,23 +426,41 @@ class PromQueryField extends React.Component<PromQueryFieldProps, PromQueryField
|
||||
try {
|
||||
const res = await this.request(url);
|
||||
const body = await (res.data || res.json());
|
||||
this.setState({ metrics: body.data }, this.onReceiveMetrics);
|
||||
const metrics = body.data;
|
||||
const metricsByPrefix = groupMetricsByPrefix(metrics);
|
||||
this.setState({ metrics, metricsByPrefix }, this.onReceiveMetrics);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
}
|
||||
|
||||
render() {
|
||||
const { histogramMetrics, metricsByPrefix } = this.state;
|
||||
const histogramOptions = histogramMetrics.map(hm => ({ label: hm, value: hm }));
|
||||
const metricsOptions = [
|
||||
{ label: 'Histograms', value: HISTOGRAM_GROUP, children: histogramOptions },
|
||||
...metricsByPrefix,
|
||||
];
|
||||
|
||||
return (
|
||||
<TypeaheadField
|
||||
additionalPlugins={this.plugins}
|
||||
cleanText={cleanText}
|
||||
initialValue={this.props.initialQuery}
|
||||
onTypeahead={this.onTypeahead}
|
||||
onWillApplySuggestion={willApplySuggestion}
|
||||
onValueChanged={this.onChangeQuery}
|
||||
placeholder="Enter a PromQL query"
|
||||
/>
|
||||
<div className="prom-query-field">
|
||||
<div className="prom-query-field-tools">
|
||||
<Cascader options={metricsOptions} onChange={this.onChangeMetrics}>
|
||||
<button className="btn navbar-button navbar-button--tight">Metrics</button>
|
||||
</Cascader>
|
||||
</div>
|
||||
<div className="slate-query-field-wrapper">
|
||||
<TypeaheadField
|
||||
additionalPlugins={this.plugins}
|
||||
cleanText={cleanText}
|
||||
initialValue={this.props.initialQuery}
|
||||
onTypeahead={this.onTypeahead}
|
||||
onWillApplySuggestion={willApplySuggestion}
|
||||
onValueChanged={this.onChangeQuery}
|
||||
placeholder="Enter a PromQL query"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import { Block, Change, Document, Text, Value } from 'slate';
|
||||
import { Editor } from 'slate-react';
|
||||
import Plain from 'slate-plain-serializer';
|
||||
|
||||
import BracesPlugin from './slate-plugins/braces';
|
||||
import ClearPlugin from './slate-plugins/clear';
|
||||
import NewlinePlugin from './slate-plugins/newline';
|
||||
|
||||
@@ -97,6 +96,10 @@ export interface SuggestionGroup {
|
||||
* If true, do not filter items in this group based on the search.
|
||||
*/
|
||||
skipFilter?: boolean;
|
||||
/**
|
||||
* If true, do not sort items.
|
||||
*/
|
||||
skipSort?: boolean;
|
||||
}
|
||||
|
||||
interface TypeaheadFieldProps {
|
||||
@@ -126,6 +129,7 @@ export interface TypeaheadInput {
|
||||
prefix: string;
|
||||
selection?: Selection;
|
||||
text: string;
|
||||
value: Value;
|
||||
wrapperNode: Element;
|
||||
}
|
||||
|
||||
@@ -144,7 +148,7 @@ class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldStat
|
||||
super(props, context);
|
||||
|
||||
// Base plugins
|
||||
this.plugins = [BracesPlugin(), ClearPlugin(), NewlinePlugin(), ...props.additionalPlugins];
|
||||
this.plugins = [ClearPlugin(), NewlinePlugin(), ...props.additionalPlugins];
|
||||
|
||||
this.state = {
|
||||
suggestions: [],
|
||||
@@ -199,6 +203,7 @@ class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldStat
|
||||
handleTypeahead = _.debounce(async () => {
|
||||
const selection = window.getSelection();
|
||||
const { cleanText, onTypeahead } = this.props;
|
||||
const { value } = this.state;
|
||||
|
||||
if (onTypeahead && selection.anchorNode) {
|
||||
const wrapperNode = selection.anchorNode.parentElement;
|
||||
@@ -221,6 +226,7 @@ class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldStat
|
||||
prefix,
|
||||
selection,
|
||||
text,
|
||||
value,
|
||||
wrapperNode,
|
||||
});
|
||||
|
||||
@@ -241,7 +247,9 @@ class QueryField extends React.Component<TypeaheadFieldProps, TypeaheadFieldStat
|
||||
group.items = group.items.filter(c => c.insertText || (c.filterText || c.label) !== prefix);
|
||||
}
|
||||
|
||||
group.items = _.sortBy(group.items, item => item.sortText || item.label);
|
||||
if (!group.skipSort) {
|
||||
group.items = _.sortBy(group.items, item => item.sortText || item.label);
|
||||
}
|
||||
}
|
||||
return group;
|
||||
})
|
||||
|
||||
@@ -2,40 +2,33 @@ import React, { PureComponent } from 'react';
|
||||
|
||||
import QueryField from './PromQueryField';
|
||||
|
||||
class QueryRow extends PureComponent<any, any> {
|
||||
constructor(props) {
|
||||
super(props);
|
||||
this.state = {
|
||||
edited: false,
|
||||
query: props.query || '',
|
||||
};
|
||||
}
|
||||
|
||||
handleChangeQuery = value => {
|
||||
class QueryRow extends PureComponent<any, {}> {
|
||||
onChangeQuery = (value, override?: boolean) => {
|
||||
const { index, onChangeQuery } = this.props;
|
||||
const { query } = this.state;
|
||||
const edited = query !== value;
|
||||
this.setState({ edited, query: value });
|
||||
if (onChangeQuery) {
|
||||
onChangeQuery(value, index);
|
||||
onChangeQuery(value, index, override);
|
||||
}
|
||||
};
|
||||
|
||||
handleClickAddButton = () => {
|
||||
onClickAddButton = () => {
|
||||
const { index, onAddQueryRow } = this.props;
|
||||
if (onAddQueryRow) {
|
||||
onAddQueryRow(index);
|
||||
}
|
||||
};
|
||||
|
||||
handleClickRemoveButton = () => {
|
||||
onClickClearButton = () => {
|
||||
this.onChangeQuery('', true);
|
||||
};
|
||||
|
||||
onClickRemoveButton = () => {
|
||||
const { index, onRemoveQueryRow } = this.props;
|
||||
if (onRemoveQueryRow) {
|
||||
onRemoveQueryRow(index);
|
||||
}
|
||||
};
|
||||
|
||||
handlePressEnter = () => {
|
||||
onPressEnter = () => {
|
||||
const { onExecuteQuery } = this.props;
|
||||
if (onExecuteQuery) {
|
||||
onExecuteQuery();
|
||||
@@ -43,38 +36,43 @@ class QueryRow extends PureComponent<any, any> {
|
||||
};
|
||||
|
||||
render() {
|
||||
const { request } = this.props;
|
||||
const { edited, query } = this.state;
|
||||
const { edited, history, query, request } = this.props;
|
||||
return (
|
||||
<div className="query-row">
|
||||
<div className="query-row-tools">
|
||||
<button className="btn navbar-button navbar-button--tight" onClick={this.handleClickAddButton}>
|
||||
<i className="fa fa-plus" />
|
||||
</button>
|
||||
<button className="btn navbar-button navbar-button--tight" onClick={this.handleClickRemoveButton}>
|
||||
<i className="fa fa-minus" />
|
||||
</button>
|
||||
</div>
|
||||
<div className="slate-query-field-wrapper">
|
||||
<div className="query-row-field">
|
||||
<QueryField
|
||||
initialQuery={edited ? null : query}
|
||||
history={history}
|
||||
portalPrefix="explore"
|
||||
onPressEnter={this.handlePressEnter}
|
||||
onQueryChange={this.handleChangeQuery}
|
||||
onPressEnter={this.onPressEnter}
|
||||
onQueryChange={this.onChangeQuery}
|
||||
request={request}
|
||||
/>
|
||||
</div>
|
||||
<div className="query-row-tools">
|
||||
<button className="btn navbar-button navbar-button--tight" onClick={this.onClickClearButton}>
|
||||
<i className="fa fa-times" />
|
||||
</button>
|
||||
<button className="btn navbar-button navbar-button--tight" onClick={this.onClickAddButton}>
|
||||
<i className="fa fa-plus" />
|
||||
</button>
|
||||
<button className="btn navbar-button navbar-button--tight" onClick={this.onClickRemoveButton}>
|
||||
<i className="fa fa-minus" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export default class QueryRows extends PureComponent<any, any> {
|
||||
export default class QueryRows extends PureComponent<any, {}> {
|
||||
render() {
|
||||
const { className = '', queries, ...handlers } = this.props;
|
||||
return (
|
||||
<div className={className}>
|
||||
{queries.map((q, index) => <QueryRow key={q.key} index={index} query={q.query} {...handlers} />)}
|
||||
{queries.map((q, index) => (
|
||||
<QueryRow key={q.key} index={index} query={q.query} edited={q.edited} {...handlers} />
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,22 +1,82 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
// import TableModel from 'app/core/table_model';
|
||||
import TableModel from 'app/core/table_model';
|
||||
|
||||
const EMPTY_TABLE = {
|
||||
columns: [],
|
||||
rows: [],
|
||||
};
|
||||
const EMPTY_TABLE = new TableModel();
|
||||
|
||||
export default class Table extends PureComponent<any, any> {
|
||||
interface TableProps {
|
||||
className?: string;
|
||||
data: TableModel;
|
||||
loading: boolean;
|
||||
onClickCell?: (columnKey: string, rowValue: string) => void;
|
||||
}
|
||||
|
||||
interface SFCCellProps {
|
||||
columnIndex: number;
|
||||
onClickCell?: (columnKey: string, rowValue: string, columnIndex: number, rowIndex: number, table: TableModel) => void;
|
||||
rowIndex: number;
|
||||
table: TableModel;
|
||||
value: string;
|
||||
}
|
||||
|
||||
function Cell(props: SFCCellProps) {
|
||||
const { columnIndex, rowIndex, table, value, onClickCell } = props;
|
||||
const column = table.columns[columnIndex];
|
||||
if (column && column.filterable && onClickCell) {
|
||||
const onClick = event => {
|
||||
event.preventDefault();
|
||||
onClickCell(column.text, value, columnIndex, rowIndex, table);
|
||||
};
|
||||
return (
|
||||
<td>
|
||||
<a className="link" onClick={onClick}>
|
||||
{value}
|
||||
</a>
|
||||
</td>
|
||||
);
|
||||
}
|
||||
return <td>{value}</td>;
|
||||
}
|
||||
|
||||
export default class Table extends PureComponent<TableProps, {}> {
|
||||
render() {
|
||||
const { className = '', data } = this.props;
|
||||
const tableModel = data || EMPTY_TABLE;
|
||||
const { className = '', data, loading, onClickCell } = this.props;
|
||||
let tableModel = data || EMPTY_TABLE;
|
||||
if (!loading && data && data.rows.length === 0) {
|
||||
return (
|
||||
<table className={`${className} filter-table`}>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Table</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td className="muted">The queries returned no data for a table.</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<table className={`${className} filter-table`}>
|
||||
<thead>
|
||||
<tr>{tableModel.columns.map(col => <th key={col.text}>{col.text}</th>)}</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{tableModel.rows.map((row, i) => <tr key={i}>{row.map((content, j) => <td key={j}>{content}</td>)}</tr>)}
|
||||
{tableModel.rows.map((row, i) => (
|
||||
<tr key={i}>
|
||||
{row.map((value, j) => (
|
||||
<Cell
|
||||
key={j}
|
||||
columnIndex={j}
|
||||
rowIndex={i}
|
||||
value={String(value)}
|
||||
table={data}
|
||||
onClickCell={onClickCell}
|
||||
/>
|
||||
))}
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
|
||||
@@ -44,4 +44,13 @@ describe('braces', () => {
|
||||
handler(event, change);
|
||||
expect(Plain.serialize(change.value)).toEqual('(foo) (bar)() ugh');
|
||||
});
|
||||
|
||||
it('adds closing braces outside a selector', () => {
|
||||
const change = Plain.deserialize('sumrate(metric{namespace="dev", cluster="c1"}[2m])').change();
|
||||
let event;
|
||||
change.move(3);
|
||||
event = new window.KeyboardEvent('keydown', { key: '(' });
|
||||
handler(event, change);
|
||||
expect(Plain.serialize(change.value)).toEqual('sum(rate(metric{namespace="dev", cluster="c1"}[2m]))');
|
||||
});
|
||||
});
|
||||
@@ -4,6 +4,8 @@ const BRACES = {
|
||||
'(': ')',
|
||||
};
|
||||
|
||||
const NON_SELECTOR_SPACE_REGEXP = / (?![^}]+})/;
|
||||
|
||||
export default function BracesPlugin() {
|
||||
return {
|
||||
onKeyDown(event, change) {
|
||||
@@ -28,8 +30,8 @@ export default function BracesPlugin() {
|
||||
event.preventDefault();
|
||||
const text = value.anchorText.text;
|
||||
const offset = value.anchorOffset;
|
||||
const space = text.indexOf(' ', offset);
|
||||
const length = space > 0 ? space : text.length;
|
||||
const delimiterIndex = text.slice(offset).search(NON_SELECTOR_SPACE_REGEXP);
|
||||
const length = delimiterIndex > -1 ? delimiterIndex + offset : text.length;
|
||||
const forward = length - offset;
|
||||
// Insert matching braces
|
||||
change
|
||||
|
||||
33
public/app/containers/Explore/utils/prometheus.jest.ts
Normal file
33
public/app/containers/Explore/utils/prometheus.jest.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { getCleanSelector } from './prometheus';
|
||||
|
||||
describe('getCleanSelector()', () => {
|
||||
it('returns a clean selector from an empty selector', () => {
|
||||
expect(getCleanSelector('{}', 1)).toBe('{}');
|
||||
});
|
||||
it('throws if selector is broken', () => {
|
||||
expect(() => getCleanSelector('{foo')).toThrow();
|
||||
});
|
||||
it('returns the selector sorted by label key', () => {
|
||||
expect(getCleanSelector('{foo="bar"}')).toBe('{foo="bar"}');
|
||||
expect(getCleanSelector('{foo="bar",baz="xx"}')).toBe('{baz="xx",foo="bar"}');
|
||||
});
|
||||
it('returns a clean selector from an incomplete one', () => {
|
||||
expect(getCleanSelector('{foo}')).toBe('{}');
|
||||
expect(getCleanSelector('{foo="bar",baz}')).toBe('{foo="bar"}');
|
||||
expect(getCleanSelector('{foo="bar",baz="}')).toBe('{foo="bar"}');
|
||||
});
|
||||
it('throws if not inside a selector', () => {
|
||||
expect(() => getCleanSelector('foo{}', 0)).toThrow();
|
||||
expect(() => getCleanSelector('foo{} + bar{}', 5)).toThrow();
|
||||
});
|
||||
it('returns the selector nearest to the cursor offset', () => {
|
||||
expect(() => getCleanSelector('{foo="bar"} + {foo="bar"}', 0)).toThrow();
|
||||
expect(getCleanSelector('{foo="bar"} + {foo="bar"}', 1)).toBe('{foo="bar"}');
|
||||
expect(getCleanSelector('{foo="bar"} + {baz="xx"}', 1)).toBe('{foo="bar"}');
|
||||
expect(getCleanSelector('{baz="xx"} + {foo="bar"}', 16)).toBe('{foo="bar"}');
|
||||
});
|
||||
it('returns a selector with metric if metric is given', () => {
|
||||
expect(getCleanSelector('bar{foo}', 4)).toBe('{__name__="bar"}');
|
||||
expect(getCleanSelector('baz{foo="bar"}', 12)).toBe('{__name__="baz",foo="bar"}');
|
||||
});
|
||||
});
|
||||
@@ -1,9 +1,16 @@
|
||||
export const RATE_RANGES = ['1m', '5m', '10m', '30m', '1h'];
|
||||
|
||||
export function processLabels(labels) {
|
||||
export function processLabels(labels, withName = false) {
|
||||
const values = {};
|
||||
labels.forEach(l => {
|
||||
const { __name__, ...rest } = l;
|
||||
if (withName) {
|
||||
values['__name__'] = values['__name__'] || [];
|
||||
if (values['__name__'].indexOf(__name__) === -1) {
|
||||
values['__name__'].push(__name__);
|
||||
}
|
||||
}
|
||||
|
||||
Object.keys(rest).forEach(key => {
|
||||
if (!values[key]) {
|
||||
values[key] = [];
|
||||
@@ -18,3 +25,64 @@ export function processLabels(labels) {
|
||||
|
||||
// Strip syntax chars
|
||||
export const cleanText = s => s.replace(/[{}[\]="(),!~+\-*/^%]/g, '').trim();
|
||||
|
||||
// const cleanSelectorRegexp = /\{(\w+="[^"\n]*?")(,\w+="[^"\n]*?")*\}/;
|
||||
const selectorRegexp = /\{[^}]*?\}/;
|
||||
const labelRegexp = /\b\w+="[^"\n]*?"/g;
|
||||
export function getCleanSelector(query: string, cursorOffset = 1): string {
|
||||
if (!query.match(selectorRegexp)) {
|
||||
// Special matcher for metrics
|
||||
if (query.match(/^\w+$/)) {
|
||||
return `{__name__="${query}"}`;
|
||||
}
|
||||
throw new Error('Query must contain a selector: ' + query);
|
||||
}
|
||||
|
||||
// Check if inside a selector
|
||||
const prefix = query.slice(0, cursorOffset);
|
||||
const prefixOpen = prefix.lastIndexOf('{');
|
||||
const prefixClose = prefix.lastIndexOf('}');
|
||||
if (prefixOpen === -1) {
|
||||
throw new Error('Not inside selector, missing open brace: ' + prefix);
|
||||
}
|
||||
if (prefixClose > -1 && prefixClose > prefixOpen) {
|
||||
throw new Error('Not inside selector, previous selector already closed: ' + prefix);
|
||||
}
|
||||
const suffix = query.slice(cursorOffset);
|
||||
const suffixCloseIndex = suffix.indexOf('}');
|
||||
const suffixClose = suffixCloseIndex + cursorOffset;
|
||||
const suffixOpenIndex = suffix.indexOf('{');
|
||||
const suffixOpen = suffixOpenIndex + cursorOffset;
|
||||
if (suffixClose === -1) {
|
||||
throw new Error('Not inside selector, missing closing brace in suffix: ' + suffix);
|
||||
}
|
||||
if (suffixOpenIndex > -1 && suffixOpen < suffixClose) {
|
||||
throw new Error('Not inside selector, next selector opens before this one closed: ' + suffix);
|
||||
}
|
||||
|
||||
// Extract clean labels to form clean selector, incomplete labels are dropped
|
||||
const selector = query.slice(prefixOpen, suffixClose);
|
||||
let labels = {};
|
||||
selector.replace(labelRegexp, match => {
|
||||
const delimiterIndex = match.indexOf('=');
|
||||
const key = match.slice(0, delimiterIndex);
|
||||
const value = match.slice(delimiterIndex + 1, match.length);
|
||||
labels[key] = value;
|
||||
return '';
|
||||
});
|
||||
|
||||
// Add metric if there is one before the selector
|
||||
const metricPrefix = query.slice(0, prefixOpen);
|
||||
const metricMatch = metricPrefix.match(/\w+$/);
|
||||
if (metricMatch) {
|
||||
labels['__name__'] = `"${metricMatch[0]}"`;
|
||||
}
|
||||
|
||||
// Build sorted selector
|
||||
const cleanSelector = Object.keys(labels)
|
||||
.sort()
|
||||
.map(key => `${key}=${labels[key]}`)
|
||||
.join(',');
|
||||
|
||||
return ['{', cleanSelector, '}'].join('');
|
||||
}
|
||||
|
||||
@@ -15,7 +15,14 @@ export class KeybindingSrv {
|
||||
timepickerOpen = false;
|
||||
|
||||
/** @ngInject */
|
||||
constructor(private $rootScope, private $location, private datasourceSrv, private timeSrv, private contextSrv) {
|
||||
constructor(
|
||||
private $rootScope,
|
||||
private $location,
|
||||
private datasourceSrv,
|
||||
private timeSrv,
|
||||
private contextSrv,
|
||||
private $route
|
||||
) {
|
||||
// clear out all shortcuts on route change
|
||||
$rootScope.$on('$routeChangeSuccess', () => {
|
||||
Mousetrap.reset();
|
||||
@@ -259,6 +266,14 @@ export class KeybindingSrv {
|
||||
this.bind('d v', () => {
|
||||
appEvents.emit('toggle-view-mode');
|
||||
});
|
||||
|
||||
//Autofit panels
|
||||
this.bind('d a', () => {
|
||||
this.$location.search('autofitpanels', this.$location.search().autofitpanels ? null : true);
|
||||
//Force reload
|
||||
|
||||
this.$route.reload();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -32,6 +32,18 @@ describe('store', () => {
|
||||
expect(store.getBool('key5', false)).toBe(true);
|
||||
});
|
||||
|
||||
it('gets an object', () => {
|
||||
expect(store.getObject('object1')).toBeUndefined();
|
||||
expect(store.getObject('object1', [])).toEqual([]);
|
||||
store.setObject('object1', [1]);
|
||||
expect(store.getObject('object1')).toEqual([1]);
|
||||
});
|
||||
|
||||
it('sets an object', () => {
|
||||
expect(store.setObject('object2', { a: 1 })).toBe(true);
|
||||
expect(store.getObject('object2')).toEqual({ a: 1 });
|
||||
});
|
||||
|
||||
it('key should be deleted', () => {
|
||||
store.set('key6', '123');
|
||||
store.delete('key6');
|
||||
|
||||
@@ -14,6 +14,38 @@ export class Store {
|
||||
return window.localStorage[key] === 'true';
|
||||
}
|
||||
|
||||
getObject(key: string, def?: any) {
|
||||
let ret = def;
|
||||
if (this.exists(key)) {
|
||||
const json = window.localStorage[key];
|
||||
try {
|
||||
ret = JSON.parse(json);
|
||||
} catch (error) {
|
||||
console.error(`Error parsing store object: ${key}. Returning default: ${def}. [${error}]`);
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
// Returns true when successfully stored
|
||||
setObject(key: string, value: any): boolean {
|
||||
let json;
|
||||
try {
|
||||
json = JSON.stringify(value);
|
||||
} catch (error) {
|
||||
console.error(`Could not stringify object: ${key}. [${error}]`);
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
this.set(key, json);
|
||||
} catch (error) {
|
||||
// Likely hitting storage quota
|
||||
console.error(`Could not save item in localStorage: ${key}. [${error}]`);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
exists(key) {
|
||||
return window.localStorage[key] !== void 0;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,15 @@
|
||||
interface Column {
|
||||
text: string;
|
||||
title?: string;
|
||||
type?: string;
|
||||
sort?: boolean;
|
||||
desc?: boolean;
|
||||
filterable?: boolean;
|
||||
unit?: string;
|
||||
}
|
||||
|
||||
export default class TableModel {
|
||||
columns: any[];
|
||||
columns: Column[];
|
||||
rows: any[];
|
||||
type: string;
|
||||
columnMap: any;
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
import { describe, it, expect } from 'test/lib/common';
|
||||
|
||||
import { AlertTabCtrl } from '../alert_tab_ctrl';
|
||||
|
||||
describe('AlertTabCtrl', () => {
|
||||
var $scope = {
|
||||
ctrl: {},
|
||||
};
|
||||
|
||||
describe('with null parameters', () => {
|
||||
it('can be created', () => {
|
||||
var alertTab = new AlertTabCtrl($scope, null, null, null, null, null);
|
||||
|
||||
expect(alertTab).to.not.be(null);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -62,6 +62,8 @@ export class DashboardCtrl implements PanelContainer {
|
||||
.finally(() => {
|
||||
this.dashboard = dashboard;
|
||||
this.dashboard.processRepeats();
|
||||
this.dashboard.updateSubmenuVisibility();
|
||||
this.dashboard.autoFitPanels(window.innerHeight);
|
||||
|
||||
this.unsavedChangesSrv.init(dashboard, this.$scope);
|
||||
|
||||
@@ -70,8 +72,6 @@ export class DashboardCtrl implements PanelContainer {
|
||||
this.dashboardViewState = this.dashboardViewStateSrv.create(this.$scope);
|
||||
|
||||
this.keybindingSrv.setupDashboardBindings(this.$scope, dashboard);
|
||||
|
||||
this.dashboard.updateSubmenuVisibility();
|
||||
this.setWindowTitleAndTheme();
|
||||
|
||||
this.$scope.appEvent('dashboard-initialized', dashboard);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import moment from 'moment';
|
||||
import _ from 'lodash';
|
||||
|
||||
import { GRID_COLUMN_COUNT, REPEAT_DIR_VERTICAL } from 'app/core/constants';
|
||||
import { GRID_COLUMN_COUNT, REPEAT_DIR_VERTICAL, GRID_CELL_HEIGHT, GRID_CELL_VMARGIN } from 'app/core/constants';
|
||||
import { DEFAULT_ANNOTATION_COLOR } from 'app/core/utils/colors';
|
||||
import { Emitter } from 'app/core/utils/emitter';
|
||||
import { contextSrv } from 'app/core/services/context_srv';
|
||||
@@ -830,4 +830,32 @@ export class DashboardModel {
|
||||
|
||||
return !_.isEqual(updated, this.originalTemplating);
|
||||
}
|
||||
|
||||
autoFitPanels(viewHeight: number) {
|
||||
if (!this.meta.autofitpanels) {
|
||||
return;
|
||||
}
|
||||
|
||||
const currentGridHeight = Math.max(
|
||||
...this.panels.map(panel => {
|
||||
return panel.gridPos.h + panel.gridPos.y;
|
||||
})
|
||||
);
|
||||
|
||||
// Consider navbar and submenu controls, padding and margin
|
||||
let visibleHeight = window.innerHeight - 55 - 20;
|
||||
|
||||
// Remove submenu if visible
|
||||
if (this.meta.submenuEnabled) {
|
||||
visibleHeight -= 50;
|
||||
}
|
||||
|
||||
const visibleGridHeight = Math.floor(visibleHeight / (GRID_CELL_HEIGHT + GRID_CELL_VMARGIN));
|
||||
const scaleFactor = currentGridHeight / visibleGridHeight;
|
||||
|
||||
this.panels.forEach((panel, i) => {
|
||||
panel.gridPos.y = Math.round(panel.gridPos.y / scaleFactor) || 1;
|
||||
panel.gridPos.h = Math.round(panel.gridPos.h / scaleFactor) || 1;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +0,0 @@
|
||||
import { describe, beforeEach, expect } from 'test/lib/common';
|
||||
|
||||
import { DashboardSrv } from '../dashboard_srv';
|
||||
|
||||
describe('dashboardSrv', function() {
|
||||
var _dashboardSrv;
|
||||
|
||||
beforeEach(() => {
|
||||
_dashboardSrv = new DashboardSrv({}, {}, {});
|
||||
});
|
||||
|
||||
it('should do something', () => {
|
||||
expect(_dashboardSrv).not.to.be(null);
|
||||
});
|
||||
});
|
||||
@@ -1,53 +1,70 @@
|
||||
import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common';
|
||||
|
||||
import '../all';
|
||||
|
||||
import { VariableSrv } from '../variable_srv';
|
||||
import moment from 'moment';
|
||||
import helpers from 'test/specs/helpers';
|
||||
import { Emitter } from 'app/core/core';
|
||||
import $q from 'q';
|
||||
|
||||
describe('VariableSrv', function() {
|
||||
var ctx = new helpers.ControllerTestContext();
|
||||
|
||||
beforeEach(angularMocks.module('grafana.core'));
|
||||
beforeEach(angularMocks.module('grafana.controllers'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
|
||||
beforeEach(ctx.providePhase(['datasourceSrv', 'timeSrv', 'templateSrv', '$location']));
|
||||
beforeEach(
|
||||
angularMocks.inject(($rootScope, $q, $location, $injector) => {
|
||||
ctx.$q = $q;
|
||||
ctx.$rootScope = $rootScope;
|
||||
ctx.$location = $location;
|
||||
ctx.variableSrv = $injector.get('variableSrv');
|
||||
ctx.variableSrv.init({
|
||||
templating: { list: [] },
|
||||
events: new Emitter(),
|
||||
updateSubmenuVisibility: sinon.stub(),
|
||||
});
|
||||
ctx.$rootScope.$digest();
|
||||
})
|
||||
);
|
||||
var ctx = <any>{
|
||||
datasourceSrv: {},
|
||||
timeSrv: {
|
||||
timeRange: () => {},
|
||||
},
|
||||
$rootScope: {
|
||||
$on: () => {},
|
||||
},
|
||||
$injector: {
|
||||
instantiate: (ctr, obj) => new ctr(obj.model),
|
||||
},
|
||||
templateSrv: {
|
||||
setGrafanaVariable: jest.fn(),
|
||||
init: vars => {
|
||||
this.variables = vars;
|
||||
},
|
||||
updateTemplateData: () => {},
|
||||
replace: str =>
|
||||
str.replace(this.regex, match => {
|
||||
return match;
|
||||
}),
|
||||
},
|
||||
$location: {
|
||||
search: () => {},
|
||||
},
|
||||
};
|
||||
|
||||
function describeUpdateVariable(desc, fn) {
|
||||
describe(desc, function() {
|
||||
describe(desc, () => {
|
||||
var scenario: any = {};
|
||||
scenario.setup = function(setupFn) {
|
||||
scenario.setupFn = setupFn;
|
||||
};
|
||||
|
||||
beforeEach(function() {
|
||||
beforeEach(async () => {
|
||||
scenario.setupFn();
|
||||
|
||||
var ds: any = {};
|
||||
ds.metricFindQuery = sinon.stub().returns(ctx.$q.when(scenario.queryResult));
|
||||
ctx.datasourceSrv.get = sinon.stub().returns(ctx.$q.when(ds));
|
||||
ctx.datasourceSrv.getMetricSources = sinon.stub().returns(scenario.metricSources);
|
||||
ds.metricFindQuery = () => Promise.resolve(scenario.queryResult);
|
||||
|
||||
ctx.variableSrv = new VariableSrv(ctx.$rootScope, $q, ctx.$location, ctx.$injector, ctx.templateSrv);
|
||||
|
||||
ctx.variableSrv.timeSrv = ctx.timeSrv;
|
||||
ctx.datasourceSrv = {
|
||||
get: () => Promise.resolve(ds),
|
||||
getMetricSources: () => scenario.metricSources,
|
||||
};
|
||||
|
||||
ctx.$injector.instantiate = (ctr, model) => {
|
||||
return getVarMockConstructor(ctr, model, ctx);
|
||||
};
|
||||
|
||||
ctx.variableSrv.init({
|
||||
templating: { list: [] },
|
||||
updateSubmenuVisibility: () => {},
|
||||
});
|
||||
|
||||
scenario.variable = ctx.variableSrv.createVariableFromModel(scenario.variableModel);
|
||||
ctx.variableSrv.addVariable(scenario.variable);
|
||||
|
||||
ctx.variableSrv.updateOptions(scenario.variable);
|
||||
ctx.$rootScope.$digest();
|
||||
await ctx.variableSrv.updateOptions(scenario.variable);
|
||||
});
|
||||
|
||||
fn(scenario);
|
||||
@@ -64,9 +81,9 @@ describe('VariableSrv', function() {
|
||||
});
|
||||
|
||||
it('should update options array', () => {
|
||||
expect(scenario.variable.options.length).to.be(4);
|
||||
expect(scenario.variable.options[0].text).to.be('1s');
|
||||
expect(scenario.variable.options[0].value).to.be('1s');
|
||||
expect(scenario.variable.options.length).toBe(4);
|
||||
expect(scenario.variable.options[0].text).toBe('1s');
|
||||
expect(scenario.variable.options[0].value).toBe('1s');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -90,28 +107,28 @@ describe('VariableSrv', function() {
|
||||
to: new Date(),
|
||||
};
|
||||
|
||||
ctx.timeSrv.timeRange = sinon.stub().returns(range);
|
||||
ctx.templateSrv.setGrafanaVariable = sinon.spy();
|
||||
ctx.timeSrv.timeRange = () => range;
|
||||
// ctx.templateSrv.setGrafanaVariable = jest.fn();
|
||||
});
|
||||
|
||||
it('should update options array', function() {
|
||||
expect(scenario.variable.options.length).to.be(5);
|
||||
expect(scenario.variable.options[0].text).to.be('auto');
|
||||
expect(scenario.variable.options[0].value).to.be('$__auto_interval_test');
|
||||
it('should update options array', () => {
|
||||
expect(scenario.variable.options.length).toBe(5);
|
||||
expect(scenario.variable.options[0].text).toBe('auto');
|
||||
expect(scenario.variable.options[0].value).toBe('$__auto_interval_test');
|
||||
});
|
||||
|
||||
it('should set $__auto_interval_test', function() {
|
||||
var call = ctx.templateSrv.setGrafanaVariable.firstCall;
|
||||
expect(call.args[0]).to.be('$__auto_interval_test');
|
||||
expect(call.args[1]).to.be('12h');
|
||||
it('should set $__auto_interval_test', () => {
|
||||
var call = ctx.templateSrv.setGrafanaVariable.mock.calls[0];
|
||||
expect(call[0]).toBe('$__auto_interval_test');
|
||||
expect(call[1]).toBe('12h');
|
||||
});
|
||||
|
||||
// updateAutoValue() gets called twice: once directly once via VariableSrv.validateVariableSelectionState()
|
||||
// So use lastCall instead of a specific call number
|
||||
it('should set $__auto_interval', function() {
|
||||
var call = ctx.templateSrv.setGrafanaVariable.lastCall;
|
||||
expect(call.args[0]).to.be('$__auto_interval');
|
||||
expect(call.args[1]).to.be('12h');
|
||||
it('should set $__auto_interval', () => {
|
||||
var call = ctx.templateSrv.setGrafanaVariable.mock.calls.pop();
|
||||
expect(call[0]).toBe('$__auto_interval');
|
||||
expect(call[1]).toBe('12h');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -119,7 +136,7 @@ describe('VariableSrv', function() {
|
||||
// Query variable update
|
||||
//
|
||||
describeUpdateVariable('query variable with empty current object and refresh', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: '',
|
||||
@@ -129,16 +146,16 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'backend1' }, { text: 'backend2' }];
|
||||
});
|
||||
|
||||
it('should set current value to first option', function() {
|
||||
expect(scenario.variable.options.length).to.be(2);
|
||||
expect(scenario.variable.current.value).to.be('backend1');
|
||||
it('should set current value to first option', () => {
|
||||
expect(scenario.variable.options.length).toBe(2);
|
||||
expect(scenario.variable.current.value).toBe('backend1');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable(
|
||||
'query variable with multi select and new options does not contain some selected values',
|
||||
function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: '',
|
||||
@@ -151,9 +168,9 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'val2' }, { text: 'val3' }];
|
||||
});
|
||||
|
||||
it('should update current value', function() {
|
||||
expect(scenario.variable.current.value).to.eql(['val2', 'val3']);
|
||||
expect(scenario.variable.current.text).to.eql('val2 + val3');
|
||||
it('should update current value', () => {
|
||||
expect(scenario.variable.current.value).toEqual(['val2', 'val3']);
|
||||
expect(scenario.variable.current.text).toEqual('val2 + val3');
|
||||
});
|
||||
}
|
||||
);
|
||||
@@ -161,7 +178,7 @@ describe('VariableSrv', function() {
|
||||
describeUpdateVariable(
|
||||
'query variable with multi select and new options does not contain any selected values',
|
||||
function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: '',
|
||||
@@ -174,15 +191,15 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'val5' }, { text: 'val6' }];
|
||||
});
|
||||
|
||||
it('should update current value with first one', function() {
|
||||
expect(scenario.variable.current.value).to.eql('val5');
|
||||
expect(scenario.variable.current.text).to.eql('val5');
|
||||
it('should update current value with first one', () => {
|
||||
expect(scenario.variable.current.value).toEqual('val5');
|
||||
expect(scenario.variable.current.text).toEqual('val5');
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
describeUpdateVariable('query variable with multi select and $__all selected', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: '',
|
||||
@@ -196,14 +213,14 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'val5' }, { text: 'val6' }];
|
||||
});
|
||||
|
||||
it('should keep current All value', function() {
|
||||
expect(scenario.variable.current.value).to.eql(['$__all']);
|
||||
expect(scenario.variable.current.text).to.eql('All');
|
||||
it('should keep current All value', () => {
|
||||
expect(scenario.variable.current.value).toEqual(['$__all']);
|
||||
expect(scenario.variable.current.text).toEqual('All');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('query variable with numeric results', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: '',
|
||||
@@ -213,45 +230,45 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 12, value: 12 }];
|
||||
});
|
||||
|
||||
it('should set current value to first option', function() {
|
||||
expect(scenario.variable.current.value).to.be('12');
|
||||
expect(scenario.variable.options[0].value).to.be('12');
|
||||
expect(scenario.variable.options[0].text).to.be('12');
|
||||
it('should set current value to first option', () => {
|
||||
expect(scenario.variable.current.value).toBe('12');
|
||||
expect(scenario.variable.options[0].value).toBe('12');
|
||||
expect(scenario.variable.options[0].text).toBe('12');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('basic query variable', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
|
||||
scenario.queryResult = [{ text: 'backend1' }, { text: 'backend2' }];
|
||||
});
|
||||
|
||||
it('should update options array', function() {
|
||||
expect(scenario.variable.options.length).to.be(2);
|
||||
expect(scenario.variable.options[0].text).to.be('backend1');
|
||||
expect(scenario.variable.options[0].value).to.be('backend1');
|
||||
expect(scenario.variable.options[1].value).to.be('backend2');
|
||||
it('should update options array', () => {
|
||||
expect(scenario.variable.options.length).toBe(2);
|
||||
expect(scenario.variable.options[0].text).toBe('backend1');
|
||||
expect(scenario.variable.options[0].value).toBe('backend1');
|
||||
expect(scenario.variable.options[1].value).toBe('backend2');
|
||||
});
|
||||
|
||||
it('should select first option as value', function() {
|
||||
expect(scenario.variable.current.value).to.be('backend1');
|
||||
it('should select first option as value', () => {
|
||||
expect(scenario.variable.current.value).toBe('backend1');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('and existing value still exists in options', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
|
||||
scenario.variableModel.current = { value: 'backend2', text: 'backend2' };
|
||||
scenario.queryResult = [{ text: 'backend1' }, { text: 'backend2' }];
|
||||
});
|
||||
|
||||
it('should keep variable value', function() {
|
||||
expect(scenario.variable.current.text).to.be('backend2');
|
||||
it('should keep variable value', () => {
|
||||
expect(scenario.variable.current.text).toBe('backend2');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('and regex pattern exists', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
|
||||
scenario.variableModel.regex = '/apps.*(backend_[0-9]+)/';
|
||||
scenario.queryResult = [
|
||||
@@ -260,13 +277,13 @@ describe('VariableSrv', function() {
|
||||
];
|
||||
});
|
||||
|
||||
it('should extract and use match group', function() {
|
||||
expect(scenario.variable.options[0].value).to.be('backend_01');
|
||||
it('should extract and use match group', () => {
|
||||
expect(scenario.variable.options[0].value).toBe('backend_01');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('and regex pattern exists and no match', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
|
||||
scenario.variableModel.regex = '/apps.*(backendasd[0-9]+)/';
|
||||
scenario.queryResult = [
|
||||
@@ -275,14 +292,14 @@ describe('VariableSrv', function() {
|
||||
];
|
||||
});
|
||||
|
||||
it('should not add non matching items, None option should be added instead', function() {
|
||||
expect(scenario.variable.options.length).to.be(1);
|
||||
expect(scenario.variable.options[0].isNone).to.be(true);
|
||||
it('should not add non matching items, None option should be added instead', () => {
|
||||
expect(scenario.variable.options.length).toBe(1);
|
||||
expect(scenario.variable.options[0].isNone).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('regex pattern without slashes', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
|
||||
scenario.variableModel.regex = 'backend_01';
|
||||
scenario.queryResult = [
|
||||
@@ -291,13 +308,13 @@ describe('VariableSrv', function() {
|
||||
];
|
||||
});
|
||||
|
||||
it('should return matches options', function() {
|
||||
expect(scenario.variable.options.length).to.be(1);
|
||||
it('should return matches options', () => {
|
||||
expect(scenario.variable.options.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('regex pattern remove duplicates', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = { type: 'query', query: 'apps.*', name: 'test' };
|
||||
scenario.variableModel.regex = '/backend_01/';
|
||||
scenario.queryResult = [
|
||||
@@ -306,13 +323,13 @@ describe('VariableSrv', function() {
|
||||
];
|
||||
});
|
||||
|
||||
it('should return matches options', function() {
|
||||
expect(scenario.variable.options.length).to.be(1);
|
||||
it('should return matches options', () => {
|
||||
expect(scenario.variable.options.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('with include All', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: 'apps.*',
|
||||
@@ -322,14 +339,14 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'backend1' }, { text: 'backend2' }, { text: 'backend3' }];
|
||||
});
|
||||
|
||||
it('should add All option', function() {
|
||||
expect(scenario.variable.options[0].text).to.be('All');
|
||||
expect(scenario.variable.options[0].value).to.be('$__all');
|
||||
it('should add All option', () => {
|
||||
expect(scenario.variable.options[0].text).toBe('All');
|
||||
expect(scenario.variable.options[0].value).toBe('$__all');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('with include all and custom value', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: 'apps.*',
|
||||
@@ -340,13 +357,13 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'backend1' }, { text: 'backend2' }, { text: 'backend3' }];
|
||||
});
|
||||
|
||||
it('should add All option with custom value', function() {
|
||||
expect(scenario.variable.options[0].value).to.be('$__all');
|
||||
it('should add All option with custom value', () => {
|
||||
expect(scenario.variable.options[0].value).toBe('$__all');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('without sort', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: 'apps.*',
|
||||
@@ -356,15 +373,15 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'bbb2' }, { text: 'aaa10' }, { text: 'ccc3' }];
|
||||
});
|
||||
|
||||
it('should return options without sort', function() {
|
||||
expect(scenario.variable.options[0].text).to.be('bbb2');
|
||||
expect(scenario.variable.options[1].text).to.be('aaa10');
|
||||
expect(scenario.variable.options[2].text).to.be('ccc3');
|
||||
it('should return options without sort', () => {
|
||||
expect(scenario.variable.options[0].text).toBe('bbb2');
|
||||
expect(scenario.variable.options[1].text).toBe('aaa10');
|
||||
expect(scenario.variable.options[2].text).toBe('ccc3');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('with alphabetical sort (asc)', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: 'apps.*',
|
||||
@@ -374,15 +391,15 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'bbb2' }, { text: 'aaa10' }, { text: 'ccc3' }];
|
||||
});
|
||||
|
||||
it('should return options with alphabetical sort', function() {
|
||||
expect(scenario.variable.options[0].text).to.be('aaa10');
|
||||
expect(scenario.variable.options[1].text).to.be('bbb2');
|
||||
expect(scenario.variable.options[2].text).to.be('ccc3');
|
||||
it('should return options with alphabetical sort', () => {
|
||||
expect(scenario.variable.options[0].text).toBe('aaa10');
|
||||
expect(scenario.variable.options[1].text).toBe('bbb2');
|
||||
expect(scenario.variable.options[2].text).toBe('ccc3');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('with alphabetical sort (desc)', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: 'apps.*',
|
||||
@@ -392,15 +409,15 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'bbb2' }, { text: 'aaa10' }, { text: 'ccc3' }];
|
||||
});
|
||||
|
||||
it('should return options with alphabetical sort', function() {
|
||||
expect(scenario.variable.options[0].text).to.be('ccc3');
|
||||
expect(scenario.variable.options[1].text).to.be('bbb2');
|
||||
expect(scenario.variable.options[2].text).to.be('aaa10');
|
||||
it('should return options with alphabetical sort', () => {
|
||||
expect(scenario.variable.options[0].text).toBe('ccc3');
|
||||
expect(scenario.variable.options[1].text).toBe('bbb2');
|
||||
expect(scenario.variable.options[2].text).toBe('aaa10');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('with numerical sort (asc)', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: 'apps.*',
|
||||
@@ -410,15 +427,15 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'bbb2' }, { text: 'aaa10' }, { text: 'ccc3' }];
|
||||
});
|
||||
|
||||
it('should return options with numerical sort', function() {
|
||||
expect(scenario.variable.options[0].text).to.be('bbb2');
|
||||
expect(scenario.variable.options[1].text).to.be('ccc3');
|
||||
expect(scenario.variable.options[2].text).to.be('aaa10');
|
||||
it('should return options with numerical sort', () => {
|
||||
expect(scenario.variable.options[0].text).toBe('bbb2');
|
||||
expect(scenario.variable.options[1].text).toBe('ccc3');
|
||||
expect(scenario.variable.options[2].text).toBe('aaa10');
|
||||
});
|
||||
});
|
||||
|
||||
describeUpdateVariable('with numerical sort (desc)', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'query',
|
||||
query: 'apps.*',
|
||||
@@ -428,10 +445,10 @@ describe('VariableSrv', function() {
|
||||
scenario.queryResult = [{ text: 'bbb2' }, { text: 'aaa10' }, { text: 'ccc3' }];
|
||||
});
|
||||
|
||||
it('should return options with numerical sort', function() {
|
||||
expect(scenario.variable.options[0].text).to.be('aaa10');
|
||||
expect(scenario.variable.options[1].text).to.be('ccc3');
|
||||
expect(scenario.variable.options[2].text).to.be('bbb2');
|
||||
it('should return options with numerical sort', () => {
|
||||
expect(scenario.variable.options[0].text).toBe('aaa10');
|
||||
expect(scenario.variable.options[1].text).toBe('ccc3');
|
||||
expect(scenario.variable.options[2].text).toBe('bbb2');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -439,7 +456,7 @@ describe('VariableSrv', function() {
|
||||
// datasource variable update
|
||||
//
|
||||
describeUpdateVariable('datasource variable with regex filter', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'datasource',
|
||||
query: 'graphite',
|
||||
@@ -455,14 +472,14 @@ describe('VariableSrv', function() {
|
||||
];
|
||||
});
|
||||
|
||||
it('should set only contain graphite ds and filtered using regex', function() {
|
||||
expect(scenario.variable.options.length).to.be(2);
|
||||
expect(scenario.variable.options[0].value).to.be('backend2_pee');
|
||||
expect(scenario.variable.options[1].value).to.be('backend4_pee');
|
||||
it('should set only contain graphite ds and filtered using regex', () => {
|
||||
expect(scenario.variable.options.length).toBe(2);
|
||||
expect(scenario.variable.options[0].value).toBe('backend2_pee');
|
||||
expect(scenario.variable.options[1].value).toBe('backend4_pee');
|
||||
});
|
||||
|
||||
it('should keep current value if available', function() {
|
||||
expect(scenario.variable.current.value).to.be('backend4_pee');
|
||||
it('should keep current value if available', () => {
|
||||
expect(scenario.variable.current.value).toBe('backend4_pee');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -470,7 +487,7 @@ describe('VariableSrv', function() {
|
||||
// Custom variable update
|
||||
//
|
||||
describeUpdateVariable('update custom variable', function(scenario) {
|
||||
scenario.setup(function() {
|
||||
scenario.setup(() => {
|
||||
scenario.variableModel = {
|
||||
type: 'custom',
|
||||
query: 'hej, hop, asd',
|
||||
@@ -478,25 +495,25 @@ describe('VariableSrv', function() {
|
||||
};
|
||||
});
|
||||
|
||||
it('should update options array', function() {
|
||||
expect(scenario.variable.options.length).to.be(3);
|
||||
expect(scenario.variable.options[0].text).to.be('hej');
|
||||
expect(scenario.variable.options[1].value).to.be('hop');
|
||||
it('should update options array', () => {
|
||||
expect(scenario.variable.options.length).toBe(3);
|
||||
expect(scenario.variable.options[0].text).toBe('hej');
|
||||
expect(scenario.variable.options[1].value).toBe('hop');
|
||||
});
|
||||
});
|
||||
|
||||
describe('multiple interval variables with auto', function() {
|
||||
describe('multiple interval variables with auto', () => {
|
||||
var variable1, variable2;
|
||||
|
||||
beforeEach(function() {
|
||||
beforeEach(() => {
|
||||
var range = {
|
||||
from: moment(new Date())
|
||||
.subtract(7, 'days')
|
||||
.toDate(),
|
||||
to: new Date(),
|
||||
};
|
||||
ctx.timeSrv.timeRange = sinon.stub().returns(range);
|
||||
ctx.templateSrv.setGrafanaVariable = sinon.spy();
|
||||
ctx.timeSrv.timeRange = () => range;
|
||||
ctx.templateSrv.setGrafanaVariable = jest.fn();
|
||||
|
||||
var variableModel1 = {
|
||||
type: 'interval',
|
||||
@@ -520,38 +537,38 @@ describe('VariableSrv', function() {
|
||||
|
||||
ctx.variableSrv.updateOptions(variable1);
|
||||
ctx.variableSrv.updateOptions(variable2);
|
||||
ctx.$rootScope.$digest();
|
||||
// ctx.$rootScope.$digest();
|
||||
});
|
||||
|
||||
it('should update options array', function() {
|
||||
expect(variable1.options.length).to.be(5);
|
||||
expect(variable1.options[0].text).to.be('auto');
|
||||
expect(variable1.options[0].value).to.be('$__auto_interval_variable1');
|
||||
expect(variable2.options.length).to.be(4);
|
||||
expect(variable2.options[0].text).to.be('auto');
|
||||
expect(variable2.options[0].value).to.be('$__auto_interval_variable2');
|
||||
it('should update options array', () => {
|
||||
expect(variable1.options.length).toBe(5);
|
||||
expect(variable1.options[0].text).toBe('auto');
|
||||
expect(variable1.options[0].value).toBe('$__auto_interval_variable1');
|
||||
expect(variable2.options.length).toBe(4);
|
||||
expect(variable2.options[0].text).toBe('auto');
|
||||
expect(variable2.options[0].value).toBe('$__auto_interval_variable2');
|
||||
});
|
||||
|
||||
it('should correctly set $__auto_interval_variableX', function() {
|
||||
it('should correctly set $__auto_interval_variableX', () => {
|
||||
var variable1Set,
|
||||
variable2Set,
|
||||
legacySet,
|
||||
unknownSet = false;
|
||||
// updateAutoValue() gets called repeatedly: once directly once via VariableSrv.validateVariableSelectionState()
|
||||
// So check that all calls are valid rather than expect a specific number and/or ordering of calls
|
||||
for (var i = 0; i < ctx.templateSrv.setGrafanaVariable.callCount; i++) {
|
||||
var call = ctx.templateSrv.setGrafanaVariable.getCall(i);
|
||||
switch (call.args[0]) {
|
||||
for (var i = 0; i < ctx.templateSrv.setGrafanaVariable.mock.calls.length; i++) {
|
||||
var call = ctx.templateSrv.setGrafanaVariable.mock.calls[i];
|
||||
switch (call[0]) {
|
||||
case '$__auto_interval_variable1':
|
||||
expect(call.args[1]).to.be('12h');
|
||||
expect(call[1]).toBe('12h');
|
||||
variable1Set = true;
|
||||
break;
|
||||
case '$__auto_interval_variable2':
|
||||
expect(call.args[1]).to.be('10m');
|
||||
expect(call[1]).toBe('10m');
|
||||
variable2Set = true;
|
||||
break;
|
||||
case '$__auto_interval':
|
||||
expect(call.args[1]).to.match(/^(12h|10m)$/);
|
||||
expect(call[1]).toEqual(expect.stringMatching(/^(12h|10m)$/));
|
||||
legacySet = true;
|
||||
break;
|
||||
default:
|
||||
@@ -559,10 +576,25 @@ describe('VariableSrv', function() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
expect(variable1Set).to.be.equal(true);
|
||||
expect(variable2Set).to.be.equal(true);
|
||||
expect(legacySet).to.be.equal(true);
|
||||
expect(unknownSet).to.be.equal(false);
|
||||
expect(variable1Set).toEqual(true);
|
||||
expect(variable2Set).toEqual(true);
|
||||
expect(legacySet).toEqual(true);
|
||||
expect(unknownSet).toEqual(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function getVarMockConstructor(variable, model, ctx) {
|
||||
switch (model.model.type) {
|
||||
case 'datasource':
|
||||
return new variable(model.model, ctx.datasourceSrv, ctx.variableSrv, ctx.templateSrv);
|
||||
case 'query':
|
||||
return new variable(model.model, ctx.datasourceSrv, ctx.templateSrv, ctx.variableSrv);
|
||||
case 'interval':
|
||||
return new variable(model.model, ctx.timeSrv, ctx.templateSrv, ctx.variableSrv);
|
||||
case 'custom':
|
||||
return new variable(model.model, ctx.variableSrv);
|
||||
default:
|
||||
return new variable(model.model);
|
||||
}
|
||||
}
|
||||
115
public/app/plugins/datasource/mssql/img/sql_server_logo.svg
Normal file
115
public/app/plugins/datasource/mssql/img/sql_server_logo.svg
Normal file
File diff suppressed because one or more lines are too long
|
After Width: | Height: | Size: 22 KiB |
@@ -53,7 +53,9 @@ Macros:
|
||||
- $__timeEpoch(column) -> DATEDIFF(second, '1970-01-01', column) AS time
|
||||
- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
|
||||
- $__unixEpochFilter(column) -> column >= 1492750877 AND column <= 1492750877
|
||||
- $__timeGroup(column, '5m'[, fillvalue]) -> CAST(ROUND(DATEDIFF(second, '1970-01-01', column)/300.0, 0) as bigint)*300. Providing a <i>fillValue</i> of <i>NULL</i> or floating value will automatically fill empty series in timerange with that value.
|
||||
- $__timeGroup(column, '5m'[, fillvalue]) -> CAST(ROUND(DATEDIFF(second, '1970-01-01', column)/300.0, 0) as bigint)*300.
|
||||
by setting fillvalue grafana will fill in missing values according to the interval
|
||||
fillvalue can be either a literal value, NULL or previous; previous will fill in the previous seen value or NULL if none has been seen yet
|
||||
- $__timeGroupAlias(column, '5m'[, fillvalue]) -> CAST(ROUND(DATEDIFF(second, '1970-01-01', column)/300.0, 0) as bigint)*300 AS [time]
|
||||
|
||||
Example of group by and order by with $__timeGroup:
|
||||
|
||||
@@ -10,8 +10,8 @@
|
||||
"url": "https://grafana.com"
|
||||
},
|
||||
"logos": {
|
||||
"small": "",
|
||||
"large": ""
|
||||
"small": "img/sql_server_logo.svg",
|
||||
"large": "img/sql_server_logo.svg"
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
@@ -53,7 +53,9 @@ Macros:
|
||||
- $__timeEpoch(column) -> UNIX_TIMESTAMP(column) as time_sec
|
||||
- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
|
||||
- $__unixEpochFilter(column) -> time_unix_epoch > 1492750877 AND time_unix_epoch < 1492750877
|
||||
- $__timeGroup(column,'5m') -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
|
||||
- $__timeGroup(column,'5m'[, fillvalue]) -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed)
|
||||
by setting fillvalue grafana will fill in missing values according to the interval
|
||||
fillvalue can be either a literal value, NULL or previous; previous will fill in the previous seen value or NULL if none has been seen yet
|
||||
- $__timeGroupAlias(column,'5m') -> cast(cast(UNIX_TIMESTAMP(column)/(300) as signed)*300 as signed) AS "time"
|
||||
|
||||
Example of group by and order by with $__timeGroup:
|
||||
|
||||
@@ -1,105 +0,0 @@
|
||||
import { describe, beforeEach, it, expect, angularMocks } from 'test/lib/common';
|
||||
import helpers from 'test/specs/helpers';
|
||||
import OpenTsDatasource from '../datasource';
|
||||
|
||||
describe('opentsdb', function() {
|
||||
var ctx = new helpers.ServiceTestContext();
|
||||
var instanceSettings = { url: '', jsonData: { tsdbVersion: 1 } };
|
||||
|
||||
beforeEach(angularMocks.module('grafana.core'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(ctx.providePhase(['backendSrv']));
|
||||
|
||||
beforeEach(
|
||||
angularMocks.inject(function($q, $rootScope, $httpBackend, $injector) {
|
||||
ctx.$q = $q;
|
||||
ctx.$httpBackend = $httpBackend;
|
||||
ctx.$rootScope = $rootScope;
|
||||
ctx.ds = $injector.instantiate(OpenTsDatasource, {
|
||||
instanceSettings: instanceSettings,
|
||||
});
|
||||
$httpBackend.when('GET', /\.html$/).respond('');
|
||||
})
|
||||
);
|
||||
|
||||
describe('When performing metricFindQuery', function() {
|
||||
var results;
|
||||
var requestOptions;
|
||||
|
||||
beforeEach(function() {
|
||||
ctx.backendSrv.datasourceRequest = function(options) {
|
||||
requestOptions = options;
|
||||
return ctx.$q.when({
|
||||
data: [{ target: 'prod1.count', datapoints: [[10, 1], [12, 1]] }],
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
it('metrics() should generate api suggest query', function() {
|
||||
ctx.ds.metricFindQuery('metrics(pew)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
expect(requestOptions.url).to.be('/api/suggest');
|
||||
expect(requestOptions.params.type).to.be('metrics');
|
||||
expect(requestOptions.params.q).to.be('pew');
|
||||
expect(results).not.to.be(null);
|
||||
});
|
||||
|
||||
it('tag_names(cpu) should generate lookup query', function() {
|
||||
ctx.ds.metricFindQuery('tag_names(cpu)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
expect(requestOptions.url).to.be('/api/search/lookup');
|
||||
expect(requestOptions.params.m).to.be('cpu');
|
||||
});
|
||||
|
||||
it('tag_values(cpu, test) should generate lookup query', function() {
|
||||
ctx.ds.metricFindQuery('tag_values(cpu, hostname)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
expect(requestOptions.url).to.be('/api/search/lookup');
|
||||
expect(requestOptions.params.m).to.be('cpu{hostname=*}');
|
||||
});
|
||||
|
||||
it('tag_values(cpu, test) should generate lookup query', function() {
|
||||
ctx.ds.metricFindQuery('tag_values(cpu, hostname, env=$env)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
expect(requestOptions.url).to.be('/api/search/lookup');
|
||||
expect(requestOptions.params.m).to.be('cpu{hostname=*,env=$env}');
|
||||
});
|
||||
|
||||
it('tag_values(cpu, test) should generate lookup query', function() {
|
||||
ctx.ds.metricFindQuery('tag_values(cpu, hostname, env=$env, region=$region)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
expect(requestOptions.url).to.be('/api/search/lookup');
|
||||
expect(requestOptions.params.m).to.be('cpu{hostname=*,env=$env,region=$region}');
|
||||
});
|
||||
|
||||
it('suggest_tagk() should generate api suggest query', function() {
|
||||
ctx.ds.metricFindQuery('suggest_tagk(foo)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
expect(requestOptions.url).to.be('/api/suggest');
|
||||
expect(requestOptions.params.type).to.be('tagk');
|
||||
expect(requestOptions.params.q).to.be('foo');
|
||||
});
|
||||
|
||||
it('suggest_tagv() should generate api suggest query', function() {
|
||||
ctx.ds.metricFindQuery('suggest_tagv(bar)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
ctx.$rootScope.$apply();
|
||||
expect(requestOptions.url).to.be('/api/suggest');
|
||||
expect(requestOptions.params.type).to.be('tagv');
|
||||
expect(requestOptions.params.q).to.be('bar');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,91 @@
|
||||
import OpenTsDatasource from '../datasource';
|
||||
import $q from 'q';
|
||||
|
||||
describe('opentsdb', () => {
|
||||
let ctx = <any>{
|
||||
backendSrv: {},
|
||||
ds: {},
|
||||
templateSrv: {
|
||||
replace: str => str,
|
||||
},
|
||||
};
|
||||
let instanceSettings = { url: '', jsonData: { tsdbVersion: 1 } };
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.ctrl = new OpenTsDatasource(instanceSettings, $q, ctx.backendSrv, ctx.templateSrv);
|
||||
});
|
||||
|
||||
describe('When performing metricFindQuery', () => {
|
||||
var results;
|
||||
var requestOptions;
|
||||
|
||||
beforeEach(async () => {
|
||||
ctx.backendSrv.datasourceRequest = await function(options) {
|
||||
requestOptions = options;
|
||||
return Promise.resolve({
|
||||
data: [{ target: 'prod1.count', datapoints: [[10, 1], [12, 1]] }],
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
it('metrics() should generate api suggest query', () => {
|
||||
ctx.ctrl.metricFindQuery('metrics(pew)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
expect(requestOptions.url).toBe('/api/suggest');
|
||||
expect(requestOptions.params.type).toBe('metrics');
|
||||
expect(requestOptions.params.q).toBe('pew');
|
||||
expect(results).not.toBe(null);
|
||||
});
|
||||
|
||||
it('tag_names(cpu) should generate lookup query', () => {
|
||||
ctx.ctrl.metricFindQuery('tag_names(cpu)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
expect(requestOptions.url).toBe('/api/search/lookup');
|
||||
expect(requestOptions.params.m).toBe('cpu');
|
||||
});
|
||||
|
||||
it('tag_values(cpu, test) should generate lookup query', () => {
|
||||
ctx.ctrl.metricFindQuery('tag_values(cpu, hostname)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
expect(requestOptions.url).toBe('/api/search/lookup');
|
||||
expect(requestOptions.params.m).toBe('cpu{hostname=*}');
|
||||
});
|
||||
|
||||
it('tag_values(cpu, test) should generate lookup query', () => {
|
||||
ctx.ctrl.metricFindQuery('tag_values(cpu, hostname, env=$env)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
expect(requestOptions.url).toBe('/api/search/lookup');
|
||||
expect(requestOptions.params.m).toBe('cpu{hostname=*,env=$env}');
|
||||
});
|
||||
|
||||
it('tag_values(cpu, test) should generate lookup query', () => {
|
||||
ctx.ctrl.metricFindQuery('tag_values(cpu, hostname, env=$env, region=$region)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
expect(requestOptions.url).toBe('/api/search/lookup');
|
||||
expect(requestOptions.params.m).toBe('cpu{hostname=*,env=$env,region=$region}');
|
||||
});
|
||||
|
||||
it('suggest_tagk() should generate api suggest query', () => {
|
||||
ctx.ctrl.metricFindQuery('suggest_tagk(foo)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
expect(requestOptions.url).toBe('/api/suggest');
|
||||
expect(requestOptions.params.type).toBe('tagk');
|
||||
expect(requestOptions.params.q).toBe('foo');
|
||||
});
|
||||
|
||||
it('suggest_tagv() should generate api suggest query', () => {
|
||||
ctx.ctrl.metricFindQuery('suggest_tagv(bar)').then(function(data) {
|
||||
results = data;
|
||||
});
|
||||
expect(requestOptions.url).toBe('/api/suggest');
|
||||
expect(requestOptions.params.type).toBe('tagv');
|
||||
expect(requestOptions.params.q).toBe('bar');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,113 +0,0 @@
|
||||
import { describe, beforeEach, it, sinon, expect, angularMocks } from 'test/lib/common';
|
||||
import helpers from 'test/specs/helpers';
|
||||
import { OpenTsQueryCtrl } from '../query_ctrl';
|
||||
|
||||
describe('OpenTsQueryCtrl', function() {
|
||||
var ctx = new helpers.ControllerTestContext();
|
||||
|
||||
beforeEach(angularMocks.module('grafana.core'));
|
||||
beforeEach(angularMocks.module('grafana.services'));
|
||||
beforeEach(
|
||||
angularMocks.module(function($compileProvider) {
|
||||
$compileProvider.preAssignBindingsEnabled(true);
|
||||
})
|
||||
);
|
||||
|
||||
beforeEach(ctx.providePhase(['backendSrv', 'templateSrv']));
|
||||
|
||||
beforeEach(ctx.providePhase());
|
||||
beforeEach(
|
||||
angularMocks.inject(($rootScope, $controller, $q) => {
|
||||
ctx.$q = $q;
|
||||
ctx.scope = $rootScope.$new();
|
||||
ctx.target = { target: '' };
|
||||
ctx.panelCtrl = {
|
||||
panel: {
|
||||
targets: [ctx.target],
|
||||
},
|
||||
};
|
||||
ctx.panelCtrl.refresh = sinon.spy();
|
||||
ctx.datasource.getAggregators = sinon.stub().returns(ctx.$q.when([]));
|
||||
ctx.datasource.getFilterTypes = sinon.stub().returns(ctx.$q.when([]));
|
||||
|
||||
ctx.ctrl = $controller(
|
||||
OpenTsQueryCtrl,
|
||||
{ $scope: ctx.scope },
|
||||
{
|
||||
panelCtrl: ctx.panelCtrl,
|
||||
datasource: ctx.datasource,
|
||||
target: ctx.target,
|
||||
}
|
||||
);
|
||||
ctx.scope.$digest();
|
||||
})
|
||||
);
|
||||
|
||||
describe('init query_ctrl variables', function() {
|
||||
it('filter types should be initialized', function() {
|
||||
expect(ctx.ctrl.filterTypes.length).to.be(7);
|
||||
});
|
||||
|
||||
it('aggregators should be initialized', function() {
|
||||
expect(ctx.ctrl.aggregators.length).to.be(8);
|
||||
});
|
||||
|
||||
it('fill policy options should be initialized', function() {
|
||||
expect(ctx.ctrl.fillPolicies.length).to.be(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when adding filters and tags', function() {
|
||||
it('addTagMode should be false when closed', function() {
|
||||
ctx.ctrl.addTagMode = true;
|
||||
ctx.ctrl.closeAddTagMode();
|
||||
expect(ctx.ctrl.addTagMode).to.be(false);
|
||||
});
|
||||
|
||||
it('addFilterMode should be false when closed', function() {
|
||||
ctx.ctrl.addFilterMode = true;
|
||||
ctx.ctrl.closeAddFilterMode();
|
||||
expect(ctx.ctrl.addFilterMode).to.be(false);
|
||||
});
|
||||
|
||||
it('removing a tag from the tags list', function() {
|
||||
ctx.ctrl.target.tags = { tagk: 'tag_key', tagk2: 'tag_value2' };
|
||||
ctx.ctrl.removeTag('tagk');
|
||||
expect(Object.keys(ctx.ctrl.target.tags).length).to.be(1);
|
||||
});
|
||||
|
||||
it('removing a filter from the filters list', function() {
|
||||
ctx.ctrl.target.filters = [
|
||||
{
|
||||
tagk: 'tag_key',
|
||||
filter: 'tag_value2',
|
||||
type: 'wildcard',
|
||||
groupBy: true,
|
||||
},
|
||||
];
|
||||
ctx.ctrl.removeFilter(0);
|
||||
expect(ctx.ctrl.target.filters.length).to.be(0);
|
||||
});
|
||||
|
||||
it('adding a filter when tags exist should generate error', function() {
|
||||
ctx.ctrl.target.tags = { tagk: 'tag_key', tagk2: 'tag_value2' };
|
||||
ctx.ctrl.addFilter();
|
||||
expect(ctx.ctrl.errors.filters).to.be(
|
||||
'Please remove tags to use filters, tags and filters are mutually exclusive.'
|
||||
);
|
||||
});
|
||||
|
||||
it('adding a tag when filters exist should generate error', function() {
|
||||
ctx.ctrl.target.filters = [
|
||||
{
|
||||
tagk: 'tag_key',
|
||||
filter: 'tag_value2',
|
||||
type: 'wildcard',
|
||||
groupBy: true,
|
||||
},
|
||||
];
|
||||
ctx.ctrl.addTag();
|
||||
expect(ctx.ctrl.errors.tags).to.be('Please remove filters to use tags, tags and filters are mutually exclusive.');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,93 @@
|
||||
import { OpenTsQueryCtrl } from '../query_ctrl';
|
||||
|
||||
describe('OpenTsQueryCtrl', () => {
|
||||
var ctx = <any>{
|
||||
target: { target: '' },
|
||||
datasource: {
|
||||
tsdbVersion: '',
|
||||
getAggregators: () => Promise.resolve([]),
|
||||
getFilterTypes: () => Promise.resolve([]),
|
||||
},
|
||||
};
|
||||
|
||||
ctx.panelCtrl = {
|
||||
panel: {
|
||||
targets: [ctx.target],
|
||||
},
|
||||
refresh: () => {},
|
||||
};
|
||||
|
||||
OpenTsQueryCtrl.prototype = Object.assign(OpenTsQueryCtrl.prototype, ctx);
|
||||
|
||||
beforeEach(() => {
|
||||
ctx.ctrl = new OpenTsQueryCtrl({}, {});
|
||||
});
|
||||
|
||||
describe('init query_ctrl variables', () => {
|
||||
it('filter types should be initialized', () => {
|
||||
expect(ctx.ctrl.filterTypes.length).toBe(7);
|
||||
});
|
||||
|
||||
it('aggregators should be initialized', () => {
|
||||
expect(ctx.ctrl.aggregators.length).toBe(8);
|
||||
});
|
||||
|
||||
it('fill policy options should be initialized', () => {
|
||||
expect(ctx.ctrl.fillPolicies.length).toBe(4);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when adding filters and tags', () => {
|
||||
it('addTagMode should be false when closed', () => {
|
||||
ctx.ctrl.addTagMode = true;
|
||||
ctx.ctrl.closeAddTagMode();
|
||||
expect(ctx.ctrl.addTagMode).toBe(false);
|
||||
});
|
||||
|
||||
it('addFilterMode should be false when closed', () => {
|
||||
ctx.ctrl.addFilterMode = true;
|
||||
ctx.ctrl.closeAddFilterMode();
|
||||
expect(ctx.ctrl.addFilterMode).toBe(false);
|
||||
});
|
||||
|
||||
it('removing a tag from the tags list', () => {
|
||||
ctx.ctrl.target.tags = { tagk: 'tag_key', tagk2: 'tag_value2' };
|
||||
ctx.ctrl.removeTag('tagk');
|
||||
expect(Object.keys(ctx.ctrl.target.tags).length).toBe(1);
|
||||
});
|
||||
|
||||
it('removing a filter from the filters list', () => {
|
||||
ctx.ctrl.target.filters = [
|
||||
{
|
||||
tagk: 'tag_key',
|
||||
filter: 'tag_value2',
|
||||
type: 'wildcard',
|
||||
groupBy: true,
|
||||
},
|
||||
];
|
||||
ctx.ctrl.removeFilter(0);
|
||||
expect(ctx.ctrl.target.filters.length).toBe(0);
|
||||
});
|
||||
|
||||
it('adding a filter when tags exist should generate error', () => {
|
||||
ctx.ctrl.target.tags = { tagk: 'tag_key', tagk2: 'tag_value2' };
|
||||
ctx.ctrl.addFilter();
|
||||
expect(ctx.ctrl.errors.filters).toBe(
|
||||
'Please remove tags to use filters, tags and filters are mutually exclusive.'
|
||||
);
|
||||
});
|
||||
|
||||
it('adding a tag when filters exist should generate error', () => {
|
||||
ctx.ctrl.target.filters = [
|
||||
{
|
||||
tagk: 'tag_key',
|
||||
filter: 'tag_value2',
|
||||
type: 'wildcard',
|
||||
groupBy: true,
|
||||
},
|
||||
];
|
||||
ctx.ctrl.addTag();
|
||||
expect(ctx.ctrl.errors.tags).toBe('Please remove filters to use tags, tags and filters are mutually exclusive.');
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -151,7 +151,9 @@ Macros:
|
||||
- $__timeEpoch -> extract(epoch from column) as "time"
|
||||
- $__timeFilter(column) -> column BETWEEN '2017-04-21T05:01:17Z' AND '2017-04-21T05:01:17Z'
|
||||
- $__unixEpochFilter(column) -> column >= 1492750877 AND column <= 1492750877
|
||||
- $__timeGroup(column,'5m') -> (extract(epoch from column)/300)::bigint*300
|
||||
- $__timeGroup(column,'5m'[, fillvalue]) -> (extract(epoch from column)/300)::bigint*300
|
||||
by setting fillvalue grafana will fill in missing values according to the interval
|
||||
fillvalue can be either a literal value, NULL or previous; previous will fill in the previous seen value or NULL if none has been seen yet
|
||||
- $__timeGroupAlias(column,'5m') -> (extract(epoch from column)/300)::bigint*300 AS "time"
|
||||
|
||||
Example of group by and order by with $__timeGroup:
|
||||
|
||||
@@ -16,6 +16,72 @@ export function alignRange(start, end, step) {
|
||||
};
|
||||
}
|
||||
|
||||
const keywords = 'by|without|on|ignoring|group_left|group_right';
|
||||
|
||||
// Duplicate from mode-prometheus.js, which can't be used in tests due to global ace not being loaded.
|
||||
const builtInWords = [
|
||||
keywords,
|
||||
'count|count_values|min|max|avg|sum|stddev|stdvar|bottomk|topk|quantile',
|
||||
'true|false|null|__name__|job',
|
||||
'abs|absent|ceil|changes|clamp_max|clamp_min|count_scalar|day_of_month|day_of_week|days_in_month|delta|deriv',
|
||||
'drop_common_labels|exp|floor|histogram_quantile|holt_winters|hour|idelta|increase|irate|label_replace|ln|log2',
|
||||
'log10|minute|month|predict_linear|rate|resets|round|scalar|sort|sort_desc|sqrt|time|vector|year|avg_over_time',
|
||||
'min_over_time|max_over_time|sum_over_time|count_over_time|quantile_over_time|stddev_over_time|stdvar_over_time',
|
||||
]
|
||||
.join('|')
|
||||
.split('|');
|
||||
|
||||
// addLabelToQuery('foo', 'bar', 'baz') => 'foo{bar="baz"}'
|
||||
export function addLabelToQuery(query: string, key: string, value: string): string {
|
||||
if (!key || !value) {
|
||||
throw new Error('Need label to add to query.');
|
||||
}
|
||||
|
||||
// Add empty selector to bare metric name
|
||||
let previousWord;
|
||||
query = query.replace(/(\w+)\b(?![\({=",])/g, (match, word, offset) => {
|
||||
// Check if inside a selector
|
||||
const nextSelectorStart = query.slice(offset).indexOf('{');
|
||||
const nextSelectorEnd = query.slice(offset).indexOf('}');
|
||||
const insideSelector = nextSelectorEnd > -1 && (nextSelectorStart === -1 || nextSelectorStart > nextSelectorEnd);
|
||||
// Handle "sum by (key) (metric)"
|
||||
const previousWordIsKeyWord = previousWord && keywords.split('|').indexOf(previousWord) > -1;
|
||||
previousWord = word;
|
||||
if (!insideSelector && !previousWordIsKeyWord && builtInWords.indexOf(word) === -1) {
|
||||
return `${word}{}`;
|
||||
}
|
||||
return word;
|
||||
});
|
||||
|
||||
// Adding label to existing selectors
|
||||
const selectorRegexp = /{([^{]*)}/g;
|
||||
let match = null;
|
||||
const parts = [];
|
||||
let lastIndex = 0;
|
||||
let suffix = '';
|
||||
while ((match = selectorRegexp.exec(query))) {
|
||||
const prefix = query.slice(lastIndex, match.index);
|
||||
const selectorParts = match[1].split(',');
|
||||
const labels = selectorParts.reduce((acc, label) => {
|
||||
const labelParts = label.split('=');
|
||||
if (labelParts.length === 2) {
|
||||
acc[labelParts[0]] = labelParts[1];
|
||||
}
|
||||
return acc;
|
||||
}, {});
|
||||
labels[key] = `"${value}"`;
|
||||
const selector = Object.keys(labels)
|
||||
.sort()
|
||||
.map(key => `${key}=${labels[key]}`)
|
||||
.join(',');
|
||||
lastIndex = match.index + match[1].length + 2;
|
||||
suffix = query.slice(match.index + match[0].length);
|
||||
parts.push(prefix, '{', selector, '}');
|
||||
}
|
||||
parts.push(suffix);
|
||||
return parts.join('');
|
||||
}
|
||||
|
||||
export function prometheusRegularEscape(value) {
|
||||
if (typeof value === 'string') {
|
||||
return value.replace(/'/g, "\\\\'");
|
||||
@@ -384,6 +450,14 @@ export class PrometheusDatasource {
|
||||
return state;
|
||||
}
|
||||
|
||||
modifyQuery(query: string, options: any): string {
|
||||
const { addFilter } = options;
|
||||
if (addFilter) {
|
||||
return addLabelToQuery(query, addFilter.key, addFilter.value);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
getPrometheusTime(date, roundUp) {
|
||||
if (_.isString(date)) {
|
||||
date = dateMath.parse(date, roundUp);
|
||||
|
||||
@@ -86,7 +86,7 @@ export class ResultTransformer {
|
||||
table.columns.push({ text: 'Time', type: 'time' });
|
||||
_.each(sortedLabels, function(label, labelIndex) {
|
||||
metricLabels[label] = labelIndex + 1;
|
||||
table.columns.push({ text: label });
|
||||
table.columns.push({ text: label, filterable: !label.startsWith('__') });
|
||||
});
|
||||
let valueText = resultCount > 1 ? `Value #${refId}` : 'Value';
|
||||
table.columns.push({ text: valueText });
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import _ from 'lodash';
|
||||
import moment from 'moment';
|
||||
import q from 'q';
|
||||
import { alignRange, PrometheusDatasource, prometheusSpecialRegexEscape, prometheusRegularEscape } from '../datasource';
|
||||
import {
|
||||
alignRange,
|
||||
PrometheusDatasource,
|
||||
prometheusSpecialRegexEscape,
|
||||
prometheusRegularEscape,
|
||||
addLabelToQuery,
|
||||
} from '../datasource';
|
||||
|
||||
jest.mock('../metric_find_query');
|
||||
|
||||
describe('PrometheusDatasource', () => {
|
||||
@@ -245,6 +252,24 @@ describe('PrometheusDatasource', () => {
|
||||
expect(intervalMs).toEqual({ text: 15000, value: 15000 });
|
||||
});
|
||||
});
|
||||
|
||||
describe('addLabelToQuery()', () => {
|
||||
expect(() => {
|
||||
addLabelToQuery('foo', '', '');
|
||||
}).toThrow();
|
||||
expect(addLabelToQuery('foo + foo', 'bar', 'baz')).toBe('foo{bar="baz"} + foo{bar="baz"}');
|
||||
expect(addLabelToQuery('foo{}', 'bar', 'baz')).toBe('foo{bar="baz"}');
|
||||
expect(addLabelToQuery('foo{x="yy"}', 'bar', 'baz')).toBe('foo{bar="baz",x="yy"}');
|
||||
expect(addLabelToQuery('foo{x="yy"} + metric', 'bar', 'baz')).toBe('foo{bar="baz",x="yy"} + metric{bar="baz"}');
|
||||
expect(addLabelToQuery('avg(foo) + sum(xx_yy)', 'bar', 'baz')).toBe('avg(foo{bar="baz"}) + sum(xx_yy{bar="baz"})');
|
||||
expect(addLabelToQuery('foo{x="yy"} * metric{y="zz",a="bb"} * metric2', 'bar', 'baz')).toBe(
|
||||
'foo{bar="baz",x="yy"} * metric{a="bb",bar="baz",y="zz"} * metric2{bar="baz"}'
|
||||
);
|
||||
expect(addLabelToQuery('sum by (xx) (foo)', 'bar', 'baz')).toBe('sum by (xx) (foo{bar="baz"})');
|
||||
expect(addLabelToQuery('foo{instance="my-host.com:9100"}', 'bar', 'baz')).toBe(
|
||||
'foo{bar="baz",instance="my-host.com:9100"}'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
const SECOND = 1000;
|
||||
|
||||
@@ -39,7 +39,7 @@ describe('Prometheus Result Transformer', () => {
|
||||
[1443454528000, 'test', '', 'testjob', 3846],
|
||||
[1443454529000, 'test', 'localhost:8080', 'otherjob', 3847],
|
||||
]);
|
||||
expect(table.columns).toEqual([
|
||||
expect(table.columns).toMatchObject([
|
||||
{ text: 'Time', type: 'time' },
|
||||
{ text: '__name__' },
|
||||
{ text: 'instance' },
|
||||
@@ -51,7 +51,7 @@ describe('Prometheus Result Transformer', () => {
|
||||
it('should column title include refId if response count is more than 2', () => {
|
||||
var table = ctx.resultTransformer.transformMetricDataToTable(response.data.result, 2, 'B');
|
||||
expect(table.type).toBe('table');
|
||||
expect(table.columns).toEqual([
|
||||
expect(table.columns).toMatchObject([
|
||||
{ text: 'Time', type: 'time' },
|
||||
{ text: '__name__' },
|
||||
{ text: 'instance' },
|
||||
@@ -79,7 +79,7 @@ describe('Prometheus Result Transformer', () => {
|
||||
var table = ctx.resultTransformer.transformMetricDataToTable(response.data.result);
|
||||
expect(table.type).toBe('table');
|
||||
expect(table.rows).toEqual([[1443454528000, 'test', 'testjob', 3846]]);
|
||||
expect(table.columns).toEqual([
|
||||
expect(table.columns).toMatchObject([
|
||||
{ text: 'Time', type: 'time' },
|
||||
{ text: '__name__' },
|
||||
{ text: 'job' },
|
||||
|
||||
@@ -38,9 +38,10 @@ export class LoadDashboardCtrl {
|
||||
}
|
||||
}
|
||||
|
||||
if ($routeParams.keepRows) {
|
||||
result.meta.keepRows = true;
|
||||
if ($routeParams.autofitpanels) {
|
||||
result.meta.autofitpanels = true;
|
||||
}
|
||||
|
||||
$scope.initDashboard(result, $scope);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// vendor
|
||||
@import '../vendor/css/timepicker.css';
|
||||
@import '../vendor/css/spectrum.css';
|
||||
@import '../vendor/css/rc-cascader.scss';
|
||||
|
||||
// MIXINS
|
||||
@import 'mixins/mixins';
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
.dashboard-container {
|
||||
padding: $dashboard-padding;
|
||||
padding: $dashboard-padding $dashboard-padding 0 $dashboard-padding;
|
||||
width: 100%;
|
||||
min-height: 100%;
|
||||
}
|
||||
|
||||
@@ -47,6 +47,14 @@
|
||||
background-color: $btn-active-bg;
|
||||
}
|
||||
|
||||
.navbar-button--no-icon {
|
||||
line-height: 18px;
|
||||
}
|
||||
|
||||
.result-options {
|
||||
margin-top: 2 * $panel-margin;
|
||||
}
|
||||
|
||||
.elapsed-time {
|
||||
position: absolute;
|
||||
left: 0;
|
||||
@@ -80,6 +88,10 @@
|
||||
.relative {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.link {
|
||||
text-decoration: underline;
|
||||
}
|
||||
}
|
||||
|
||||
.explore + .explore {
|
||||
@@ -95,7 +107,12 @@
|
||||
}
|
||||
|
||||
.query-row-tools {
|
||||
width: 4rem;
|
||||
width: 6rem;
|
||||
}
|
||||
|
||||
.query-row-field {
|
||||
margin-right: 3px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.explore {
|
||||
@@ -134,3 +151,11 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Prometheus-specifics, to be extracted to datasource soon
|
||||
|
||||
.explore {
|
||||
.prom-query-field {
|
||||
display: flex;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
import {describe, it, expect} from 'test/lib/common';
|
||||
|
||||
import {GrafanaApp} from 'app/app';
|
||||
|
||||
describe('GrafanaApp', () => {
|
||||
|
||||
var app = new GrafanaApp();
|
||||
|
||||
it('can call inits', () => {
|
||||
expect(app).to.not.be(null);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
160
public/vendor/css/rc-cascader.scss
vendored
Normal file
160
public/vendor/css/rc-cascader.scss
vendored
Normal file
@@ -0,0 +1,160 @@
|
||||
.rc-cascader {
|
||||
font-size: 12px;
|
||||
}
|
||||
.rc-cascader-menus {
|
||||
font-size: 12px;
|
||||
overflow: hidden;
|
||||
background: $panel-bg;
|
||||
position: absolute;
|
||||
border: $panel-border;
|
||||
border-radius: $border-radius;
|
||||
box-shadow: $typeahead-shadow;
|
||||
white-space: nowrap;
|
||||
}
|
||||
.rc-cascader-menus-hidden {
|
||||
display: none;
|
||||
}
|
||||
.rc-cascader-menus.slide-up-enter,
|
||||
.rc-cascader-menus.slide-up-appear {
|
||||
animation-duration: .3s;
|
||||
animation-fill-mode: both;
|
||||
transform-origin: 0 0;
|
||||
opacity: 0;
|
||||
animation-timing-function: cubic-bezier(0.08, 0.82, 0.17, 1);
|
||||
animation-play-state: paused;
|
||||
}
|
||||
.rc-cascader-menus.slide-up-leave {
|
||||
animation-duration: .3s;
|
||||
animation-fill-mode: both;
|
||||
transform-origin: 0 0;
|
||||
opacity: 1;
|
||||
animation-timing-function: cubic-bezier(0.6, 0.04, 0.98, 0.34);
|
||||
animation-play-state: paused;
|
||||
}
|
||||
.rc-cascader-menus.slide-up-enter.slide-up-enter-active.rc-cascader-menus-placement-bottomLeft,
|
||||
.rc-cascader-menus.slide-up-appear.slide-up-appear-active.rc-cascader-menus-placement-bottomLeft {
|
||||
animation-name: SlideUpIn;
|
||||
animation-play-state: running;
|
||||
}
|
||||
.rc-cascader-menus.slide-up-enter.slide-up-enter-active.rc-cascader-menus-placement-topLeft,
|
||||
.rc-cascader-menus.slide-up-appear.slide-up-appear-active.rc-cascader-menus-placement-topLeft {
|
||||
animation-name: SlideDownIn;
|
||||
animation-play-state: running;
|
||||
}
|
||||
.rc-cascader-menus.slide-up-leave.slide-up-leave-active.rc-cascader-menus-placement-bottomLeft {
|
||||
animation-name: SlideUpOut;
|
||||
animation-play-state: running;
|
||||
}
|
||||
.rc-cascader-menus.slide-up-leave.slide-up-leave-active.rc-cascader-menus-placement-topLeft {
|
||||
animation-name: SlideDownOut;
|
||||
animation-play-state: running;
|
||||
}
|
||||
.rc-cascader-menu {
|
||||
display: inline-block;
|
||||
/* width: 100px; */
|
||||
max-width: 50vw;
|
||||
height: 192px;
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
border-right: $panel-border;
|
||||
overflow: auto;
|
||||
}
|
||||
.rc-cascader-menu:last-child {
|
||||
border-right: 0;
|
||||
}
|
||||
.rc-cascader-menu-item {
|
||||
height: 32px;
|
||||
line-height: 32px;
|
||||
padding: 0 16px;
|
||||
cursor: pointer;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
transition: all 0.3s ease;
|
||||
position: relative;
|
||||
}
|
||||
.rc-cascader-menu-item:hover {
|
||||
background: $typeahead-selected-bg;
|
||||
}
|
||||
.rc-cascader-menu-item-disabled {
|
||||
cursor: not-allowed;
|
||||
color: $text-color-weak;
|
||||
}
|
||||
.rc-cascader-menu-item-disabled:hover {
|
||||
background: transparent;
|
||||
}
|
||||
.rc-cascader-menu-item-loading:after {
|
||||
position: absolute;
|
||||
right: 12px;
|
||||
content: 'loading';
|
||||
color: $text-color-weak;
|
||||
font-style: italic;
|
||||
}
|
||||
.rc-cascader-menu-item-active {
|
||||
color: $typeahead-selected-color;
|
||||
background: $typeahead-selected-bg;
|
||||
}
|
||||
.rc-cascader-menu-item-active:hover {
|
||||
color: $typeahead-selected-color;
|
||||
background: $typeahead-selected-bg;
|
||||
}
|
||||
.rc-cascader-menu-item-expand {
|
||||
position: relative;
|
||||
}
|
||||
.rc-cascader-menu-item-expand:after {
|
||||
content: '>';
|
||||
font-size: 12px;
|
||||
color: $text-color-weak;
|
||||
position: absolute;
|
||||
right: 16px;
|
||||
line-height: 32px;
|
||||
}
|
||||
@keyframes SlideUpIn {
|
||||
0% {
|
||||
opacity: 0;
|
||||
transform-origin: 0% 0%;
|
||||
transform: scaleY(0.8);
|
||||
}
|
||||
100% {
|
||||
opacity: 1;
|
||||
transform-origin: 0% 0%;
|
||||
transform: scaleY(1);
|
||||
}
|
||||
}
|
||||
@keyframes SlideUpOut {
|
||||
0% {
|
||||
opacity: 1;
|
||||
transform-origin: 0% 0%;
|
||||
transform: scaleY(1);
|
||||
}
|
||||
100% {
|
||||
opacity: 0;
|
||||
transform-origin: 0% 0%;
|
||||
transform: scaleY(0.8);
|
||||
}
|
||||
}
|
||||
@keyframes SlideDownIn {
|
||||
0% {
|
||||
opacity: 0;
|
||||
transform-origin: 0% 100%;
|
||||
transform: scaleY(0.8);
|
||||
}
|
||||
100% {
|
||||
opacity: 1;
|
||||
transform-origin: 0% 100%;
|
||||
transform: scaleY(1);
|
||||
}
|
||||
}
|
||||
@keyframes SlideDownOut {
|
||||
0% {
|
||||
opacity: 1;
|
||||
transform-origin: 0% 100%;
|
||||
transform: scaleY(1);
|
||||
}
|
||||
100% {
|
||||
opacity: 0;
|
||||
transform-origin: 0% 100%;
|
||||
transform: scaleY(0.8);
|
||||
}
|
||||
}
|
||||
@@ -14,12 +14,14 @@ echo "current dir: $(pwd)"
|
||||
|
||||
if [ "$CIRCLE_TAG" != "" ]; then
|
||||
echo "Building releases from tag $CIRCLE_TAG"
|
||||
CC=${CCX64} go run build.go -includeBuildNumber=false build
|
||||
OPT="-includeBuildNumber=false"
|
||||
else
|
||||
echo "Building incremental build for $CIRCLE_BRANCH"
|
||||
CC=${CCX64} go run build.go -buildNumber=${CIRCLE_BUILD_NUM} build
|
||||
OPT="-buildNumber=${CIRCLE_BUILD_NUM}"
|
||||
fi
|
||||
|
||||
CC=${CCX64} go run build.go ${OPT} build
|
||||
|
||||
yarn install --pure-lockfile --no-progress
|
||||
|
||||
echo "current dir: $(pwd)"
|
||||
@@ -28,14 +30,8 @@ if [ -d "dist" ]; then
|
||||
rm -rf dist
|
||||
fi
|
||||
|
||||
if [ "$CIRCLE_TAG" != "" ]; then
|
||||
echo "Building frontend from tag $CIRCLE_TAG"
|
||||
go run build.go -includeBuildNumber=false build-frontend
|
||||
echo "Packaging a release from tag $CIRCLE_TAG"
|
||||
go run build.go -goos linux -pkg-arch amd64 -includeBuildNumber=false package-only latest
|
||||
else
|
||||
echo "Building frontend for $CIRCLE_BRANCH"
|
||||
go run build.go -buildNumber=${CIRCLE_BUILD_NUM} build-frontend
|
||||
echo "Packaging incremental build for $CIRCLE_BRANCH"
|
||||
go run build.go -goos linux -pkg-arch amd64 -buildNumber=${CIRCLE_BUILD_NUM} package-only latest
|
||||
fi
|
||||
echo "Building frontend"
|
||||
go run build.go ${OPT} build-frontend
|
||||
|
||||
echo "Packaging"
|
||||
go run build.go -goos linux -pkg-arch amd64 ${OPT} package-only latest
|
||||
|
||||
123
yarn.lock
123
yarn.lock
@@ -478,6 +478,12 @@ acorn@~2.6.4:
|
||||
version "2.6.4"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-2.6.4.tgz#eb1f45b4a43fa31d03701a5ec46f3b52673e90ee"
|
||||
|
||||
add-dom-event-listener@1.x:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/add-dom-event-listener/-/add-dom-event-listener-1.0.2.tgz#8faed2c41008721cf111da1d30d995b85be42bed"
|
||||
dependencies:
|
||||
object-assign "4.x"
|
||||
|
||||
after@0.8.2:
|
||||
version "0.8.2"
|
||||
resolved "https://registry.yarnpkg.com/after/-/after-0.8.2.tgz#fedb394f9f0e02aa9768e702bda23b505fae7e1f"
|
||||
@@ -771,6 +777,10 @@ array-slice@^0.2.3:
|
||||
version "0.2.3"
|
||||
resolved "https://registry.yarnpkg.com/array-slice/-/array-slice-0.2.3.tgz#dd3cfb80ed7973a75117cdac69b0b99ec86186f5"
|
||||
|
||||
array-tree-filter@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/array-tree-filter/-/array-tree-filter-1.0.1.tgz#0a8ad1eefd38ce88858632f9cc0423d7634e4d5d"
|
||||
|
||||
array-union@^1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/array-union/-/array-union-1.0.2.tgz#9a34410e4f4e3da23dea375be5be70f24778ec39"
|
||||
@@ -1514,7 +1524,7 @@ babel-register@^6.26.0, babel-register@^6.9.0:
|
||||
mkdirp "^0.5.1"
|
||||
source-map-support "^0.4.15"
|
||||
|
||||
babel-runtime@^6.0.0, babel-runtime@^6.18.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0, babel-runtime@^6.9.2:
|
||||
babel-runtime@6.x, babel-runtime@^6.0.0, babel-runtime@^6.18.0, babel-runtime@^6.22.0, babel-runtime@^6.26.0, babel-runtime@^6.9.2:
|
||||
version "6.26.0"
|
||||
resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe"
|
||||
dependencies:
|
||||
@@ -2246,6 +2256,10 @@ classnames@2.x, classnames@^2.2.4, classnames@^2.2.5:
|
||||
version "2.2.5"
|
||||
resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.2.5.tgz#fb3801d453467649ef3603c7d61a02bd129bde6d"
|
||||
|
||||
classnames@^2.2.6:
|
||||
version "2.2.6"
|
||||
resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.2.6.tgz#43935bffdd291f326dad0a205309b38d00f650ce"
|
||||
|
||||
clean-css@3.4.x, clean-css@~3.4.2:
|
||||
version "3.4.28"
|
||||
resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-3.4.28.tgz#bf1945e82fc808f55695e6ddeaec01400efd03ff"
|
||||
@@ -2553,6 +2567,12 @@ component-bind@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/component-bind/-/component-bind-1.0.0.tgz#00c608ab7dcd93897c0009651b1d3a8e1e73bbd1"
|
||||
|
||||
component-classes@^1.2.5:
|
||||
version "1.2.6"
|
||||
resolved "https://registry.yarnpkg.com/component-classes/-/component-classes-1.2.6.tgz#c642394c3618a4d8b0b8919efccbbd930e5cd691"
|
||||
dependencies:
|
||||
component-indexof "0.0.3"
|
||||
|
||||
component-emitter@1.1.2:
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.1.2.tgz#296594f2753daa63996d2af08d15a95116c9aec3"
|
||||
@@ -2561,6 +2581,10 @@ component-emitter@1.2.1, component-emitter@^1.2.1:
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6"
|
||||
|
||||
component-indexof@0.0.3:
|
||||
version "0.0.3"
|
||||
resolved "https://registry.yarnpkg.com/component-indexof/-/component-indexof-0.0.3.tgz#11d091312239eb8f32c8f25ae9cb002ffe8d3c24"
|
||||
|
||||
component-inherit@0.0.3:
|
||||
version "0.0.3"
|
||||
resolved "https://registry.yarnpkg.com/component-inherit/-/component-inherit-0.0.3.tgz#645fc4adf58b72b649d5cae65135619db26ff143"
|
||||
@@ -2841,6 +2865,13 @@ crypto-random-string@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e"
|
||||
|
||||
css-animation@^1.3.2:
|
||||
version "1.4.1"
|
||||
resolved "https://registry.yarnpkg.com/css-animation/-/css-animation-1.4.1.tgz#5b8813125de0fbbbb0bbe1b472ae84221469b7a8"
|
||||
dependencies:
|
||||
babel-runtime "6.x"
|
||||
component-classes "^1.2.5"
|
||||
|
||||
css-color-names@0.0.4:
|
||||
version "0.0.4"
|
||||
resolved "https://registry.yarnpkg.com/css-color-names/-/css-color-names-0.0.4.tgz#808adc2e79cf84738069b646cb20ec27beb629e0"
|
||||
@@ -3515,6 +3546,10 @@ doctrine@^1.2.2:
|
||||
esutils "^2.0.2"
|
||||
isarray "^1.0.0"
|
||||
|
||||
dom-align@^1.7.0:
|
||||
version "1.8.0"
|
||||
resolved "https://registry.yarnpkg.com/dom-align/-/dom-align-1.8.0.tgz#c0e89b5b674c6e836cd248c52c2992135f093654"
|
||||
|
||||
dom-converter@~0.1:
|
||||
version "0.1.4"
|
||||
resolved "https://registry.yarnpkg.com/dom-converter/-/dom-converter-0.1.4.tgz#a45ef5727b890c9bffe6d7c876e7b19cb0e17f3b"
|
||||
@@ -7354,6 +7389,10 @@ lodash._createset@~4.0.0:
|
||||
version "4.0.3"
|
||||
resolved "https://registry.yarnpkg.com/lodash._createset/-/lodash._createset-4.0.3.tgz#0f4659fbb09d75194fa9e2b88a6644d363c9fe26"
|
||||
|
||||
lodash._getnative@^3.0.0:
|
||||
version "3.9.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash._getnative/-/lodash._getnative-3.9.1.tgz#570bc7dede46d61cdcde687d65d3eecbaa3aaff5"
|
||||
|
||||
lodash._root@~3.0.0:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash._root/-/lodash._root-3.0.1.tgz#fba1c4524c19ee9a5f8136b4609f017cf4ded692"
|
||||
@@ -7386,6 +7425,14 @@ lodash.flattendeep@^4.4.0:
|
||||
version "4.4.0"
|
||||
resolved "https://registry.yarnpkg.com/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz#fb030917f86a3134e5bc9bec0d69e0013ddfedb2"
|
||||
|
||||
lodash.isarguments@^3.0.0:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isarguments/-/lodash.isarguments-3.1.0.tgz#2f573d85c6a24289ff00663b491c1d338ff3458a"
|
||||
|
||||
lodash.isarray@^3.0.0:
|
||||
version "3.0.4"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isarray/-/lodash.isarray-3.0.4.tgz#79e4eb88c36a8122af86f844aa9bcd851b5fbb55"
|
||||
|
||||
lodash.isequal@^4.0.0:
|
||||
version "4.5.0"
|
||||
resolved "https://registry.yarnpkg.com/lodash.isequal/-/lodash.isequal-4.5.0.tgz#415c4478f2bcc30120c22ce10ed3226f7d3e18e0"
|
||||
@@ -7406,6 +7453,14 @@ lodash.kebabcase@^4.0.0:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36"
|
||||
|
||||
lodash.keys@^3.1.2:
|
||||
version "3.1.2"
|
||||
resolved "https://registry.yarnpkg.com/lodash.keys/-/lodash.keys-3.1.2.tgz#4dbc0472b156be50a0b286855d1bd0b0c656098a"
|
||||
dependencies:
|
||||
lodash._getnative "^3.0.0"
|
||||
lodash.isarguments "^3.0.0"
|
||||
lodash.isarray "^3.0.0"
|
||||
|
||||
lodash.memoize@^4.1.2:
|
||||
version "4.1.2"
|
||||
resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe"
|
||||
@@ -8651,7 +8706,7 @@ object-assign@4.1.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.0.tgz#7a3b3d0e98063d43f4c03f2e8ae6cd51a86883a0"
|
||||
|
||||
object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1:
|
||||
object-assign@4.x, object-assign@^4.0.1, object-assign@^4.1.0, object-assign@^4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
|
||||
|
||||
@@ -9981,6 +10036,54 @@ raw-body@2.3.3:
|
||||
iconv-lite "0.4.23"
|
||||
unpipe "1.0.0"
|
||||
|
||||
rc-align@^2.4.0:
|
||||
version "2.4.3"
|
||||
resolved "https://registry.yarnpkg.com/rc-align/-/rc-align-2.4.3.tgz#b9b3c2a6d68adae71a8e1d041cd5e3b2a655f99a"
|
||||
dependencies:
|
||||
babel-runtime "^6.26.0"
|
||||
dom-align "^1.7.0"
|
||||
prop-types "^15.5.8"
|
||||
rc-util "^4.0.4"
|
||||
|
||||
rc-animate@2.x:
|
||||
version "2.4.4"
|
||||
resolved "https://registry.yarnpkg.com/rc-animate/-/rc-animate-2.4.4.tgz#a05a784c747beef140d99ff52b6117711bef4b1e"
|
||||
dependencies:
|
||||
babel-runtime "6.x"
|
||||
css-animation "^1.3.2"
|
||||
prop-types "15.x"
|
||||
|
||||
rc-cascader@^0.14.0:
|
||||
version "0.14.0"
|
||||
resolved "https://registry.yarnpkg.com/rc-cascader/-/rc-cascader-0.14.0.tgz#a956c99896f10883bf63d46fb894d0cb326842a4"
|
||||
dependencies:
|
||||
array-tree-filter "^1.0.0"
|
||||
prop-types "^15.5.8"
|
||||
rc-trigger "^2.2.0"
|
||||
rc-util "^4.0.4"
|
||||
shallow-equal "^1.0.0"
|
||||
warning "^4.0.1"
|
||||
|
||||
rc-trigger@^2.2.0:
|
||||
version "2.5.4"
|
||||
resolved "https://registry.yarnpkg.com/rc-trigger/-/rc-trigger-2.5.4.tgz#9088a24ba5a811b254f742f004e38a9e2f8843fb"
|
||||
dependencies:
|
||||
babel-runtime "6.x"
|
||||
classnames "^2.2.6"
|
||||
prop-types "15.x"
|
||||
rc-align "^2.4.0"
|
||||
rc-animate "2.x"
|
||||
rc-util "^4.4.0"
|
||||
|
||||
rc-util@^4.0.4, rc-util@^4.4.0:
|
||||
version "4.5.1"
|
||||
resolved "https://registry.yarnpkg.com/rc-util/-/rc-util-4.5.1.tgz#0e435057174c024901c7600ba8903dd03da3ab39"
|
||||
dependencies:
|
||||
add-dom-event-listener "1.x"
|
||||
babel-runtime "6.x"
|
||||
prop-types "^15.5.10"
|
||||
shallowequal "^0.2.2"
|
||||
|
||||
rc@^1.0.1, rc@^1.1.6, rc@^1.1.7:
|
||||
version "1.2.8"
|
||||
resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed"
|
||||
@@ -10980,6 +11083,16 @@ shallow-clone@^1.0.0:
|
||||
kind-of "^5.0.0"
|
||||
mixin-object "^2.0.1"
|
||||
|
||||
shallow-equal@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/shallow-equal/-/shallow-equal-1.0.0.tgz#508d1838b3de590ab8757b011b25e430900945f7"
|
||||
|
||||
shallowequal@^0.2.2:
|
||||
version "0.2.2"
|
||||
resolved "https://registry.yarnpkg.com/shallowequal/-/shallowequal-0.2.2.tgz#1e32fd5bcab6ad688a4812cb0cc04efc75c7014e"
|
||||
dependencies:
|
||||
lodash.keys "^3.1.2"
|
||||
|
||||
shallowequal@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/shallowequal/-/shallowequal-1.0.2.tgz#1561dbdefb8c01408100319085764da3fcf83f8f"
|
||||
@@ -12555,6 +12668,12 @@ walker@~1.0.5:
|
||||
dependencies:
|
||||
makeerror "1.0.x"
|
||||
|
||||
warning@^4.0.1:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/warning/-/warning-4.0.1.tgz#66ce376b7fbfe8a887c22bdf0e7349d73d397745"
|
||||
dependencies:
|
||||
loose-envify "^1.0.0"
|
||||
|
||||
watch@~0.18.0:
|
||||
version "0.18.0"
|
||||
resolved "https://registry.yarnpkg.com/watch/-/watch-0.18.0.tgz#28095476c6df7c90c963138990c0a5423eb4b986"
|
||||
|
||||
Reference in New Issue
Block a user