mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge remote-tracking branch 'grafana/master' into chore/drone-ci
This commit is contained in:
commit
bdfb2825a0
@ -56,7 +56,7 @@ commands:
|
||||
- run:
|
||||
name: "Install Grafana build pipeline tool"
|
||||
command: |
|
||||
VERSION=0.4.17
|
||||
VERSION=0.4.19
|
||||
curl -fLO https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v${VERSION}/grabpl
|
||||
chmod +x grabpl
|
||||
mv grabpl /tmp
|
||||
@ -667,8 +667,7 @@ jobs:
|
||||
# This is a release
|
||||
/tmp/grabpl publish-docker --jobs 4 --edition << parameters.edition >> --ubuntu=<< parameters.ubuntu >>
|
||||
else
|
||||
# TODO: Don't ignore errors, temporary workaround until we fix #22955
|
||||
/tmp/grabpl publish-docker --jobs 4 --edition << parameters.edition >> --ubuntu=<< parameters.ubuntu >> || echo Publishing failed!
|
||||
/tmp/grabpl publish-docker --jobs 4 --edition << parameters.edition >> --ubuntu=<< parameters.ubuntu >>
|
||||
fi
|
||||
- run:
|
||||
name: CI job failed
|
||||
@ -813,9 +812,9 @@ jobs:
|
||||
name: Install Go linters
|
||||
command: |
|
||||
pushd /tmp
|
||||
VERSION=1.27.0
|
||||
VERSION=1.28.0
|
||||
curl -fLO https://github.com/golangci/golangci-lint/releases/download/v${VERSION}/golangci-lint-${VERSION}-linux-amd64.tar.gz
|
||||
echo 8d345e4e88520e21c113d81978e89ad77fc5b13bfdf20e5bca86b83fc4261272 \
|
||||
echo 179d34edf4baf6454a7081fbaaf74dc99397a3be8e1a535dee04d835a977bf76 \
|
||||
golangci-lint-${VERSION}-linux-amd64.tar.gz | sha256sum --check --strict --status
|
||||
tar -xf golangci-lint-${VERSION}-linux-amd64.tar.gz
|
||||
sudo mv golangci-lint-${VERSION}-linux-amd64/golangci-lint /usr/local/bin/
|
||||
@ -826,7 +825,7 @@ jobs:
|
||||
command: |
|
||||
# To save memory, run in two batches
|
||||
golangci-lint run -v -j 4 --config scripts/go/configs/ci/.golangci.toml -E deadcode -E depguard -E dogsled \
|
||||
-E errcheck -E goconst -E golint -E gosec -E gosimple -E govet -E scopelint ./pkg/...
|
||||
-E errcheck -E goconst -E golint -E gosec -E gosimple -E govet -E exportloopref -E whitespace ./pkg/...
|
||||
golangci-lint run -v -j 4 --config scripts/go/configs/ci/.golangci.toml -E ineffassign \
|
||||
-E rowserrcheck -E staticcheck -E structcheck -E typecheck -E unconvert -E unused -E varcheck ./pkg/...
|
||||
./scripts/go/bin/revive -formatter stylish -config ./scripts/go/configs/revive.toml ./pkg/...
|
||||
|
@ -1,3 +1,12 @@
|
||||
# 7.1.0-beta 2 (2020-07-02)
|
||||
|
||||
### Features / Enhancements
|
||||
* **Loki**: Allow aliasing Loki queries in dashboard. [#25706](https://github.com/grafana/grafana/pull/25706), [@bastjan](https://github.com/bastjan)
|
||||
|
||||
### Bug Fixes
|
||||
* **Explore**: Fix href when jumping from Explore to Add data source. [#25991](https://github.com/grafana/grafana/pull/25991), [@ivanahuckova](https://github.com/ivanahuckova)
|
||||
* **Fix**: Build-in plugins failed to load in windows. [#25982](https://github.com/grafana/grafana/pull/25982), [@papagian](https://github.com/papagian)
|
||||
|
||||
# 7.1.0-beta 1 (2020-07-01)
|
||||
|
||||
### Features / Enhancements
|
||||
|
@ -125,7 +125,7 @@ yarn e2e:dev
|
||||
|
||||
## Configure Grafana for development
|
||||
|
||||
The default configuration, `grafana.ini`, is located in the `conf` directory.
|
||||
The default configuration, `defaults.ini`, is located in the `conf` directory.
|
||||
|
||||
To override the default configuration, create a `custom.ini` file in the `conf` directory. You only need to add the options you wish to override.
|
||||
|
||||
|
@ -18,6 +18,32 @@ For all items not covered in this guide, refer to the [Microsoft Style Guide](ht
|
||||
|
||||
The [codespell](https://github.com/codespell-project/codespell) tool is run for every change to catch common misspellings.
|
||||
|
||||
## Inclusive language
|
||||
|
||||
This section provides guidelines on how to avoid using charged language in documentation.
|
||||
|
||||
### Allowing and blocking
|
||||
|
||||
Don't use "whitelist" or "blacklist" when referring to allowing or blocking content or traffic.
|
||||
|
||||
* When used as a noun, use "allowlist" or "blocklist".
|
||||
* When used as a verb, use "allow" or "block"
|
||||
|
||||
Example: _To **allow** outgoing traffic, add the IP to the **allowlist**._
|
||||
|
||||
### Leader and follower
|
||||
|
||||
Don't use "master" or "slave" to describe relationships between nodes or processes.
|
||||
|
||||
* Use "leader", "main" or "primary," instead of "master."
|
||||
* Use "follower" or "secondary," instead of "slave."
|
||||
|
||||
### Exceptions
|
||||
|
||||
When referring to a configuration or settings used by third-party libraries och technologies outside the Grafana project, prefer the original name to avoid confusion.
|
||||
|
||||
For example, use "master" when referring to the default Git branch.
|
||||
|
||||
## Grafana-specific style
|
||||
|
||||
The following sections provide general guidelines on topics specific to Grafana documentation. Note that for the most part, these are *guidelines*, not rigid rules. If you have questions, ask in the #docs channel of Grafana Slack.
|
||||
@ -31,7 +57,7 @@ The following sections provide general guidelines on topics specific to Grafana
|
||||
* Write in present tense.
|
||||
- Not: The panel will open.
|
||||
- Use: The panel opens. Grafana opens the panel.
|
||||
* Do not use an ampersand (&) as an abbreviation for "and."
|
||||
* Do not use an ampersand (&) as an abbreviation for "and."
|
||||
- **Exceptions:** If an ampersand is used in the Grafana UI, then match the UI.
|
||||
* Avoid using internal slang and jargon in technical documentation.
|
||||
|
||||
@ -156,7 +182,7 @@ One word, not two.
|
||||
|
||||
#### open source, open-source
|
||||
|
||||
Do not hyphenate when used as an adjective unless the lack of hyphen would cause confusion. For example: _Open source software design is the most open open-source system I can imagine._
|
||||
Do not hyphenate when used as an adjective unless the lack of hyphen would cause confusion. For example: _Open source software design is the most open open-source system I can imagine._
|
||||
|
||||
Do not hyphenate when it is used as a noun. For example: _Open source is the best way to develop software._
|
||||
|
||||
|
3
devenv/local-npm/conf/nginx/Dockerfile
Normal file
3
devenv/local-npm/conf/nginx/Dockerfile
Normal file
@ -0,0 +1,3 @@
|
||||
FROM tutum/nginx
|
||||
RUN rm /etc/nginx/sites-enabled/default
|
||||
ADD sites-enabled /etc/nginx/sites-enabled
|
14
devenv/local-npm/conf/nginx/sites-enabled/verdaccio-conf
Normal file
14
devenv/local-npm/conf/nginx/sites-enabled/verdaccio-conf
Normal file
@ -0,0 +1,14 @@
|
||||
server {
|
||||
listen 80 default_server;
|
||||
access_log /var/log/nginx/verdaccio.log;
|
||||
charset utf-8;
|
||||
location / {
|
||||
proxy_pass http://grafana-npm.local:4873/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-NginX-Proxy true;
|
||||
proxy_ssl_session_reuse off;
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_redirect off;
|
||||
}
|
||||
}
|
26
devenv/local-npm/docker-compose.yaml
Normal file
26
devenv/local-npm/docker-compose.yaml
Normal file
@ -0,0 +1,26 @@
|
||||
version: '2'
|
||||
|
||||
services:
|
||||
verdaccio:
|
||||
image: verdaccio/verdaccio:4
|
||||
container_name: verdaccio_root_path
|
||||
ports:
|
||||
- "4873:4873"
|
||||
volumes:
|
||||
- verdaccio:/verdaccio
|
||||
|
||||
nginx:
|
||||
restart: always
|
||||
build: conf/nginx
|
||||
ports:
|
||||
- "80:80"
|
||||
volumes:
|
||||
- /www/public
|
||||
volumes_from:
|
||||
- verdaccio
|
||||
links:
|
||||
- verdaccio:verdaccio
|
||||
|
||||
volumes:
|
||||
verdaccio:
|
||||
driver: local
|
@ -1179,7 +1179,7 @@ Optional URL to send to users in notifications. If the string contains the seque
|
||||
|
||||
### key_file
|
||||
|
||||
Path to JSON key file associated with a Google service account to authenticate and authorize.
|
||||
Optional path to JSON key file associated with a Google service account to authenticate and authorize. If no value is provided it tries to use the [application default credentials](https://cloud.google.com/docs/authentication/production#finding_credentials_automatically).
|
||||
Service Account keys can be created and downloaded from https://console.developers.google.com/permissions/serviceaccounts.
|
||||
|
||||
Service Account should have "Storage Object Writer" role. The access control model of the bucket needs to be "Set object-level and bucket-level permissions". Grafana itself will make the images public readable.
|
||||
|
@ -327,11 +327,11 @@ providers:
|
||||
updateIntervalSeconds: 30
|
||||
options:
|
||||
path: /etc/dashboards
|
||||
foldersFromFileStructure: true
|
||||
foldersFromFilesStructure: true
|
||||
```
|
||||
`server` and `application` will become new folders in Grafana menu.
|
||||
|
||||
> **Note.** `folder` and `folderUid` options should be empty or missing to make `foldersFromFileStructure` works.
|
||||
> **Note.** `folder` and `folderUid` options should be empty or missing to make `foldersFromFilesStructure` work.
|
||||
|
||||
## Alert Notification Channels
|
||||
|
||||
|
@ -22,18 +22,17 @@ To enable the Azure AD OAuth2 you must register your application with Azure AD.
|
||||
|
||||
1. Log in to [Azure Portal](https://portal.azure.com) and click **Azure Active Directory** in the side menu.
|
||||
|
||||
1. Click **App Registrations** and add a new application registration:
|
||||
- Name: Grafana
|
||||
- Application type: Web app / API
|
||||
- Sign-on URL: `https://<grafana domain>/login/azuread`
|
||||
1. Click **App Registrations** and add a new application registration with a fitting name.
|
||||
|
||||
1. Click on **Authentication** then **Add a platform** and pick **Web App**.
|
||||
|
||||
1. Click the name of the new application to open the application details page.
|
||||
1. Add the redirect URL `https://<grafana domain>/login/azuread`.
|
||||
|
||||
1. Click **Endpoints**.
|
||||
1. Click **Overview** and then **Endpoints**.
|
||||
- Note down the **OAuth 2.0 authorization endpoint (v2)**, this will be the auth URL.
|
||||
- Note down the **OAuth 2.0 token endpoint (v2)**, this will be the token URL.
|
||||
|
||||
1. Close the Endpoints page to come back to the application details page.
|
||||
1. Close the Endpoints page to come back to the application overview.
|
||||
|
||||
1. Note down the "Application ID", this will be the OAuth client id.
|
||||
|
||||
@ -88,9 +87,9 @@ To enable the Azure AD OAuth2 you must register your application with Azure AD.
|
||||
],
|
||||
```
|
||||
|
||||
1. Click Overview and then on **Managed application in local directory** to show the Enterprise Application details.
|
||||
1. Go to **Azure Active Directory** and then to **Enterprise Applications**. Search for your application and click on it.
|
||||
|
||||
1. Click on **Users and groups** and add Users/Groups to the Grafana roles by using **Add User**.
|
||||
1. Click on **Users and Groups** and add Users/Groups to the Grafana roles by using **Add User**.
|
||||
|
||||
## Enable Azure AD OAuth in Grafana
|
||||
|
||||
|
@ -12,8 +12,13 @@ weight = 3
|
||||
|
||||
# Generic OAuth Authentication
|
||||
|
||||
You can configure many different OAuth2 authentication services with Grafana using the generic OAuth2 feature. Below you
|
||||
can find examples using Okta, BitBucket, OneLogin and Azure.
|
||||
You can configure many different OAuth2 authentication services with Grafana using the generic OAuth2 feature. Examples:
|
||||
- [Auth0](#set-up-oauth2-with-auth0)
|
||||
- [Azure AD]({{< relref "azuread.md" >}})
|
||||
- [BitBucket](#set-up-oauth2-with-bitbucket)
|
||||
- [Centrify](#set-up-oauth2-with-centrify)
|
||||
- [Okta]({{< relref "okta.md" >}})
|
||||
- [OneLogin](#set-up-oauth2-with-onelogin)
|
||||
|
||||
This callback URL must match the full HTTP address that you use in your browser to access Grafana, but with the prefix path of `/login/generic_oauth`.
|
||||
|
||||
@ -54,6 +59,32 @@ Check for the presence of a role using the [JMESPath](http://jmespath.org/exampl
|
||||
|
||||
See [JMESPath examples](#jmespath-examples) for more information.
|
||||
|
||||
## Set up OAuth2 with Auth0
|
||||
|
||||
1. Create a new Client in Auth0
|
||||
- Name: Grafana
|
||||
- Type: Regular Web Application
|
||||
|
||||
2. Go to the Settings tab and set:
|
||||
- Allowed Callback URLs: `https://<grafana domain>/login/generic_oauth`
|
||||
|
||||
3. Click Save Changes, then use the values at the top of the page to configure Grafana:
|
||||
|
||||
```bash
|
||||
[auth.generic_oauth]
|
||||
enabled = true
|
||||
allow_sign_up = true
|
||||
team_ids =
|
||||
allowed_organizations =
|
||||
name = Auth0
|
||||
client_id = <client id>
|
||||
client_secret = <client secret>
|
||||
scopes = openid profile email
|
||||
auth_url = https://<domain>/authorize
|
||||
token_url = https://<domain>/oauth/token
|
||||
api_url = https://<domain>/userinfo
|
||||
```
|
||||
|
||||
## Set up OAuth2 with Bitbucket
|
||||
|
||||
```bash
|
||||
@ -71,6 +102,37 @@ team_ids =
|
||||
allowed_organizations =
|
||||
```
|
||||
|
||||
## Set up OAuth2 with Centrify
|
||||
|
||||
1. Create a new Custom OpenID Connect application configuration in the Centrify dashboard.
|
||||
|
||||
2. Create a memorable unique Application ID, e.g. "grafana", "grafana_aws", etc.
|
||||
|
||||
3. Put in other basic configuration (name, description, logo, category)
|
||||
|
||||
4. On the Trust tab, generate a long password and put it into the OpenID Connect Client Secret field.
|
||||
|
||||
5. Put the URL to the front page of your Grafana instance into the "Resource Application URL" field.
|
||||
|
||||
6. Add an authorized Redirect URI like https://your-grafana-server/login/generic_oauth
|
||||
|
||||
7. Set up permissions, policies, etc. just like any other Centrify app
|
||||
|
||||
8. Configure Grafana as follows:
|
||||
|
||||
```bash
|
||||
[auth.generic_oauth]
|
||||
name = Centrify
|
||||
enabled = true
|
||||
allow_sign_up = true
|
||||
client_id = <OpenID Connect Client ID from Centrify>
|
||||
client_secret = <your generated OpenID Connect Client Secret"
|
||||
scopes = openid profile email
|
||||
auth_url = https://<your domain>.my.centrify.com/OAuth2/Authorize/<Application ID>
|
||||
token_url = https://<your domain>.my.centrify.com/OAuth2/Token/<Application ID>
|
||||
api_url = https://<your domain>.my.centrify.com/OAuth2/UserInfo/<Application ID>
|
||||
```
|
||||
|
||||
## Set up OAuth2 with OneLogin
|
||||
|
||||
1. Create a new Custom Connector with the following settings:
|
||||
@ -106,63 +168,6 @@ allowed_organizations =
|
||||
allowed_organizations =
|
||||
```
|
||||
|
||||
## Set up OAuth2 with Auth0
|
||||
|
||||
1. Create a new Client in Auth0
|
||||
- Name: Grafana
|
||||
- Type: Regular Web Application
|
||||
|
||||
2. Go to the Settings tab and set:
|
||||
- Allowed Callback URLs: `https://<grafana domain>/login/generic_oauth`
|
||||
|
||||
3. Click Save Changes, then use the values at the top of the page to configure Grafana:
|
||||
|
||||
```bash
|
||||
[auth.generic_oauth]
|
||||
enabled = true
|
||||
allow_sign_up = true
|
||||
team_ids =
|
||||
allowed_organizations =
|
||||
name = Auth0
|
||||
client_id = <client id>
|
||||
client_secret = <client secret>
|
||||
scopes = openid profile email
|
||||
auth_url = https://<domain>/authorize
|
||||
token_url = https://<domain>/oauth/token
|
||||
api_url = https://<domain>/userinfo
|
||||
```
|
||||
|
||||
## Set up OAuth2 with Centrify
|
||||
|
||||
1. Create a new Custom OpenID Connect application configuration in the Centrify dashboard.
|
||||
|
||||
2. Create a memorable unique Application ID, e.g. "grafana", "grafana_aws", etc.
|
||||
|
||||
3. Put in other basic configuration (name, description, logo, category)
|
||||
|
||||
4. On the Trust tab, generate a long password and put it into the OpenID Connect Client Secret field.
|
||||
|
||||
5. Put the URL to the front page of your Grafana instance into the "Resource Application URL" field.
|
||||
|
||||
6. Add an authorized Redirect URI like https://your-grafana-server/login/generic_oauth
|
||||
|
||||
7. Set up permissions, policies, etc. just like any other Centrify app
|
||||
|
||||
8. Configure Grafana as follows:
|
||||
|
||||
```bash
|
||||
[auth.generic_oauth]
|
||||
name = Centrify
|
||||
enabled = true
|
||||
allow_sign_up = true
|
||||
client_id = <OpenID Connect Client ID from Centrify>
|
||||
client_secret = <your generated OpenID Connect Client Secret"
|
||||
scopes = openid profile email
|
||||
auth_url = https://<your domain>.my.centrify.com/OAuth2/Authorize/<Application ID>
|
||||
token_url = https://<your domain>.my.centrify.com/OAuth2/Token/<Application ID>
|
||||
api_url = https://<your domain>.my.centrify.com/OAuth2/UserInfo/<Application ID>
|
||||
```
|
||||
|
||||
## JMESPath examples
|
||||
|
||||
To ease configuration of a proper JMESPath expression, you can test/evaluate expressions with custom payloads at http://jmespath.org/.
|
||||
|
@ -1,5 +1,5 @@
|
||||
+++
|
||||
title = "Build a plugin"
|
||||
title = "Build a plugin."
|
||||
type = "docs"
|
||||
+++
|
||||
|
||||
@ -32,6 +32,7 @@ If you're looking to build your first plugin, check out these introductory tutor
|
||||
|
||||
- [Build a panel plugin]({{< relref "../../../../../tutorials/build-a-panel-plugin.md" >}})
|
||||
- [Build a data source plugin]({{< relref "../../../../../tutorials/build-a-data-source-plugin.md" >}})
|
||||
- [Build a data source backend plugin]({{< relref "../../../../../tutorials/build-a-data-source-backend-plugin.md" >}})
|
||||
|
||||
Ready to learn more? Check out our other tutorials:
|
||||
|
||||
|
@ -13,9 +13,7 @@ weight = 100
|
||||
|
||||
Grafana Enterprise is a commercial edition of Grafana that includes additional features not found in the open source version.
|
||||
|
||||
Building on everything you already know and love about Grafana, Grafana Enterprise adds enterprise data sources, advanced authentication options, more permission controls, 24x7x365 support, and training from the core Grafana team.
|
||||
|
||||
Grafana Enterprise includes all of the features found in the open source edition and more.
|
||||
Building on everything you already know and love about Grafana, Grafana Enterprise includes [exclusive datasource plugins]({{< relref "#enterprise-plugins">}}) and [additional features]({{< relref "#enterprise-features">}}). On top of that you get 24x7x365 support and training from the core Grafana team.
|
||||
|
||||
[Learn more about Grafana Enterprise.](https://grafana.com/enterprise)
|
||||
|
||||
|
@ -69,7 +69,7 @@ Hourly reports are generated once per hour. All fields are required.
|
||||
|
||||
Daily reports are generated once per day. All fields are required.
|
||||
|
||||
* **Time -** Time of day in 24 hours format when the report should be sent.
|
||||
* **Time -** Time the report is sent, in 24-hour format.
|
||||
* **Time zone -** Time zone for the **Time** field.
|
||||
|
||||
#### Weekly
|
||||
@ -77,7 +77,17 @@ Daily reports are generated once per day. All fields are required.
|
||||
Weekly reports are generated once per week. All fields are required.
|
||||
|
||||
* **Day -** Weekday which the report should be sent on.
|
||||
* **Time -** Time of day in 24 hours format when the report should be sent.
|
||||
* **Time -** Time the report is sent, in 24-hour format.
|
||||
* **Time zone -** Time zone for the **Time** field.
|
||||
|
||||
#### Monthly
|
||||
|
||||
> Only available in Grafana Enterprise v7.1+.
|
||||
|
||||
Monthly reports are generated once per month. All fields are required.
|
||||
|
||||
* **Day in month -** Day of the month when the report should be sent. You can select `last` for reports that should go out on the last day of the month.
|
||||
* **Time -** Time the report is sent, in 24-hour format.
|
||||
* **Time zone -** Time zone for the **Time** field.
|
||||
|
||||
#### Never
|
||||
|
@ -21,9 +21,9 @@ The Azure Monitor data source supports multiple services in the Azure cloud:
|
||||
- **[Azure Monitor]({{< relref "#querying-the-azure-monitor-service" >}})** is the platform service that provides a single source for monitoring Azure resources.
|
||||
- **[Application Insights]({{< relref "#querying-the-application-insights-service" >}})** is an extensible Application Performance Management (APM) service for web developers on multiple platforms and can be used to monitor your live web application - it will automatically detect performance anomalies.
|
||||
- **[Azure Log Analytics]({{< relref "#querying-the-azure-log-analytics-service" >}})** (or Azure Logs) gives you access to log data collected by Azure Monitor.
|
||||
- **[Application Insights Analytics]({{< relref "#writing-analytics-queries-for-the-application-insights-service" >}})** allows you to query [Application Insights data](https://docs.microsoft.com/en-us/azure/azure-monitor/app/analytics) using the same query language used for Azure Log Analytics.
|
||||
- **[Application Insights Analytics]({{< relref "#query-the-application-insights-analytics-service" >}})** allows you to query [Application Insights data](https://docs.microsoft.com/en-us/azure/azure-monitor/app/analytics) using the same query language used for Azure Log Analytics.
|
||||
|
||||
## Adding the data source
|
||||
## Add the data source
|
||||
|
||||
The data source can access metrics from four different services. You can configure access to the services that you use. It is also possible to use the same credentials for multiple services if that is how you have set it up in Azure AD.
|
||||
|
||||
@ -76,10 +76,13 @@ In the query editor for a panel, after choosing your Azure Monitor data source,
|
||||
- `Azure Monitor`
|
||||
- `Application Insights`
|
||||
- `Azure Log Analytics`
|
||||
- `Insights Analytics`
|
||||
|
||||
The query editor will change depending on which one you pick. Azure Monitor is the default.
|
||||
The query editor changes depending on which one you pick. Azure Monitor is the default.
|
||||
|
||||
## Querying the Azure Monitor service
|
||||
Starting in Grafana 7.1, Insights Analytics replaced the former edit mode from within Application Insights.
|
||||
|
||||
## Query the Azure Monitor service
|
||||
|
||||
The Azure Monitor service provides metrics for all the Azure services that you have running. It helps you understand how your applications on Azure are performing and to proactively find issues affecting your applications.
|
||||
|
||||
@ -93,29 +96,34 @@ Examples of metrics that you can get from the service are:
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v60/azuremonitor-service-query-editor.png" class="docs-image--no-shadow" caption="Azure Monitor Query Editor" >}}
|
||||
|
||||
### Formatting legend keys with aliases for Azure Monitor
|
||||
As of Grafana 7.1, the query editor allows you to query multiple dimensions for metrics that support them. Metrics that support multiple dimensions are those listed in the [Azure Monitor supported Metrics List](https://docs.microsoft.com/en-us/azure/azure-monitor/platform/metrics-supported) that have one or more values listed in the "Dimension" column for the metric.
|
||||
|
||||
### Format legend keys with aliases for Azure Monitor
|
||||
|
||||
The default legend formatting for the Azure Monitor API is:
|
||||
|
||||
`resourceName{dimensionValue=dimensionName}.metricName`
|
||||
`metricName{dimensionName=dimensionValue,dimensionTwoName=DimensionTwoValue}`
|
||||
|
||||
These can be quite long but this formatting can be changed using aliases. In the Legend Format field, the aliases which are defined below can be combined any way you want.
|
||||
> **Note:** Before Grafana 7.1, the formatting included the resource name in the default: `resourceName{dimensionName=dimensionValue}.metricName`. As of Grafana 7.1, the resource name has been removed from the default legend.
|
||||
|
||||
Azure Monitor Examples:
|
||||
These can be quite long, but this formatting can be changed by using aliases. In the **Legend Format** field, you can combine the aliases defined below any way you want.
|
||||
|
||||
- `dimension: {{dimensionvalue}}`
|
||||
- `{{resourcegroup}} - {{resourcename}}`
|
||||
Azure Monitor examples:
|
||||
|
||||
- `Blob Type: {{ blobtype }}`
|
||||
- `{{ resourcegroup }} - {{ resourcename }}`
|
||||
|
||||
### Alias patterns for Azure Monitor
|
||||
|
||||
- `{{resourcegroup}}` = replaced with the value of the Resource Group
|
||||
- `{{namespace}}` = replaced with the value of the Namespace (e.g. Microsoft.Compute/virtualMachines)
|
||||
- `{{resourcename}}` = replaced with the value of the Resource Name
|
||||
- `{{metric}}` = replaced with metric name (e.g. Percentage CPU)
|
||||
- `{{dimensionname}}` = replaced with dimension key/label (e.g. blobtype)
|
||||
- `{{dimensionvalue}}` = replaced with dimension value (e.g. BlockBlob)
|
||||
- `{{ resourcegroup }}` = replaced with the value of the Resource Group
|
||||
- `{{ namespace }}` = replaced with the value of the Namespace (e.g. Microsoft.Compute/virtualMachines)
|
||||
- `{{ resourcename }}` = replaced with the value of the Resource Name
|
||||
- `{{ metric }}` = replaced with metric name (e.g. Percentage CPU)
|
||||
- `{{ dimensionname }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with the first dimension's key/label (as sorted by the key/label) (e.g. blobtype)
|
||||
- `{{ dimensionvalue }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with first dimension's value (as sorted by the key/label) (e.g. BlockBlob)
|
||||
- `{{ arbitraryDim }}` = *Available in 7.1+* replaced with the value of the corresponding dimension. (e.g. `{{ blobtype }}` becomes BlockBlob)
|
||||
|
||||
### Templating with variables for Azure Monitor
|
||||
### Create template variables for Azure Monitor
|
||||
|
||||
Instead of hard-coding things like server, application and sensor name in your metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns make it easy to change the data being displayed in your dashboard.
|
||||
|
||||
@ -159,29 +167,31 @@ Grafana alerting is supported for the Azure Monitor service. This is not Azure A
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v60/azuremonitor-alerting.png" class="docs-image--no-shadow" caption="Azure Monitor Alerting" >}}
|
||||
|
||||
## Querying the Application Insights Service
|
||||
## Query the Application Insights Service
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v60/appinsights-service-query-editor.png" class="docs-image--no-shadow" caption="Application Insights Query Editor" >}}
|
||||
{{< docs-imagebox img="/img/docs/azuremonitor/insights_metrics_multi-dim.png" class="docs-image--no-shadow" caption="Application Insights Query Editor" >}}
|
||||
|
||||
As of Grafana 7.1, you can select more than one group by dimension.
|
||||
|
||||
### Formatting legend keys with aliases for Application Insights
|
||||
|
||||
The default legend formatting is:
|
||||
|
||||
`metric/name{group/by="groupbyvalue"}`
|
||||
`metricName{dimensionName=dimensionValue,dimensionTwoName=DimensionTwoValue}`
|
||||
|
||||
In the Legend Format field, the aliases which are defined below can be combined any way you want.
|
||||
|
||||
Application Insights Examples:
|
||||
Application Insights examples:
|
||||
|
||||
- `server: {{groupbyvalue}}`
|
||||
- `city: {{groupbyvalue}}`
|
||||
- `{{groupbyname}}: {{groupbyvalue}}`
|
||||
- `city: {{ client/city }}`
|
||||
- `{{ metric }} [Location: {{ client/countryOrRegion }}, {{ client/city }}]`
|
||||
|
||||
### Alias patterns for Application Insights
|
||||
|
||||
- `{{groupbyvalue}}` = replaced with the value of the group by
|
||||
- `{{groupbyname}}` = replaced with the name/label of the group by
|
||||
- `{{metric}}` = replaced with metric name (e.g. requests/count)
|
||||
- `{{ groupbyvalue }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with the first dimension's key/label (as sorted by the key/label)
|
||||
- `{{ groupbyname }}` = *Legacy as of 7.1+ (for backwards compatibility)* replaced with first dimension's value (as sorted by the key/label) (e.g. BlockBlob)
|
||||
- `{{ metric }}` = replaced with metric name (e.g. requests/count)
|
||||
- `{{ arbitraryDim }}` = *Available in 7.1+* replaced with the value of the corresponding dimension. (e.g. `{{ client/city }}` becomes Chicago)
|
||||
|
||||
### Filter expressions for Application Insights
|
||||
|
||||
@ -222,30 +232,55 @@ Grafana alerting is supported for Application Insights. This is not Azure Alerts
|
||||
|
||||
## Querying the Azure Log Analytics service
|
||||
|
||||
Queries are written in the new [Azure Log Analytics (or KustoDB) Query Language](https://docs.loganalytics.io/index). A Log Analytics Query can be formatted as Time Series data or as Table data.
|
||||
Queries are written in the new [Azure Log Analytics (or KustoDB) Query Language](https://docs.loganalytics.io/index). A Log Analytics query can be formatted as time series data or as table data.
|
||||
|
||||
Time Series queries are for the Graph Panel (and other panels like the Single Stat panel) and must contain a datetime column, a metric name column and a value column. Here is an example query that returns the aggregated count grouped by the Category column and grouped by hour:
|
||||
If your credentials give you access to multiple subscriptions, then choose the appropriate subscription before entering queries.
|
||||
|
||||
```
|
||||
AzureActivity
|
||||
### Time series queries
|
||||
|
||||
Time series queries are for the Graph panel and other panels like the SingleStat panel. Each query must contain at least a datetime column and a numeric value column. The result must also be sorted in ascending order by the datetime column.
|
||||
|
||||
Here is an example query that returns the aggregated count grouped by hour:
|
||||
|
||||
```kusto
|
||||
Perf
|
||||
| where $__timeFilter(TimeGenerated)
|
||||
| summarize count() by Category, bin(TimeGenerated, 1h)
|
||||
| summarize count() by bin(TimeGenerated, 1h)
|
||||
| order by TimeGenerated asc
|
||||
```
|
||||
|
||||
Table queries are mainly used in the Table panel and row a list of columns and rows. This example query returns rows with the 6 specified columns:
|
||||
A query can also have one or more non-numeric/non-datetime columns, and those columns are considered dimensions and become labels in the response. For example, a query that returns the aggregated count grouped by hour, Computer, and the CounterName:
|
||||
|
||||
```kusto
|
||||
Perf
|
||||
| where $__timeFilter(TimeGenerated)
|
||||
| summarize count() by bin(TimeGenerated, 1h), Computer, CounterName
|
||||
| order by TimeGenerated asc
|
||||
```
|
||||
|
||||
You can also select additional number value columns (with, or without multiple dimensions). For example, getting a count and average value by hour, Computer, CounterName, and InstanceName:
|
||||
|
||||
```kusto
|
||||
Perf
|
||||
| where $__timeFilter(TimeGenerated)
|
||||
| summarize Samples=count(), AvgValue=avg(CounterValue)
|
||||
by bin(TimeGenerated, $__interval), Computer, CounterName, InstanceName
|
||||
| order by TimeGenerated asc
|
||||
```
|
||||
|
||||
{{< docs-imagebox img="/img/docs/azuremonitor/logs_multi-value_multi-dim.png" class="docs-image--no-shadow" caption="Azure Logs query with multiple values and multiple dimensions" >}}
|
||||
|
||||
### Table queries
|
||||
|
||||
Table queries are mainly used in the Table panel and show a list of columns and rows. This example query returns rows with the six specified columns:
|
||||
|
||||
```kusto
|
||||
AzureActivity
|
||||
| where $__timeFilter()
|
||||
| project TimeGenerated, ResourceGroup, Category, OperationName, ActivityStatus, Caller
|
||||
| order by TimeGenerated desc
|
||||
```
|
||||
|
||||
If your credentials give you access to multiple subscriptions then choose the appropriate subscription first.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v60/azureloganalytics-service-query-editor.png" class="docs-image--no-shadow" caption="Azure Log Analytics Query Editor" >}}
|
||||
|
||||
### Azure Log Analytics macros
|
||||
|
||||
To make writing queries easier there are several Grafana macros that can be used in the where clause of a query:
|
||||
@ -304,7 +339,7 @@ Example variable queries:
|
||||
|
||||
Example of a time series query using variables:
|
||||
|
||||
```
|
||||
```kusto
|
||||
Perf
|
||||
| where ObjectName == "$object" and CounterName == "$metric"
|
||||
| where TimeGenerated >= $__timeFrom() and TimeGenerated <= $__timeTo()
|
||||
@ -331,21 +366,11 @@ If you're not currently logged in to the Azure Portal, then the link opens the l
|
||||
|
||||
Grafana alerting is supported for Application Insights. This is not Azure Alerts support. Read more about how alerting in Grafana works in [Alerting rules]({{< relref "../../alerting/alerts-overview.md" >}}).
|
||||
|
||||
### Writing analytics queries For the Application Insights service
|
||||
## Query the Application Insights Analytics service
|
||||
|
||||
If you change the service type to "Application Insights", the menu icon to the right adds another option, "Toggle Edit Mode". Once clicked, the query edit mode changes to give you a full text area in which to write log analytics queries. (This is identical to how the InfluxDB data source lets you write raw queries.)
|
||||
If you change the service type to **Insights Analytics**, then a similar editor to the Log Analytics service is available. This service also uses the Kusto language, so the instructions for querying data are identical to [querying the log analytics service]({{< relref "#querying-the-azure-log-analytics-service" >}}), except that you query Application Insights Analytics data instead.
|
||||
|
||||
Once a query is written, the column names are automatically parsed out of the response data. You can then select them in the "X-axis", "Y-axis", and "Split On" dropdown menus, or just type them out.
|
||||
|
||||
There are some important caveats to remember:
|
||||
|
||||
- You'll want to order your y-axis in the query, eg. `order by timestamp asc`. The graph may come out looking bizarre otherwise. It's better to have Microsoft sort it on their side where it's faster, than to implement this in the plugin.
|
||||
|
||||
- If you copy a log analytics query, typically they'll end with a render instruction, like `render barchart`. This is unnecessary, but harmless.
|
||||
|
||||
- Currently, four default dashboard variables are supported: `$__timeFilter()`, `$__from`, `$__to`, and `$__interval`. If you're searching in timestamped data, replace the beginning of your where clause to `where $__timeFilter()`. Dashboard changes by time region are handled as you'd expect, as long as you leave the name of the `timestamp` column alone. Likewise, `$__interval` will automatically change based on the dashboard's time region _and_ the width of the chart being displayed. Use it in bins, so `bin(timestamp,$__interval)` changes into something like `bin(timestamp,1s)`. Use `$__from` and `$__to` if you just want the formatted dates to be inserted.
|
||||
|
||||
- Templated dashboard variables are not yet supported! They will come in a future version.
|
||||
{{< docs-imagebox img="/img/docs/azuremonitor/insights_analytics_multi-dim.png" class="docs-image--no-shadow" caption="Azure Application Insights Analytics query with multiple dimensions" >}}
|
||||
|
||||
## Configure the data source with provisioning
|
||||
|
||||
|
@ -34,6 +34,7 @@ build dashboards or use Explore with CloudWatch metrics and CloudWatch Logs.
|
||||
| _Auth Provider_ | Specify the provider to get credentials. |
|
||||
| _Credentials_ profile name | Specify the name of the profile to use (if you use `~/.aws/credentials` file), leave blank for default. |
|
||||
| _Assume Role Arn_ | Specify the ARN of the role to assume |
|
||||
| _External ID_ | If you are assuming a role in another account, that has been created with an external ID, specify the exterrnal ID here. |
|
||||
|
||||
## Authentication
|
||||
|
||||
|
54
docs/sources/getting-started/strategies.md
Normal file
54
docs/sources/getting-started/strategies.md
Normal file
@ -0,0 +1,54 @@
|
||||
+++
|
||||
title = "Monitoring strategies"
|
||||
description = "Common monitoring strategies"
|
||||
keywords = ["grafana", "intro", "guide", "concepts", "methods"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
weight = 500
|
||||
+++
|
||||
|
||||
# Common observability strategies
|
||||
|
||||
When you have a lot to monitor, like a server farm, you need a strategy to decide what is important enough to monitor. This page describes several common methods for choosing what to monitor.
|
||||
|
||||
A logical strategy allows you to make uniform dashboards and scale your observability platform more easily.
|
||||
|
||||
## Guidelines for usage
|
||||
|
||||
- The USE method tells you how happy your machines are, the RED method tells you how happy your users are.
|
||||
- USE reports on causes of issues.
|
||||
- RED reports on user experience and is more likely to report symptoms of problems.
|
||||
- The best practice of alerting is to alert on symptoms rather than causes, so alerting should be done on RED dashboards.
|
||||
|
||||
## USE method
|
||||
|
||||
USE stands for:
|
||||
|
||||
- **Utilization -** Percent time the resource is busy, such as node CPU usage
|
||||
- **Saturation -** Amount of work a resource has to do, often queue length or node load
|
||||
- **Errors -** Count of error events
|
||||
|
||||
This method is best for hardware resources in infrastructure, such as CPU, memory, and network devices. For more information, refer to [The USE Method](http://www.brendangregg.com/usemethod.html).
|
||||
|
||||
## RED method
|
||||
|
||||
RED stands for:
|
||||
|
||||
- **Rate -** Requests per second
|
||||
- **Errors -** Number of requests that are failing
|
||||
- **Duration -** Amount of time these requests take, distribution of latency measurements
|
||||
|
||||
This method is most applicable to services, especially a microservices environment. For each of your services, instrument the code to expose these metrics for each component. RED dashboards are good for alerting and SLAs. A well-designed RED dashboard is a proxy for user experience.
|
||||
|
||||
For more information, refer to Tom Wilkie's blog post [The RED method: How to instrument your services](https://grafana.com/blog/2018/08/02/the-red-method-how-to-instrument-your-services).
|
||||
|
||||
## The Four Golden Signals
|
||||
|
||||
According to the [Google SRE handbook](https://landing.google.com/sre/sre-book/chapters/monitoring-distributed-systems/#xref_monitoring_golden-signals), if you can only measure four metrics of your user-facing system, focus on these four.
|
||||
|
||||
This method is similar to the RED method, but it includes saturation.
|
||||
|
||||
- **Latency -** Time taken to serve a request
|
||||
- **Traffic -** How much demand is placed on your system
|
||||
- **Errors -** Rate of requests that are failing
|
||||
- **Saturation -** How "full" your system is
|
@ -3,8 +3,8 @@ title = "Time series"
|
||||
description = "Introduction to time series"
|
||||
keywords = ["grafana", "intro", "guide", "concepts", "timeseries"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
aliases = ["/docs/grafana/latest/guides/timeseries"]
|
||||
[menu.docs]
|
||||
name = "Time series"
|
||||
identifier = "time_series"
|
||||
parent = "guides"
|
||||
|
@ -17,7 +17,7 @@ Currently you can authenticate via an `API Token` or via a `Session cookie` (acq
|
||||
|
||||
## Basic Auth
|
||||
|
||||
If basic auth is enabled (it is enabled by default) you can authenticate your HTTP request via
|
||||
If basic auth is enabled (it is enabled by default), then you can authenticate your HTTP request via
|
||||
standard basic auth. Basic auth will also authenticate LDAP users.
|
||||
|
||||
curl example:
|
||||
|
@ -9,6 +9,8 @@
|
||||
link: /getting-started/timeseries/
|
||||
- name: Intro to histograms
|
||||
link: /getting-started/intro-histograms/
|
||||
- name: Observability strategies
|
||||
link: /getting-started/strategies/
|
||||
- name: Glossary
|
||||
link: /getting-started/glossary/
|
||||
- name: Installation
|
||||
|
@ -16,8 +16,9 @@ The request function can be used to perform a remote call by specifying a [Backe
|
||||
<b>Signature</b>
|
||||
|
||||
```typescript
|
||||
export interface BackendSrv
|
||||
export interface BackendSrv
|
||||
```
|
||||
|
||||
<b>Import</b>
|
||||
|
||||
```typescript
|
||||
@ -26,19 +27,21 @@ import { BackendSrv } from '@grafana/runtime';
|
||||
|
||||
## Remarks
|
||||
|
||||
By default Grafana will display an error message alert if the remote call fails. If you want to prevent this from happending you need to catch the error thrown by the BackendSrv and set the `isHandled = true` on the incoming error.
|
||||
By default Grafana will display an error message alert if the remote call fails. If you want to prevent this from happending you need to catch the error thrown by the BackendSrv and set the `showErrorAlert = true` on the request options object.
|
||||
|
||||
> In versions prior to v7.2 you disable the notification alert by setting isHandled on the caught error
|
||||
|
||||
<b>Methods</b>
|
||||
|
||||
| Method | Description |
|
||||
| --- | --- |
|
||||
| [datasourceRequest(options)](#datasourcerequest-method) | Special function used to communicate with datasources that will emit core events that the Grafana QueryInspector and QueryEditor is listening for to be able to display datasource query information. Can be skipped by adding <code>option.silent</code> when initializing the request. |
|
||||
| [delete(url)](#delete-method) | |
|
||||
| [get(url, params, requestId)](#get-method) | |
|
||||
| [patch(url, data)](#patch-method) | |
|
||||
| [post(url, data)](#post-method) | |
|
||||
| [put(url, data)](#put-method) | |
|
||||
| [request(options)](#request-method) | |
|
||||
| Method | Description |
|
||||
| ------------------------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| [datasourceRequest(options)](#datasourcerequest-method) | Special function used to communicate with datasources that will emit core events that the Grafana QueryInspector and QueryEditor is listening for to be able to display datasource query information. Can be skipped by adding <code>option.silent</code> when initializing the request. |
|
||||
| [delete(url)](#delete-method) | |
|
||||
| [get(url, params, requestId)](#get-method) | |
|
||||
| [patch(url, data)](#patch-method) | |
|
||||
| [post(url, data)](#post-method) | |
|
||||
| [put(url, data)](#put-method) | |
|
||||
| [request(options)](#request-method) | |
|
||||
|
||||
### datasourceRequest method
|
||||
|
||||
@ -49,11 +52,12 @@ Special function used to communicate with datasources that will emit core events
|
||||
```typescript
|
||||
datasourceRequest(options: BackendSrvRequest): Promise<any>;
|
||||
```
|
||||
|
||||
<b>Parameters</b>
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| options | <code>BackendSrvRequest</code> | |
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ------------------------------ | ----------- |
|
||||
| options | <code>BackendSrvRequest</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
@ -66,11 +70,12 @@ datasourceRequest(options: BackendSrvRequest): Promise<any>;
|
||||
```typescript
|
||||
delete(url: string): Promise<any>;
|
||||
```
|
||||
|
||||
<b>Parameters</b>
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| url | <code>string</code> | |
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ------------------- | ----------- |
|
||||
| url | <code>string</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
@ -83,13 +88,14 @@ delete(url: string): Promise<any>;
|
||||
```typescript
|
||||
get(url: string, params?: any, requestId?: string): Promise<any>;
|
||||
```
|
||||
|
||||
<b>Parameters</b>
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| url | <code>string</code> | |
|
||||
| params | <code>any</code> | |
|
||||
| requestId | <code>string</code> | |
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ------------------- | ----------- |
|
||||
| url | <code>string</code> | |
|
||||
| params | <code>any</code> | |
|
||||
| requestId | <code>string</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
@ -102,12 +108,13 @@ get(url: string, params?: any, requestId?: string): Promise<any>;
|
||||
```typescript
|
||||
patch(url: string, data?: any): Promise<any>;
|
||||
```
|
||||
|
||||
<b>Parameters</b>
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| url | <code>string</code> | |
|
||||
| data | <code>any</code> | |
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ------------------- | ----------- |
|
||||
| url | <code>string</code> | |
|
||||
| data | <code>any</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
@ -120,12 +127,13 @@ patch(url: string, data?: any): Promise<any>;
|
||||
```typescript
|
||||
post(url: string, data?: any): Promise<any>;
|
||||
```
|
||||
|
||||
<b>Parameters</b>
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| url | <code>string</code> | |
|
||||
| data | <code>any</code> | |
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ------------------- | ----------- |
|
||||
| url | <code>string</code> | |
|
||||
| data | <code>any</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
@ -138,12 +146,13 @@ post(url: string, data?: any): Promise<any>;
|
||||
```typescript
|
||||
put(url: string, data?: any): Promise<any>;
|
||||
```
|
||||
|
||||
<b>Parameters</b>
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| url | <code>string</code> | |
|
||||
| data | <code>any</code> | |
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ------------------- | ----------- |
|
||||
| url | <code>string</code> | |
|
||||
| data | <code>any</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
@ -156,13 +165,13 @@ put(url: string, data?: any): Promise<any>;
|
||||
```typescript
|
||||
request(options: BackendSrvRequest): Promise<any>;
|
||||
```
|
||||
|
||||
<b>Parameters</b>
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --- | --- | --- |
|
||||
| options | <code>BackendSrvRequest</code> | |
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ------------------------------ | ----------- |
|
||||
| options | <code>BackendSrvRequest</code> | |
|
||||
|
||||
<b>Returns:</b>
|
||||
|
||||
`Promise<any>`
|
||||
|
||||
|
@ -11,7 +11,7 @@ weight = 300
|
||||
|
||||
This page explains what transformations in Grafana are and how to use them.
|
||||
|
||||
> **Note:** This documentation refers to a Grafana 7.0 beta feature. This documentation will be frequently updated to reflect updates to the feature, and it will probably be broken into smaller sections when the feature moves out of beta.
|
||||
> **Note:** This documentation refers to a Grafana 7.0 feature. This documentation will be frequently updated to reflect updates to the feature, and it will probably be broken into smaller sections when the feature moves out of beta.
|
||||
|
||||
Transformations process the result set before it’s passed to the visualization. You access transformations in the Transform tab of the Grafana panel editor.
|
||||
|
||||
@ -74,6 +74,7 @@ Grafana comes with the following transformations:
|
||||
- [Join by field (outer join)](#join-by-field-outer-join)
|
||||
- [Add field from calculation](#add-field-from-calculation)
|
||||
- [Labels to fields](#labels-to-fields)
|
||||
- [Series to rows](#series-to-rows)
|
||||
- [Debug transformations](#debug-transformations)
|
||||
|
||||
Keep reading for detailed descriptions of each type of transformation and the options available for each, as well as suggestions on how to use them.
|
||||
@ -96,25 +97,33 @@ After I apply the transformation, there is no time value and each column has bee
|
||||
|
||||
### Merge
|
||||
|
||||
Use this transformation to combine the result from multiple queries into one single result based on the time field. This is helpful when using the table panel visualization.
|
||||
> **Note:** This documentation refers to a Grafana 7.1 feature.
|
||||
|
||||
In the example below, we are visualizing multiple queries returning table data before applying the transformation.
|
||||
Use this transformation to combine the result from multiple queries into one single result. This is helpful when using the table panel visualization. Values that can be merged are combined into the same row. Values are mergeable if the shared fields contains the same data.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/transformations/table-data-before-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
|
||||
In the example below, we have two queries returning table data. It is visualized as two separate tables before applying the transformation.
|
||||
|
||||
Here is the same example after applying the merge transformation.
|
||||
Query A:
|
||||
|
||||
{{< docs-imagebox img="/img/docs/transformations/table-data-after-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
|
||||
| Time | Job | Uptime |
|
||||
|---------------------|---------|-----------|
|
||||
| 2020-07-07 11:34:20 | node | 25260122 |
|
||||
| 2020-07-07 11:24:20 | postgre | 123001233 |
|
||||
|
||||
If any of the queries return time series data, then a `Metric` column containing the name of the query is added. You can be customized this value by defining `Label` on the source query.
|
||||
Query B:
|
||||
|
||||
In the example below, we are visualizing multiple queries returning time series data before applying the transformation.
|
||||
| Time | Job | Errors |
|
||||
|---------------------|---------|--------|
|
||||
| 2020-07-07 11:34:20 | node | 15 |
|
||||
| 2020-07-07 11:24:20 | postgre | 5 |
|
||||
|
||||
{{< docs-imagebox img="/img/docs/transformations/time-series-before-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
|
||||
Here is the result after applying the `Merge` transformation.
|
||||
|
||||
Here is the same example after applying the merge transformation.
|
||||
| Time | Job | Errors | Uptime |
|
||||
|---------------------|---------|--------|-----------|
|
||||
| 2020-07-07 11:34:20 | node | 15 | 25260122 |
|
||||
| 2020-07-07 11:24:20 | postgre | 5 | 123001233 |
|
||||
|
||||
{{< docs-imagebox img="/img/docs/transformations/time-series-after-merge-7-1.png" class="docs-image--no-shadow" max-width= "1100px" >}}
|
||||
|
||||
### Filter by name
|
||||
|
||||
@ -213,6 +222,43 @@ After I apply the transformation, my labels appear in the table as fields.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/transformations/labels-to-fields-after-7-0.png" class="docs-image--no-shadow" max-width= "1100px" >}}
|
||||
|
||||
## Series to rows
|
||||
|
||||
> **Note:** This documentation refers to a Grafana 7.1 feature.
|
||||
|
||||
Use this transformation to combine the result from multiple time series data queries into one single result. This is helpful when using the table panel visualization.
|
||||
|
||||
The result from this transformation will contain three columns: `Time`, `Metric`, and `Value`. The `Metric` column is added so you easily can see from which query the metric originates from. Customize this value by defining `Label` on the source query.
|
||||
|
||||
In the example below, we have two queries returning time series data. It is visualized as two separate tables before applying the transformation.
|
||||
|
||||
Query A:
|
||||
|
||||
| Time | Temperature |
|
||||
|---------------------|-------------|
|
||||
| 2020-07-07 11:34:20 | 25 |
|
||||
| 2020-07-07 10:31:22 | 22 |
|
||||
| 2020-07-07 09:30:05 | 19 |
|
||||
|
||||
Query B:
|
||||
|
||||
| Time | Humidity |
|
||||
|---------------------|----------|
|
||||
| 2020-07-07 11:34:20 | 24 |
|
||||
| 2020-07-07 10:32:20 | 29 |
|
||||
| 2020-07-07 09:30:57 | 33 |
|
||||
|
||||
Here is the result after applying the `Series to rows` transformation.
|
||||
|
||||
| Time | Metric | Value |
|
||||
|---------------------|-------------|-------|
|
||||
| 2020-07-07 11:34:20 | Temperature | 25 |
|
||||
| 2020-07-07 11:34:20 | Humidity | 22 |
|
||||
| 2020-07-07 10:32:20 | Humidity | 29 |
|
||||
| 2020-07-07 10:31:22 | Temperature | 22 |
|
||||
| 2020-07-07 09:30:57 | Humidity | 33 |
|
||||
| 2020-07-07 09:30:05 | Temperature | 19 |
|
||||
|
||||
## Debug transformations
|
||||
|
||||
To see the input and the output result sets of the transformation, click the bug icon on the right side of the transformation row.
|
||||
|
@ -32,6 +32,17 @@ Use these settings to refine your visualization.
|
||||
- **Points -** Display points for values.
|
||||
- **Point radius -** Controls how large the points are.
|
||||
|
||||
### Stacking and null value
|
||||
|
||||
- **Stack -** Each series is stacked on top of another.
|
||||
- **Percent -** Available when **Stack** is selected. Each series is drawn as a percentage of the total of all series.
|
||||
- **Null value -** How null values are displayed. _This is a very important setting._ See note below.
|
||||
- **connected -** If there is a gap in the series, meaning a null value or values, then the line will skip the gap and connect to the next non-null value.
|
||||
- **null -** (default) If there is a gap in the series, meaning a null value, then the line in the graph will be broken and show the gap.
|
||||
- **null as zero -** If there is a gap in the series, meaning a null value, then it will be displayed as a zero value in the graph panel.
|
||||
|
||||
> **Note:** If you are monitoring a server's CPU load and the load reaches 100%, then the server will lock up and the agent sending statistics will not be able to collect the load statistic. This leads to a gap in the metrics and having the default as _null_ means Grafana will show the gaps and indicate that something is wrong. If this is set to _connected_, then it would be easy to miss this signal.
|
||||
|
||||
### Hover tooltip
|
||||
|
||||
Use these settings to change the appearance of the tooltip that appears when you hover your cursor over the graph visualization.
|
||||
@ -44,17 +55,6 @@ Use these settings to change the appearance of the tooltip that appears when you
|
||||
- **Increasing -** The series in the hover tooltip are sorted by value and in increasing order, with the lowest value at the top of the list.
|
||||
- **Decreasing -** The series in the hover tooltip are sorted by value and in decreasing order, with the highest value at the top of the list.
|
||||
|
||||
### Stacking and null value
|
||||
|
||||
- **Stack -** Each series is stacked on top of another.
|
||||
- **Percent -** Available when **Stack** is selected. Each series is drawn as a percentage of the total of all series.
|
||||
- **Null value -** How null values are displayed. _This is a very important setting._ See note below.
|
||||
- **connected -** If there is a gap in the series, meaning a null value or values, then the line will skip the gap and connect to the next non-null value.
|
||||
- **null -** (default) If there is a gap in the series, meaning a null value, then the line in the graph will be broken and show the gap.
|
||||
- **null as zero -** If there is a gap in the series, meaning a null value, then it will be displayed as a zero value in the graph panel.
|
||||
|
||||
> **Note:** If you are monitoring a server's CPU load and the load reaches 100%, then the server will lock up and the agent sending statistics will not be able to collect the load statistic. This leads to a gap in the metrics and having the default as _null_ means Grafana will show the gaps and indicate that something is wrong. If this is set to _connected_, then it would be easy to miss this signal.
|
||||
|
||||
## Series overrides
|
||||
|
||||
Series overrides allow a series in a graph panel to be rendered differently from the others. You can customize display options on a per-series bases or by using regex rules. For example, one series can have a thicker line width to make it stand out or be moved to the right Y-axis.
|
||||
|
@ -9,7 +9,7 @@ Plugin signature verification (signing) is a security measure to make sure plugi
|
||||
|
||||
## How it works
|
||||
|
||||
For Grafana to be able to verify the digital signature of a plugin, the plugin must include a signed manifest file, _MANIFEST.txt_. The signed manifest file contains two sections:
|
||||
For Grafana to verify the digital signature of a plugin, the plugin must include a signed manifest file, _MANIFEST.txt_. The signed manifest file contains two sections:
|
||||
|
||||
- **Signed message -** The signed message contains plugin metadata and plugin files with their respective checksums (SHA256).
|
||||
- **Digital signature -** The digital signature is created by encrypting the signed message using a private key. Grafana has a public key built-in that can be used to verify that the digital signature have been encrypted using expected private key.
|
||||
|
@ -74,7 +74,7 @@ const expectDrawerTabsAndContent = () => {
|
||||
e2e.components.PanelInspector.Stats.content().should('not.be.visible');
|
||||
e2e.components.PanelInspector.Query.content().should('not.be.visible');
|
||||
|
||||
e2e().wait(250);
|
||||
e2e().wait(350);
|
||||
/* Monaco Editor specific wait that fixes error below https://github.com/microsoft/monaco-editor/issues/354
|
||||
TypeError: Cannot read property 'getText' of null
|
||||
at Object.getFoldingRanges (http://localhost:3001/public/build/json.worker.js:18829:102)
|
||||
|
3
go.mod
3
go.mod
@ -25,12 +25,13 @@ require (
|
||||
github.com/go-sql-driver/mysql v1.5.0
|
||||
github.com/go-stack/stack v1.8.0
|
||||
github.com/gobwas/glob v0.2.3
|
||||
github.com/golang/mock v1.4.3
|
||||
github.com/golang/protobuf v1.4.0
|
||||
github.com/google/go-cmp v0.4.0
|
||||
github.com/gorilla/websocket v1.4.1
|
||||
github.com/gosimple/slug v1.4.2
|
||||
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.70.0
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.71.0
|
||||
github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd
|
||||
github.com/hashicorp/go-plugin v1.2.2
|
||||
github.com/hashicorp/go-version v1.1.0
|
||||
|
11
go.sum
11
go.sum
@ -110,7 +110,10 @@ github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7a
|
||||
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.2.0 h1:28o5sBqPkBsMGnC6b4MvE2TzSr5/AT4c/1fLqVGIwlk=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/mock v1.4.3 h1:GV+pQPG/EUUbkh47niozDcADz6go/dUwhVzdUQHIVRw=
|
||||
github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
@ -150,8 +153,8 @@ github.com/gosimple/slug v1.4.2 h1:jDmprx3q/9Lfk4FkGZtvzDQ9Cj9eAmsjzeQGp24PeiQ=
|
||||
github.com/gosimple/slug v1.4.2/go.mod h1:ER78kgg1Mv0NQGlXiDe57DpCyfbNywXXZ9mIorhxAf0=
|
||||
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4 h1:SPdxCL9BChFTlyi0Khv64vdCW4TMna8+sxL7+Chx+Ag=
|
||||
github.com/grafana/grafana-plugin-model v0.0.0-20190930120109-1fc953a61fb4/go.mod h1:nc0XxBzjeGcrMltCDw269LoWF9S8ibhgxolCdA1R8To=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.70.0 h1:tbwf0KMp8QEQQYF3bDBOOv/npegD6YP8T90OWbLr7n4=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.70.0/go.mod h1:NvxLzGkVhnoBKwzkst6CFfpMFKwAdIUZ1q8ssuLeF60=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.71.0 h1:dF2H1O03aTekFujss+iU/dcrvdDMsk16URbyExNJxqY=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.71.0/go.mod h1:NvxLzGkVhnoBKwzkst6CFfpMFKwAdIUZ1q8ssuLeF60=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0 h1:0IKlLyQ3Hs9nDaiK5cSHAGmcQEIC8l2Ts1u6x5Dfrqg=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.2.0/go.mod h1:mJzapYve32yjrKlk9GbyCZHuPgZsrbyIbyKhSzOpg6s=
|
||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=
|
||||
@ -441,6 +444,7 @@ golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||
@ -454,6 +458,7 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190802220118-1d1727260058/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
|
||||
@ -530,6 +535,8 @@ honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWh
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||
xorm.io/builder v0.3.6 h1:ha28mQ2M+TFx96Hxo+iq6tQgnkC9IZkM6D8w9sKHHF8=
|
||||
xorm.io/builder v0.3.6/go.mod h1:LEFAPISnRzG+zxaxj2vPicRwz67BdhFreKg8yv8/TgU=
|
||||
xorm.io/core v0.7.2/go.mod h1:jJfd0UAEzZ4t87nbQYtVjmqpIODugN6PD2D9E+dJvdM=
|
||||
|
@ -1,4 +1,4 @@
|
||||
{
|
||||
"stable": "7.0.5",
|
||||
"testing": "7.1.0-beta1"
|
||||
"testing": "7.1.0-beta2"
|
||||
}
|
||||
|
@ -25,6 +25,7 @@
|
||||
"packages:publishCanary": "lerna publish from-package --contents dist --dist-tag canary --yes",
|
||||
"packages:publishLatest": "lerna publish from-package --contents dist --yes",
|
||||
"packages:publishNext": "lerna publish from-package --contents dist --dist-tag next --yes",
|
||||
"packages:publishDev": "lerna publish from-package --contents dist --dist-tag dev --yes --registry http://grafana-npm.local:4873 --force-publish=*",
|
||||
"packages:typecheck": "lerna run typecheck",
|
||||
"precommit": "grafana-toolkit precommit",
|
||||
"prettier:check": "prettier --list-different \"**/*.{ts,tsx,scss}\"",
|
||||
@ -267,7 +268,7 @@
|
||||
"regenerator-runtime": "0.13.3",
|
||||
"reselect": "4.0.0",
|
||||
"rst2html": "github:thoward/rst2html#990cb89",
|
||||
"rxjs": "6.5.5",
|
||||
"rxjs": "6.6.0",
|
||||
"search-query-parser": "1.5.4",
|
||||
"slate": "0.47.8",
|
||||
"slate-plain-serializer": "0.7.10",
|
||||
|
@ -52,3 +52,42 @@ To build individual packages, run:
|
||||
```
|
||||
grafana-toolkit package:build --scope=<ui|toolkit|runtime|data>
|
||||
```
|
||||
|
||||
### Setting up @grafana/* packages for local development
|
||||
|
||||
A known issue with @grafana/* packages is that a lot of times we discover problems on canary channel(see [versioning overview](#Versioning)) when the version was already pushed to npm.
|
||||
|
||||
We can easily avoid that by setting up a local packages registry and test the packages before actually publishing to npm.
|
||||
|
||||
In this guide you will set up [Verdaccio](https://verdaccio.org/) registry locally to fake npm registry. This will enable testing @grafana/* packages without the need for pushing to master.
|
||||
|
||||
#### Setting up local npm registry
|
||||
|
||||
From your terminal:
|
||||
1. Modify `/etc/hosts` file and add the following entry: ```127.0.0.1 grafana-npm.local```
|
||||
2. Navigate to `devenv/local-npm` directory.
|
||||
3. Run `docker-compose up`. This will start your local npm registry, available at http://grafana-npm.local:4873/
|
||||
4. Run `npm login --registry=http://grafana-npm.local:4873 --scope=@grafana` . This will allow you to publish any @grafana/* package into the local registry.
|
||||
5. Run `npm config set @grafana:registry http://grafana-npm.local:4873`. This will config your npm to install @grafana scoped packages from your local registry.
|
||||
|
||||
#### Publishing packages to local npm registry
|
||||
|
||||
You need to follow [manual packages release procedure](#manual-release). The only difference is you need to run `yarn packages:publishDev` task in order to publish to you local registry.
|
||||
|
||||
From your terminal:
|
||||
1. Run `yarn packages:prepare`.
|
||||
2. Commit changes in package.json and lerna.json files
|
||||
3. Build packages: `yarn packages:build`
|
||||
4. Run `yarn packages:publishDev`.
|
||||
5. Navigate to http://grafana-npm.local:4873 and verify that version was published
|
||||
|
||||
Locally published packages will be published under `dev` channel, so in your plugin package.json file you can use that channel. For example:
|
||||
|
||||
```
|
||||
// plugin's package.json
|
||||
|
||||
{
|
||||
...
|
||||
"@grafana/data": "dev"
|
||||
}
|
||||
```
|
||||
|
@ -26,7 +26,7 @@
|
||||
"@braintree/sanitize-url": "4.0.0",
|
||||
"apache-arrow": "0.16.0",
|
||||
"lodash": "4.17.15",
|
||||
"rxjs": "6.5.5",
|
||||
"rxjs": "6.6.0",
|
||||
"xss": "1.0.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -21,7 +21,7 @@ const buildCjsPackage = ({ env }) => {
|
||||
globals: {},
|
||||
},
|
||||
],
|
||||
external: ['lodash', 'apache-arrow'], // Use Lodash & arrow from grafana
|
||||
external: ['lodash', 'rxjs', 'apache-arrow'], // Use Lodash, rxjs & arrow from grafana
|
||||
plugins: [
|
||||
json({
|
||||
include: ['../../node_modules/moment-timezone/data/packed/latest.json'],
|
||||
|
@ -53,15 +53,15 @@ export class DataFrameView<T = any> extends FunctionalVector<T> {
|
||||
* Helper function to return the {@link DisplayProcessor} for a given field column.
|
||||
* @param colIndex - the field column index for the data frame.
|
||||
*/
|
||||
getFieldDisplayProcessor(colIndex: number): DisplayProcessor | null {
|
||||
getFieldDisplayProcessor(colIndex: number): DisplayProcessor | undefined {
|
||||
if (!this.dataFrame || !this.dataFrame.fields) {
|
||||
return null;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const field = this.dataFrame.fields[colIndex];
|
||||
|
||||
if (!field || !field.display) {
|
||||
return null;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return field.display;
|
||||
|
12
packages/grafana-data/src/dataframe/utils.ts
Normal file
12
packages/grafana-data/src/dataframe/utils.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import { DataFrame, FieldType } from '../types/dataFrame';
|
||||
|
||||
export const isTimeSerie = (frame: DataFrame): boolean => {
|
||||
if (frame.fields.length > 2) {
|
||||
return false;
|
||||
}
|
||||
return !!frame.fields.find(field => field.type === FieldType.time);
|
||||
};
|
||||
|
||||
export const isTimeSeries = (data: DataFrame[]): boolean => {
|
||||
return !data.find(frame => !isTimeSerie(frame));
|
||||
};
|
@ -8,10 +8,11 @@ import { filterFramesByRefIdTransformer } from './transformers/filterByRefId';
|
||||
import { orderFieldsTransformer } from './transformers/order';
|
||||
import { organizeFieldsTransformer } from './transformers/organize';
|
||||
import { seriesToColumnsTransformer } from './transformers/seriesToColumns';
|
||||
import { seriesToRowsTransformer } from './transformers/seriesToRows';
|
||||
import { renameFieldsTransformer } from './transformers/rename';
|
||||
import { labelsToFieldsTransformer } from './transformers/labelsToFields';
|
||||
import { ensureColumnsTransformer } from './transformers/ensureColumns';
|
||||
import { mergeTransformer } from './transformers/merge/merge';
|
||||
import { mergeTransformer } from './transformers/merge';
|
||||
|
||||
export const standardTransformers = {
|
||||
noopTransformer,
|
||||
@ -25,6 +26,7 @@ export const standardTransformers = {
|
||||
reduceTransformer,
|
||||
calculateFieldTransformer,
|
||||
seriesToColumnsTransformer,
|
||||
seriesToRowsTransformer,
|
||||
renameFieldsTransformer,
|
||||
labelsToFieldsTransformer,
|
||||
ensureColumnsTransformer,
|
||||
|
@ -8,6 +8,7 @@ export enum DataTransformerID {
|
||||
rename = 'rename',
|
||||
calculateField = 'calculateField',
|
||||
seriesToColumns = 'seriesToColumns',
|
||||
seriesToRows = 'seriesToRows',
|
||||
merge = 'merge',
|
||||
labelsToFields = 'labelsToFields',
|
||||
filterFields = 'filterFields',
|
||||
|
@ -1,9 +1,9 @@
|
||||
import { mockTransformationsRegistry } from '../../../utils/tests/mockTransformationsRegistry';
|
||||
import { DataTransformerConfig, Field, FieldType } from '../../../types';
|
||||
import { DataTransformerID } from '../ids';
|
||||
import { toDataFrame } from '../../../dataframe';
|
||||
import { transformDataFrame } from '../../transformDataFrame';
|
||||
import { ArrayVector } from '../../../vector';
|
||||
import { mockTransformationsRegistry } from '../../utils/tests/mockTransformationsRegistry';
|
||||
import { DataTransformerConfig, Field, FieldType } from '../../types';
|
||||
import { DataTransformerID } from './ids';
|
||||
import { toDataFrame } from '../../dataframe';
|
||||
import { transformDataFrame } from '../transformDataFrame';
|
||||
import { ArrayVector } from '../../vector';
|
||||
import { mergeTransformer, MergeTransformerOptions } from './merge';
|
||||
|
||||
describe('Merge multipe to single', () => {
|
||||
@ -35,12 +35,11 @@ describe('Merge multipe to single', () => {
|
||||
|
||||
const result = transformDataFrame([cfg], [seriesA, seriesB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [1000, 2000]),
|
||||
createField('Metric', FieldType.string, ['A', 'B']),
|
||||
createField('Value', FieldType.number, [1, -1]),
|
||||
createField('Time', FieldType.time, [2000, 1000]),
|
||||
createField('Temp', FieldType.number, [-1, 1]),
|
||||
];
|
||||
|
||||
expect(result[0].fields).toMatchObject(expected);
|
||||
expect(unwrap(result[0].fields)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine two series with multiple values into one', () => {
|
||||
@ -67,12 +66,11 @@ describe('Merge multipe to single', () => {
|
||||
|
||||
const result = transformDataFrame([cfg], [seriesA, seriesB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
|
||||
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
|
||||
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5]),
|
||||
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
|
||||
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1]),
|
||||
];
|
||||
|
||||
expect(result[0].fields).toMatchObject(expected);
|
||||
expect(unwrap(result[0].fields)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine three series into one', () => {
|
||||
@ -107,12 +105,11 @@ describe('Merge multipe to single', () => {
|
||||
|
||||
const result = transformDataFrame([cfg], [seriesA, seriesB, seriesC]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [500, 1000, 2000]),
|
||||
createField('Metric', FieldType.string, ['C', 'A', 'B']),
|
||||
createField('Value', FieldType.number, [2, 1, -1]),
|
||||
createField('Time', FieldType.time, [2000, 1000, 500]),
|
||||
createField('Temp', FieldType.number, [-1, 1, 2]),
|
||||
];
|
||||
|
||||
expect(result[0].fields).toMatchObject(expected);
|
||||
expect(unwrap(result[0].fields)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine one serie and two tables into one table', () => {
|
||||
@ -149,13 +146,12 @@ describe('Merge multipe to single', () => {
|
||||
|
||||
const result = transformDataFrame([cfg], [tableA, seriesB, tableB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [500, 1000, 1000]),
|
||||
createField('Metric', FieldType.string, ['C', 'A', 'B']),
|
||||
createField('Temp', FieldType.number, [2, 1, -1]),
|
||||
createField('Humidity', FieldType.number, [5, 10, null]),
|
||||
createField('Time', FieldType.time, [1000, 1000, 500]),
|
||||
createField('Temp', FieldType.number, [1, -1, 2]),
|
||||
createField('Humidity', FieldType.number, [10, null, 5]),
|
||||
];
|
||||
|
||||
expect(result[0].fields).toMatchObject(expected);
|
||||
expect(unwrap(result[0].fields)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine one serie and two tables with ISO dates into one table', () => {
|
||||
@ -192,13 +188,12 @@ describe('Merge multipe to single', () => {
|
||||
|
||||
const result = transformDataFrame([cfg], [tableA, seriesB, tableC]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, ['2019-09-01T11:10:23Z', '2019-10-01T11:10:23Z', '2019-11-01T11:10:23Z']),
|
||||
createField('Metric', FieldType.string, ['B', 'A', 'C']),
|
||||
createField('Temp', FieldType.number, [-1, 1, 2]),
|
||||
createField('Humidity', FieldType.number, [null, 10, 5]),
|
||||
createField('Time', FieldType.time, ['2019-11-01T11:10:23Z', '2019-10-01T11:10:23Z', '2019-09-01T11:10:23Z']),
|
||||
createField('Temp', FieldType.number, [2, 1, -1]),
|
||||
createField('Humidity', FieldType.number, [5, 10, null]),
|
||||
];
|
||||
|
||||
expect(result[0].fields).toMatchObject(expected);
|
||||
expect(unwrap(result[0].fields)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine three tables with multiple values into one', () => {
|
||||
@ -235,14 +230,15 @@ describe('Merge multipe to single', () => {
|
||||
});
|
||||
|
||||
const result = transformDataFrame([cfg], [tableA, tableB, tableC]);
|
||||
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [100, 100, 100, 124, 125, 126, 149, 150, 200]),
|
||||
createField('Temp', FieldType.number, [1, -1, 1, 4, 2, 3, 5, 4, 5]),
|
||||
createField('Humidity', FieldType.number, [10, null, 22, 25, null, null, 30, 14, 55]),
|
||||
createField('Enabled', FieldType.boolean, [null, true, null, null, false, true, null, null, null]),
|
||||
createField('Time', FieldType.time, [200, 150, 149, 126, 125, 124, 100, 100, 100]),
|
||||
createField('Temp', FieldType.number, [5, 4, 5, 3, 2, 4, 1, -1, 1]),
|
||||
createField('Humidity', FieldType.number, [55, 14, 30, null, null, 25, 10, null, 22]),
|
||||
createField('Enabled', FieldType.boolean, [null, null, null, true, false, null, null, true, null]),
|
||||
];
|
||||
|
||||
expect(result[0].fields).toMatchObject(expected);
|
||||
expect(unwrap(result[0].fields)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine two time series, where first serie fields has displayName, into one', () => {
|
||||
@ -269,13 +265,14 @@ describe('Merge multipe to single', () => {
|
||||
|
||||
const result = transformDataFrame([cfg], [serieA, serieB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
|
||||
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
|
||||
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5]),
|
||||
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
|
||||
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1]),
|
||||
];
|
||||
|
||||
expect(result[0].fields[2].config).toEqual({});
|
||||
expect(result[0].fields).toMatchObject(expected);
|
||||
const fields = unwrap(result[0].fields);
|
||||
|
||||
expect(fields[1].config).toEqual({});
|
||||
expect(fields).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine two time series, where first serie fields has units, into one', () => {
|
||||
@ -302,13 +299,14 @@ describe('Merge multipe to single', () => {
|
||||
|
||||
const result = transformDataFrame([cfg], [serieA, serieB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
|
||||
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
|
||||
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5], { units: 'celsius' }),
|
||||
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
|
||||
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1], { units: 'celsius' }),
|
||||
];
|
||||
|
||||
expect(result[0].fields[2].config).toEqual({ units: 'celsius' });
|
||||
expect(result[0].fields).toMatchObject(expected);
|
||||
const fields = unwrap(result[0].fields);
|
||||
|
||||
expect(fields[1].config).toEqual({ units: 'celsius' });
|
||||
expect(fields).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine two time series, where second serie fields has units, into one', () => {
|
||||
@ -335,16 +333,28 @@ describe('Merge multipe to single', () => {
|
||||
|
||||
const result = transformDataFrame([cfg], [serieA, serieB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [100, 100, 125, 126, 150, 200]),
|
||||
createField('Metric', FieldType.string, ['A', 'B', 'B', 'B', 'A', 'A']),
|
||||
createField('Value', FieldType.number, [1, -1, 2, 3, 4, 5]),
|
||||
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
|
||||
createField('Temp', FieldType.number, [5, 4, 3, 2, 1, -1]),
|
||||
];
|
||||
|
||||
expect(result[0].fields[2].config).toEqual({});
|
||||
expect(result[0].fields).toMatchObject(expected);
|
||||
const fields = unwrap(result[0].fields);
|
||||
|
||||
expect(fields[1].config).toEqual({});
|
||||
expect(fields).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
const createField = (name: string, type: FieldType, values: any[], config = {}): Field => {
|
||||
return { name, type, values: new ArrayVector(values), config, labels: undefined };
|
||||
};
|
||||
|
||||
const unwrap = (fields: Field[]): Field[] => {
|
||||
return fields.map(field =>
|
||||
createField(
|
||||
field.name,
|
||||
field.type,
|
||||
field.values.toArray().map((value: any) => value),
|
||||
field.config
|
||||
)
|
||||
);
|
||||
};
|
216
packages/grafana-data/src/transformations/transformers/merge.ts
Normal file
216
packages/grafana-data/src/transformations/transformers/merge.ts
Normal file
@ -0,0 +1,216 @@
|
||||
import { DataTransformerID } from './ids';
|
||||
import { DataTransformerInfo } from '../../types/transformations';
|
||||
import { DataFrame, Field, FieldType } from '../../types/dataFrame';
|
||||
import { omit } from 'lodash';
|
||||
import { ArrayVector } from '../../vector/ArrayVector';
|
||||
import { MutableDataFrame, sortDataFrame } from '../../dataframe';
|
||||
|
||||
type MergeDetailsKeyFactory = (existing: Record<string, any>, value: Record<string, any>) => string;
|
||||
|
||||
export interface MergeTransformerOptions {}
|
||||
|
||||
export const mergeTransformer: DataTransformerInfo<MergeTransformerOptions> = {
|
||||
id: DataTransformerID.merge,
|
||||
name: 'Merge series/tables',
|
||||
description: 'Merges multiple series/tables into a single serie/table',
|
||||
defaultOptions: {},
|
||||
transformer: (options: MergeTransformerOptions) => {
|
||||
return (data: DataFrame[]) => {
|
||||
if (!Array.isArray(data) || data.length <= 1) {
|
||||
return data;
|
||||
}
|
||||
|
||||
const fieldByName = new Set<string>();
|
||||
const fieldIndexByName: Record<string, Record<number, number>> = {};
|
||||
const fieldNamesForKey: string[] = [];
|
||||
const dataFrame = new MutableDataFrame();
|
||||
|
||||
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
|
||||
const frame = data[frameIndex];
|
||||
|
||||
for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) {
|
||||
const field = frame.fields[fieldIndex];
|
||||
|
||||
if (!fieldByName.has(field.name)) {
|
||||
dataFrame.addField(copyFieldStructure(field));
|
||||
fieldByName.add(field.name);
|
||||
}
|
||||
|
||||
fieldIndexByName[field.name] = fieldIndexByName[field.name] || {};
|
||||
fieldIndexByName[field.name][frameIndex] = fieldIndex;
|
||||
|
||||
if (data.length - 1 !== frameIndex) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Object.keys(fieldIndexByName[field.name]).length === data.length) {
|
||||
fieldNamesForKey.push(field.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldNamesForKey.length === 0) {
|
||||
return data;
|
||||
}
|
||||
|
||||
const dataFrameIndexByKey: Record<string, number> = {};
|
||||
const keyFactory = createKeyFactory(data, fieldIndexByName, fieldNamesForKey);
|
||||
const detailsKeyFactory = createDetailsKeyFactory(fieldByName, fieldNamesForKey);
|
||||
const valueMapper = createValueMapper(data, fieldByName, fieldIndexByName);
|
||||
|
||||
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
|
||||
const frame = data[frameIndex];
|
||||
|
||||
for (let valueIndex = 0; valueIndex < frame.length; valueIndex++) {
|
||||
const key = keyFactory(frameIndex, valueIndex);
|
||||
const value = valueMapper(frameIndex, valueIndex);
|
||||
mergeOrAdd(key, value, dataFrame, dataFrameIndexByKey, detailsKeyFactory);
|
||||
}
|
||||
}
|
||||
|
||||
const timeIndex = dataFrame.fields.findIndex(field => field.type === FieldType.time);
|
||||
if (typeof timeIndex === 'number') {
|
||||
return [sortDataFrame(dataFrame, timeIndex, true)];
|
||||
}
|
||||
return [dataFrame];
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
const copyFieldStructure = (field: Field): Field => {
|
||||
return {
|
||||
...omit(field, ['values', 'state', 'labels', 'config']),
|
||||
values: new ArrayVector(),
|
||||
config: {
|
||||
...omit(field.config, 'displayName'),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
const createKeyFactory = (
|
||||
data: DataFrame[],
|
||||
fieldPointerByName: Record<string, Record<string, number>>,
|
||||
keyFieldNames: string[]
|
||||
) => {
|
||||
const factoryIndex = keyFieldNames.reduce((index: Record<string, number[]>, fieldName) => {
|
||||
return Object.keys(fieldPointerByName[fieldName]).reduce((index: Record<string, number[]>, frameIndex) => {
|
||||
index[frameIndex] = index[frameIndex] || [];
|
||||
index[frameIndex].push(fieldPointerByName[fieldName][frameIndex]);
|
||||
return index;
|
||||
}, index);
|
||||
}, {});
|
||||
|
||||
return (frameIndex: number, valueIndex: number): string => {
|
||||
return factoryIndex[frameIndex].reduce((key: string, fieldIndex: number) => {
|
||||
return key + data[frameIndex].fields[fieldIndex].values.get(valueIndex);
|
||||
}, '');
|
||||
};
|
||||
};
|
||||
|
||||
const createDetailsKeyFactory = (fieldByName: Set<string>, fieldNamesForKey: string[]): MergeDetailsKeyFactory => {
|
||||
const fieldNamesToExclude = fieldNamesForKey.reduce((exclude: Record<string, boolean>, fieldName: string) => {
|
||||
exclude[fieldName] = true;
|
||||
return exclude;
|
||||
}, {});
|
||||
|
||||
const checkOrder = Array.from(fieldByName).filter(fieldName => !fieldNamesToExclude[fieldName]);
|
||||
|
||||
return (existing: Record<string, any>, value: Record<string, any>) => {
|
||||
return checkOrder.reduce((key: string, fieldName: string) => {
|
||||
if (typeof existing[fieldName] === 'undefined') {
|
||||
return key;
|
||||
}
|
||||
if (typeof value[fieldName] === 'undefined') {
|
||||
return key;
|
||||
}
|
||||
if (existing[fieldName] === value[fieldName]) {
|
||||
return key;
|
||||
}
|
||||
return key + value[fieldName];
|
||||
}, '');
|
||||
};
|
||||
};
|
||||
|
||||
const createValueMapper = (
|
||||
data: DataFrame[],
|
||||
fieldByName: Set<string>,
|
||||
fieldIndexByName: Record<string, Record<number, number>>
|
||||
) => {
|
||||
return (frameIndex: number, valueIndex: number) => {
|
||||
const value: Record<string, any> = {};
|
||||
const fieldNames = Array.from(fieldByName);
|
||||
|
||||
for (const fieldName of fieldNames) {
|
||||
const fieldIndexByFrameIndex = fieldIndexByName[fieldName];
|
||||
if (!fieldIndexByFrameIndex) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const fieldIndex = fieldIndexByFrameIndex[frameIndex];
|
||||
if (typeof fieldIndex !== 'number') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const frame = data[frameIndex];
|
||||
if (!frame || !frame.fields) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const field = frame.fields[fieldIndex];
|
||||
if (!field || !field.values) {
|
||||
continue;
|
||||
}
|
||||
|
||||
value[fieldName] = field.values.get(valueIndex);
|
||||
}
|
||||
|
||||
return value;
|
||||
};
|
||||
};
|
||||
|
||||
const isMergable = (existing: Record<string, any>, value: Record<string, any>): boolean => {
|
||||
let mergable = true;
|
||||
|
||||
for (const prop in value) {
|
||||
if (typeof existing[prop] === 'undefined') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (existing[prop] === null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (existing[prop] !== value[prop]) {
|
||||
mergable = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return mergable;
|
||||
};
|
||||
|
||||
const mergeOrAdd = (
|
||||
key: string,
|
||||
value: Record<string, any>,
|
||||
dataFrame: MutableDataFrame,
|
||||
dataFrameIndexByKey: Record<string, number>,
|
||||
detailsKeyFactory: MergeDetailsKeyFactory
|
||||
) => {
|
||||
if (typeof dataFrameIndexByKey[key] === 'undefined') {
|
||||
dataFrame.add(value);
|
||||
dataFrameIndexByKey[key] = dataFrame.length - 1;
|
||||
return;
|
||||
}
|
||||
|
||||
const dataFrameIndex = dataFrameIndexByKey[key];
|
||||
const existing = dataFrame.get(dataFrameIndex);
|
||||
|
||||
if (isMergable(existing, value)) {
|
||||
const merged = { ...existing, ...value };
|
||||
dataFrame.set(dataFrameIndex, merged);
|
||||
return;
|
||||
}
|
||||
|
||||
const nextKey = key + detailsKeyFactory(existing, value);
|
||||
mergeOrAdd(nextKey, value, dataFrame, dataFrameIndexByKey, detailsKeyFactory);
|
||||
};
|
@ -1,135 +0,0 @@
|
||||
import { MutableDataFrame } from '../../../dataframe';
|
||||
import {
|
||||
DataFrame,
|
||||
FieldType,
|
||||
Field,
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
TIME_SERIES_VALUE_FIELD_NAME,
|
||||
} from '../../../types/dataFrame';
|
||||
import { ArrayVector } from '../../../vector';
|
||||
import { omit } from 'lodash';
|
||||
import { getFrameDisplayName } from '../../../field';
|
||||
|
||||
interface DataFrameBuilderResult {
|
||||
dataFrame: MutableDataFrame;
|
||||
valueMapper: ValueMapper;
|
||||
}
|
||||
|
||||
type ValueMapper = (frame: DataFrame, valueIndex: number, timeIndex: number) => Record<string, any>;
|
||||
|
||||
const TIME_SERIES_METRIC_FIELD_NAME = 'Metric';
|
||||
|
||||
export class DataFrameBuilder {
|
||||
private isOnlyTimeSeries: boolean;
|
||||
private displayMetricField: boolean;
|
||||
private valueFields: Record<string, Field>;
|
||||
private timeField: Field | null;
|
||||
|
||||
constructor() {
|
||||
this.isOnlyTimeSeries = true;
|
||||
this.displayMetricField = false;
|
||||
this.valueFields = {};
|
||||
this.timeField = null;
|
||||
}
|
||||
|
||||
addFields(frame: DataFrame, timeIndex: number): void {
|
||||
if (frame.fields.length > 2) {
|
||||
this.isOnlyTimeSeries = false;
|
||||
}
|
||||
|
||||
if (frame.fields.length === 2) {
|
||||
this.displayMetricField = true;
|
||||
}
|
||||
|
||||
for (let index = 0; index < frame.fields.length; index++) {
|
||||
const field = frame.fields[index];
|
||||
|
||||
if (index === timeIndex) {
|
||||
if (!this.timeField) {
|
||||
this.timeField = this.copyStructure(field, TIME_SERIES_TIME_FIELD_NAME);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!this.valueFields[field.name]) {
|
||||
this.valueFields[field.name] = this.copyStructure(field, field.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
build(): DataFrameBuilderResult {
|
||||
return {
|
||||
dataFrame: this.createDataFrame(),
|
||||
valueMapper: this.createValueMapper(),
|
||||
};
|
||||
}
|
||||
|
||||
private createValueMapper(): ValueMapper {
|
||||
return (frame: DataFrame, valueIndex: number, timeIndex: number) => {
|
||||
return frame.fields.reduce((values: Record<string, any>, field, index) => {
|
||||
const value = field.values.get(valueIndex);
|
||||
|
||||
if (index === timeIndex) {
|
||||
values[TIME_SERIES_TIME_FIELD_NAME] = value;
|
||||
|
||||
if (this.displayMetricField) {
|
||||
values[TIME_SERIES_METRIC_FIELD_NAME] = getFrameDisplayName(frame);
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
if (this.isOnlyTimeSeries) {
|
||||
values[TIME_SERIES_VALUE_FIELD_NAME] = value;
|
||||
return values;
|
||||
}
|
||||
|
||||
values[field.name] = value;
|
||||
return values;
|
||||
}, {});
|
||||
};
|
||||
}
|
||||
|
||||
private createDataFrame(): MutableDataFrame {
|
||||
const dataFrame = new MutableDataFrame();
|
||||
|
||||
if (this.timeField) {
|
||||
dataFrame.addField(this.timeField);
|
||||
|
||||
if (this.displayMetricField) {
|
||||
dataFrame.addField({
|
||||
name: TIME_SERIES_METRIC_FIELD_NAME,
|
||||
type: FieldType.string,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const valueFields = Object.values(this.valueFields);
|
||||
|
||||
if (this.isOnlyTimeSeries) {
|
||||
if (valueFields.length > 0) {
|
||||
dataFrame.addField({
|
||||
...valueFields[0],
|
||||
name: TIME_SERIES_VALUE_FIELD_NAME,
|
||||
});
|
||||
}
|
||||
return dataFrame;
|
||||
}
|
||||
|
||||
for (const field of valueFields) {
|
||||
dataFrame.addField(field);
|
||||
}
|
||||
|
||||
return dataFrame;
|
||||
}
|
||||
|
||||
private copyStructure(field: Field, name: string): Field {
|
||||
return {
|
||||
...omit(field, ['values', 'name', 'state', 'labels', 'config']),
|
||||
name,
|
||||
values: new ArrayVector(),
|
||||
config: {
|
||||
...omit(field.config, 'displayName'),
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
@ -1,74 +0,0 @@
|
||||
import { DataFrame } from '../../../types/dataFrame';
|
||||
import { timeComparer } from '../../../field/fieldComparers';
|
||||
import { sortDataFrame } from '../../../dataframe';
|
||||
import { TimeFieldsByFrame } from './TimeFieldsByFrame';
|
||||
|
||||
interface DataFrameStackValue {
|
||||
valueIndex: number;
|
||||
timeIndex: number;
|
||||
frame: DataFrame;
|
||||
}
|
||||
export class DataFramesStackedByTime {
|
||||
private valuesPointerByFrame: Record<number, number>;
|
||||
private dataFrames: DataFrame[];
|
||||
private isSorted: boolean;
|
||||
|
||||
constructor(private timeFields: TimeFieldsByFrame) {
|
||||
this.valuesPointerByFrame = {};
|
||||
this.dataFrames = [];
|
||||
this.isSorted = false;
|
||||
}
|
||||
|
||||
push(frame: DataFrame): number {
|
||||
const index = this.dataFrames.length;
|
||||
this.valuesPointerByFrame[index] = 0;
|
||||
this.dataFrames.push(frame);
|
||||
return index;
|
||||
}
|
||||
|
||||
pop(): DataFrameStackValue {
|
||||
if (!this.isSorted) {
|
||||
this.sortByTime();
|
||||
this.isSorted = true;
|
||||
}
|
||||
|
||||
const frameIndex = this.dataFrames.reduce((champion, frame, index) => {
|
||||
const championTime = this.peekTimeValueForFrame(champion);
|
||||
const contenderTime = this.peekTimeValueForFrame(index);
|
||||
return timeComparer(contenderTime, championTime) >= 0 ? champion : index;
|
||||
}, 0);
|
||||
|
||||
const previousPointer = this.movePointerForward(frameIndex);
|
||||
|
||||
return {
|
||||
frame: this.dataFrames[frameIndex],
|
||||
valueIndex: previousPointer,
|
||||
timeIndex: this.timeFields.getFieldIndex(frameIndex),
|
||||
};
|
||||
}
|
||||
|
||||
getLength(): number {
|
||||
const frames = Object.values(this.dataFrames);
|
||||
return frames.reduce((length: number, frame) => (length += frame.length), 0);
|
||||
}
|
||||
|
||||
private peekTimeValueForFrame(frameIndex: number): any {
|
||||
const timeField = this.timeFields.getField(frameIndex);
|
||||
const valuePointer = this.valuesPointerByFrame[frameIndex];
|
||||
return timeField.values.get(valuePointer);
|
||||
}
|
||||
|
||||
private movePointerForward(frameIndex: number): number {
|
||||
const currentPointer = this.valuesPointerByFrame[frameIndex];
|
||||
this.valuesPointerByFrame[frameIndex] = currentPointer + 1;
|
||||
|
||||
return currentPointer;
|
||||
}
|
||||
|
||||
private sortByTime() {
|
||||
this.dataFrames = this.dataFrames.map((frame, index) => {
|
||||
const timeFieldIndex = this.timeFields.getFieldIndex(index);
|
||||
return sortDataFrame(frame, timeFieldIndex);
|
||||
});
|
||||
}
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
import { isNumber } from 'lodash';
|
||||
import { Field, DataFrame } from '../../../types/dataFrame';
|
||||
import { getTimeField } from '../../../dataframe';
|
||||
|
||||
export class TimeFieldsByFrame {
|
||||
private timeIndexByFrameIndex: Record<number, number>;
|
||||
private timeFieldByFrameIndex: Record<number, Field>;
|
||||
|
||||
constructor() {
|
||||
this.timeIndexByFrameIndex = {};
|
||||
this.timeFieldByFrameIndex = {};
|
||||
}
|
||||
|
||||
add(frameIndex: number, frame: DataFrame): void {
|
||||
const fieldDescription = getTimeField(frame);
|
||||
const timeIndex = fieldDescription?.timeIndex;
|
||||
const timeField = fieldDescription?.timeField;
|
||||
|
||||
if (isNumber(timeIndex)) {
|
||||
this.timeIndexByFrameIndex[frameIndex] = timeIndex;
|
||||
}
|
||||
|
||||
if (timeField) {
|
||||
this.timeFieldByFrameIndex[frameIndex] = timeField;
|
||||
}
|
||||
}
|
||||
|
||||
getField(frameIndex: number): Field {
|
||||
return this.timeFieldByFrameIndex[frameIndex];
|
||||
}
|
||||
|
||||
getFieldIndex(frameIndex: number): number {
|
||||
return this.timeIndexByFrameIndex[frameIndex];
|
||||
}
|
||||
|
||||
getLength() {
|
||||
return Object.keys(this.timeIndexByFrameIndex).length;
|
||||
}
|
||||
}
|
@ -1,47 +0,0 @@
|
||||
import { DataTransformerID } from '../ids';
|
||||
import { DataTransformerInfo } from '../../../types/transformations';
|
||||
import { DataFrame } from '../../../types/dataFrame';
|
||||
import { DataFrameBuilder } from './DataFrameBuilder';
|
||||
import { TimeFieldsByFrame } from './TimeFieldsByFrame';
|
||||
import { DataFramesStackedByTime } from './DataFramesStackedByTime';
|
||||
|
||||
export interface MergeTransformerOptions {}
|
||||
|
||||
export const mergeTransformer: DataTransformerInfo<MergeTransformerOptions> = {
|
||||
id: DataTransformerID.merge,
|
||||
name: 'Merge series/tables',
|
||||
description: 'Merges multiple series/tables by time into a single serie/table',
|
||||
defaultOptions: {},
|
||||
transformer: (options: MergeTransformerOptions) => {
|
||||
return (data: DataFrame[]) => {
|
||||
if (!Array.isArray(data) || data.length <= 1) {
|
||||
return data;
|
||||
}
|
||||
|
||||
const timeFields = new TimeFieldsByFrame();
|
||||
const framesStack = new DataFramesStackedByTime(timeFields);
|
||||
const dataFrameBuilder = new DataFrameBuilder();
|
||||
|
||||
for (const frame of data) {
|
||||
const frameIndex = framesStack.push(frame);
|
||||
timeFields.add(frameIndex, frame);
|
||||
|
||||
const timeIndex = timeFields.getFieldIndex(frameIndex);
|
||||
dataFrameBuilder.addFields(frame, timeIndex);
|
||||
}
|
||||
|
||||
if (data.length !== timeFields.getLength()) {
|
||||
return data;
|
||||
}
|
||||
|
||||
const { dataFrame, valueMapper } = dataFrameBuilder.build();
|
||||
|
||||
for (let index = 0; index < framesStack.getLength(); index++) {
|
||||
const { frame, valueIndex, timeIndex } = framesStack.pop();
|
||||
dataFrame.add(valueMapper(frame, valueIndex, timeIndex));
|
||||
}
|
||||
|
||||
return [dataFrame];
|
||||
};
|
||||
},
|
||||
};
|
@ -0,0 +1,237 @@
|
||||
import { mockTransformationsRegistry } from '../../utils/tests/mockTransformationsRegistry';
|
||||
import { DataTransformerConfig, Field, FieldType } from '../../types';
|
||||
import { DataTransformerID } from './ids';
|
||||
import { toDataFrame } from '../../dataframe';
|
||||
import { transformDataFrame } from '../transformDataFrame';
|
||||
import { ArrayVector } from '../../vector';
|
||||
import { seriesToRowsTransformer, SeriesToRowsTransformerOptions } from './seriesToRows';
|
||||
|
||||
describe('Series to rows', () => {
|
||||
beforeAll(() => {
|
||||
mockTransformationsRegistry([seriesToRowsTransformer]);
|
||||
});
|
||||
|
||||
it('combine two series into one', () => {
|
||||
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
|
||||
id: DataTransformerID.seriesToRows,
|
||||
options: {},
|
||||
};
|
||||
|
||||
const seriesA = toDataFrame({
|
||||
name: 'A',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [1000] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [1] },
|
||||
],
|
||||
});
|
||||
|
||||
const seriesB = toDataFrame({
|
||||
name: 'B',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [2000] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [-1] },
|
||||
],
|
||||
});
|
||||
|
||||
const result = transformDataFrame([cfg], [seriesA, seriesB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [2000, 1000]),
|
||||
createField('Metric', FieldType.string, ['B', 'A']),
|
||||
createField('Value', FieldType.number, [-1, 1]),
|
||||
];
|
||||
|
||||
expect(unwrap(result[0].fields)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine two series with multiple values into one', () => {
|
||||
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
|
||||
id: DataTransformerID.seriesToRows,
|
||||
options: {},
|
||||
};
|
||||
|
||||
const seriesA = toDataFrame({
|
||||
name: 'A',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 150, 200] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5] },
|
||||
],
|
||||
});
|
||||
|
||||
const seriesB = toDataFrame({
|
||||
name: 'B',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3] },
|
||||
],
|
||||
});
|
||||
|
||||
const result = transformDataFrame([cfg], [seriesA, seriesB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
|
||||
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
|
||||
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1]),
|
||||
];
|
||||
|
||||
expect(unwrap(result[0].fields)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine three series into one', () => {
|
||||
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
|
||||
id: DataTransformerID.seriesToRows,
|
||||
options: {},
|
||||
};
|
||||
|
||||
const seriesA = toDataFrame({
|
||||
name: 'A',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [1000] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [1] },
|
||||
],
|
||||
});
|
||||
|
||||
const seriesB = toDataFrame({
|
||||
name: 'B',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [2000] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [-1] },
|
||||
],
|
||||
});
|
||||
|
||||
const seriesC = toDataFrame({
|
||||
name: 'C',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [500] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [2] },
|
||||
],
|
||||
});
|
||||
|
||||
const result = transformDataFrame([cfg], [seriesA, seriesB, seriesC]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [2000, 1000, 500]),
|
||||
createField('Metric', FieldType.string, ['B', 'A', 'C']),
|
||||
createField('Value', FieldType.number, [-1, 1, 2]),
|
||||
];
|
||||
|
||||
expect(unwrap(result[0].fields)).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine two time series, where first serie fields has displayName, into one', () => {
|
||||
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
|
||||
id: DataTransformerID.seriesToRows,
|
||||
options: {},
|
||||
};
|
||||
|
||||
const serieA = toDataFrame({
|
||||
name: 'A',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 150, 200], config: { displayName: 'Random time' } },
|
||||
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5], config: { displayName: 'Temp' } },
|
||||
],
|
||||
});
|
||||
|
||||
const serieB = toDataFrame({
|
||||
name: 'B',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3] },
|
||||
],
|
||||
});
|
||||
|
||||
const result = transformDataFrame([cfg], [serieA, serieB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
|
||||
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
|
||||
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1]),
|
||||
];
|
||||
|
||||
const fields = unwrap(result[0].fields);
|
||||
|
||||
expect(fields[2].config).toEqual({});
|
||||
expect(fields).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine two time series, where first serie fields has units, into one', () => {
|
||||
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
|
||||
id: DataTransformerID.seriesToRows,
|
||||
options: {},
|
||||
};
|
||||
|
||||
const serieA = toDataFrame({
|
||||
name: 'A',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 150, 200] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5], config: { units: 'celsius' } },
|
||||
],
|
||||
});
|
||||
|
||||
const serieB = toDataFrame({
|
||||
name: 'B',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3] },
|
||||
],
|
||||
});
|
||||
|
||||
const result = transformDataFrame([cfg], [serieA, serieB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
|
||||
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
|
||||
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1], { units: 'celsius' }),
|
||||
];
|
||||
|
||||
const fields = unwrap(result[0].fields);
|
||||
|
||||
expect(fields[2].config).toEqual({ units: 'celsius' });
|
||||
expect(fields).toEqual(expected);
|
||||
});
|
||||
|
||||
it('combine two time series, where second serie fields has units, into one', () => {
|
||||
const cfg: DataTransformerConfig<SeriesToRowsTransformerOptions> = {
|
||||
id: DataTransformerID.seriesToRows,
|
||||
options: {},
|
||||
};
|
||||
|
||||
const serieA = toDataFrame({
|
||||
name: 'A',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 150, 200] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [1, 4, 5] },
|
||||
],
|
||||
});
|
||||
|
||||
const serieB = toDataFrame({
|
||||
name: 'B',
|
||||
fields: [
|
||||
{ name: 'Time', type: FieldType.time, values: [100, 125, 126] },
|
||||
{ name: 'Temp', type: FieldType.number, values: [-1, 2, 3], config: { units: 'celsius' } },
|
||||
],
|
||||
});
|
||||
|
||||
const result = transformDataFrame([cfg], [serieA, serieB]);
|
||||
const expected: Field[] = [
|
||||
createField('Time', FieldType.time, [200, 150, 126, 125, 100, 100]),
|
||||
createField('Metric', FieldType.string, ['A', 'A', 'B', 'B', 'A', 'B']),
|
||||
createField('Value', FieldType.number, [5, 4, 3, 2, 1, -1]),
|
||||
];
|
||||
|
||||
const fields = unwrap(result[0].fields);
|
||||
|
||||
expect(fields[2].config).toEqual({});
|
||||
expect(fields).toEqual(expected);
|
||||
});
|
||||
});
|
||||
|
||||
const createField = (name: string, type: FieldType, values: any[], config = {}): Field => {
|
||||
return { name, type, values: new ArrayVector(values), config, labels: undefined };
|
||||
};
|
||||
|
||||
const unwrap = (fields: Field[]): Field[] => {
|
||||
return fields.map(field =>
|
||||
createField(
|
||||
field.name,
|
||||
field.type,
|
||||
field.values.toArray().map((value: any) => value),
|
||||
field.config
|
||||
)
|
||||
);
|
||||
};
|
@ -0,0 +1,97 @@
|
||||
import { omit } from 'lodash';
|
||||
import { DataTransformerID } from './ids';
|
||||
import { DataTransformerInfo } from '../../types/transformations';
|
||||
import {
|
||||
DataFrame,
|
||||
Field,
|
||||
FieldType,
|
||||
TIME_SERIES_TIME_FIELD_NAME,
|
||||
TIME_SERIES_VALUE_FIELD_NAME,
|
||||
TIME_SERIES_METRIC_FIELD_NAME,
|
||||
} from '../../types/dataFrame';
|
||||
import { isTimeSeries } from '../../dataframe/utils';
|
||||
import { MutableDataFrame, sortDataFrame } from '../../dataframe';
|
||||
import { ArrayVector } from '../../vector';
|
||||
import { getFrameDisplayName } from '../../field/fieldState';
|
||||
|
||||
export interface SeriesToRowsTransformerOptions {}
|
||||
|
||||
export const seriesToRowsTransformer: DataTransformerInfo<SeriesToRowsTransformerOptions> = {
|
||||
id: DataTransformerID.seriesToRows,
|
||||
name: 'Series to rows',
|
||||
description: 'Combines multiple series into a single serie and appends a column with metric name per value.',
|
||||
defaultOptions: {},
|
||||
transformer: (options: SeriesToRowsTransformerOptions) => {
|
||||
return (data: DataFrame[]) => {
|
||||
if (!Array.isArray(data) || data.length <= 1) {
|
||||
return data;
|
||||
}
|
||||
|
||||
if (!isTimeSeries(data)) {
|
||||
return data;
|
||||
}
|
||||
|
||||
const timeFieldByIndex: Record<number, number> = {};
|
||||
const targetFields = new Set<string>();
|
||||
const dataFrame = new MutableDataFrame();
|
||||
const metricField: Field = {
|
||||
name: TIME_SERIES_METRIC_FIELD_NAME,
|
||||
values: new ArrayVector(),
|
||||
config: {},
|
||||
type: FieldType.string,
|
||||
};
|
||||
|
||||
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
|
||||
const frame = data[frameIndex];
|
||||
|
||||
for (let fieldIndex = 0; fieldIndex < frame.fields.length; fieldIndex++) {
|
||||
const field = frame.fields[fieldIndex];
|
||||
|
||||
if (field.type === FieldType.time) {
|
||||
timeFieldByIndex[frameIndex] = fieldIndex;
|
||||
|
||||
if (!targetFields.has(TIME_SERIES_TIME_FIELD_NAME)) {
|
||||
dataFrame.addField(copyFieldStructure(field, TIME_SERIES_TIME_FIELD_NAME));
|
||||
dataFrame.addField(metricField);
|
||||
targetFields.add(TIME_SERIES_TIME_FIELD_NAME);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!targetFields.has(TIME_SERIES_VALUE_FIELD_NAME)) {
|
||||
dataFrame.addField(copyFieldStructure(field, TIME_SERIES_VALUE_FIELD_NAME));
|
||||
targetFields.add(TIME_SERIES_VALUE_FIELD_NAME);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let frameIndex = 0; frameIndex < data.length; frameIndex++) {
|
||||
const frame = data[frameIndex];
|
||||
|
||||
for (let valueIndex = 0; valueIndex < frame.length; valueIndex++) {
|
||||
const timeFieldIndex = timeFieldByIndex[frameIndex];
|
||||
const valueFieldIndex = timeFieldIndex === 0 ? 1 : 0;
|
||||
|
||||
dataFrame.add({
|
||||
[TIME_SERIES_TIME_FIELD_NAME]: frame.fields[timeFieldIndex].values.get(valueIndex),
|
||||
[TIME_SERIES_METRIC_FIELD_NAME]: getFrameDisplayName(frame),
|
||||
[TIME_SERIES_VALUE_FIELD_NAME]: frame.fields[valueFieldIndex].values.get(valueIndex),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return [sortDataFrame(dataFrame, 0, true)];
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
const copyFieldStructure = (field: Field, name: string): Field => {
|
||||
return {
|
||||
...omit(field, ['values', 'state', 'labels', 'config', 'name']),
|
||||
name: name,
|
||||
values: new ArrayVector(),
|
||||
config: {
|
||||
...omit(field.config, 'displayName'),
|
||||
},
|
||||
};
|
||||
};
|
@ -150,3 +150,4 @@ export interface FieldCalcs extends Record<string, any> {}
|
||||
|
||||
export const TIME_SERIES_VALUE_FIELD_NAME = 'Value';
|
||||
export const TIME_SERIES_TIME_FIELD_NAME = 'Time';
|
||||
export const TIME_SERIES_METRIC_FIELD_NAME = 'Metric';
|
||||
|
@ -15,20 +15,10 @@ export interface DataSourcePluginOptionsEditorProps<JSONData = DataSourceJsonDat
|
||||
}
|
||||
|
||||
// Utility type to extract the query type TQuery from a class extending DataSourceApi<TQuery, TOptions>
|
||||
export type DataSourceQueryType<DSType extends DataSourceApi<any, any>> = DSType extends DataSourceApi<
|
||||
infer TQuery,
|
||||
infer _TOptions
|
||||
>
|
||||
? TQuery
|
||||
: never;
|
||||
export type DataSourceQueryType<DSType> = DSType extends DataSourceApi<infer TQuery, any> ? TQuery : never;
|
||||
|
||||
// Utility type to extract the options type TOptions from a class extending DataSourceApi<TQuery, TOptions>
|
||||
export type DataSourceOptionsType<DSType extends DataSourceApi<any, any>> = DSType extends DataSourceApi<
|
||||
infer _TQuery,
|
||||
infer TOptions
|
||||
>
|
||||
? TOptions
|
||||
: never;
|
||||
export type DataSourceOptionsType<DSType> = DSType extends DataSourceApi<any, infer TOptions> ? TOptions : never;
|
||||
|
||||
export class DataSourcePlugin<
|
||||
DSType extends DataSourceApi<TQuery, TOptions>,
|
||||
@ -339,7 +329,7 @@ export interface ExploreQueryFieldProps<
|
||||
}
|
||||
|
||||
export interface ExploreStartPageProps {
|
||||
datasource?: DataSourceApi;
|
||||
datasource: DataSourceApi;
|
||||
exploreMode: ExploreMode;
|
||||
onClickExample: (query: DataQuery) => void;
|
||||
exploreId?: any;
|
||||
@ -453,7 +443,6 @@ export interface DataQueryTimings {
|
||||
}
|
||||
|
||||
export interface QueryFix {
|
||||
type: string;
|
||||
label: string;
|
||||
action?: QueryFixAction;
|
||||
}
|
||||
@ -472,6 +461,7 @@ export interface QueryHint {
|
||||
|
||||
export interface MetricFindValue {
|
||||
text: string;
|
||||
expandable?: boolean;
|
||||
}
|
||||
|
||||
export interface DataSourceJsonData {
|
||||
@ -500,7 +490,7 @@ export interface DataSourceSettings<T extends DataSourceJsonData = DataSourceJso
|
||||
isDefault: boolean;
|
||||
jsonData: T;
|
||||
secureJsonData?: S;
|
||||
secureJsonFields?: KeyValue<boolean>;
|
||||
secureJsonFields: KeyValue<boolean>;
|
||||
readOnly: boolean;
|
||||
withCredentials: boolean;
|
||||
version?: number;
|
||||
|
@ -68,6 +68,8 @@ export const addPanel = (config?: Partial<AddPanelConfig>): any =>
|
||||
.click();
|
||||
closeOptionsGroup('type');
|
||||
|
||||
closeOptions();
|
||||
|
||||
queriesForm(fullConfig);
|
||||
|
||||
e2e().wait('@chartData');
|
||||
@ -77,8 +79,6 @@ export const addPanel = (config?: Partial<AddPanelConfig>): any =>
|
||||
//e2e.components.Panels.Panel.containerByTitle(panelTitle).find('.panel-content').contains('No data');
|
||||
//e2e.components.QueryEditorRow.actionButton('Disable/enable query').click();
|
||||
|
||||
closeOptions();
|
||||
|
||||
e2e()
|
||||
.get('button[title="Apply changes and go back to dashboard"]')
|
||||
.click();
|
||||
|
@ -1,10 +1,16 @@
|
||||
import { Observable } from 'rxjs';
|
||||
|
||||
/**
|
||||
* Used to initiate a remote call via the {@link BackendSrv}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
export type BackendSrvRequest = {
|
||||
/**
|
||||
* Request URL
|
||||
*/
|
||||
url: string;
|
||||
|
||||
/**
|
||||
* Number of times to retry the remote call if it fails.
|
||||
*/
|
||||
@ -15,7 +21,7 @@ export type BackendSrvRequest = {
|
||||
* Please have a look at {@link https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API | Fetch API}
|
||||
* for supported headers.
|
||||
*/
|
||||
headers?: any;
|
||||
headers?: Record<string, any>;
|
||||
|
||||
/**
|
||||
* HTTP verb to perform in the remote call GET, POST, PUT etc.
|
||||
@ -23,11 +29,15 @@ export type BackendSrvRequest = {
|
||||
method?: string;
|
||||
|
||||
/**
|
||||
* If set to true an alert with the response message will be displayed
|
||||
* upon successful remote call
|
||||
* Set to false an success application alert box will not be shown for successful PUT, DELETE, POST requests
|
||||
*/
|
||||
showSuccessAlert?: boolean;
|
||||
|
||||
/**
|
||||
* Set to false to not show an application alert box for request errors
|
||||
*/
|
||||
showErrorAlert?: boolean;
|
||||
|
||||
/**
|
||||
* Provided by the initiator to identify a particular remote call. An example
|
||||
* of this is when a datasource plugin triggers a query. If the request id already
|
||||
@ -35,9 +45,71 @@ export type BackendSrvRequest = {
|
||||
* new one.
|
||||
*/
|
||||
requestId?: string;
|
||||
[key: string]: any;
|
||||
|
||||
/**
|
||||
* Set to to true to not include call in query inspector
|
||||
*/
|
||||
silent?: boolean;
|
||||
|
||||
/**
|
||||
* The data to send
|
||||
*/
|
||||
data?: any;
|
||||
|
||||
/**
|
||||
* Query params
|
||||
*/
|
||||
params?: Record<string, any>;
|
||||
|
||||
/**
|
||||
* Indicates whether or not cross-site Access-Control requests should be made using credentials such as cookies, authorization headers or TLS client certificates. Setting withCredentials has no effect on same-site requests.
|
||||
* In addition, this flag is also used to indicate when cookies are to be ignored in the response.
|
||||
*/
|
||||
withCredentials?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Response for fetch function in {@link BackendSrv}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
export interface FetchResponse<T = any> {
|
||||
data: T;
|
||||
readonly status: number;
|
||||
readonly statusText: string;
|
||||
readonly ok: boolean;
|
||||
readonly headers: Headers;
|
||||
readonly redirected: boolean;
|
||||
readonly type: ResponseType;
|
||||
readonly url: string;
|
||||
readonly config: BackendSrvRequest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Error type for fetch function in {@link BackendSrv}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
export interface FetchErrorDataProps {
|
||||
message?: string;
|
||||
status?: string;
|
||||
error?: string | any;
|
||||
}
|
||||
|
||||
/**
|
||||
* Error type for fetch function in {@link BackendSrv}
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
export interface FetchError<T extends FetchErrorDataProps = any> {
|
||||
status: number;
|
||||
statusText?: string;
|
||||
data: T | string;
|
||||
cancelled?: boolean;
|
||||
isHandled?: boolean;
|
||||
config: BackendSrvRequest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to communicate via http(s) to a remote backend such as the Grafana backend,
|
||||
* a datasource etc. The BackendSrv is using the {@link https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API | Fetch API}
|
||||
@ -48,9 +120,8 @@ export type BackendSrvRequest = {
|
||||
* use default values executing the request.
|
||||
*
|
||||
* @remarks
|
||||
* By default Grafana will display an error message alert if the remote call fails. If you want
|
||||
* to prevent this from happending you need to catch the error thrown by the BackendSrv and
|
||||
* set the `isHandled = true` on the incoming error.
|
||||
* By default, Grafana displays an error message alert if the remote call fails. To prevent this from
|
||||
* happening `showErrorAlert = true` on the options object.
|
||||
*
|
||||
* @public
|
||||
*/
|
||||
@ -60,15 +131,26 @@ export interface BackendSrv {
|
||||
post(url: string, data?: any): Promise<any>;
|
||||
patch(url: string, data?: any): Promise<any>;
|
||||
put(url: string, data?: any): Promise<any>;
|
||||
|
||||
/**
|
||||
* @deprecated Use the fetch function instead. If you prefer to work with a promise
|
||||
* call the toPromise() function on the Observable returned by fetch.
|
||||
*/
|
||||
request(options: BackendSrvRequest): Promise<any>;
|
||||
|
||||
/**
|
||||
* @deprecated Use the fetch function instead
|
||||
* Special function used to communicate with datasources that will emit core
|
||||
* events that the Grafana QueryInspector and QueryEditor is listening for to be able
|
||||
* to display datasource query information. Can be skipped by adding `option.silent`
|
||||
* when initializing the request.
|
||||
*/
|
||||
datasourceRequest(options: BackendSrvRequest): Promise<any>;
|
||||
|
||||
/**
|
||||
* Observable http request interface
|
||||
*/
|
||||
fetch<T>(options: BackendSrvRequest): Observable<FetchResponse<T>>;
|
||||
}
|
||||
|
||||
let singletonInstance: BackendSrv;
|
||||
|
@ -16,7 +16,7 @@ export interface TemplateSrv {
|
||||
/**
|
||||
* Replace the values within the target string. See also {@link InterpolateFunction}
|
||||
*/
|
||||
replace(target: string, scopedVars?: ScopedVars, format?: string | Function): string;
|
||||
replace(target?: string, scopedVars?: ScopedVars, format?: string | Function): string;
|
||||
}
|
||||
|
||||
let singletonInstance: TemplateSrv;
|
||||
|
@ -129,6 +129,11 @@ export class DataSourceWithBackend<
|
||||
|
||||
/**
|
||||
* Optionally augment the response before returning the results to the
|
||||
*
|
||||
* NOTE: this was added in 7.1 for azure, and will be removed in 7.2
|
||||
* when the entire response pipeline is Observable
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
processResponse?(res: DataQueryResponse): Promise<DataQueryResponse>;
|
||||
|
||||
@ -171,12 +176,11 @@ export class DataSourceWithBackend<
|
||||
*/
|
||||
async callHealthCheck(): Promise<HealthCheckResult> {
|
||||
return getBackendSrv()
|
||||
.get(`/api/datasources/${this.id}/health`)
|
||||
.request({ method: 'GET', url: `/api/datasources/${this.id}/health`, showErrorAlert: false })
|
||||
.then(v => {
|
||||
return v as HealthCheckResult;
|
||||
})
|
||||
.catch(err => {
|
||||
err.isHandled = true; // Avoid extra popup warning
|
||||
return err.data as HealthCheckResult;
|
||||
});
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
FROM alpine
|
||||
USER root
|
||||
FROM alpine:3.12
|
||||
USER root
|
||||
ADD scripts scripts
|
||||
ADD install /usr/local
|
||||
WORKDIR scripts
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Using this docker image
|
||||
|
||||
Currently tagged and uploaded to dockerhub as srclosson/integrations-ci-build
|
||||
Uploaded to dockerhub as grafana/grafana-plugin-ci:latest-alpine
|
||||
|
||||
Based off of `circleci/node:12-browsers`
|
||||
|
||||
@ -15,7 +15,7 @@ The home directory will be `/home/circleci`
|
||||
|
||||
## Go
|
||||
- Go 1.14 is installed in `/usr/local/bin/go`
|
||||
- golangci-lint 1.23.7 is installed in `/usr/local/bin/golangci-lint`
|
||||
- golangci-lint is installed in `/usr/local/bin/golangci-lint`
|
||||
- mage is installed in `/home/circleci/go/bin/mage`
|
||||
|
||||
All of the above directories are in the path, so there is no need to specify fully qualified paths.
|
||||
@ -58,4 +58,4 @@ cd test
|
||||
```
|
||||
|
||||
You will be in /home/circleci/test with the buildscripts installed to the local directory.
|
||||
Do your edits/run tests. When saving, your edits will be available in the container immediately.
|
||||
Do your edits/run tests. When saving, your edits will be available in the container immediately.
|
||||
|
@ -4,4 +4,4 @@
|
||||
## Common variable declarations
|
||||
##
|
||||
|
||||
DOCKER_IMAGE_NAME="srclosson/grafana-plugin-ci-alpine"
|
||||
DOCKER_IMAGE_NAME="grafana/grafana-plugin-ci:latest-alpine"
|
||||
|
@ -10,12 +10,12 @@ rm /bin/cp
|
||||
mv /usr/local/bin/cp /bin/cp
|
||||
|
||||
sed -i -e 's/v[[:digit:]]\..*\//edge\//g' /etc/apk/repositories
|
||||
apk add nodejs npm yarn build-base openssh git-lfs perl-utils
|
||||
apk add --no-cache nodejs npm yarn build-base openssh git-lfs perl-utils
|
||||
|
||||
#
|
||||
# Only relevant for testing, but cypress does not work with musl/alpine.
|
||||
#
|
||||
# apk add xvfb glib nss nspr gdk-pixbuf "gtk+3.0" pango atk cairo dbus-libs libxcomposite libxrender libxi libxtst libxrandr libxscrnsaver alsa-lib at-spi2-atk at-spi2-core cups-libs gcompat libc6-compat
|
||||
# apk add --no-cache xvfb glib nss nspr gdk-pixbuf "gtk+3.0" pango atk cairo dbus-libs libxcomposite libxrender libxi libxtst libxrandr libxscrnsaver alsa-lib at-spi2-atk at-spi2-core cups-libs gcompat libc6-compat
|
||||
|
||||
# Install Go
|
||||
filename="go1.14.linux-amd64.tar.gz"
|
||||
@ -23,10 +23,11 @@ get_file "https://dl.google.com/go/$filename" "/tmp/$filename" "08df79b46b0adf49
|
||||
untar_file "/tmp/$filename"
|
||||
|
||||
# Install golangci-lint
|
||||
filename="golangci-lint-1.26.0-linux-amd64"
|
||||
get_file "https://github.com/golangci/golangci-lint/releases/download/v1.26.0/$filename.tar.gz" \
|
||||
GOLANGCILINT_VERSION=1.28.0
|
||||
filename="golangci-lint-${GOLANGCILINT_VERSION}-linux-amd64"
|
||||
get_file "https://github.com/golangci/golangci-lint/releases/download/v${GOLANGCILINT_VERSION}/$filename.tar.gz" \
|
||||
"/tmp/$filename.tar.gz" \
|
||||
"59b0e49a4578fea574648a2fd5174ed61644c667ea1a1b54b8082fde15ef94fd"
|
||||
"179d34edf4baf6454a7081fbaaf74dc99397a3be8e1a535dee04d835a977bf76"
|
||||
untar_file "/tmp/$filename.tar.gz"
|
||||
ln -s /usr/local/${filename}/golangci-lint /usr/local/bin/golangci-lint
|
||||
ln -s /usr/local/go/bin/go /usr/local/bin/go
|
||||
@ -34,7 +35,7 @@ ln -s /usr/local/go/bin/gofmt /usr/local/bin/gofmt
|
||||
chmod 755 /usr/local/bin/golangci-lint
|
||||
|
||||
# Install dependencies
|
||||
apk add fontconfig zip jq
|
||||
apk add --no-cache fontconfig zip jq
|
||||
|
||||
# Install code climate
|
||||
get_file "https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64" \
|
||||
@ -45,7 +46,7 @@ chmod +x /usr/local/bin/cc-test-reporter
|
||||
wget -O /usr/local/bin/grabpl "https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/v0.4.4/grabpl"
|
||||
chmod +x /usr/local/bin/grabpl
|
||||
|
||||
apk add git
|
||||
apk add --no-cache git
|
||||
# Install Mage
|
||||
mkdir -pv /tmp/mage $HOME/go/bin
|
||||
git clone https://github.com/magefile/mage.git /tmp/mage
|
||||
@ -66,6 +67,5 @@ cd /usr/local/grafana-toolkit && yarn install && cd $current_dir
|
||||
ln -s /usr/local/grafana-toolkit/bin/grafana-toolkit.js /usr/local/bin/grafana-toolkit
|
||||
|
||||
# Cleanup after yourself
|
||||
/bin/rm -rf /tmp/mage
|
||||
/bin/rm -rf /tmp/mage
|
||||
/bin/rm -rf $HOME/go
|
||||
/bin/rm -rf /var/cache/apk/*
|
||||
|
@ -9,7 +9,7 @@ services:
|
||||
- ${HOME}/.ssh:/root/.ssh
|
||||
- ../../..:/home/circleci/grafana-toolkit
|
||||
cibuilt:
|
||||
image: "srclosson/grafana-plugin-ci-alpine"
|
||||
image: "grafana/grafana-plugin-ci:latest-alpine"
|
||||
user: root
|
||||
volumes:
|
||||
- ../scripts:/home/circleci/scripts
|
||||
|
5
packages/grafana-ui/src/components/Badge/Badge.mdx
Normal file
5
packages/grafana-ui/src/components/Badge/Badge.mdx
Normal file
@ -0,0 +1,5 @@
|
||||
<Meta title="MDX|Badge" component={Badge} />
|
||||
|
||||
# Badge
|
||||
|
||||
The badge component adds meta information to other content, for example about release status or new elements. You can add any `Icon` component or use the badge without an icon.
|
@ -55,6 +55,12 @@ export interface Props extends Themeable {
|
||||
justifyMode?: BigValueJustifyMode;
|
||||
alignmentFactors?: DisplayValueAlignmentFactors;
|
||||
textMode?: BigValueTextMode;
|
||||
|
||||
/**
|
||||
* If part of a series of stat panes, this is the total number.
|
||||
* Used by BigValueTextMode.Auto text mode.
|
||||
*/
|
||||
count?: number;
|
||||
}
|
||||
|
||||
export class BigValue extends PureComponent<Props> {
|
||||
|
@ -463,12 +463,18 @@ export interface BigValueTextValues extends DisplayValue {
|
||||
}
|
||||
|
||||
function getTextValues(props: Props): BigValueTextValues {
|
||||
const { textMode: nameAndValue, value, alignmentFactors } = props;
|
||||
const { value, alignmentFactors, count } = props;
|
||||
let { textMode } = props;
|
||||
|
||||
const titleToAlignTo = alignmentFactors ? alignmentFactors.title : value.title;
|
||||
const valueToAlignTo = formattedValueToString(alignmentFactors ? alignmentFactors : value);
|
||||
|
||||
switch (nameAndValue) {
|
||||
// In the auto case we only show title if this big value is part of more panes (count > 1)
|
||||
if (textMode === BigValueTextMode.Auto && (count ?? 1) === 1) {
|
||||
textMode = BigValueTextMode.Value;
|
||||
}
|
||||
|
||||
switch (textMode) {
|
||||
case BigValueTextMode.Name:
|
||||
return {
|
||||
...value,
|
||||
@ -498,6 +504,7 @@ function getTextValues(props: Props): BigValueTextValues {
|
||||
valueToAlignTo: '1',
|
||||
tooltip: `Name: ${value.title}\nValue: ${formattedValueToString(value)}`,
|
||||
};
|
||||
case BigValueTextMode.ValueAndName:
|
||||
default:
|
||||
return {
|
||||
...value,
|
||||
|
@ -0,0 +1,11 @@
|
||||
<Meta title="MDX|ConfirmButton" component={ConfirmButton} />
|
||||
|
||||
# ConfirmButton
|
||||
|
||||
The ConfirmButton is an interactive component that adds a double-confirm option to a clickable action. When clicked, the action is replaced by an inline confirmation with the option to cancel. In Grafana, this is used for example for editing values in settings tables.
|
||||
|
||||
## Variants
|
||||
|
||||
There are four variants of the `ConfirmButton`: primary, secondary, destructive, and link. The primary and secondary variants include a primary or secondary `Button` component. The primary and secondary variant should be used to confirm actions like saving or adding data. The destructive variant includes a destructive `Button` component. The destructive variant should be used to double-confirm a deletion or removal of an element. The link variant doesn't include any button and double-confirms as links instead.
|
||||
|
||||
Apart from the button variant, you can also modify the button size and the button text.
|
@ -28,6 +28,7 @@ const setup = (propOverrides?: object) => {
|
||||
secureJsonData: {
|
||||
password: true,
|
||||
},
|
||||
secureJsonFields: {},
|
||||
readOnly: true,
|
||||
},
|
||||
onChange: jest.fn(),
|
||||
|
@ -28,6 +28,7 @@ const settingsMock: DataSourceSettings<any, any> = {
|
||||
secureJsonData: {
|
||||
password: true,
|
||||
},
|
||||
secureJsonFields: {},
|
||||
readOnly: true,
|
||||
};
|
||||
|
||||
|
@ -2,6 +2,8 @@ import React from 'react';
|
||||
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
|
||||
import { FileUpload } from './FileUpload';
|
||||
import mdx from './FileUpload.mdx';
|
||||
import { useSize } from '../../utils/storybook/useSize';
|
||||
import { ComponentSize } from '../../types/size';
|
||||
|
||||
export default {
|
||||
title: 'Forms/FileUpload',
|
||||
@ -15,8 +17,10 @@ export default {
|
||||
};
|
||||
|
||||
export const single = () => {
|
||||
const size = useSize();
|
||||
return (
|
||||
<FileUpload
|
||||
size={size as ComponentSize}
|
||||
onFileUpload={({ currentTarget }) => console.log('file', currentTarget?.files && currentTarget.files[0])}
|
||||
/>
|
||||
);
|
||||
|
@ -3,12 +3,14 @@ import { GrafanaTheme } from '@grafana/data';
|
||||
import { css, cx } from 'emotion';
|
||||
import { getFormStyles, Icon } from '../index';
|
||||
import { stylesFactory, useTheme } from '../../themes';
|
||||
import { ComponentSize } from '../../types/size';
|
||||
|
||||
export interface Props {
|
||||
onFileUpload: (event: FormEvent<HTMLInputElement>) => void;
|
||||
/** Accepted file extensions */
|
||||
accept?: string;
|
||||
className?: string;
|
||||
size?: ComponentSize;
|
||||
}
|
||||
|
||||
function trimFileName(fileName: string) {
|
||||
@ -24,9 +26,15 @@ function trimFileName(fileName: string) {
|
||||
return `${file.substring(0, nameLength)}...${extension}`;
|
||||
}
|
||||
|
||||
export const FileUpload: FC<Props> = ({ onFileUpload, className, children = 'Upload file', accept = '*' }) => {
|
||||
export const FileUpload: FC<Props> = ({
|
||||
onFileUpload,
|
||||
className,
|
||||
children = 'Upload file',
|
||||
accept = '*',
|
||||
size = 'md',
|
||||
}) => {
|
||||
const theme = useTheme();
|
||||
const style = getStyles(theme);
|
||||
const style = getStyles(theme, size);
|
||||
const [fileName, setFileName] = useState('');
|
||||
|
||||
const onChange = useCallback((event: FormEvent<HTMLInputElement>) => {
|
||||
@ -60,8 +68,8 @@ export const FileUpload: FC<Props> = ({ onFileUpload, className, children = 'Upl
|
||||
);
|
||||
};
|
||||
|
||||
const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||
const buttonFormStyle = getFormStyles(theme, { variant: 'primary', invalid: false, size: 'md' }).button.button;
|
||||
const getStyles = stylesFactory((theme: GrafanaTheme, size: ComponentSize) => {
|
||||
const buttonFormStyle = getFormStyles(theme, { variant: 'primary', invalid: false, size }).button.button;
|
||||
return {
|
||||
fileUpload: css`
|
||||
display: none;
|
||||
|
@ -34,10 +34,12 @@ export const FormLabel: FunctionComponent<Props> = ({
|
||||
{tooltip && (
|
||||
<Tooltip placement="top" content={tooltip} theme={'info'}>
|
||||
<div className="gf-form-help-icon gf-form-help-icon--right-normal">
|
||||
<Icon name="info-circle" size="xs" style={{ marginLeft: '10px' }} />
|
||||
<Icon name="info-circle" size="sm" style={{ marginLeft: '10px' }} />
|
||||
</div>
|
||||
</Tooltip>
|
||||
)}
|
||||
</label>
|
||||
);
|
||||
};
|
||||
|
||||
export const InlineFormLabel = FormLabel;
|
||||
|
@ -2,6 +2,7 @@ import React, { PureComponent } from 'react';
|
||||
import uniqueId from 'lodash/uniqueId';
|
||||
import { Tooltip } from '../../../Tooltip/Tooltip';
|
||||
import * as PopperJS from 'popper.js';
|
||||
import { Icon } from '../../..';
|
||||
|
||||
export interface Props {
|
||||
label: string;
|
||||
@ -54,7 +55,7 @@ export class Switch extends PureComponent<Props, State> {
|
||||
{tooltip && (
|
||||
<Tooltip placement={tooltipPlacement ? tooltipPlacement : 'auto'} content={tooltip} theme={'info'}>
|
||||
<div className="gf-form-help-icon gf-form-help-icon--right-normal">
|
||||
<i className="fa fa-info-circle" />
|
||||
<Icon name="info-circle" size="sm" style={{ marginLeft: '10px' }} />
|
||||
</div>
|
||||
</Tooltip>
|
||||
)}
|
||||
|
23
packages/grafana-ui/src/components/Forms/Legend.mdx
Normal file
23
packages/grafana-ui/src/components/Forms/Legend.mdx
Normal file
@ -0,0 +1,23 @@
|
||||
import { Story, Preview, Props } from '@storybook/addon-docs/blocks';
|
||||
import { Legend } from './Legend';
|
||||
|
||||
<Meta title="MDX|Legend" component={Legend} />
|
||||
|
||||
# Legend
|
||||
|
||||
|
||||
### When to use
|
||||
|
||||
Legend should be used to add a caption to a group of related form elements that have been grouped toegheter into a `FieldSet`.
|
||||
|
||||
|
||||
### Usage
|
||||
|
||||
```jsx
|
||||
import { Forms } from '@grafana/ui';
|
||||
|
||||
<Legend>{label}</Legend>
|
||||
```
|
||||
|
||||
### Props
|
||||
<Props of={Legend} />
|
@ -2,6 +2,7 @@ import React from 'react';
|
||||
import { text } from '@storybook/addon-knobs';
|
||||
|
||||
import { Legend } from './Legend';
|
||||
import mdx from './Legend.mdx';
|
||||
|
||||
const getKnobs = () => {
|
||||
return {
|
||||
@ -12,6 +13,11 @@ const getKnobs = () => {
|
||||
export default {
|
||||
title: 'Forms/Legend',
|
||||
component: Legend,
|
||||
parameters: {
|
||||
docs: {
|
||||
page: mdx,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const basic = () => {
|
||||
|
@ -0,0 +1,38 @@
|
||||
import { findInsertIndex } from './suggestions';
|
||||
|
||||
describe('Check suggestion index', () => {
|
||||
it('find last $ sign', () => {
|
||||
const line = ' hello $123';
|
||||
const { index, prefix } = findInsertIndex(line);
|
||||
expect(index).toEqual(line.indexOf('$'));
|
||||
expect(prefix).toEqual('$123');
|
||||
});
|
||||
|
||||
it('insert into empty line', () => {
|
||||
const line = '';
|
||||
const { index, prefix } = findInsertIndex(line);
|
||||
expect(index).toEqual(0);
|
||||
expect(prefix).toEqual('');
|
||||
});
|
||||
|
||||
it('insert new word', () => {
|
||||
const line = 'this is a new ';
|
||||
const { index, prefix } = findInsertIndex(line);
|
||||
expect(index).toEqual(line.length);
|
||||
expect(prefix).toEqual('');
|
||||
});
|
||||
|
||||
it('complte a simple word', () => {
|
||||
const line = 'SELECT * FROM tab';
|
||||
const { index, prefix } = findInsertIndex(line);
|
||||
expect(index).toEqual(line.lastIndexOf(' ') + 1);
|
||||
expect(prefix).toEqual('tab');
|
||||
});
|
||||
|
||||
it('complete a quoted word', () => {
|
||||
const line = 'SELECT "hello", "wo';
|
||||
const { index, prefix } = findInsertIndex(line);
|
||||
expect(index).toEqual(line.lastIndexOf('"') + 1);
|
||||
expect(prefix).toEqual('wo');
|
||||
});
|
||||
});
|
@ -2,6 +2,33 @@ import * as monaco from 'monaco-editor/esm/vs/editor/editor.api';
|
||||
|
||||
import { CodeEditorSuggestionItem, CodeEditorSuggestionItemKind, CodeEditorSuggestionProvider } from './types';
|
||||
|
||||
/**
|
||||
* @internal -- only exported for tests
|
||||
*/
|
||||
export function findInsertIndex(line: string): { index: number; prefix: string } {
|
||||
for (let i = line.length - 1; i > 0; i--) {
|
||||
const ch = line.charAt(i);
|
||||
if (ch === '$') {
|
||||
return {
|
||||
index: i,
|
||||
prefix: line.substring(i),
|
||||
};
|
||||
}
|
||||
|
||||
// Keep these seperators
|
||||
if (ch === ' ' || ch === '\t' || ch === '"' || ch === "'") {
|
||||
return {
|
||||
index: i + 1,
|
||||
prefix: line.substring(i + 1),
|
||||
};
|
||||
}
|
||||
}
|
||||
return {
|
||||
index: 0,
|
||||
prefix: line,
|
||||
};
|
||||
}
|
||||
|
||||
function getCompletionItems(
|
||||
prefix: string,
|
||||
suggestions: CodeEditorSuggestionItem[],
|
||||
@ -53,51 +80,39 @@ export function registerSuggestions(
|
||||
triggerCharacters: ['$'],
|
||||
|
||||
provideCompletionItems: (model, position, context) => {
|
||||
const range = {
|
||||
startLineNumber: position.lineNumber,
|
||||
endLineNumber: position.lineNumber,
|
||||
startColumn: position.column,
|
||||
endColumn: position.column,
|
||||
};
|
||||
|
||||
// Simple check if this was triggered by pressing `$`
|
||||
if (context.triggerCharacter === '$') {
|
||||
const range = {
|
||||
startLineNumber: position.lineNumber,
|
||||
endLineNumber: position.lineNumber,
|
||||
startColumn: position.column - 1,
|
||||
endColumn: position.column,
|
||||
};
|
||||
range.startColumn = position.column - 1;
|
||||
return {
|
||||
suggestions: getCompletionItems('$', getSuggestions(), range),
|
||||
};
|
||||
}
|
||||
|
||||
// find out if we are completing a property in the 'dependencies' object.
|
||||
const lineText = model.getValueInRange({
|
||||
// Find the replacement region
|
||||
const currentLine = model.getValueInRange({
|
||||
startLineNumber: position.lineNumber,
|
||||
startColumn: 1,
|
||||
endLineNumber: position.lineNumber,
|
||||
endColumn: position.column,
|
||||
});
|
||||
|
||||
const idx = lineText.lastIndexOf('$');
|
||||
if (idx >= 0) {
|
||||
const range = {
|
||||
startLineNumber: position.lineNumber,
|
||||
endLineNumber: position.lineNumber,
|
||||
startColumn: idx, // the last $ we found
|
||||
endColumn: position.column,
|
||||
};
|
||||
return {
|
||||
suggestions: getCompletionItems(lineText.substr(idx), getSuggestions(), range),
|
||||
};
|
||||
const { index, prefix } = findInsertIndex(currentLine);
|
||||
range.startColumn = index + 1;
|
||||
|
||||
const suggestions = getCompletionItems(prefix, getSuggestions(), range);
|
||||
if (suggestions.length) {
|
||||
// NOTE, this will replace any language provided suggestions
|
||||
return { suggestions };
|
||||
}
|
||||
|
||||
// Empty line that asked for suggestion
|
||||
if (lineText.trim().length < 1) {
|
||||
return {
|
||||
suggestions: getCompletionItems('', getSuggestions(), {
|
||||
startLineNumber: position.lineNumber,
|
||||
endLineNumber: position.lineNumber,
|
||||
startColumn: position.column,
|
||||
endColumn: position.column,
|
||||
}),
|
||||
};
|
||||
}
|
||||
// console.log('complete?', lineText, context);
|
||||
// Default language suggestions
|
||||
return undefined;
|
||||
},
|
||||
});
|
||||
|
@ -26,7 +26,7 @@ export interface QueryFieldProps {
|
||||
// We have both value and local state. This is usually an antipattern but we need to keep local state
|
||||
// for perf reasons and also have outside value in for example in Explore redux that is mutable from logs
|
||||
// creating a two way binding.
|
||||
query: string | null;
|
||||
query?: string | null;
|
||||
onRunQuery?: () => void;
|
||||
onBlur?: () => void;
|
||||
onChange?: (value: string) => void;
|
||||
|
12
packages/grafana-ui/src/components/Slider/Slider.mdx
Normal file
12
packages/grafana-ui/src/components/Slider/Slider.mdx
Normal file
@ -0,0 +1,12 @@
|
||||
import { Meta, Props } from '@storybook/addon-docs/blocks';
|
||||
import { Slider } from './Slider';
|
||||
|
||||
<Meta title="MDX|Slider" />
|
||||
|
||||
# Slider
|
||||
|
||||
The `Slider` component is an input element where users can manipulate one or two values on a one-dimensional axis.
|
||||
|
||||
`Slider` can be implemented in horizontal or vertical orientation. You can set the default starting value(s) for the slider with the `value` prop.
|
||||
|
||||
<Props of={Slider} />
|
@ -3,6 +3,7 @@ import { css, cx } from 'emotion';
|
||||
import { TableCellProps } from './types';
|
||||
import { Tooltip } from '../Tooltip/Tooltip';
|
||||
import { JSONFormatter } from '../JSONFormatter/JSONFormatter';
|
||||
import { isString } from 'lodash';
|
||||
|
||||
export const JSONViewCell: FC<TableCellProps> = props => {
|
||||
const { field, cell, tableStyles } = props;
|
||||
@ -16,8 +17,16 @@ export const JSONViewCell: FC<TableCellProps> = props => {
|
||||
font-family: monospace;
|
||||
`;
|
||||
|
||||
const displayValue = JSON.stringify(cell.value);
|
||||
const content = <JSONTooltip value={cell.value} />;
|
||||
let value = cell.value;
|
||||
let displayValue = value;
|
||||
if (isString(value)) {
|
||||
try {
|
||||
value = JSON.parse(value);
|
||||
} catch {} // ignore errors
|
||||
} else {
|
||||
displayValue = JSON.stringify(value);
|
||||
}
|
||||
const content = <JSONTooltip value={value} />;
|
||||
return (
|
||||
<div className={cx(txt, tableStyles.tableCell)}>
|
||||
<Tooltip placement="auto" content={content} theme={'info'}>
|
||||
|
@ -4,4 +4,26 @@ import { TextArea } from './TextArea';
|
||||
# TextArea
|
||||
Use for multi line inputs like descriptions.
|
||||
|
||||
### Usage
|
||||
|
||||
```jsx
|
||||
<TextArea invalid={invalid} placeholder={placeholder} cols={cols} disabled={disabled} />
|
||||
```
|
||||
|
||||
### Usage in forms with Field
|
||||
|
||||
`TextArea` should be used with the `Field` component to get labels and descriptions. It should also be used for validation. See the `Field` component for more information.
|
||||
|
||||
```jsx
|
||||
<Field label="Important information" description="This information is very important, so you really need to fill it in">
|
||||
<Textarea name="importantTextarea" required />
|
||||
</Field>
|
||||
```
|
||||
|
||||
<Preview>
|
||||
<Field label="Important information" description="This information is very important, so you really need to fill it in">
|
||||
<Textarea name="importantTextarea" required />
|
||||
</Field>
|
||||
</Preview>
|
||||
|
||||
<Props of={TextArea} />
|
||||
|
@ -163,7 +163,7 @@ export { FileUpload } from './FileUpload/FileUpload';
|
||||
// Legacy forms
|
||||
|
||||
// Export this until we've figured out a good approach to inline form styles.
|
||||
export { FormLabel as InlineFormLabel } from './FormLabel/FormLabel';
|
||||
export { InlineFormLabel } from './FormLabel/FormLabel';
|
||||
|
||||
// Select
|
||||
import { Select, AsyncSelect } from './Forms/Legacy/Select/Select';
|
||||
|
27
packages/grafana-ui/src/themes/ThemeContext.test.tsx
Normal file
27
packages/grafana-ui/src/themes/ThemeContext.test.tsx
Normal file
@ -0,0 +1,27 @@
|
||||
import React from 'react';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { css } from 'emotion';
|
||||
import { mount } from 'enzyme';
|
||||
import { useStyles } from './ThemeContext';
|
||||
|
||||
describe('useStyles', () => {
|
||||
it('passes in theme and returns style object', () => {
|
||||
const Dummy: React.FC = function() {
|
||||
const styles = useStyles(theme => {
|
||||
expect(theme).toEqual(config.theme);
|
||||
|
||||
return {
|
||||
someStyle: css`
|
||||
color: ${theme?.palette.critical};
|
||||
`,
|
||||
};
|
||||
});
|
||||
|
||||
expect(typeof styles.someStyle).toBe('string');
|
||||
|
||||
return <div>dummy</div>;
|
||||
};
|
||||
|
||||
mount(<Dummy />);
|
||||
});
|
||||
});
|
@ -38,12 +38,11 @@ export const withTheme = <P extends Themeable, S extends {} = {}>(Component: Rea
|
||||
export function useTheme(): GrafanaTheme {
|
||||
return useContext(ThemeContextMock || ThemeContext);
|
||||
}
|
||||
|
||||
/** Hook for using memoized styles with access to the theme. */
|
||||
export const useStyles = (getStyles: (theme?: GrafanaTheme) => any) => {
|
||||
const currentTheme = useTheme();
|
||||
const callback = stylesFactory(stylesTheme => getStyles(stylesTheme));
|
||||
return callback(currentTheme);
|
||||
};
|
||||
export function useStyles<T>(getStyles: (theme: GrafanaTheme) => T) {
|
||||
return stylesFactory(getStyles)(useTheme());
|
||||
}
|
||||
|
||||
/**
|
||||
* Enables theme context mocking
|
||||
|
7
packages/grafana-ui/src/utils/storybook/useSize.ts
Normal file
7
packages/grafana-ui/src/utils/storybook/useSize.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { select } from '@storybook/addon-knobs';
|
||||
import { ComponentSize } from '../../types/size';
|
||||
|
||||
export function useSize(size: ComponentSize = 'md') {
|
||||
const sizes = ['xs', 'sm', 'md', 'lg'];
|
||||
return select('Size', sizes, size);
|
||||
}
|
@ -49,6 +49,7 @@ RUN mkdir -p "$GF_PATHS_HOME/.aws" && \
|
||||
mkdir -p "$GF_PATHS_PROVISIONING/datasources" \
|
||||
"$GF_PATHS_PROVISIONING/dashboards" \
|
||||
"$GF_PATHS_PROVISIONING/notifiers" \
|
||||
"$GF_PATHS_PROVISIONING/plugins" \
|
||||
"$GF_PATHS_LOGS" \
|
||||
"$GF_PATHS_PLUGINS" \
|
||||
"$GF_PATHS_DATA" && \
|
||||
|
@ -39,6 +39,7 @@ RUN mkdir -p "$GF_PATHS_HOME/.aws" && \
|
||||
mkdir -p "$GF_PATHS_PROVISIONING/datasources" \
|
||||
"$GF_PATHS_PROVISIONING/dashboards" \
|
||||
"$GF_PATHS_PROVISIONING/notifiers" \
|
||||
"$GF_PATHS_PROVISIONING/plugins" \
|
||||
"$GF_PATHS_LOGS" \
|
||||
"$GF_PATHS_PLUGINS" \
|
||||
"$GF_PATHS_DATA" && \
|
||||
|
@ -39,7 +39,6 @@ func AdminGetSettings(c *models.ReqContext) {
|
||||
}
|
||||
|
||||
func AdminGetStats(c *models.ReqContext) {
|
||||
|
||||
statsQuery := models.GetAdminStatsQuery{}
|
||||
|
||||
if err := bus.Dispatch(&statsQuery); err != nil {
|
||||
|
@ -21,7 +21,6 @@ const (
|
||||
func TestAdminApiEndpoint(t *testing.T) {
|
||||
role := models.ROLE_ADMIN
|
||||
Convey("Given a server admin attempts to remove themself as an admin", t, func() {
|
||||
|
||||
updateCmd := dtos.AdminUpdateUserPermissionsForm{
|
||||
IsGrafanaAdmin: false,
|
||||
}
|
||||
|
@ -281,6 +281,11 @@ func CreateAlertNotification(c *models.ReqContext, cmd models.CreateAlertNotific
|
||||
func UpdateAlertNotification(c *models.ReqContext, cmd models.UpdateAlertNotificationCommand) Response {
|
||||
cmd.OrgId = c.OrgId
|
||||
|
||||
err := fillWithSecureSettingsData(&cmd)
|
||||
if err != nil {
|
||||
return Error(500, "Failed to update alert notification", err)
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&cmd); err != nil {
|
||||
return Error(500, "Failed to update alert notification", err)
|
||||
}
|
||||
@ -289,13 +294,27 @@ func UpdateAlertNotification(c *models.ReqContext, cmd models.UpdateAlertNotific
|
||||
return Error(404, "Alert notification not found", nil)
|
||||
}
|
||||
|
||||
return JSON(200, dtos.NewAlertNotification(cmd.Result))
|
||||
query := models.GetAlertNotificationsQuery{
|
||||
OrgId: c.OrgId,
|
||||
Id: cmd.Id,
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&query); err != nil {
|
||||
return Error(500, "Failed to get alert notification", err)
|
||||
}
|
||||
|
||||
return JSON(200, dtos.NewAlertNotification(query.Result))
|
||||
}
|
||||
|
||||
func UpdateAlertNotificationByUID(c *models.ReqContext, cmd models.UpdateAlertNotificationWithUidCommand) Response {
|
||||
cmd.OrgId = c.OrgId
|
||||
cmd.Uid = c.Params("uid")
|
||||
|
||||
err := fillWithSecureSettingsDataByUID(&cmd)
|
||||
if err != nil {
|
||||
return Error(500, "Failed to update alert notification", err)
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&cmd); err != nil {
|
||||
return Error(500, "Failed to update alert notification", err)
|
||||
}
|
||||
@ -304,7 +323,64 @@ func UpdateAlertNotificationByUID(c *models.ReqContext, cmd models.UpdateAlertNo
|
||||
return Error(404, "Alert notification not found", nil)
|
||||
}
|
||||
|
||||
return JSON(200, dtos.NewAlertNotification(cmd.Result))
|
||||
query := models.GetAlertNotificationsWithUidQuery{
|
||||
OrgId: cmd.OrgId,
|
||||
Uid: cmd.Uid,
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&query); err != nil {
|
||||
return Error(500, "Failed to get alert notification", err)
|
||||
}
|
||||
|
||||
return JSON(200, dtos.NewAlertNotification(query.Result))
|
||||
}
|
||||
|
||||
func fillWithSecureSettingsData(cmd *models.UpdateAlertNotificationCommand) error {
|
||||
if len(cmd.SecureSettings) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
query := &models.GetAlertNotificationsQuery{
|
||||
OrgId: cmd.OrgId,
|
||||
Id: cmd.Id,
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(query); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
secureSettings := query.Result.SecureSettings.Decrypt()
|
||||
for k, v := range secureSettings {
|
||||
if _, ok := cmd.SecureSettings[k]; !ok {
|
||||
cmd.SecureSettings[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func fillWithSecureSettingsDataByUID(cmd *models.UpdateAlertNotificationWithUidCommand) error {
|
||||
if len(cmd.SecureSettings) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
query := &models.GetAlertNotificationsWithUidQuery{
|
||||
OrgId: cmd.OrgId,
|
||||
Uid: cmd.Uid,
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(query); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
secureSettings := query.Result.SecureSettings.Decrypt()
|
||||
for k, v := range secureSettings {
|
||||
if _, ok := cmd.SecureSettings[k]; !ok {
|
||||
cmd.SecureSettings[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func DeleteAlertNotification(c *models.ReqContext) Response {
|
||||
@ -336,9 +412,12 @@ func DeleteAlertNotificationByUID(c *models.ReqContext) Response {
|
||||
//POST /api/alert-notifications/test
|
||||
func NotificationTest(c *models.ReqContext, dto dtos.NotificationTestCommand) Response {
|
||||
cmd := &alerting.NotificationTestCommand{
|
||||
Name: dto.Name,
|
||||
Type: dto.Type,
|
||||
Settings: dto.Settings,
|
||||
OrgID: c.OrgId,
|
||||
ID: dto.ID,
|
||||
Name: dto.Name,
|
||||
Type: dto.Type,
|
||||
Settings: dto.Settings,
|
||||
SecureSettings: dto.SecureSettings,
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(cmd); err != nil {
|
||||
|
@ -13,7 +13,6 @@ import (
|
||||
|
||||
func TestAlertingApiEndpoint(t *testing.T) {
|
||||
Convey("Given an alert in a dashboard with an acl", t, func() {
|
||||
|
||||
singleAlert := &models.Alert{Id: 1, DashboardId: 1, Name: "singlealert"}
|
||||
|
||||
bus.AddHandler("test", func(query *models.GetAlertByIdQuery) error {
|
||||
|
@ -11,7 +11,6 @@ import (
|
||||
)
|
||||
|
||||
func GetAnnotations(c *models.ReqContext) Response {
|
||||
|
||||
query := &annotations.ItemQuery{
|
||||
From: c.QueryInt64("from"),
|
||||
To: c.QueryInt64("to"),
|
||||
|
@ -112,7 +112,6 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
|
||||
// authed api
|
||||
r.Group("/api", func(apiRoute routing.RouteRegister) {
|
||||
|
||||
// user (signed in)
|
||||
apiRoute.Group("/user", func(userRoute routing.RouteRegister) {
|
||||
userRoute.Get("/", Wrap(GetSignedInUser))
|
||||
@ -383,7 +382,6 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
|
||||
// error test
|
||||
r.Get("/metrics/error", Wrap(GenerateError))
|
||||
|
||||
}, reqSignedIn)
|
||||
|
||||
// admin api
|
||||
|
@ -31,7 +31,6 @@ type NormalResponse struct {
|
||||
}
|
||||
|
||||
func Wrap(action interface{}) macaron.Handler {
|
||||
|
||||
return func(c *models.ReqContext) {
|
||||
var res Response
|
||||
val, err := c.Invoke(action)
|
||||
@ -48,7 +47,6 @@ func Wrap(action interface{}) macaron.Handler {
|
||||
func (r *NormalResponse) WriteTo(ctx *models.ReqContext) {
|
||||
if r.err != nil {
|
||||
ctx.Logger.Error(r.errMessage, "error", r.err, "remote_addr", ctx.RemoteAddr())
|
||||
|
||||
}
|
||||
|
||||
header := ctx.Resp.Header()
|
||||
|
@ -458,7 +458,6 @@ func GetDashboardVersion(c *models.ReqContext) Response {
|
||||
|
||||
// POST /api/dashboards/calculate-diff performs diffs on two dashboards
|
||||
func CalculateDashboardDiff(c *models.ReqContext, apiOptions dtos.CalculateDiffOptions) Response {
|
||||
|
||||
guardianBase := guardian.New(apiOptions.Base.DashboardId, c.OrgId, c.SignedInUser)
|
||||
if canSave, err := guardianBase.CanSave(); err != nil || !canSave {
|
||||
return dashboardGuardianResponse(err)
|
||||
|
@ -716,7 +716,6 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("Post dashboard response tests", t, func() {
|
||||
|
||||
// This tests that a valid request returns correct response
|
||||
|
||||
Convey("Given a correct request for creating a dashboard", func() {
|
||||
@ -953,7 +952,6 @@ func TestDashboardApiEndpoint(t *testing.T) {
|
||||
})
|
||||
|
||||
Convey("Given provisioned dashboard", t, func() {
|
||||
|
||||
bus.AddHandler("test", func(query *models.GetDashboardsBySlugQuery) error {
|
||||
query.Result = []*models.Dashboard{{}}
|
||||
return nil
|
||||
@ -1062,7 +1060,6 @@ func GetDashboardShouldReturn200(sc *scenarioContext) dtos.DashboardFullWithMeta
|
||||
}
|
||||
|
||||
func CallGetDashboard(sc *scenarioContext, hs *HTTPServer) {
|
||||
|
||||
sc.handlerFunc = hs.GetDashboard
|
||||
sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec()
|
||||
}
|
||||
|
@ -225,7 +225,6 @@ func fillWithSecureJSONData(cmd *models.UpdateDataSourceCommand) error {
|
||||
|
||||
secureJSONData := ds.SecureJsonData.Decrypt()
|
||||
for k, v := range secureJSONData {
|
||||
|
||||
if _, ok := cmd.SecureJsonData[k]; !ok {
|
||||
cmd.SecureJsonData[k] = v
|
||||
}
|
||||
|
@ -19,7 +19,6 @@ const (
|
||||
func TestDataSourcesProxy(t *testing.T) {
|
||||
Convey("Given a user is logged in", t, func() {
|
||||
loggedInUserScenario("When calling GET on", "/api/datasources/", func(sc *scenarioContext) {
|
||||
|
||||
// Stubs the database query
|
||||
bus.AddHandler("test", func(query *models.GetDataSourcesQuery) error {
|
||||
So(query.OrgId, ShouldEqual, TestOrgID)
|
||||
|
@ -48,7 +48,7 @@ func formatShort(interval time.Duration) string {
|
||||
}
|
||||
|
||||
func NewAlertNotification(notification *models.AlertNotification) *AlertNotification {
|
||||
return &AlertNotification{
|
||||
dto := &AlertNotification{
|
||||
Id: notification.Id,
|
||||
Uid: notification.Uid,
|
||||
Name: notification.Name,
|
||||
@ -60,7 +60,16 @@ func NewAlertNotification(notification *models.AlertNotification) *AlertNotifica
|
||||
SendReminder: notification.SendReminder,
|
||||
DisableResolveMessage: notification.DisableResolveMessage,
|
||||
Settings: notification.Settings,
|
||||
SecureFields: map[string]bool{},
|
||||
}
|
||||
|
||||
if notification.SecureSettings != nil {
|
||||
for k := range notification.SecureSettings {
|
||||
dto.SecureFields[k] = true
|
||||
}
|
||||
}
|
||||
|
||||
return dto
|
||||
}
|
||||
|
||||
type AlertNotification struct {
|
||||
@ -75,6 +84,7 @@ type AlertNotification struct {
|
||||
Created time.Time `json:"created"`
|
||||
Updated time.Time `json:"updated"`
|
||||
Settings *simplejson.Json `json:"settings"`
|
||||
SecureFields map[string]bool `json:"secureFields"`
|
||||
}
|
||||
|
||||
func NewAlertNotificationLookup(notification *models.AlertNotification) *AlertNotificationLookup {
|
||||
@ -122,12 +132,14 @@ type EvalMatch struct {
|
||||
}
|
||||
|
||||
type NotificationTestCommand struct {
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
SendReminder bool `json:"sendReminder"`
|
||||
DisableResolveMessage bool `json:"disableResolveMessage"`
|
||||
Frequency string `json:"frequency"`
|
||||
Settings *simplejson.Json `json:"settings"`
|
||||
ID int64 `json:"id,omitempty"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"`
|
||||
SendReminder bool `json:"sendReminder"`
|
||||
DisableResolveMessage bool `json:"disableResolveMessage"`
|
||||
Frequency string `json:"frequency"`
|
||||
Settings *simplejson.Json `json:"settings"`
|
||||
SecureSettings map[string]string `json:"secureSettings"`
|
||||
}
|
||||
|
||||
type PauseAlertCommand struct {
|
||||
|
@ -93,7 +93,6 @@ func (c *connection) handleMessage(message []byte) {
|
||||
case "unsubscribe":
|
||||
c.hub.subChannel <- &streamSubscription{name: streamName, conn: c, remove: true}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func (c *connection) write(mt int, payload []byte) error {
|
||||
|
@ -88,7 +88,6 @@ func NewStream(name string) *Stream {
|
||||
}
|
||||
|
||||
func (s *Stream) Push(packet *models.StreamPacket) {
|
||||
|
||||
messageBytes, _ := simplejson.NewFromAny(packet).Encode()
|
||||
|
||||
for _, sub := range s.subscribers {
|
||||
|
@ -30,7 +30,6 @@ import (
|
||||
)
|
||||
|
||||
func TestDSRouteRule(t *testing.T) {
|
||||
|
||||
Convey("DataSourceProxy", t, func() {
|
||||
Convey("Plugin with routes", func() {
|
||||
plugin := &plugins.DataSourcePlugin{
|
||||
|
@ -60,7 +60,6 @@ func NewApiPluginProxy(ctx *models.ReqContext, proxyPath string, route *plugins.
|
||||
targetURL, _ := url.Parse(route.URL)
|
||||
|
||||
director := func(req *http.Request) {
|
||||
|
||||
req.URL.Scheme = targetURL.Scheme
|
||||
req.URL.Host = targetURL.Host
|
||||
req.Host = targetURL.Host
|
||||
|
@ -13,7 +13,6 @@ import (
|
||||
)
|
||||
|
||||
func TestPluginProxy(t *testing.T) {
|
||||
|
||||
Convey("When getting proxy headers", t, func() {
|
||||
route := &plugins.AppPluginRoute{
|
||||
Headers: []plugins.AppPluginRouteHeader{
|
||||
@ -129,7 +128,6 @@ func TestPluginProxy(t *testing.T) {
|
||||
So(route.URL, ShouldEqual, "{{.JsonData.dynamicUrl}}")
|
||||
})
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
// getPluginProxiedRequest is a helper for easier setup of tests based on global config and ReqContext.
|
||||
|
@ -8,7 +8,6 @@ import (
|
||||
|
||||
// POST /api/preferences/set-home-dash
|
||||
func SetHomeDashboard(c *models.ReqContext, cmd models.SavePreferencesCommand) Response {
|
||||
|
||||
cmd.UserId = c.UserId
|
||||
cmd.OrgId = c.OrgId
|
||||
|
||||
|
@ -72,10 +72,8 @@ type routeRegister struct {
|
||||
}
|
||||
|
||||
func (rr *routeRegister) Insert(pattern string, fn func(RouteRegister), handlers ...macaron.Handler) {
|
||||
|
||||
//loop over all groups at current level
|
||||
for _, g := range rr.groups {
|
||||
|
||||
// apply routes if the prefix matches the pattern
|
||||
if g.prefix == pattern {
|
||||
g.Group("", fn)
|
||||
|
@ -108,7 +108,6 @@ func TestRouteGroupedRegister(t *testing.T) {
|
||||
user.Group("/admin", func(admin RouteRegister) {
|
||||
admin.Delete("", emptyHandler("3"))
|
||||
admin.Get("/all", emptyHandler("3"), emptyHandler("4"), emptyHandler("5"))
|
||||
|
||||
}, emptyHandler("3"))
|
||||
})
|
||||
|
||||
@ -235,7 +234,6 @@ func TestNamedMiddlewareRouteRegister(t *testing.T) {
|
||||
user.Group("/admin", func(admin RouteRegister) {
|
||||
admin.Delete("", emptyHandler("3"))
|
||||
admin.Get("/all", emptyHandler("3"), emptyHandler("4"), emptyHandler("5"))
|
||||
|
||||
}, emptyHandler("3"))
|
||||
})
|
||||
|
||||
|
@ -24,7 +24,6 @@ func StarDashboard(c *models.ReqContext) Response {
|
||||
}
|
||||
|
||||
func UnstarDashboard(c *models.ReqContext) Response {
|
||||
|
||||
cmd := models.UnstarDashboardCommand{UserId: c.UserId, DashboardId: c.ParamsInt64(":id")}
|
||||
|
||||
if cmd.DashboardId <= 0 {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user