Merge branch 'master' into data-source-settings-to-react

This commit is contained in:
Peter Holmberg 2018-10-24 14:50:33 +02:00
commit 8c742a9530
52 changed files with 2375 additions and 1669 deletions

View File

@ -242,7 +242,7 @@ jobs:
command: '/tmp/bootstrap.sh'
- run:
name: checkout enterprise
command: './scripts/build/prepare_enterprise.sh'
command: './scripts/build/prepare-enterprise.sh'
- run:
name: test enterprise
command: 'go test ./pkg/extensions/...'
@ -274,7 +274,7 @@ jobs:
command: '/tmp/bootstrap.sh'
- run:
name: checkout enterprise
command: './scripts/build/prepare_enterprise.sh'
command: './scripts/build/prepare-enterprise.sh'
- restore_cache:
key: phantomjs-binaries-{{ checksum "scripts/build/download-phantomjs.sh" }}
- run:
@ -323,18 +323,18 @@ jobs:
name: deploy to s3
command: 'aws s3 sync ./enterprise-dist s3://$ENTERPRISE_BUCKET_NAME/master'
deploy-enterprise-release:
docker:
- image: circleci/python:2.7-stretch
steps:
- attach_workspace:
at: .
- run:
name: install awscli
command: 'sudo pip install awscli'
- run:
name: deploy to s3
command: 'aws s3 sync ./enterprise-dist s3://$ENTERPRISE_BUCKET_NAME/release'
deploy-enterprise-release:
docker:
- image: circleci/python:2.7-stretch
steps:
- attach_workspace:
at: .
- run:
name: install awscli
command: 'sudo pip install awscli'
- run:
name: deploy to s3
command: 'aws s3 sync ./enterprise-dist s3://$ENTERPRISE_BUCKET_NAME/release'
deploy-master:
docker:

View File

@ -6,6 +6,7 @@
* **Postgres/MySQL/MSSQL**: Adds support for configuration of max open/idle connections and connection max lifetime. Also, panels with multiple SQL queries will now be executed concurrently [#11711](https://github.com/grafana/grafana/issues/11711), thx [@connection-reset](https://github.com/connection-reset)
* **MSSQL**: Add encrypt setting to allow configuration of how data sent between client and server are encrypted [#13629](https://github.com/grafana/grafana/issues/13629), thx [@ramiro](https://github.com/ramiro)
* **MySQL**: Support connecting thru Unix socket for MySQL datasource [#12342](https://github.com/grafana/grafana/issues/12342), thx [@Yukinoshita-Yukino](https://github.com/Yukinoshita-Yukino)
* **Stackdriver**: Not possible to authenticate using GCE metadata server [#13669](https://github.com/grafana/grafana/issues/13669)
### Minor
@ -17,13 +18,23 @@
* Postgres/MySQL/MSSQL datasources now per default uses `max open connections` = `unlimited` (earlier 10), `max idle connections` = `2` (earlier 10) and `connection max lifetime` = `4` hours (earlier unlimited)
# 5.3.2 (unreleased)
# 5.3.2 (2018-10-24)
* **InfluxDB/Graphite/Postgres**: Prevent cross site scripting (XSS) in query editor [#13667](https://github.com/grafana/grafana/issues/13667), thx [@svenklemm](https://github.com/svenklemm)
* **Postgres**: Fix template variables error [#13692](https://github.com/grafana/grafana/issues/13692), thx [@svenklemm](https://github.com/svenklemm)
* **Cloudwatch**: Fix service panic because of race conditions [#13674](https://github.com/grafana/grafana/issues/13674), thx [@mtanda](https://github.com/mtanda)
* **Cloudwatch**: Fix check for invalid percentile statistics [#13633](https://github.com/grafana/grafana/issues/13633), thx [@apalaniuk](https://github.com/apalaniuk)
* **Stackdriver/Cloudwatch**: Allow user to change unit in graph panel if cloudwatch/stackdriver datasource response doesn't include unit [#13718](https://github.com/grafana/grafana/issues/13718), thx [@mtanda](https://github.com/mtanda)
* **Stackdriver**: stackdriver user-metrics duplicated response when multiple resource types [#13691](https://github.com/grafana/grafana/issues/13691)
* **Variables**: Fix text box template variable doesn't work properly without a default value [#13666](https://github.com/grafana/grafana/issues/13666)
* **Variables**: Fix variable dependency check when using `${var}` format [#13600](https://github.com/grafana/grafana/issues/13600)
* **Dashboard**: Fix kiosk=1 url parameter should put dashboard in kiosk mode [#13764](https://github.com/grafana/grafana/pull/13764)
* **LDAP**: Fix super admins can also be admins of orgs [#13710](https://github.com/grafana/grafana/issues/13710), thx [@adrien-f](https://github.com/adrien-f)
* **Provisioning**: Fix deleting provisioned dashboard folder should cleanup provisioning meta data [#13280](https://github.com/grafana/grafana/issues/13280)
### Minor
* **Docker**: adds curl back into the docker image for utility. [#13794](https://github.com/grafana/grafana/pull/13794)
# 5.3.1 (2018-10-16)

View File

@ -35,7 +35,9 @@ Grafana ships with built-in support for Google Stackdriver. Just add it as a dat
## Authentication
### Service Account Credentials - Private Key File
There are two ways to authenticate the Stackdriver plugin - either by uploading a Google JWT file, or by automatically retrieving credentials from Google metadata server. The latter option is only available when running Grafana on GCE virtual machine.
### Using a Google Service Account Key File
To authenticate with the Stackdriver API, you need to create a Google Cloud Platform (GCP) Service Account for the Project you want to show data for. A Grafana datasource integrates with one GCP Project. If you want to visualize data from multiple GCP Projects then you need to create one datasource per GCP Project.
@ -74,6 +76,16 @@ Click on the links above and click the `Enable` button:
{{< docs-imagebox img="/img/docs/v53/stackdriver_grafana_key_uploaded.png" class="docs-image--no-shadow" caption="Service key file is uploaded to Grafana" >}}
### Using GCE Default Service Account
If Grafana is running on a Google Compute Engine (GCE) virtual machine, it is possible for Grafana to automatically retrieve default credentials from the metadata server. This has the advantage of not needing to generate a private key file for the service account and also not having to upload the file to Grafana. However for this to work, there are a few preconditions that need to be met.
1. First of all, you need to create a Service Account that can be used by the GCE virtual machine. See detailed instructions on how to do that [here](https://cloud.google.com/compute/docs/access/create-enable-service-accounts-for-instances#createanewserviceaccount).
2. Make sure the GCE virtual machine instance is being run as the service account that you just created. See instructions [here](https://cloud.google.com/compute/docs/access/create-enable-service-accounts-for-instances#using).
3. Allow access to the `Stackdriver Monitoring API` scope. See instructions [here](changeserviceaccountandscopes).
Read more about creating and enabling service accounts for GCE VM instances [here](https://cloud.google.com/compute/docs/access/create-enable-service-accounts-for-instances).
## Metric Query Editor
{{< docs-imagebox img="/img/docs/v53/stackdriver_query_editor.png" max-width= "400px" class="docs-image--right" >}}
@ -144,6 +156,16 @@ Example Alias By: `{{metric.type}} - {{metric.labels.instance_name}}`
Example Result: `compute.googleapis.com/instance/cpu/usage_time - server1-prod`
It is also possible to resolve the name of the Monitored Resource Type.
| Alias Pattern Format | Description | Example Result |
| ------------------------ | ------------------------------------------------| ---------------- |
| `{{resource.type}}` | returns the name of the monitored resource type | `gce_instance` |
Example Alias By: `{{resource.type}} - {{metric.type}}`
Example Result: `gce_instance - compute.googleapis.com/instance/cpu/usage_time`
## Templating
Instead of hard-coding things like server, application and sensor name in you metric queries you can use variables in their place.
@ -194,7 +216,7 @@ Example Result: `monitoring.googleapis.com/uptime_check/http_status has this val
It's now possible to configure datasources using config files with Grafana's provisioning system. You can read more about how it works and all the settings you can set for datasources on the [provisioning docs page](/administration/provisioning/#datasources)
Here is a provisioning example for this datasource.
Here is a provisioning example using the JWT (Service Account key file) authentication type.
```yaml
apiVersion: 1
@ -206,6 +228,7 @@ datasources:
jsonData:
tokenUri: https://oauth2.googleapis.com/token
clientEmail: stackdriver@myproject.iam.gserviceaccount.com
authenticationType: jwt
defaultProject: my-project-name
secureJsonData:
privateKey: |
@ -215,3 +238,16 @@ datasources:
yA+23427282348234=
-----END PRIVATE KEY-----
```
Here is a provisioning example using GCE Default Service Account authentication.
```yaml
apiVersion: 1
datasources:
- name: Stackdriver
type: stackdriver
access: proxy
jsonData:
authenticationType: gce
```

View File

@ -28,7 +28,7 @@ installation.
```bash
wget <debian package url>
sudo apt-get install -y adduser libfontconfig
sudo dpkg -i grafana_5.1.4_amd64.deb
sudo dpkg -i grafana_<version>_amd64.deb
```
Example:

View File

@ -1,5 +1,6 @@
+++
title = "Tutorials"
type = "docs"
[menu.docs]
identifier = "tutorials"
weight = 6
@ -11,7 +12,11 @@ This section of the docs contains a series for tutorials and stack setup guides.
## Articles
- [How to integrate Hubot with Grafana](hubot_howto.md)
- [Running Grafana behind a reverse proxy]({{< relref "behind_proxy.md" >}})
- [API Tutorial: How To Create API Tokens And Dashboards For A Specific Organization]({{< relref "api_org_token_howto.md" >}})
- [How to Use IIS with URL Rewrite as a Reverse Proxy for Grafana on Windows]({{< relref "iis.md" >}})
- [How to integrate Hubot with Grafana]({{< relref "hubot_howto.md" >}})
- [How to setup Grafana for high availability]({{< relref "ha_setup.md" >}})
## External links

View File

@ -1,4 +1,4 @@
{
"stable": "5.3.1",
"testing": "5.3.1"
"stable": "5.3.2",
"testing": "5.3.2"
}

View File

@ -25,7 +25,7 @@ ENV PATH=/usr/share/grafana/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bi
WORKDIR $GF_PATHS_HOME
RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates && \
RUN apt-get update && apt-get install -qq -y libfontconfig ca-certificates curl && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/*

View File

@ -12,6 +12,7 @@ import (
m "github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/util"
"golang.org/x/oauth2/google"
)
//ApplyRoute should use the plugin route data to set auth headers and custom headers
@ -54,15 +55,30 @@ func ApplyRoute(ctx context.Context, req *http.Request, proxyPath string, route
}
}
if route.JwtTokenAuth != nil {
authenticationType := ds.JsonData.Get("authenticationType").MustString("jwt")
if route.JwtTokenAuth != nil && authenticationType == "jwt" {
if token, err := tokenProvider.getJwtAccessToken(ctx, data); err != nil {
logger.Error("Failed to get access token", "error", err)
} else {
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token))
}
}
logger.Info("Requesting", "url", req.URL.String())
if authenticationType == "gce" {
tokenSrc, err := google.DefaultTokenSource(ctx, route.JwtTokenAuth.Scopes...)
if err != nil {
logger.Error("Failed to get default token from meta data server", "error", err)
} else {
token, err := tokenSrc.Token()
if err != nil {
logger.Error("Failed to get default access token from meta data server", "error", err)
} else {
req.Header.Add("Authorization", fmt.Sprintf("Bearer %s", token.AccessToken))
}
}
}
logger.Info("Requesting", "url", req.URL.String())
}
func interpolateString(text string, data templateData) (string, error) {

View File

@ -40,7 +40,7 @@ var New = func(dashId int64, orgId int64, user *m.SignedInUser) DashboardGuardia
user: user,
dashId: dashId,
orgId: orgId,
log: log.New("guardians.dashboard"),
log: log.New("dashboard.permissions"),
}
}
@ -66,15 +66,30 @@ func (g *dashboardGuardianImpl) CanAdmin() (bool, error) {
func (g *dashboardGuardianImpl) HasPermission(permission m.PermissionType) (bool, error) {
if g.user.OrgRole == m.ROLE_ADMIN {
return true, nil
return g.logHasPermissionResult(permission, true, nil)
}
acl, err := g.GetAcl()
if err != nil {
return false, err
return g.logHasPermissionResult(permission, false, err)
}
return g.checkAcl(permission, acl)
result, err := g.checkAcl(permission, acl)
return g.logHasPermissionResult(permission, result, err)
}
func (g *dashboardGuardianImpl) logHasPermissionResult(permission m.PermissionType, hasPermission bool, err error) (bool, error) {
if err != nil {
return hasPermission, err
}
if hasPermission {
g.log.Debug("User granted access to execute action", "userId", g.user.UserId, "orgId", g.orgId, "uname", g.user.Login, "dashId", g.dashId, "action", permission)
} else {
g.log.Debug("User denied access to execute action", "userId", g.user.UserId, "orgId", g.orgId, "uname", g.user.Login, "dashId", g.dashId, "action", permission)
}
return hasPermission, err
}
func (g *dashboardGuardianImpl) checkAcl(permission m.PermissionType, acl []*m.DashboardAclInfoDTO) (bool, error) {

View File

@ -320,13 +320,18 @@ func DeleteDashboard(cmd *m.DeleteDashboardCommand) error {
"DELETE FROM dashboard WHERE id = ?",
"DELETE FROM playlist_item WHERE type = 'dashboard_by_id' AND value = ?",
"DELETE FROM dashboard_version WHERE dashboard_id = ?",
"DELETE FROM dashboard WHERE folder_id = ?",
"DELETE FROM annotation WHERE dashboard_id = ?",
"DELETE FROM dashboard_provisioning WHERE dashboard_id = ?",
}
if dashboard.IsFolder {
deletes = append(deletes, "DELETE FROM dashboard_provisioning WHERE dashboard_id in (select id from dashboard where folder_id = ?)")
deletes = append(deletes, "DELETE FROM dashboard WHERE folder_id = ?")
}
for _, sql := range deletes {
_, err := sess.Exec(sql, dashboard.Id)
if err != nil {
return err
}

View File

@ -13,17 +13,30 @@ func TestDashboardProvisioningTest(t *testing.T) {
Convey("Testing Dashboard provisioning", t, func() {
InitTestDB(t)
saveDashboardCmd := &models.SaveDashboardCommand{
folderCmd := &models.SaveDashboardCommand{
OrgId: 1,
FolderId: 0,
IsFolder: false,
IsFolder: true,
Dashboard: simplejson.NewFromAny(map[string]interface{}{
"id": nil,
"title": "test dashboard",
}),
}
Convey("Saving dashboards with extras", func() {
err := SaveDashboard(folderCmd)
So(err, ShouldBeNil)
saveDashboardCmd := &models.SaveDashboardCommand{
OrgId: 1,
IsFolder: false,
FolderId: folderCmd.Result.Id,
Dashboard: simplejson.NewFromAny(map[string]interface{}{
"id": nil,
"title": "test dashboard",
}),
}
Convey("Saving dashboards with provisioning meta data", func() {
now := time.Now()
cmd := &models.SaveProvisionedDashboardCommand{
@ -67,6 +80,21 @@ func TestDashboardProvisioningTest(t *testing.T) {
So(err, ShouldBeNil)
So(query.Result, ShouldBeFalse)
})
Convey("Deleteing folder should delete provision meta data", func() {
deleteCmd := &models.DeleteDashboardCommand{
Id: folderCmd.Result.Id,
OrgId: 1,
}
So(DeleteDashboard(deleteCmd), ShouldBeNil)
query := &models.IsDashboardProvisionedQuery{DashboardId: cmd.Result.Id}
err = GetProvisionedDataByDashboardId(query)
So(err, ShouldBeNil)
So(query.Result, ShouldBeFalse)
})
})
})
}

View File

@ -0,0 +1,24 @@
package stackdriver
import (
"context"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb"
)
func (e *StackdriverExecutor) ensureDefaultProject(ctx context.Context, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) {
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: tsdbQuery.Queries[0].RefId}
result := &tsdb.Response{
Results: make(map[string]*tsdb.QueryResult),
}
defaultProject, err := e.getDefaultProject(ctx)
if err != nil {
return nil, err
}
e.dsInfo.JsonData.Set("defaultProject", defaultProject)
queryResult.Meta.Set("defaultProject", defaultProject)
result.Results[tsdbQuery.Queries[0].RefId] = queryResult
return result, nil
}

View File

@ -16,6 +16,7 @@ import (
"time"
"golang.org/x/net/context/ctxhttp"
"golang.org/x/oauth2/google"
"github.com/grafana/grafana/pkg/api/pluginproxy"
"github.com/grafana/grafana/pkg/components/null"
@ -34,6 +35,11 @@ var (
metricNameFormat *regexp.Regexp
)
const (
gceAuthentication string = "gce"
jwtAuthentication string = "jwt"
)
// StackdriverExecutor executes queries for the Stackdriver datasource
type StackdriverExecutor struct {
httpClient *http.Client
@ -71,6 +77,8 @@ func (e *StackdriverExecutor) Query(ctx context.Context, dsInfo *models.DataSour
switch queryType {
case "annotationQuery":
result, err = e.executeAnnotationQuery(ctx, tsdbQuery)
case "ensureDefaultProjectQuery":
result, err = e.ensureDefaultProject(ctx, tsdbQuery)
case "timeSeriesQuery":
fallthrough
default:
@ -85,6 +93,16 @@ func (e *StackdriverExecutor) executeTimeSeriesQuery(ctx context.Context, tsdbQu
Results: make(map[string]*tsdb.QueryResult),
}
authenticationType := e.dsInfo.JsonData.Get("authenticationType").MustString(jwtAuthentication)
if authenticationType == gceAuthentication {
defaultProject, err := e.getDefaultProject(ctx)
if err != nil {
return nil, fmt.Errorf("Failed to retrieve default project from GCE metadata server. error: %v", err)
}
e.dsInfo.JsonData.Set("defaultProject", defaultProject)
}
queries, err := e.buildQueries(tsdbQuery)
if err != nil {
return nil, err
@ -337,11 +355,21 @@ func (e *StackdriverExecutor) unmarshalResponse(res *http.Response) (Stackdriver
func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data StackdriverResponse, query *StackdriverQuery) error {
metricLabels := make(map[string][]string)
resourceLabels := make(map[string][]string)
var resourceTypes []string
for _, series := range data.TimeSeries {
if !containsLabel(resourceTypes, series.Resource.Type) {
resourceTypes = append(resourceTypes, series.Resource.Type)
}
}
for _, series := range data.TimeSeries {
points := make([]tsdb.TimePoint, 0)
defaultMetricName := series.Metric.Type
if len(resourceTypes) > 1 {
defaultMetricName += " " + series.Resource.Type
}
for key, value := range series.Metric.Labels {
if !containsLabel(metricLabels[key], value) {
@ -385,7 +413,7 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
points = append(points, tsdb.NewTimePoint(null.FloatFrom(value), float64((point.Interval.EndTime).Unix())*1000))
}
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, make(map[string]string), query)
metricName := formatLegendKeys(series.Metric.Type, defaultMetricName, series.Resource.Type, series.Metric.Labels, series.Resource.Labels, make(map[string]string), query)
queryRes.Series = append(queryRes.Series, &tsdb.TimeSeries{
Name: metricName,
@ -411,7 +439,7 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
additionalLabels := map[string]string{"bucket": bucketBound}
buckets[i] = &tsdb.TimeSeries{
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Resource.Type, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
Points: make([]tsdb.TimePoint, 0),
}
if maxKey < i {
@ -427,7 +455,7 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
bucketBound := calcBucketBound(point.Value.DistributionValue.BucketOptions, i)
additionalLabels := map[string]string{"bucket": bucketBound}
buckets[i] = &tsdb.TimeSeries{
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
Name: formatLegendKeys(series.Metric.Type, defaultMetricName, series.Resource.Type, series.Metric.Labels, series.Resource.Labels, additionalLabels, query),
Points: make([]tsdb.TimePoint, 0),
}
}
@ -442,6 +470,7 @@ func (e *StackdriverExecutor) parseResponse(queryRes *tsdb.QueryResult, data Sta
queryRes.Meta.Set("resourceLabels", resourceLabels)
queryRes.Meta.Set("metricLabels", metricLabels)
queryRes.Meta.Set("groupBys", query.GroupBys)
queryRes.Meta.Set("resourceTypes", resourceTypes)
return nil
}
@ -455,7 +484,7 @@ func containsLabel(labels []string, newLabel string) bool {
return false
}
func formatLegendKeys(metricType string, defaultMetricName string, metricLabels map[string]string, resourceLabels map[string]string, additionalLabels map[string]string, query *StackdriverQuery) string {
func formatLegendKeys(metricType string, defaultMetricName string, resourceType string, metricLabels map[string]string, resourceLabels map[string]string, additionalLabels map[string]string, query *StackdriverQuery) string {
if query.AliasBy == "" {
return defaultMetricName
}
@ -469,6 +498,10 @@ func formatLegendKeys(metricType string, defaultMetricName string, metricLabels
return []byte(metricType)
}
if metaPartName == "resource.type" && resourceType != "" {
return []byte(resourceType)
}
metricPart := replaceWithMetricPart(metaPartName, metricType)
if metricPart != nil {
@ -550,8 +583,6 @@ func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.
if !ok {
return nil, errors.New("Unable to find datasource plugin Stackdriver")
}
projectName := dsInfo.JsonData.Get("defaultProject").MustString()
proxyPass := fmt.Sprintf("stackdriver%s", "v3/projects/"+projectName+"/timeSeries")
var stackdriverRoute *plugins.AppPluginRoute
for _, route := range plugin.Routes {
@ -561,7 +592,22 @@ func (e *StackdriverExecutor) createRequest(ctx context.Context, dsInfo *models.
}
}
projectName := dsInfo.JsonData.Get("defaultProject").MustString()
proxyPass := fmt.Sprintf("stackdriver%s", "v3/projects/"+projectName+"/timeSeries")
pluginproxy.ApplyRoute(ctx, req, proxyPass, stackdriverRoute, dsInfo)
return req, nil
}
func (e *StackdriverExecutor) getDefaultProject(ctx context.Context) (string, error) {
authenticationType := e.dsInfo.JsonData.Get("authenticationType").MustString(jwtAuthentication)
if authenticationType == gceAuthentication {
defaultCredentials, err := google.FindDefaultCredentials(ctx, "https://www.googleapis.com/auth/monitoring.read")
if err != nil {
return "", fmt.Errorf("Failed to retrieve default project from GCE metadata server. error: %v", err)
}
return defaultCredentials.ProjectID, nil
}
return e.dsInfo.JsonData.Get("defaultProject").MustString(), nil
}

View File

@ -34,6 +34,7 @@ export class HelpCtrl {
{ keys: ['p', 's'], description: 'Open Panel Share Modal' },
{ keys: ['p', 'd'], description: 'Duplicate Panel' },
{ keys: ['p', 'r'], description: 'Remove Panel' },
{ keys: ['p', 'l'], description: 'Toggle panel legend' },
],
'Time Range': [
{ keys: ['t', 'z'], description: 'Zoom out time range' },

View File

@ -242,6 +242,18 @@ export class KeybindingSrv {
}
});
// toggle panel legend
this.bind('p l', () => {
if (dashboard.meta.focusPanelId) {
const panelInfo = dashboard.getPanelInfoById(dashboard.meta.focusPanelId);
if (panelInfo.panel.legend) {
const panelRef = dashboard.getPanelById(dashboard.meta.focusPanelId);
panelRef.legend.show = !panelRef.legend.show;
panelRef.refresh();
}
}
});
// collapse all rows
this.bind('d shift+c', () => {
dashboard.collapseRows();

View File

@ -1,4 +1,4 @@
import TableModel from 'app/core/table_model';
import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
describe('when sorting table desc', () => {
let table;
@ -79,3 +79,118 @@ describe('when sorting with nulls', () => {
expect(values).toEqual([null, null, 'd', 'c', 'b', 'a', '', '']);
});
});
describe('mergeTables', () => {
const time = new Date().getTime();
const singleTable = new TableModel({
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Value' }],
rows: [[time, 'Label Value 1', 42]],
});
const multipleTablesSameColumns = [
new TableModel({
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Label Key 2' }, { text: 'Value #A' }],
rows: [[time, 'Label Value 1', 'Label Value 2', 42]],
}),
new TableModel({
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Label Key 2' }, { text: 'Value #B' }],
rows: [[time, 'Label Value 1', 'Label Value 2', 13]],
}),
new TableModel({
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Label Key 2' }, { text: 'Value #C' }],
rows: [[time, 'Label Value 1', 'Label Value 2', 4]],
}),
new TableModel({
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Label Key 2' }, { text: 'Value #C' }],
rows: [[time, 'Label Value 1', 'Label Value 2', 7]],
}),
];
const multipleTablesDifferentColumns = [
new TableModel({
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Value #A' }],
rows: [[time, 'Label Value 1', 42]],
}),
new TableModel({
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 2' }, { text: 'Value #B' }],
rows: [[time, 'Label Value 2', 13]],
}),
new TableModel({
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Value #C' }],
rows: [[time, 'Label Value 3', 7]],
}),
];
it('should return the single table as is', () => {
const table = mergeTablesIntoModel(new TableModel(), singleTable);
expect(table.columns.length).toBe(3);
expect(table.columns[0].text).toBe('Time');
expect(table.columns[1].text).toBe('Label Key 1');
expect(table.columns[2].text).toBe('Value');
});
it('should return the union of columns for multiple tables', () => {
const table = mergeTablesIntoModel(new TableModel(), ...multipleTablesSameColumns);
expect(table.columns.length).toBe(6);
expect(table.columns[0].text).toBe('Time');
expect(table.columns[1].text).toBe('Label Key 1');
expect(table.columns[2].text).toBe('Label Key 2');
expect(table.columns[3].text).toBe('Value #A');
expect(table.columns[4].text).toBe('Value #B');
expect(table.columns[5].text).toBe('Value #C');
});
it('should return 1 row for a single table', () => {
const table = mergeTablesIntoModel(new TableModel(), singleTable);
expect(table.rows.length).toBe(1);
expect(table.rows[0][0]).toBe(time);
expect(table.rows[0][1]).toBe('Label Value 1');
expect(table.rows[0][2]).toBe(42);
});
it('should return 2 rows for a multiple tables with same column values plus one extra row', () => {
const table = mergeTablesIntoModel(new TableModel(), ...multipleTablesSameColumns);
expect(table.rows.length).toBe(2);
expect(table.rows[0][0]).toBe(time);
expect(table.rows[0][1]).toBe('Label Value 1');
expect(table.rows[0][2]).toBe('Label Value 2');
expect(table.rows[0][3]).toBe(42);
expect(table.rows[0][4]).toBe(13);
expect(table.rows[0][5]).toBe(4);
expect(table.rows[1][0]).toBe(time);
expect(table.rows[1][1]).toBe('Label Value 1');
expect(table.rows[1][2]).toBe('Label Value 2');
expect(table.rows[1][3]).toBeUndefined();
expect(table.rows[1][4]).toBeUndefined();
expect(table.rows[1][5]).toBe(7);
});
it('should return 2 rows for multiple tables with different column values', () => {
const table = mergeTablesIntoModel(new TableModel(), ...multipleTablesDifferentColumns);
expect(table.rows.length).toBe(2);
expect(table.columns.length).toBe(6);
expect(table.rows[0][0]).toBe(time);
expect(table.rows[0][1]).toBe('Label Value 1');
expect(table.rows[0][2]).toBe(42);
expect(table.rows[0][3]).toBe('Label Value 2');
expect(table.rows[0][4]).toBe(13);
expect(table.rows[0][5]).toBeUndefined();
expect(table.rows[1][0]).toBe(time);
expect(table.rows[1][1]).toBe('Label Value 3');
expect(table.rows[1][2]).toBeUndefined();
expect(table.rows[1][3]).toBeUndefined();
expect(table.rows[1][4]).toBeUndefined();
expect(table.rows[1][5]).toBe(7);
});
});

View File

@ -1,3 +1,5 @@
import _ from 'lodash';
interface Column {
text: string;
title?: string;
@ -14,11 +16,20 @@ export default class TableModel {
type: string;
columnMap: any;
constructor() {
constructor(table?: any) {
this.columns = [];
this.columnMap = {};
this.rows = [];
this.type = 'table';
if (table) {
if (table.columns) {
table.columns.forEach(col => this.addColumn(col));
}
if (table.rows) {
table.rows.forEach(row => this.addRow(row));
}
}
}
sort(options) {
@ -52,3 +63,100 @@ export default class TableModel {
this.rows.push(row);
}
}
// Returns true if both rows have matching non-empty fields as well as matching
// indexes where one field is empty and the other is not
function areRowsMatching(columns, row, otherRow) {
let foundFieldToMatch = false;
for (let columnIndex = 0; columnIndex < columns.length; columnIndex++) {
if (row[columnIndex] !== undefined && otherRow[columnIndex] !== undefined) {
if (row[columnIndex] !== otherRow[columnIndex]) {
return false;
}
} else if (row[columnIndex] === undefined || otherRow[columnIndex] === undefined) {
foundFieldToMatch = true;
}
}
return foundFieldToMatch;
}
export function mergeTablesIntoModel(dst?: TableModel, ...tables: TableModel[]): TableModel {
const model = dst || new TableModel();
// Single query returns data columns and rows as is
if (arguments.length === 2) {
model.columns = [...tables[0].columns];
model.rows = [...tables[0].rows];
return model;
}
// Track column indexes of union: name -> index
const columnNames = {};
// Union of all non-value columns
const columnsUnion = tables.slice().reduce((acc, series) => {
series.columns.forEach(col => {
const { text } = col;
if (columnNames[text] === undefined) {
columnNames[text] = acc.length;
acc.push(col);
}
});
return acc;
}, []);
// Map old column index to union index per series, e.g.,
// given columnNames {A: 0, B: 1} and
// data [{columns: [{ text: 'A' }]}, {columns: [{ text: 'B' }]}] => [[0], [1]]
const columnIndexMapper = tables.map(series => series.columns.map(col => columnNames[col.text]));
// Flatten rows of all series and adjust new column indexes
const flattenedRows = tables.reduce((acc, series, seriesIndex) => {
const mapper = columnIndexMapper[seriesIndex];
series.rows.forEach(row => {
const alteredRow = [];
// Shifting entries according to index mapper
mapper.forEach((to, from) => {
alteredRow[to] = row[from];
});
acc.push(alteredRow);
});
return acc;
}, []);
// Merge rows that have same values for columns
const mergedRows = {};
const compactedRows = flattenedRows.reduce((acc, row, rowIndex) => {
if (!mergedRows[rowIndex]) {
// Look from current row onwards
let offset = rowIndex + 1;
// More than one row can be merged into current row
while (offset < flattenedRows.length) {
// Find next row that could be merged
const match = _.findIndex(flattenedRows, otherRow => areRowsMatching(columnsUnion, row, otherRow), offset);
if (match > -1) {
const matchedRow = flattenedRows[match];
// Merge values from match into current row if there is a gap in the current row
for (let columnIndex = 0; columnIndex < columnsUnion.length; columnIndex++) {
if (row[columnIndex] === undefined && matchedRow[columnIndex] !== undefined) {
row[columnIndex] = matchedRow[columnIndex];
}
}
// Don't visit this row again
mergedRows[match] = matchedRow;
// Keep looking for more rows to merge
offset = match + 1;
} else {
// No match found, stop looking
break;
}
}
acc.push(row);
}
return acc;
}, []);
model.columns = columnsUnion;
model.rows = compactedRows;
return model;
}

View File

@ -8,23 +8,17 @@ const DEFAULT_EXPLORE_STATE: ExploreState = {
datasourceMissing: false,
datasourceName: '',
exploreDatasources: [],
graphResult: null,
graphRange: DEFAULT_RANGE,
history: [],
latency: 0,
loading: false,
logsResult: null,
queries: [],
queryErrors: [],
queryHints: [],
queryTransactions: [],
range: DEFAULT_RANGE,
requestOptions: null,
showingGraph: true,
showingLogs: true,
showingTable: true,
supportsGraph: null,
supportsLogs: null,
supportsTable: null,
tableResult: null,
};
describe('state functions', () => {

View File

@ -1,8 +1,17 @@
import React from 'react';
import { hot } from 'react-hot-loader';
import Select from 'react-select';
import _ from 'lodash';
import { ExploreState, ExploreUrlState, Query } from 'app/types/explore';
import {
ExploreState,
ExploreUrlState,
HistoryItem,
Query,
QueryTransaction,
Range,
ResultType,
} from 'app/types/explore';
import kbn from 'app/core/utils/kbn';
import colors from 'app/core/utils/colors';
import store from 'app/core/store';
@ -13,8 +22,8 @@ import ResetStyles from 'app/core/components/Picker/ResetStyles';
import PickerOption from 'app/core/components/Picker/PickerOption';
import IndicatorsContainer from 'app/core/components/Picker/IndicatorsContainer';
import NoOptionsMessage from 'app/core/components/Picker/NoOptionsMessage';
import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
import ElapsedTime from './ElapsedTime';
import QueryRows from './QueryRows';
import Graph from './Graph';
import Logs from './Logs';
@ -24,16 +33,6 @@ import { ensureQueries, generateQueryKey, hasQuery } from './utils/query';
const MAX_HISTORY_ITEMS = 100;
function makeHints(hints) {
const hintsByIndex = [];
hints.forEach(hint => {
if (hint) {
hintsByIndex[hint.index] = hint;
}
});
return hintsByIndex;
}
function makeTimeSeriesList(dataList, options) {
return dataList.map((seriesData, index) => {
const datapoints = seriesData.datapoints || [];
@ -52,6 +51,25 @@ function makeTimeSeriesList(dataList, options) {
});
}
/**
* Update the query history. Side-effect: store history in local storage
*/
function updateHistory(history: HistoryItem[], datasourceId: string, queries: string[]): HistoryItem[] {
const ts = Date.now();
queries.forEach(query => {
history = [{ query, ts }, ...history];
});
if (history.length > MAX_HISTORY_ITEMS) {
history = history.slice(0, MAX_HISTORY_ITEMS);
}
// Combine all queries of a datasource type into one history
const historyKey = `grafana.explore.history.${datasourceId}`;
store.setObject(historyKey, history);
return history;
}
interface ExploreProps {
datasourceSrv: any;
onChangeSplit: (split: boolean, state?: ExploreState) => void;
@ -82,6 +100,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
} else {
const { datasource, queries, range } = props.urlState as ExploreUrlState;
initialQueries = ensureQueries(queries);
const initialRange = range || { ...DEFAULT_RANGE };
this.state = {
datasource: null,
datasourceError: null,
@ -89,23 +108,17 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
datasourceMissing: false,
datasourceName: datasource,
exploreDatasources: [],
graphResult: null,
graphRange: initialRange,
history: [],
latency: 0,
loading: false,
logsResult: null,
queries: initialQueries,
queryErrors: [],
queryHints: [],
range: range || { ...DEFAULT_RANGE },
requestOptions: null,
queryTransactions: [],
range: initialRange,
showingGraph: true,
showingLogs: true,
showingTable: true,
supportsGraph: null,
supportsLogs: null,
supportsTable: null,
tableResult: null,
};
}
this.queryExpressions = initialQueries.map(q => q.query);
@ -199,14 +212,32 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
};
onAddQueryRow = index => {
const { queries } = this.state;
// Local cache
this.queryExpressions[index + 1] = '';
const nextQueries = [
...queries.slice(0, index + 1),
{ query: '', key: generateQueryKey() },
...queries.slice(index + 1),
];
this.setState({ queries: nextQueries });
this.setState(state => {
const { queries, queryTransactions } = state;
// Add row by generating new react key
const nextQueries = [
...queries.slice(0, index + 1),
{ query: '', key: generateQueryKey() },
...queries.slice(index + 1),
];
// Ongoing transactions need to update their row indices
const nextQueryTransactions = queryTransactions.map(qt => {
if (qt.rowIndex > index) {
return {
...qt,
rowIndex: qt.rowIndex + 1,
};
}
return qt;
});
return { queries: nextQueries, queryTransactions: nextQueryTransactions };
});
};
onChangeDatasource = async option => {
@ -214,12 +245,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
datasource: null,
datasourceError: null,
datasourceLoading: true,
graphResult: null,
latency: 0,
logsResult: null,
queryErrors: [],
queryHints: [],
tableResult: null,
queryTransactions: [],
});
const datasourceName = option.value;
const datasource = await this.props.datasourceSrv.get(datasourceName);
@ -230,24 +256,25 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
// Keep current value in local cache
this.queryExpressions[index] = value;
// Replace query row on override
if (override) {
const { queries } = this.state;
const nextQuery: Query = {
key: generateQueryKey(index),
query: value,
};
const nextQueries = [...queries];
nextQueries[index] = nextQuery;
this.setState(state => {
// Replace query row
const { queries, queryTransactions } = state;
const nextQuery: Query = {
key: generateQueryKey(index),
query: value,
};
const nextQueries = [...queries];
nextQueries[index] = nextQuery;
this.setState(
{
queryErrors: [],
queryHints: [],
// Discard ongoing transaction related to row query
const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index);
return {
queries: nextQueries,
},
this.onSubmit
);
queryTransactions: nextQueryTransactions,
};
}, this.onSubmit);
}
};
@ -263,13 +290,8 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
this.queryExpressions = [''];
this.setState(
{
graphResult: null,
logsResult: null,
latency: 0,
queries: ensureQueries(),
queryErrors: [],
queryHints: [],
tableResult: null,
queryTransactions: [],
},
this.saveState
);
@ -283,11 +305,41 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
};
onClickGraphButton = () => {
this.setState(state => ({ showingGraph: !state.showingGraph }));
this.setState(
state => {
const showingGraph = !state.showingGraph;
let nextQueryTransactions = state.queryTransactions;
if (!showingGraph) {
// Discard transactions related to Graph query
nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Graph');
}
return { queryTransactions: nextQueryTransactions, showingGraph };
},
() => {
if (this.state.showingGraph) {
this.onSubmit();
}
}
);
};
onClickLogsButton = () => {
this.setState(state => ({ showingLogs: !state.showingLogs }));
this.setState(
state => {
const showingLogs = !state.showingLogs;
let nextQueryTransactions = state.queryTransactions;
if (!showingLogs) {
// Discard transactions related to Logs query
nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Logs');
}
return { queryTransactions: nextQueryTransactions, showingLogs };
},
() => {
if (this.state.showingLogs) {
this.onSubmit();
}
}
);
};
onClickSplit = () => {
@ -299,7 +351,22 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
};
onClickTableButton = () => {
this.setState(state => ({ showingTable: !state.showingTable }));
this.setState(
state => {
const showingTable = !state.showingTable;
let nextQueryTransactions = state.queryTransactions;
if (!showingTable) {
// Discard transactions related to Table query
nextQueryTransactions = state.queryTransactions.filter(qt => qt.resultType !== 'Table');
}
return { queryTransactions: nextQueryTransactions, showingTable };
},
() => {
if (this.state.showingTable) {
this.onSubmit();
}
}
);
};
onClickTableCell = (columnKey: string, rowValue: string) => {
@ -307,39 +374,68 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
};
onModifyQueries = (action: object, index?: number) => {
const { datasource, queries } = this.state;
const { datasource } = this.state;
if (datasource && datasource.modifyQuery) {
let nextQueries;
if (index === undefined) {
// Modify all queries
nextQueries = queries.map((q, i) => ({
key: generateQueryKey(i),
query: datasource.modifyQuery(this.queryExpressions[i], action),
}));
} else {
// Modify query only at index
nextQueries = [
...queries.slice(0, index),
{
key: generateQueryKey(index),
query: datasource.modifyQuery(this.queryExpressions[index], action),
},
...queries.slice(index + 1),
];
}
this.queryExpressions = nextQueries.map(q => q.query);
this.setState({ queries: nextQueries }, () => this.onSubmit());
this.setState(
state => {
const { queries, queryTransactions } = state;
let nextQueries;
let nextQueryTransactions;
if (index === undefined) {
// Modify all queries
nextQueries = queries.map((q, i) => ({
key: generateQueryKey(i),
query: datasource.modifyQuery(this.queryExpressions[i], action),
}));
// Discard all ongoing transactions
nextQueryTransactions = [];
} else {
// Modify query only at index
nextQueries = [
...queries.slice(0, index),
{
key: generateQueryKey(index),
query: datasource.modifyQuery(this.queryExpressions[index], action),
},
...queries.slice(index + 1),
];
// Discard transactions related to row query
nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index);
}
this.queryExpressions = nextQueries.map(q => q.query);
return {
queries: nextQueries,
queryTransactions: nextQueryTransactions,
};
},
() => this.onSubmit()
);
}
};
onRemoveQueryRow = index => {
const { queries } = this.state;
if (queries.length <= 1) {
return;
}
const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)];
this.queryExpressions = nextQueries.map(q => q.query);
this.setState({ queries: nextQueries }, () => this.onSubmit());
// Remove from local cache
this.queryExpressions = [...this.queryExpressions.slice(0, index), ...this.queryExpressions.slice(index + 1)];
this.setState(
state => {
const { queries, queryTransactions } = state;
if (queries.length <= 1) {
return null;
}
// Remove row from react state
const nextQueries = [...queries.slice(0, index), ...queries.slice(index + 1)];
// Discard transactions related to row query
const nextQueryTransactions = queryTransactions.filter(qt => qt.rowIndex !== index);
return {
queries: nextQueries,
queryTransactions: nextQueryTransactions,
};
},
() => this.onSubmit()
);
};
onSubmit = () => {
@ -348,7 +444,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
this.runTableQuery();
}
if (showingGraph && supportsGraph) {
this.runGraphQuery();
this.runGraphQueries();
}
if (showingLogs && supportsLogs) {
this.runLogsQuery();
@ -356,32 +452,11 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
this.saveState();
};
onQuerySuccess(datasourceId: string, queries: string[]): void {
// save queries to history
let { history } = this.state;
const { datasource } = this.state;
if (datasource.meta.id !== datasourceId) {
// Navigated away, queries did not matter
return;
}
const ts = Date.now();
queries.forEach(query => {
history = [{ query, ts }, ...history];
});
if (history.length > MAX_HISTORY_ITEMS) {
history = history.slice(0, MAX_HISTORY_ITEMS);
}
// Combine all queries of a datasource type into one history
const historyKey = `grafana.explore.history.${datasourceId}`;
store.setObject(historyKey, history);
this.setState({ history });
}
buildQueryOptions(targetOptions: { format: string; hinting?: boolean; instant?: boolean }) {
buildQueryOptions(
query: string,
rowIndex: number,
targetOptions: { format: string; hinting?: boolean; instant?: boolean }
) {
const { datasource, range } = this.state;
const resolution = this.el.offsetWidth;
const absoluteRange = {
@ -389,88 +464,235 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
to: parseDate(range.to, true),
};
const { interval } = kbn.calculateInterval(absoluteRange, resolution, datasource.interval);
const targets = this.queryExpressions.map(q => ({
...targetOptions,
expr: q,
}));
const targets = [
{
...targetOptions,
// Target identifier is needed for table transformations
refId: rowIndex + 1,
expr: query,
},
];
// Clone range for query request
const queryRange: Range = { ...range };
return {
interval,
range,
targets,
range: queryRange,
};
}
async runGraphQuery() {
startQueryTransaction(query: string, rowIndex: number, resultType: ResultType, options: any): QueryTransaction {
const queryOptions = this.buildQueryOptions(query, rowIndex, options);
const transaction: QueryTransaction = {
query,
resultType,
rowIndex,
id: generateQueryKey(),
done: false,
latency: 0,
options: queryOptions,
};
// Using updater style because we might be modifying queryTransactions in quick succession
this.setState(state => {
const { queryTransactions } = state;
// Discarding existing transactions of same type
const remainingTransactions = queryTransactions.filter(
qt => !(qt.resultType === resultType && qt.rowIndex === rowIndex)
);
// Append new transaction
const nextQueryTransactions = [...remainingTransactions, transaction];
return {
queryTransactions: nextQueryTransactions,
};
});
return transaction;
}
completeQueryTransaction(
transactionId: string,
result: any,
latency: number,
queries: string[],
datasourceId: string
) {
const { datasource } = this.state;
if (datasource.meta.id !== datasourceId) {
// Navigated away, queries did not matter
return;
}
this.setState(state => {
const { history, queryTransactions } = state;
// Transaction might have been discarded
const transaction = queryTransactions.find(qt => qt.id === transactionId);
if (!transaction) {
return null;
}
// Get query hints
let hints;
if (datasource.getQueryHints) {
hints = datasource.getQueryHints(transaction.query, result);
}
// Mark transactions as complete
const nextQueryTransactions = queryTransactions.map(qt => {
if (qt.id === transactionId) {
return {
...qt,
hints,
latency,
result,
done: true,
};
}
return qt;
});
const nextHistory = updateHistory(history, datasourceId, queries);
return {
history: nextHistory,
queryTransactions: nextQueryTransactions,
};
});
}
discardTransactions(rowIndex: number) {
this.setState(state => {
const remainingTransactions = state.queryTransactions.filter(qt => qt.rowIndex !== rowIndex);
return { queryTransactions: remainingTransactions };
});
}
failQueryTransaction(transactionId: string, error: string, datasourceId: string) {
const { datasource } = this.state;
if (datasource.meta.id !== datasourceId) {
// Navigated away, queries did not matter
return;
}
this.setState(state => {
// Transaction might have been discarded
if (!state.queryTransactions.find(qt => qt.id === transactionId)) {
return null;
}
// Mark transactions as complete
const nextQueryTransactions = state.queryTransactions.map(qt => {
if (qt.id === transactionId) {
return {
...qt,
error,
done: true,
};
}
return qt;
});
return {
queryTransactions: nextQueryTransactions,
};
});
}
async runGraphQueries() {
const queries = [...this.queryExpressions];
if (!hasQuery(queries)) {
return;
}
this.setState({ latency: 0, loading: true, graphResult: null, queryErrors: [], queryHints: [] });
const now = Date.now();
const options = this.buildQueryOptions({ format: 'time_series', instant: false, hinting: true });
try {
const res = await datasource.query(options);
const result = makeTimeSeriesList(res.data, options);
const queryHints = res.hints ? makeHints(res.hints) : [];
const latency = Date.now() - now;
this.setState({ latency, loading: false, graphResult: result, queryHints, requestOptions: options });
this.onQuerySuccess(datasource.meta.id, queries);
} catch (response) {
console.error(response);
const queryError = response.data ? response.data.error : response;
this.setState({ loading: false, queryErrors: [queryError] });
}
const { datasource } = this.state;
const datasourceId = datasource.meta.id;
// Run all queries concurrently
queries.forEach(async (query, rowIndex) => {
if (query) {
const transaction = this.startQueryTransaction(query, rowIndex, 'Graph', {
format: 'time_series',
instant: false,
});
try {
const now = Date.now();
const res = await datasource.query(transaction.options);
const latency = Date.now() - now;
const results = makeTimeSeriesList(res.data, transaction.options);
this.completeQueryTransaction(transaction.id, results, latency, queries, datasourceId);
this.setState({ graphRange: transaction.options.range });
} catch (response) {
console.error(response);
const queryError = response.data ? response.data.error : response;
this.failQueryTransaction(transaction.id, queryError, datasourceId);
}
} else {
this.discardTransactions(rowIndex);
}
});
}
async runTableQuery() {
const queries = [...this.queryExpressions];
const { datasource } = this.state;
if (!hasQuery(queries)) {
return;
}
this.setState({ latency: 0, loading: true, queryErrors: [], queryHints: [], tableResult: null });
const now = Date.now();
const options = this.buildQueryOptions({
format: 'table',
instant: true,
const { datasource } = this.state;
const datasourceId = datasource.meta.id;
// Run all queries concurrently
queries.forEach(async (query, rowIndex) => {
if (query) {
const transaction = this.startQueryTransaction(query, rowIndex, 'Table', {
format: 'table',
instant: true,
valueWithRefId: true,
});
try {
const now = Date.now();
const res = await datasource.query(transaction.options);
const latency = Date.now() - now;
const results = res.data[0];
this.completeQueryTransaction(transaction.id, results, latency, queries, datasourceId);
} catch (response) {
console.error(response);
const queryError = response.data ? response.data.error : response;
this.failQueryTransaction(transaction.id, queryError, datasourceId);
}
} else {
this.discardTransactions(rowIndex);
}
});
try {
const res = await datasource.query(options);
const tableModel = res.data[0];
const latency = Date.now() - now;
this.setState({ latency, loading: false, tableResult: tableModel, requestOptions: options });
this.onQuerySuccess(datasource.meta.id, queries);
} catch (response) {
console.error(response);
const queryError = response.data ? response.data.error : response;
this.setState({ loading: false, queryErrors: [queryError] });
}
}
async runLogsQuery() {
const queries = [...this.queryExpressions];
const { datasource } = this.state;
if (!hasQuery(queries)) {
return;
}
this.setState({ latency: 0, loading: true, queryErrors: [], queryHints: [], logsResult: null });
const now = Date.now();
const options = this.buildQueryOptions({
format: 'logs',
const { datasource } = this.state;
const datasourceId = datasource.meta.id;
// Run all queries concurrently
queries.forEach(async (query, rowIndex) => {
if (query) {
const transaction = this.startQueryTransaction(query, rowIndex, 'Logs', { format: 'logs' });
try {
const now = Date.now();
const res = await datasource.query(transaction.options);
const latency = Date.now() - now;
const results = res.data;
this.completeQueryTransaction(transaction.id, results, latency, queries, datasourceId);
} catch (response) {
console.error(response);
const queryError = response.data ? response.data.error : response;
this.failQueryTransaction(transaction.id, queryError, datasourceId);
}
} else {
this.discardTransactions(rowIndex);
}
});
try {
const res = await datasource.query(options);
const logsData = res.data;
const latency = Date.now() - now;
this.setState({ latency, loading: false, logsResult: logsData, requestOptions: options });
this.onQuerySuccess(datasource.meta.id, queries);
} catch (response) {
console.error(response);
const queryError = response.data ? response.data.error : response;
this.setState({ loading: false, queryErrors: [queryError] });
}
}
request = url => {
@ -482,6 +704,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
// Copy state, but copy queries including modifications
return {
...this.state,
queryTransactions: [],
queries: ensureQueries(this.queryExpressions.map(query => ({ query }))),
};
}
@ -499,23 +722,17 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
datasourceLoading,
datasourceMissing,
exploreDatasources,
graphResult,
graphRange,
history,
latency,
loading,
logsResult,
queries,
queryErrors,
queryHints,
queryTransactions,
range,
requestOptions,
showingGraph,
showingLogs,
showingTable,
supportsGraph,
supportsLogs,
supportsTable,
tableResult,
} = this.state;
const showingBoth = showingGraph && showingTable;
const graphHeight = showingBoth ? '200px' : '400px';
@ -524,6 +741,20 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
const tableButtonActive = showingBoth || showingTable ? 'active' : '';
const exploreClass = split ? 'explore explore-split' : 'explore';
const selectedDatasource = datasource ? exploreDatasources.find(d => d.label === datasource.name) : undefined;
const graphLoading = queryTransactions.some(qt => qt.resultType === 'Graph' && !qt.done);
const tableLoading = queryTransactions.some(qt => qt.resultType === 'Table' && !qt.done);
const logsLoading = queryTransactions.some(qt => qt.resultType === 'Logs' && !qt.done);
const graphResult = _.flatten(
queryTransactions.filter(qt => qt.resultType === 'Graph' && qt.done && qt.result).map(qt => qt.result)
);
const tableResult = mergeTablesIntoModel(
new TableModel(),
...queryTransactions.filter(qt => qt.resultType === 'Table' && qt.done).map(qt => qt.result)
);
const logsResult = _.flatten(
queryTransactions.filter(qt => qt.resultType === 'Logs' && qt.done).map(qt => qt.result)
);
const loading = queryTransactions.some(qt => !qt.done);
return (
<div className={exploreClass} ref={this.getRef}>
@ -581,9 +812,9 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
</div>
<div className="navbar-buttons relative">
<button className="btn navbar-button--primary" onClick={this.onSubmit}>
Run Query <i className="fa fa-level-down run-icon" />
Run Query{' '}
{loading ? <i className="fa fa-spinner fa-spin run-icon" /> : <i className="fa fa-level-down run-icon" />}
</button>
{loading || latency ? <ElapsedTime time={latency} className="text-info" /> : null}
</div>
</div>
@ -602,8 +833,6 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
<QueryRows
history={history}
queries={queries}
queryErrors={queryErrors}
queryHints={queryHints}
request={this.request}
onAddQueryRow={this.onAddQueryRow}
onChangeQuery={this.onChangeQuery}
@ -611,6 +840,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
onExecuteQuery={this.onSubmit}
onRemoveQueryRow={this.onRemoveQueryRow}
supportsLogs={supportsLogs}
transactions={queryTransactions}
/>
<div className="result-options">
{supportsGraph ? (
@ -632,23 +862,22 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
<main className="m-t-2">
{supportsGraph &&
showingGraph &&
graphResult && (
showingGraph && (
<Graph
data={graphResult}
height={graphHeight}
loading={loading}
loading={graphLoading}
id={`explore-graph-${position}`}
options={requestOptions}
range={graphRange}
split={split}
/>
)}
{supportsTable && showingTable ? (
<div className="panel-container">
<Table data={tableResult} loading={loading} onClickCell={this.onClickTableCell} />
<div className="panel-container m-t-2">
<Table data={tableResult} loading={tableLoading} onClickCell={this.onClickTableCell} />
</div>
) : null}
{supportsLogs && showingLogs ? <Logs data={logsResult} loading={loading} /> : null}
{supportsLogs && showingLogs ? <Logs data={logsResult} loading={logsLoading} /> : null}
</main>
</div>
) : null}

View File

@ -4,24 +4,11 @@ import { Graph } from './Graph';
import { mockData } from './__mocks__/mockData';
const setup = (propOverrides?: object) => {
const props = Object.assign(
{
data: mockData().slice(0, 19),
options: {
interval: '20s',
range: { from: 'now-6h', to: 'now' },
targets: [
{
format: 'time_series',
instant: false,
hinting: true,
expr: 'prometheus_http_request_duration_seconds_bucket',
},
],
},
},
propOverrides
);
const props = {
data: mockData().slice(0, 19),
range: { from: 'now-6h', to: 'now' },
...propOverrides,
};
// Enzyme.shallow did not work well with jquery.flop. Mocking the draw function.
Graph.prototype.draw = jest.fn();

View File

@ -5,6 +5,8 @@ import { withSize } from 'react-sizeme';
import 'vendor/flot/jquery.flot';
import 'vendor/flot/jquery.flot.time';
import { Range } from 'app/types/explore';
import * as dateMath from 'app/core/utils/datemath';
import TimeSeries from 'app/core/time_series2';
@ -74,7 +76,7 @@ interface GraphProps {
height?: string; // e.g., '200px'
id?: string;
loading?: boolean;
options: any;
range: Range;
split?: boolean;
size?: { width: number; height: number };
}
@ -101,7 +103,7 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
componentDidUpdate(prevProps: GraphProps) {
if (
prevProps.data !== this.props.data ||
prevProps.options !== this.props.options ||
prevProps.range !== this.props.range ||
prevProps.split !== this.props.split ||
prevProps.height !== this.props.height ||
(prevProps.size && prevProps.size.width !== this.props.size.width)
@ -120,22 +122,22 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
};
draw() {
const { options: userOptions, size } = this.props;
const { range, size } = this.props;
const data = this.getGraphData();
const $el = $(`#${this.props.id}`);
if (!data) {
$el.empty();
return;
let series = [{ data: [[0, 0]] }];
if (data && data.length > 0) {
series = data.map((ts: TimeSeries) => ({
color: ts.color,
label: ts.label,
data: ts.getFlotPairs('null'),
}));
}
const series = data.map((ts: TimeSeries) => ({
color: ts.color,
label: ts.label,
data: ts.getFlotPairs('null'),
}));
const ticks = (size.width || 0) / 100;
let { from, to } = userOptions.range;
let { from, to } = range;
if (!moment.isMoment(from)) {
from = dateMath.parse(from, false);
}
@ -157,7 +159,6 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
const options = {
...FLOT_OPTIONS,
...dynamicOptions,
...userOptions,
};
$.plot($el, series, options);
}
@ -166,16 +167,11 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
const { height = '100px', id = 'graph', loading = false } = this.props;
const data = this.getGraphData();
if (!loading && data.length === 0) {
return (
<div className="panel-container">
<div className="muted m-a-1">The queries returned no time series to graph.</div>
</div>
);
}
return (
<div>
{this.props.data.length > MAX_NUMBER_OF_TIME_SERIES &&
<div className="panel-container">
{loading && <div className="explore-graph__loader" />}
{this.props.data &&
this.props.data.length > MAX_NUMBER_OF_TIME_SERIES &&
!this.state.showAllTimeSeries && (
<div className="time-series-disclaimer">
<i className="fa fa-fw fa-warning disclaimer-icon" />
@ -185,10 +181,8 @@ export class Graph extends PureComponent<GraphProps, GraphState> {
}`}</span>
</div>
)}
<div className="panel-container">
<div id={id} className="explore-graph" style={{ height }} />
<Legend data={data} />
</div>
<div id={id} className="explore-graph" style={{ height }} />
<Legend data={data} />
</div>
);
}

View File

@ -198,7 +198,7 @@ class QueryField extends React.PureComponent<TypeaheadFieldProps, TypeaheadField
if (textChanged && value.selection.isCollapsed) {
// Need one paint to allow DOM-based typeahead rules to work
window.requestAnimationFrame(this.handleTypeahead);
} else {
} else if (!this.resetTimer) {
this.resetTypeahead();
}
};
@ -402,6 +402,7 @@ class QueryField extends React.PureComponent<TypeaheadFieldProps, TypeaheadField
typeaheadPrefix: '',
typeaheadContext: null,
});
this.resetTimer = null;
};
handleBlur = () => {

View File

@ -1,7 +1,18 @@
import React, { PureComponent } from 'react';
import { QueryTransaction } from 'app/types/explore';
// TODO make this datasource-plugin-dependent
import QueryField from './PromQueryField';
import QueryTransactions from './QueryTransactions';
function getFirstHintFromTransactions(transactions: QueryTransaction[]) {
const transaction = transactions.find(qt => qt.hints && qt.hints.length > 0);
if (transaction) {
return transaction.hints[0];
}
return undefined;
}
class QueryRow extends PureComponent<any, {}> {
onChangeQuery = (value, override?: boolean) => {
@ -44,13 +55,19 @@ class QueryRow extends PureComponent<any, {}> {
};
render() {
const { history, query, queryError, queryHint, request, supportsLogs } = this.props;
const { history, query, request, supportsLogs, transactions } = this.props;
const transactionWithError = transactions.find(t => t.error);
const hint = getFirstHintFromTransactions(transactions);
const queryError = transactionWithError ? transactionWithError.error : null;
return (
<div className="query-row">
<div className="query-row-status">
<QueryTransactions transactions={transactions} />
</div>
<div className="query-row-field">
<QueryField
error={queryError}
hint={queryHint}
hint={hint}
initialQuery={query}
history={history}
onClickHintFix={this.onClickHintFix}
@ -78,7 +95,7 @@ class QueryRow extends PureComponent<any, {}> {
export default class QueryRows extends PureComponent<any, {}> {
render() {
const { className = '', queries, queryErrors, queryHints, ...handlers } = this.props;
const { className = '', queries, queryHints, transactions, ...handlers } = this.props;
return (
<div className={className}>
{queries.map((q, index) => (
@ -86,8 +103,7 @@ export default class QueryRows extends PureComponent<any, {}> {
key={q.key}
index={index}
query={q.query}
queryError={queryErrors[index]}
queryHint={queryHints[index]}
transactions={transactions.filter(t => t.rowIndex === index)}
{...handlers}
/>
))}

View File

@ -0,0 +1,42 @@
import React, { PureComponent } from 'react';
import { QueryTransaction as QueryTransactionModel } from 'app/types/explore';
import ElapsedTime from './ElapsedTime';
function formatLatency(value) {
return `${(value / 1000).toFixed(1)}s`;
}
interface QueryTransactionProps {
transaction: QueryTransactionModel;
}
class QueryTransaction extends PureComponent<QueryTransactionProps> {
render() {
const { transaction } = this.props;
const className = transaction.done ? 'query-transaction' : 'query-transaction query-transaction--loading';
return (
<div className={className}>
<div className="query-transaction__type">{transaction.resultType}:</div>
<div className="query-transaction__duration">
{transaction.done ? formatLatency(transaction.latency) : <ElapsedTime />}
</div>
</div>
);
}
}
interface QueryTransactionsProps {
transactions: QueryTransactionModel[];
}
export default class QueryTransactions extends PureComponent<QueryTransactionsProps> {
render() {
const { transactions } = this.props;
return (
<div className="query-transactions">
{transactions.map((t, i) => <QueryTransaction key={`${t.query}:${t.resultType}`} transaction={t} />)}
</div>
);
}
}

View File

@ -5,6 +5,8 @@ import ReactTable from 'react-table';
import TableModel from 'app/core/table_model';
const EMPTY_TABLE = new TableModel();
// Identify columns that contain values
const VALUE_REGEX = /^[Vv]alue #\d+/;
interface TableProps {
data: TableModel;
@ -34,6 +36,7 @@ export default class Table extends PureComponent<TableProps> {
const columns = tableModel.columns.map(({ filterable, text }) => ({
Header: text,
accessor: text,
className: VALUE_REGEX.test(text) ? 'text-right' : '',
show: text !== 'Time',
Cell: row => <span className={filterable ? 'link' : ''}>{row.value}</span>,
}));
@ -48,7 +51,7 @@ export default class Table extends PureComponent<TableProps> {
minRows={0}
noDataText={noDataText}
resolveData={data => prepareRows(data, columnNames)}
showPagination={data}
showPagination={Boolean(data)}
/>
);
}

File diff suppressed because it is too large Load Diff

View File

@ -115,7 +115,7 @@
<div class="gf-form">
<span class="gf-form-label width-9">Values</span>
<input type="text" class="gf-form-input" placeholder="name" ng-model='current.query' placeholder="1m,10m,1h,6h,1d,7d" ng-model-onblur ng-change="runQuery()" required></input>
<input type="text" class="gf-form-input" ng-model='current.query' placeholder="1m,10m,1h,6h,1d,7d" ng-model-onblur ng-change="runQuery()" required></input>
</div>
<div class="gf-form-inline">

View File

@ -429,6 +429,11 @@ describe('templateSrv', () => {
name: 'period',
current: { value: '$__auto_interval_interval', text: 'auto' },
},
{
type: 'textbox',
name: 'empty_on_init',
current: { value: '', text: '' },
},
]);
_templateSrv.setGrafanaVariable('$__auto_interval_interval', '13m');
_templateSrv.updateTemplateData();
@ -438,6 +443,11 @@ describe('templateSrv', () => {
const target = _templateSrv.replaceWithText('Server: $server, period: $period');
expect(target).toBe('Server: All, period: 13m');
});
it('should replace empty string-values with an empty string', () => {
const target = _templateSrv.replaceWithText('Hello $empty_on_init');
expect(target).toBe('Hello ');
});
});
describe('built in interval variables', () => {

View File

@ -22,6 +22,11 @@ describe('containsVariable', () => {
expect(contains).toBe(true);
});
it('should find it with [[var:option]] syntax', () => {
const contains = containsVariable('this.[[test:csv]].filters', 'test');
expect(contains).toBe(true);
});
it('should find it when part of segment', () => {
const contains = containsVariable('metrics.$env.$group-*', 'group');
expect(contains).toBe(true);
@ -36,6 +41,16 @@ describe('containsVariable', () => {
const contains = containsVariable('asd', 'asd2.$env', 'env');
expect(contains).toBe(true);
});
it('should find it with ${var} syntax', () => {
const contains = containsVariable('this.${test}.filters', 'test');
expect(contains).toBe(true);
});
it('should find it with ${var:option} syntax', () => {
const contains = containsVariable('this.${test:csv}.filters', 'test');
expect(contains).toBe(true);
});
});
});

View File

@ -1,5 +1,6 @@
import kbn from 'app/core/utils/kbn';
import _ from 'lodash';
import { variableRegex } from 'app/features/templating/variable';
function luceneEscape(value) {
return value.replace(/([\!\*\+\-\=<>\s\&\|\(\)\[\]\{\}\^\~\?\:\\/"])/g, '\\$1');
@ -8,13 +9,7 @@ function luceneEscape(value) {
export class TemplateSrv {
variables: any[];
/*
* This regex matches 3 types of variable reference with an optional format specifier
* \$(\w+) $var1
* \[\[([\s\S]+?)(?::(\w+))?\]\] [[var2]] or [[var2:fmt2]]
* \${(\w+)(?::(\w+))?} ${var3} or ${var3:fmt3}
*/
private regex = /\$(\w+)|\[\[([\s\S]+?)(?::(\w+))?\]\]|\${(\w+)(?::(\w+))?}/g;
private regex = variableRegex;
private index = {};
private grafanaVariables = {};
private builtIns = {};
@ -30,17 +25,14 @@ export class TemplateSrv {
}
updateTemplateData() {
this.index = {};
const existsOrEmpty = value => value || value === '';
for (let i = 0; i < this.variables.length; i++) {
const variable = this.variables[i];
if (!variable.current || (!variable.current.isNone && !variable.current.value)) {
continue;
this.index = this.variables.reduce((acc, currentValue) => {
if (currentValue.current && !currentValue.current.isNone && existsOrEmpty(currentValue.current.value)) {
acc[currentValue.name] = currentValue;
}
this.index[variable.name] = variable;
}
return acc;
}, {});
}
variableInitialized(variable) {

View File

@ -1,6 +1,19 @@
import kbn from 'app/core/utils/kbn';
import { assignModelProperties } from 'app/core/utils/model_utils';
/*
* This regex matches 3 types of variable reference with an optional format specifier
* \$(\w+) $var1
* \[\[([\s\S]+?)(?::(\w+))?\]\] [[var2]] or [[var2:fmt2]]
* \${(\w+)(?::(\w+))?} ${var3} or ${var3:fmt3}
*/
export const variableRegex = /\$(\w+)|\[\[([\s\S]+?)(?::(\w+))?\]\]|\${(\w+)(?::(\w+))?}/g;
// Helper function since lastIndex is not reset
export const variableRegexExec = (variableString: string) => {
variableRegex.lastIndex = 0;
return variableRegex.exec(variableString);
};
export interface Variable {
setValue(option);
updateOptions();
@ -14,15 +27,16 @@ export let variableTypes = {};
export { assignModelProperties };
export function containsVariable(...args: any[]) {
let variableName = args[args.length - 1];
let str = args[0] || '';
const variableName = args[args.length - 1];
const variableString = args.slice(0, -1).join(' ');
const matches = variableString.match(variableRegex);
const isMatchingVariable =
matches !== null
? matches.find(match => {
const varMatch = variableRegexExec(match);
return varMatch !== null && varMatch.indexOf(variableName) > -1;
})
: false;
for (let i = 1; i < args.length - 1; i++) {
str += ' ' + args[i] || '';
}
variableName = kbn.regexEscape(variableName);
const findVarRegex = new RegExp('\\$(' + variableName + ')(?:\\W|$)|\\[\\[(' + variableName + ')\\]\\]', 'g');
const match = findVarRegex.exec(str);
return match !== null;
return !!isMatchingVariable;
}

View File

@ -176,7 +176,6 @@ export class PrometheusDatasource {
return this.$q.all(allQueryPromise).then(responseList => {
let result = [];
let hints = [];
_.each(responseList, (response, index) => {
if (response.status === 'error') {
@ -196,19 +195,14 @@ export class PrometheusDatasource {
end: queries[index].end,
query: queries[index].expr,
responseListLength: responseList.length,
responseIndex: index,
refId: activeTargets[index].refId,
valueWithRefId: activeTargets[index].valueWithRefId,
};
const series = this.resultTransformer.transform(response, transformerOptions);
result = [...result, ...series];
if (queries[index].hinting) {
const queryHints = getQueryHints(series, this);
hints = [...hints, ...queryHints];
}
});
return { data: result, hints };
return { data: result };
});
}
@ -437,6 +431,10 @@ export class PrometheusDatasource {
return state;
}
getQueryHints(query: string, result: any[]) {
return getQueryHints(query, result, this);
}
loadRules() {
this.metadataRequest('/api/v1/rules')
.then(res => res.data || res.json())

View File

@ -1,100 +1,92 @@
import _ from 'lodash';
export function getQueryHints(series: any[], datasource?: any): any[] {
const hints = series.map((s, i) => {
const query: string = s.query;
const index: number = s.responseIndex;
if (query === undefined || index === undefined) {
return null;
}
export function getQueryHints(query: string, series?: any[], datasource?: any): any[] {
const hints = [];
// ..._bucket metric needs a histogram_quantile()
const histogramMetric = query.trim().match(/^\w+_bucket$/);
if (histogramMetric) {
const label = 'Time series has buckets, you probably wanted a histogram.';
return {
index,
// ..._bucket metric needs a histogram_quantile()
const histogramMetric = query.trim().match(/^\w+_bucket$/);
if (histogramMetric) {
const label = 'Time series has buckets, you probably wanted a histogram.';
hints.push({
type: 'HISTOGRAM_QUANTILE',
label,
fix: {
label: 'Fix by adding histogram_quantile().',
action: {
type: 'ADD_HISTOGRAM_QUANTILE',
query,
},
},
});
}
// Check for monotony on series (table results are being ignored here)
if (series && series.length > 0) {
series.forEach(s => {
const datapoints: number[][] = s.datapoints;
if (query.indexOf('rate(') === -1 && datapoints.length > 1) {
let increasing = false;
const nonNullData = datapoints.filter(dp => dp[0] !== null);
const monotonic = nonNullData.every((dp, index) => {
if (index === 0) {
return true;
}
increasing = increasing || dp[0] > nonNullData[index - 1][0];
// monotonic?
return dp[0] >= nonNullData[index - 1][0];
});
if (increasing && monotonic) {
const simpleMetric = query.trim().match(/^\w+$/);
let label = 'Time series is monotonously increasing.';
let fix;
if (simpleMetric) {
fix = {
label: 'Fix by adding rate().',
action: {
type: 'ADD_RATE',
query,
},
};
} else {
label = `${label} Try applying a rate() function.`;
}
hints.push({
type: 'APPLY_RATE',
label,
fix,
});
}
}
});
}
// Check for recording rules expansion
if (datasource && datasource.ruleMappings) {
const mapping = datasource.ruleMappings;
const mappingForQuery = Object.keys(mapping).reduce((acc, ruleName) => {
if (query.search(ruleName) > -1) {
return {
...acc,
[ruleName]: mapping[ruleName],
};
}
return acc;
}, {});
if (_.size(mappingForQuery) > 0) {
const label = 'Query contains recording rules.';
hints.push({
type: 'EXPAND_RULES',
label,
fix: {
label: 'Fix by adding histogram_quantile().',
label: 'Expand rules',
action: {
type: 'ADD_HISTOGRAM_QUANTILE',
type: 'EXPAND_RULES',
query,
index,
mapping: mappingForQuery,
},
},
};
}
// Check for monotony
const datapoints: number[][] = s.datapoints;
if (query.indexOf('rate(') === -1 && datapoints.length > 1) {
let increasing = false;
const nonNullData = datapoints.filter(dp => dp[0] !== null);
const monotonic = nonNullData.every((dp, index) => {
if (index === 0) {
return true;
}
increasing = increasing || dp[0] > nonNullData[index - 1][0];
// monotonic?
return dp[0] >= nonNullData[index - 1][0];
});
if (increasing && monotonic) {
const simpleMetric = query.trim().match(/^\w+$/);
let label = 'Time series is monotonously increasing.';
let fix;
if (simpleMetric) {
fix = {
label: 'Fix by adding rate().',
action: {
type: 'ADD_RATE',
query,
index,
},
};
} else {
label = `${label} Try applying a rate() function.`;
}
return {
label,
index,
fix,
};
}
}
// Check for recording rules expansion
if (datasource && datasource.ruleMappings) {
const mapping = datasource.ruleMappings;
const mappingForQuery = Object.keys(mapping).reduce((acc, ruleName) => {
if (query.search(ruleName) > -1) {
return {
...acc,
[ruleName]: mapping[ruleName],
};
}
return acc;
}, {});
if (_.size(mappingForQuery) > 0) {
const label = 'Query contains recording rules.';
return {
label,
index,
fix: {
label: 'Expand rules',
action: {
type: 'EXPAND_RULES',
query,
index,
mapping: mappingForQuery,
},
},
};
}
}
// No hint found
return null;
});
return hints;
}
return hints.length > 0 ? hints : null;
}

View File

@ -8,7 +8,14 @@ export class ResultTransformer {
const prometheusResult = response.data.data.result;
if (options.format === 'table') {
return [this.transformMetricDataToTable(prometheusResult, options.responseListLength, options.refId)];
return [
this.transformMetricDataToTable(
prometheusResult,
options.responseListLength,
options.refId,
options.valueWithRefId
),
];
} else if (options.format === 'heatmap') {
let seriesList = [];
prometheusResult.sort(sortSeriesByLabel);
@ -66,12 +73,11 @@ export class ResultTransformer {
return {
datapoints: dps,
query: options.query,
responseIndex: options.responseIndex,
target: metricLabel,
};
}
transformMetricDataToTable(md, resultCount: number, refId: string) {
transformMetricDataToTable(md, resultCount: number, refId: string, valueWithRefId?: boolean) {
const table = new TableModel();
let i, j;
const metricLabels = {};
@ -96,7 +102,7 @@ export class ResultTransformer {
metricLabels[label] = labelIndex + 1;
table.columns.push({ text: label, filterable: !label.startsWith('__') });
});
const valueText = resultCount > 1 ? `Value #${refId}` : 'Value';
const valueText = resultCount > 1 || valueWithRefId ? `Value #${refId}` : 'Value';
table.columns.push({ text: valueText });
// Populate rows, set value to empty string when label not present.

View File

@ -2,34 +2,31 @@ import { getQueryHints } from '../query_hints';
describe('getQueryHints()', () => {
it('returns no hints for no series', () => {
expect(getQueryHints([])).toEqual([]);
expect(getQueryHints('', [])).toEqual(null);
});
it('returns no hints for empty series', () => {
expect(getQueryHints([{ datapoints: [], query: '' }])).toEqual([null]);
expect(getQueryHints('', [{ datapoints: [] }])).toEqual(null);
});
it('returns no hint for a monotonously decreasing series', () => {
const series = [{ datapoints: [[23, 1000], [22, 1001]], query: 'metric', responseIndex: 0 }];
const hints = getQueryHints(series);
expect(hints).toEqual([null]);
const series = [{ datapoints: [[23, 1000], [22, 1001]] }];
const hints = getQueryHints('metric', series);
expect(hints).toEqual(null);
});
it('returns no hint for a flat series', () => {
const series = [
{ datapoints: [[null, 1000], [23, 1001], [null, 1002], [23, 1003]], query: 'metric', responseIndex: 0 },
];
const hints = getQueryHints(series);
expect(hints).toEqual([null]);
const series = [{ datapoints: [[null, 1000], [23, 1001], [null, 1002], [23, 1003]] }];
const hints = getQueryHints('metric', series);
expect(hints).toEqual(null);
});
it('returns a rate hint for a monotonously increasing series', () => {
const series = [{ datapoints: [[23, 1000], [24, 1001]], query: 'metric', responseIndex: 0 }];
const hints = getQueryHints(series);
const series = [{ datapoints: [[23, 1000], [24, 1001]] }];
const hints = getQueryHints('metric', series);
expect(hints.length).toBe(1);
expect(hints[0]).toMatchObject({
label: 'Time series is monotonously increasing.',
index: 0,
fix: {
action: {
type: 'ADD_RATE',
@ -40,26 +37,25 @@ describe('getQueryHints()', () => {
});
it('returns no rate hint for a monotonously increasing series that already has a rate', () => {
const series = [{ datapoints: [[23, 1000], [24, 1001]], query: 'rate(metric[1m])', responseIndex: 0 }];
const hints = getQueryHints(series);
expect(hints).toEqual([null]);
const series = [{ datapoints: [[23, 1000], [24, 1001]] }];
const hints = getQueryHints('rate(metric[1m])', series);
expect(hints).toEqual(null);
});
it('returns a rate hint w/o action for a complex monotonously increasing series', () => {
const series = [{ datapoints: [[23, 1000], [24, 1001]], query: 'sum(metric)', responseIndex: 0 }];
const hints = getQueryHints(series);
const series = [{ datapoints: [[23, 1000], [24, 1001]] }];
const hints = getQueryHints('sum(metric)', series);
expect(hints.length).toBe(1);
expect(hints[0].label).toContain('rate()');
expect(hints[0].fix).toBeUndefined();
});
it('returns a rate hint for a monotonously increasing series with missing data', () => {
const series = [{ datapoints: [[23, 1000], [null, 1001], [24, 1002]], query: 'metric', responseIndex: 0 }];
const hints = getQueryHints(series);
const series = [{ datapoints: [[23, 1000], [null, 1001], [24, 1002]] }];
const hints = getQueryHints('metric', series);
expect(hints.length).toBe(1);
expect(hints[0]).toMatchObject({
label: 'Time series is monotonously increasing.',
index: 0,
fix: {
action: {
type: 'ADD_RATE',
@ -70,12 +66,11 @@ describe('getQueryHints()', () => {
});
it('returns a histogram hint for a bucket series', () => {
const series = [{ datapoints: [[23, 1000]], query: 'metric_bucket', responseIndex: 0 }];
const hints = getQueryHints(series);
const series = [{ datapoints: [[23, 1000]] }];
const hints = getQueryHints('metric_bucket', series);
expect(hints.length).toBe(1);
expect(hints[0]).toMatchObject({
label: 'Time series has buckets, you probably wanted a histogram.',
index: 0,
fix: {
action: {
type: 'ADD_HISTOGRAM_QUANTILE',

View File

@ -5,13 +5,23 @@ export class StackdriverConfigCtrl {
jsonText: string;
validationErrors: string[] = [];
inputDataValid: boolean;
authenticationTypes: any[];
defaultAuthenticationType: string;
/** @ngInject */
constructor(datasourceSrv) {
this.defaultAuthenticationType = 'jwt';
this.datasourceSrv = datasourceSrv;
this.current.jsonData = this.current.jsonData || {};
this.current.jsonData.authenticationType = this.current.jsonData.authenticationType
? this.current.jsonData.authenticationType
: this.defaultAuthenticationType;
this.current.secureJsonData = this.current.secureJsonData || {};
this.current.secureJsonFields = this.current.secureJsonFields || {};
this.authenticationTypes = [
{ key: this.defaultAuthenticationType, value: 'Google JWT File' },
{ key: 'gce', value: 'GCE Default Service Account' },
];
}
save(jwt) {
@ -35,6 +45,10 @@ export class StackdriverConfigCtrl {
this.validationErrors.push('Client Email field missing in JWT file.');
}
if (!jwt.project_id || jwt.project_id.length === 0) {
this.validationErrors.push('Project Id field missing in JWT file.');
}
if (this.validationErrors.length === 0) {
this.inputDataValid = true;
return true;
@ -67,7 +81,7 @@ export class StackdriverConfigCtrl {
this.inputDataValid = false;
this.jsonText = '';
this.current.jsonData = {};
this.current.jsonData = Object.assign({}, { authenticationType: this.current.jsonData.authenticationType });
this.current.secureJsonData = {};
this.current.secureJsonFields = {};
}

View File

@ -1,11 +1,14 @@
import { stackdriverUnitMappings } from './constants';
import appEvents from 'app/core/app_events';
import _ from 'lodash';
export default class StackdriverDatasource {
id: number;
url: string;
baseUrl: string;
projectName: string;
authenticationType: string;
queryPromise: Promise<any>;
/** @ngInject */
constructor(instanceSettings, private backendSrv, private templateSrv, private timeSrv) {
@ -14,6 +17,7 @@ export default class StackdriverDatasource {
this.doRequest = this.doRequest;
this.id = instanceSettings.id;
this.projectName = instanceSettings.jsonData.defaultProject || '';
this.authenticationType = instanceSettings.jsonData.authenticationType || 'jwt';
}
async getTimeSeries(options) {
@ -46,16 +50,20 @@ export default class StackdriverDatasource {
};
});
const { data } = await this.backendSrv.datasourceRequest({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries,
},
});
return data;
if (queries.length > 0) {
const { data } = await this.backendSrv.datasourceRequest({
url: '/api/tsdb/query',
method: 'POST',
data: {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries,
},
});
return data;
} else {
return { results: [] };
}
}
async getLabels(metricType, refId) {
@ -106,7 +114,7 @@ export default class StackdriverDatasource {
if (!queryRes.series) {
return;
}
this.projectName = queryRes.meta.defaultProject;
const unit = this.resolvePanelUnitFromTargets(options.targets);
queryRes.series.forEach(series => {
let timeSerie: any = {
@ -121,9 +129,10 @@ export default class StackdriverDatasource {
result.push(timeSerie);
});
});
return { data: result };
} else {
return { data: [] };
}
return { data: result };
}
async annotationQuery(options) {
@ -173,76 +182,84 @@ export default class StackdriverDatasource {
throw new Error('Template variables support is not yet imlemented');
}
testDatasource() {
const path = `v3/projects/${this.projectName}/metricDescriptors`;
return this.doRequest(`${this.baseUrl}${path}`)
.then(response => {
if (response.status === 200) {
return {
status: 'success',
message: 'Successfully queried the Stackdriver API.',
title: 'Success',
};
}
return {
status: 'error',
message: 'Returned http status code ' + response.status,
};
})
.catch(error => {
let message = 'Stackdriver: ';
message += error.statusText ? error.statusText + ': ' : '';
async testDatasource() {
let status, message;
const defaultErrorMessage = 'Cannot connect to Stackdriver API';
try {
const projectName = await this.getDefaultProject();
const path = `v3/projects/${projectName}/metricDescriptors`;
const response = await this.doRequest(`${this.baseUrl}${path}`);
if (response.status === 200) {
status = 'success';
message = 'Successfully queried the Stackdriver API.';
} else {
status = 'error';
message = response.statusText ? response.statusText : defaultErrorMessage;
}
} catch (error) {
status = 'error';
if (_.isString(error)) {
message = error;
} else {
message = 'Stackdriver: ';
message += error.statusText ? error.statusText : defaultErrorMessage;
if (error.data && error.data.error && error.data.error.code) {
// 400, 401
message += error.data.error.code + '. ' + error.data.error.message;
} else {
message += 'Cannot connect to Stackdriver API';
message += ': ' + error.data.error.code + '. ' + error.data.error.message;
}
return {
status: 'error',
message: message,
};
});
}
} finally {
return {
status,
message,
};
}
}
async getProjects() {
const response = await this.doRequest(`/cloudresourcemanager/v1/projects`);
return response.data.projects.map(p => ({ id: p.projectId, name: p.name }));
formatStackdriverError(error) {
let message = 'Stackdriver: ';
message += error.statusText ? error.statusText + ': ' : '';
if (error.data && error.data.error) {
try {
const res = JSON.parse(error.data.error);
message += res.error.code + '. ' + res.error.message;
} catch (err) {
message += error.data.error;
}
} else {
message += 'Cannot connect to Stackdriver API';
}
return message;
}
async getDefaultProject() {
try {
const projects = await this.getProjects();
if (projects && projects.length > 0) {
const test = projects.filter(p => p.id === this.projectName)[0];
return test;
if (this.authenticationType === 'gce' || !this.projectName) {
const { data } = await this.backendSrv.datasourceRequest({
url: '/api/tsdb/query',
method: 'POST',
data: {
queries: [
{
refId: 'ensureDefaultProjectQuery',
type: 'ensureDefaultProjectQuery',
datasourceId: this.id,
},
],
},
});
this.projectName = data.results.ensureDefaultProjectQuery.meta.defaultProject;
return this.projectName;
} else {
throw new Error('No projects found');
return this.projectName;
}
} catch (error) {
let message = 'Projects cannot be fetched: ';
message += error.statusText ? error.statusText + ': ' : '';
if (error && error.data && error.data.error && error.data.error.message) {
if (error.data.error.code === 403) {
message += `
A list of projects could not be fetched from the Google Cloud Resource Manager API.
You might need to enable it first:
https://console.developers.google.com/apis/library/cloudresourcemanager.googleapis.com`;
} else {
message += error.data.error.code + '. ' + error.data.error.message;
}
} else {
message += 'Cannot connect to Stackdriver API';
}
appEvents.emit('ds-request-error', message);
throw this.formatStackdriverError(error);
}
}
async getMetricTypes(projectId: string) {
async getMetricTypes(projectName: string) {
try {
const metricsApiPath = `v3/projects/${projectId}/metricDescriptors`;
const metricsApiPath = `v3/projects/${projectName}/metricDescriptors`;
const { data } = await this.doRequest(`${this.baseUrl}${metricsApiPath}`);
const metrics = data.metricDescriptors.map(m => {
@ -256,7 +273,8 @@ export default class StackdriverDatasource {
return metrics;
} catch (error) {
console.log(error);
appEvents.emit('ds-request-error', this.formatStackdriverError(error));
return [];
}
}

View File

@ -44,7 +44,7 @@ export class FilterSegments {
this.removeSegment.value = DefaultRemoveFilterValue;
return Promise.resolve([this.removeSegment]);
} else {
return this.getFilterKeysFunc();
return this.getFilterKeysFunc(segment, DefaultRemoveFilterValue);
}
}

View File

@ -1,37 +1,54 @@
<div class="gf-form-group">
<div class="grafana-info-box">
<h5>GCP Service Account</h5>
<h4>Stackdriver Authentication</h4>
<p>There are two ways to authenticate the Stackdriver plugin - either by uploading a Service Account key file, or by
automatically retrieving credentials from the Google metadata server. The latter option is only available
when running Grafana on a GCE virtual machine.</p>
<h5>Uploading a Service Account Key File</h5>
<p>
To authenticate with the Stackdriver API, you need to create a Google Cloud Platform (GCP) Service Account for
First you need to create a Google Cloud Platform (GCP) Service Account for
the Project you want to show data for. A Grafana datasource integrates with one GCP Project. If you want to
visualize data from multiple GCP Projects then you need to create one datasource per GCP Project.
</p>
<p>
The <strong>Monitoring Viewer</strong> role provides all the permissions that Grafana needs.
The <strong>Monitoring Viewer</strong> role provides all the permissions that Grafana needs. The following API
needs to be enabled on GCP for the datasource to work: <a class="external-link" target="_blank" href="https://console.cloud.google.com/apis/library/monitoring.googleapis.com">Monitoring
API</a>
</p>
<h5>GCE Default Service Account</h5>
<p>
The following APIs need to be enabled on GCP for the datasource to work:
<ul>
<li><a class="external-link" target="_blank" href="https://console.cloud.google.com/apis/library/monitoring.googleapis.com">Monitoring
API</a></li>
<li><a class="external-link" target="_blank" href="https://console.cloud.google.com/apis/library/cloudresourcemanager.googleapis.com">Resource
Manager API</a></li>
</ul>
If Grafana is running on a Google Compute Engine (GCE) virtual machine, it is possible for Grafana to
automatically retrieve the default project id and authentication token from the metadata server. In order for this to
work, you need to make sure that you have a service account that is setup as the default account for the virtual
machine and that the service account has been given read access to the Stackdriver Monitoring API.
</p>
<p>Detailed instructions on how to create a Service Account can be found <a class="external-link" target="_blank"
href="http://docs.grafana.org/datasources/stackdriver/">in
the documentation.</a></p>
the documentation.</a>
</p>
</div>
</div>
<div class="gf-form-group">
<div class="gf-form">
<h3>Service Account Authentication</h3>
<h3>Authentication</h3>
<info-popover mode="header">Upload your Service Account key file or paste in the contents of the file. The file
contents will be encrypted and saved in the Grafana database.</info-popover>
</div>
<div ng-if="!ctrl.current.jsonData.clientEmail && !ctrl.inputDataValid">
<div class="gf-form-inline">
<div class="gf-form max-width-30">
<span class="gf-form-label width-10">Authentication Type</span>
<div class="gf-form-select-wrapper max-width-24">
<select class="gf-form-input" ng-model="ctrl.current.jsonData.authenticationType" ng-options="f.key as f.value for f in ctrl.authenticationTypes"></select>
</div>
</div>
</div>
<div ng-if="ctrl.current.jsonData.authenticationType === ctrl.defaultAuthenticationType && !ctrl.current.jsonData.clientEmail && !ctrl.inputDataValid">
<div class="gf-form-group" ng-if="!ctrl.inputDataValid">
<div class="gf-form">
<form>
@ -52,23 +69,23 @@
</div>
</div>
<div class="gf-form-group" ng-if="ctrl.inputDataValid || ctrl.current.jsonData.clientEmail">
<div class="gf-form-group" ng-if="ctrl.current.jsonData.authenticationType === ctrl.defaultAuthenticationType && (ctrl.inputDataValid || ctrl.current.jsonData.clientEmail)">
<h6>Uploaded Key Details</h6>
<div class="gf-form">
<span class="gf-form-label width-9">Project</span>
<span class="gf-form-label width-10">Project</span>
<input class="gf-form-input width-40" disabled type="text" ng-model="ctrl.current.jsonData.defaultProject" />
</div>
<div class="gf-form">
<span class="gf-form-label width-9">Client Email</span>
<input class="gf-form-input width-40" disabled type="text" ng-model="ctrl.current.jsonData.clientEmail" />
</div>
<span class="gf-form-label width-10">Client Email</span>
<input class="gf-form-input width-40" disabled type="text" ng-model="ctrl.current.jsonData.clientEmail" />
</div>
<div class="gf-form">
<span class="gf-form-label width-9">Token URI</span>
<span class="gf-form-label width-10">Token URI</span>
<input class="gf-form-input width-40" disabled type="text" ng-model='ctrl.current.jsonData.tokenUri' />
</div>
<div class="gf-form" ng-if="ctrl.current.secureJsonFields.privateKey">
<span class="gf-form-label width-9">Private Key</span>
<span class="gf-form-label width-10">Private Key</span>
<input type="text" class="gf-form-input max-width-12" disabled="disabled" value="configured">
</div>
@ -81,6 +98,8 @@
</div>
</div>
<div class="grafana-info-box" ng-hide="ctrl.current.secureJsonFields.privateKey">
Do not forget to save your changes after uploading a file.
</div>
<p class="gf-form-label" ng-hide="ctrl.current.secureJsonFields.privateKey || ctrl.current.jsonData.authenticationType !== ctrl.defaultAuthenticationType"><i
class="fa fa-save"></i> Do not forget to save your changes after uploading a file.</p>
<p class="gf-form-label" ng-show="ctrl.current.jsonData.authenticationType !== ctrl.defaultAuthenticationType"><i class="fa fa-save"></i>
Verify GCE default service account by clicking Save & Test</p>

View File

@ -15,8 +15,7 @@
<div class="gf-form-inline">
<div class="gf-form">
<span class="gf-form-label width-9">Project</span>
<input class="gf-form-input" disabled type="text" ng-model='ctrl.target.project.name' get-options="ctrl.getProjects()"
css-class="min-width-12" />
<input class="gf-form-input" disabled type="text" ng-model='ctrl.target.defaultProject' css-class="min-width-12" />
</div>
<div class="gf-form">
<label class="gf-form-label query-keyword" ng-click="ctrl.showHelp = !ctrl.showHelp">
@ -40,8 +39,8 @@
<div class="gf-form" ng-show="ctrl.showLastQuery">
<pre class="gf-form-pre">{{ctrl.lastQueryMeta.rawQueryString}}</pre>
</div>
<div class="grafana-info-box m-t-2 markdown-html" ng-show="ctrl.showHelp">
<h5>Alias Patterns</h5>
<div class="gf-form grafana-info-box" style="padding: 0" ng-show="ctrl.showHelp">
<pre class="gf-form-pre alert alert-info" style="margin-right: 0"><h5>Alias Patterns</h5>Format the legend keys any way you want by using alias patterns.
Format the legend keys any way you want by using alias patterns.<br /> <br />

View File

@ -28,7 +28,7 @@
<div class="gf-form">
<span class="gf-form-label query-keyword width-9">Group By</span>
<div class="gf-form" ng-repeat="segment in ctrl.groupBySegments">
<metric-segment segment="segment" get-options="ctrl.getGroupBys(segment, $index)" on-change="ctrl.groupByChanged(segment, $index)"></metric-segment>
<metric-segment segment="segment" get-options="ctrl.getGroupBys(segment)" on-change="ctrl.groupByChanged(segment, $index)"></metric-segment>
</div>
</div>
<div class="gf-form gf-form--grow">

View File

@ -28,10 +28,7 @@
"method": "GET",
"url": "https://content-monitoring.googleapis.com",
"jwtTokenAuth": {
"scopes": [
"https://www.googleapis.com/auth/monitoring.read",
"https://www.googleapis.com/auth/cloudplatformprojects.readonly"
],
"scopes": ["https://www.googleapis.com/auth/monitoring.read"],
"params": {
"token_uri": "{{.JsonData.tokenUri}}",
"client_email": "{{.JsonData.clientEmail}}",

View File

@ -14,10 +14,7 @@ export interface QueryMeta {
export class StackdriverQueryCtrl extends QueryCtrl {
static templateUrl = 'partials/query.editor.html';
target: {
project: {
id: string;
name: string;
};
defaultProject: string;
unit: string;
metricType: string;
service: string;
@ -38,10 +35,7 @@ export class StackdriverQueryCtrl extends QueryCtrl {
defaultServiceValue = 'All Services';
defaults = {
project: {
id: 'default',
name: 'loading project...',
},
defaultProject: 'loading project...',
metricType: this.defaultDropdownValue,
service: this.defaultServiceValue,
metric: '',
@ -101,6 +95,5 @@ export class StackdriverQueryCtrl extends QueryCtrl {
this.lastQueryError = jsonBody.error.message;
}
}
console.error(err);
}
}

View File

@ -1,6 +1,6 @@
import coreModule from 'app/core/core_module';
import _ from 'lodash';
import { FilterSegments, DefaultRemoveFilterValue } from './filter_segments';
import { FilterSegments } from './filter_segments';
import appEvents from 'app/core/app_events';
export class StackdriverFilter {
@ -26,8 +26,10 @@ export class StackdriverFilter {
export class StackdriverFilterCtrl {
metricLabels: { [key: string]: string[] };
resourceLabels: { [key: string]: string[] };
resourceTypes: string[];
defaultRemoveGroupByValue = '-- remove group by --';
resourceTypeValue = 'resource.type';
loadLabelsPromise: Promise<any>;
service: string;
@ -72,19 +74,29 @@ export class StackdriverFilterCtrl {
this.filterSegments = new FilterSegments(
this.uiSegmentSrv,
this.target,
this.getGroupBys.bind(this, null, null, DefaultRemoveFilterValue, false),
this.getFilterKeys.bind(this),
this.getFilterValues.bind(this)
);
this.filterSegments.buildSegmentModel();
}
async getCurrentProject() {
this.target.project = await this.datasource.getDefaultProject();
return new Promise(async (resolve, reject) => {
try {
if (!this.target.defaultProject || this.target.defaultProject === 'loading project...') {
this.target.defaultProject = await this.datasource.getDefaultProject();
}
resolve(this.target.defaultProject);
} catch (error) {
appEvents.emit('ds-request-error', error);
reject();
}
});
}
async loadMetricDescriptors() {
if (this.target.project.id !== 'default') {
this.metricDescriptors = await this.datasource.getMetricTypes(this.target.project.id);
if (this.target.defaultProject !== 'loading project...') {
this.metricDescriptors = await this.datasource.getMetricTypes(this.target.defaultProject);
this.services = this.getServicesList();
this.metrics = this.getMetricsList();
return this.metricDescriptors;
@ -141,6 +153,7 @@ export class StackdriverFilterCtrl {
const data = await this.datasource.getLabels(this.target.metricType, this.target.refId);
this.metricLabels = data.results[this.target.refId].meta.metricLabels;
this.resourceLabels = data.results[this.target.refId].meta.resourceLabels;
this.resourceTypes = data.results[this.target.refId].meta.resourceTypes;
resolve();
} catch (error) {
if (error.data && error.data.message) {
@ -181,45 +194,66 @@ export class StackdriverFilterCtrl {
this.$rootScope.$broadcast('metricTypeChanged');
}
async getGroupBys(segment, index, removeText?: string, removeUsed = true) {
async createLabelKeyElements() {
await this.loadLabelsPromise;
const metricLabels = Object.keys(this.metricLabels || {})
.filter(ml => {
if (!removeUsed) {
return true;
}
return this.target.aggregation.groupBys.indexOf('metric.label.' + ml) === -1;
})
.map(l => {
return this.uiSegmentSrv.newSegment({
value: `metric.label.${l}`,
expandable: false,
});
let elements = Object.keys(this.metricLabels || {}).map(l => {
return this.uiSegmentSrv.newSegment({
value: `metric.label.${l}`,
expandable: false,
});
});
const resourceLabels = Object.keys(this.resourceLabels || {})
.filter(ml => {
if (!removeUsed) {
return true;
}
return this.target.aggregation.groupBys.indexOf('resource.label.' + ml) === -1;
})
.map(l => {
elements = [
...elements,
...Object.keys(this.resourceLabels || {}).map(l => {
return this.uiSegmentSrv.newSegment({
value: `resource.label.${l}`,
expandable: false,
});
});
}),
];
const noValueOrPlusButton = !segment || segment.type === 'plus-button';
if (noValueOrPlusButton && metricLabels.length === 0 && resourceLabels.length === 0) {
return Promise.resolve([]);
if (this.resourceTypes && this.resourceTypes.length > 0) {
elements = [
...elements,
this.uiSegmentSrv.newSegment({
value: this.resourceTypeValue,
expandable: false,
}),
];
}
this.removeSegment.value = removeText || this.defaultRemoveGroupByValue;
return Promise.resolve([...metricLabels, ...resourceLabels, this.removeSegment]);
return elements;
}
async getFilterKeys(segment, removeText?: string) {
let elements = await this.createLabelKeyElements();
if (this.target.filters.indexOf(this.resourceTypeValue) !== -1) {
elements = elements.filter(e => e.value !== this.resourceTypeValue);
}
const noValueOrPlusButton = !segment || segment.type === 'plus-button';
if (noValueOrPlusButton && elements.length === 0) {
return [];
}
this.removeSegment.value = removeText;
return [...elements, this.removeSegment];
}
async getGroupBys(segment) {
let elements = await this.createLabelKeyElements();
elements = elements.filter(e => this.target.aggregation.groupBys.indexOf(e.value) === -1);
const noValueOrPlusButton = !segment || segment.type === 'plus-button';
if (noValueOrPlusButton && elements.length === 0) {
return [];
}
this.removeSegment.value = this.defaultRemoveGroupByValue;
return [...elements, this.removeSegment];
}
groupByChanged(segment, index) {
@ -263,6 +297,10 @@ export class StackdriverFilterCtrl {
return this.resourceLabels[shortKey];
}
if (filterKey === this.resourceTypeValue) {
return this.resourceTypes;
}
return [];
}

View File

@ -6,7 +6,7 @@ import { TemplateSrvStub } from 'test/specs/helpers';
describe('StackdriverDataSource', () => {
const instanceSettings = {
jsonData: {
projectName: 'testproject',
defaultProject: 'testproject',
},
};
const templateSrv = new TemplateSrvStub();
@ -53,7 +53,9 @@ describe('StackdriverDataSource', () => {
datasourceRequest: async () =>
Promise.reject({
statusText: 'Bad Request',
data: { error: { code: 400, message: 'Field interval.endTime had an invalid value' } },
data: {
error: { code: 400, message: 'Field interval.endTime had an invalid value' },
},
}),
};
ds = new StackdriverDataSource(instanceSettings, backendSrv, templateSrv, timeSrv);
@ -67,43 +69,6 @@ describe('StackdriverDataSource', () => {
});
});
describe('when performing getProjects', () => {
describe('and call to resource manager api succeeds', () => {
let ds;
let result;
beforeEach(async () => {
const response = {
projects: [
{
projectNumber: '853996325002',
projectId: 'test-project',
lifecycleState: 'ACTIVE',
name: 'Test Project',
createTime: '2015-06-02T14:16:08.520Z',
parent: {
type: 'organization',
id: '853996325002',
},
},
],
};
const backendSrv = {
async datasourceRequest() {
return Promise.resolve({ status: 200, data: response });
},
};
ds = new StackdriverDataSource(instanceSettings, backendSrv, templateSrv, timeSrv);
result = await ds.getProjects();
});
it('should return successfully', () => {
expect(result.length).toBe(1);
expect(result[0].id).toBe('test-project');
expect(result[0].name).toBe('Test Project');
});
});
});
describe('When performing query', () => {
const options = {
range: {

View File

@ -3,7 +3,8 @@ import $ from 'jquery';
import baron from 'baron';
import coreModule from 'app/core/core_module';
coreModule.directive('graphLegend', (popoverSrv, $timeout) => {
/** @ngInject */
function graphLegendDirective(popoverSrv, $timeout) {
return {
link: (scope, elem) => {
let firstRender = true;
@ -300,4 +301,6 @@ coreModule.directive('graphLegend', (popoverSrv, $timeout) => {
}
},
};
});
}
coreModule.directive('graphLegend', graphLegendDirective);

View File

@ -147,7 +147,7 @@ class GraphCtrl extends MetricsPanelCtrl {
onInitPanelActions(actions) {
actions.push({ text: 'Export CSV', click: 'ctrl.exportCsv()' });
actions.push({ text: 'Toggle legend', click: 'ctrl.toggleLegend()' });
actions.push({ text: 'Toggle legend', click: 'ctrl.toggleLegend()', shortcut: 'p l' });
}
issueQueries(datasource) {

View File

@ -143,24 +143,6 @@ describe('when transforming time series table', () => {
},
];
const multipleQueriesDataDifferentLabels = [
{
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Value #A' }],
rows: [[time, 'Label Value 1', 42]],
},
{
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 2' }, { text: 'Value #B' }],
rows: [[time, 'Label Value 2', 13]],
},
{
type: 'table',
columns: [{ text: 'Time' }, { text: 'Label Key 1' }, { text: 'Value #C' }],
rows: [[time, 'Label Value 3', 7]],
},
];
describe('getColumns', () => {
it('should return data columns given a single query', () => {
const columns = transformers[transform].getColumns(singleQueryData);
@ -177,16 +159,6 @@ describe('when transforming time series table', () => {
expect(columns[3].text).toBe('Value #A');
expect(columns[4].text).toBe('Value #B');
});
it('should return the union of data columns given a multiple queries with different labels', () => {
const columns = transformers[transform].getColumns(multipleQueriesDataDifferentLabels);
expect(columns[0].text).toBe('Time');
expect(columns[1].text).toBe('Label Key 1');
expect(columns[2].text).toBe('Value #A');
expect(columns[3].text).toBe('Label Key 2');
expect(columns[4].text).toBe('Value #B');
expect(columns[5].text).toBe('Value #C');
});
});
describe('transform', () => {
@ -237,26 +209,6 @@ describe('when transforming time series table', () => {
expect(table.rows[1][4]).toBeUndefined();
expect(table.rows[1][5]).toBe(7);
});
it('should return 2 rows for multiple queries with different label values', () => {
table = transformDataToTable(multipleQueriesDataDifferentLabels, panel);
expect(table.rows.length).toBe(2);
expect(table.columns.length).toBe(6);
expect(table.rows[0][0]).toBe(time);
expect(table.rows[0][1]).toBe('Label Value 1');
expect(table.rows[0][2]).toBe(42);
expect(table.rows[0][3]).toBe('Label Value 2');
expect(table.rows[0][4]).toBe(13);
expect(table.rows[0][5]).toBeUndefined();
expect(table.rows[1][0]).toBe(time);
expect(table.rows[1][1]).toBe('Label Value 3');
expect(table.rows[1][2]).toBeUndefined();
expect(table.rows[1][3]).toBeUndefined();
expect(table.rows[1][4]).toBeUndefined();
expect(table.rows[1][5]).toBe(7);
});
});
});
});

View File

@ -1,7 +1,7 @@
import _ from 'lodash';
import flatten from '../../../core/utils/flatten';
import TimeSeries from '../../../core/time_series2';
import TableModel from '../../../core/table_model';
import flatten from 'app/core/utils/flatten';
import TimeSeries from 'app/core/time_series2';
import TableModel, { mergeTablesIntoModel } from 'app/core/table_model';
const transformers = {};
@ -168,97 +168,7 @@ transformers['table'] = {
};
}
// Single query returns data columns and rows as is
if (data.length === 1) {
model.columns = [...data[0].columns];
model.rows = [...data[0].rows];
return;
}
// Track column indexes of union: name -> index
const columnNames = {};
// Union of all non-value columns
const columnsUnion = data.reduce((acc, series) => {
series.columns.forEach(col => {
const { text } = col;
if (columnNames[text] === undefined) {
columnNames[text] = acc.length;
acc.push(col);
}
});
return acc;
}, []);
// Map old column index to union index per series, e.g.,
// given columnNames {A: 0, B: 1} and
// data [{columns: [{ text: 'A' }]}, {columns: [{ text: 'B' }]}] => [[0], [1]]
const columnIndexMapper = data.map(series => series.columns.map(col => columnNames[col.text]));
// Flatten rows of all series and adjust new column indexes
const flattenedRows = data.reduce((acc, series, seriesIndex) => {
const mapper = columnIndexMapper[seriesIndex];
series.rows.forEach(row => {
const alteredRow = [];
// Shifting entries according to index mapper
mapper.forEach((to, from) => {
alteredRow[to] = row[from];
});
acc.push(alteredRow);
});
return acc;
}, []);
// Returns true if both rows have matching non-empty fields as well as matching
// indexes where one field is empty and the other is not
function areRowsMatching(columns, row, otherRow) {
let foundFieldToMatch = false;
for (let columnIndex = 0; columnIndex < columns.length; columnIndex++) {
if (row[columnIndex] !== undefined && otherRow[columnIndex] !== undefined) {
if (row[columnIndex] !== otherRow[columnIndex]) {
return false;
}
} else if (row[columnIndex] === undefined || otherRow[columnIndex] === undefined) {
foundFieldToMatch = true;
}
}
return foundFieldToMatch;
}
// Merge rows that have same values for columns
const mergedRows = {};
const compactedRows = flattenedRows.reduce((acc, row, rowIndex) => {
if (!mergedRows[rowIndex]) {
// Look from current row onwards
let offset = rowIndex + 1;
// More than one row can be merged into current row
while (offset < flattenedRows.length) {
// Find next row that could be merged
const match = _.findIndex(flattenedRows, otherRow => areRowsMatching(columnsUnion, row, otherRow), offset);
if (match > -1) {
const matchedRow = flattenedRows[match];
// Merge values from match into current row if there is a gap in the current row
for (let columnIndex = 0; columnIndex < columnsUnion.length; columnIndex++) {
if (row[columnIndex] === undefined && matchedRow[columnIndex] !== undefined) {
row[columnIndex] = matchedRow[columnIndex];
}
}
// Don't visit this row again
mergedRows[match] = matchedRow;
// Keep looking for more rows to merge
offset = match + 1;
} else {
// No match found, stop looking
break;
}
}
acc.push(row);
}
return acc;
}, []);
model.columns = columnsUnion;
model.rows = compactedRows;
mergeTablesIntoModel(model, ...data);
},
};

View File

@ -88,7 +88,7 @@ function setViewModeBodyClass(body, mode, sidemenuOpen: boolean) {
break;
}
// 1 & true for legacy states
case 1:
case '1':
case true: {
body.removeClass('sidemenu-open');
body.addClass('view-mode--kiosk');
@ -176,16 +176,16 @@ export function grafanaAppDirective(playlistSrv, contextSrv, $timeout, $rootScop
const search = $location.search();
if (options && options.exit) {
search.kiosk = 1;
search.kiosk = '1';
}
switch (search.kiosk) {
case 'tv': {
search.kiosk = 1;
search.kiosk = true;
appEvents.emit('alert-success', ['Press ESC to exit Kiosk mode']);
break;
}
case 1:
case '1':
case true: {
delete search.kiosk;
break;

View File

@ -3,6 +3,11 @@ interface ExploreDatasource {
label: string;
}
export interface HistoryItem {
ts: number;
query: string;
}
export interface Range {
from: string;
to: string;
@ -13,6 +18,19 @@ export interface Query {
key?: string;
}
export interface QueryTransaction {
id: string;
done: boolean;
error?: string;
hints?: any[];
latency: number;
options: any;
query: string;
result?: any; // Table model / Timeseries[] / Logs
resultType: ResultType;
rowIndex: number;
}
export interface TextMatch {
text: string;
start: number;
@ -27,34 +45,25 @@ export interface ExploreState {
datasourceMissing: boolean;
datasourceName?: string;
exploreDatasources: ExploreDatasource[];
graphResult: any;
history: any[];
latency: number;
loading: any;
logsResult: any;
graphRange: Range;
history: HistoryItem[];
/**
* Initial rows of queries to push down the tree.
* Modifications do not end up here, but in `this.queryExpressions`.
* The only way to reset a query is to change its `key`.
*/
queries: Query[];
/**
* Errors caused by the running the query row.
*/
queryErrors: any[];
/**
* Hints gathered for the query row.
*/
queryHints: any[];
queryTransactions: QueryTransaction[];
range: Range;
requestOptions: any;
showingGraph: boolean;
showingLogs: boolean;
showingTable: boolean;
supportsGraph: boolean | null;
supportsLogs: boolean | null;
supportsTable: boolean | null;
tableResult: any;
}
export interface ExploreUrlState {
@ -62,3 +71,5 @@ export interface ExploreUrlState {
queries: Query[];
range: Range;
}
export type ResultType = 'Graph' | 'Logs' | 'Table';

View File

@ -74,7 +74,7 @@
}
}
.elapsed-time {
.navbar .elapsed-time {
position: absolute;
left: 0;
right: 0;
@ -87,6 +87,37 @@
flex-wrap: wrap;
}
.explore-graph__loader {
height: 2px;
position: relative;
overflow: hidden;
background: $text-color-faint;
margin: $panel-margin / 2;
}
.explore-graph__loader:after {
content: ' ';
display: block;
width: 25%;
top: 0;
top: -50%;
height: 250%;
position: absolute;
animation: loader 2s cubic-bezier(0.17, 0.67, 0.83, 0.67);
animation-iteration-count: 100;
z-index: 2;
background: $blue;
}
@keyframes loader {
from {
left: -25%;
}
to {
left: 100%;
}
}
.datasource-picker {
min-width: 200px;
}
@ -119,6 +150,7 @@
.query-row {
display: flex;
position: relative;
& + & {
margin-top: 0.5rem;
@ -129,11 +161,53 @@
white-space: nowrap;
}
.query-row-status {
position: absolute;
top: 0;
right: 90px;
z-index: 1024;
display: flex;
flex-direction: column;
justify-content: center;
height: 34px;
}
.query-row-field {
margin-right: 3px;
width: 100%;
}
.query-transactions {
display: table;
}
.query-transaction {
display: table-row;
color: $text-color-faint;
line-height: 1.44;
}
.query-transaction--loading {
animation: query-loading-color-change 1s alternate 100;
}
@keyframes query-loading-color-change {
from {
color: $text-color-faint;
}
to {
color: $blue;
}
}
.query-transaction__type,
.query-transaction__duration {
display: table-cell;
font-size: $font-size-xs;
text-align: right;
padding-right: 0.25em;
}
.explore {
.logs {
.logs-entries {