diff --git a/CHANGELOG.md b/CHANGELOG.md index 7164f5d99a9..4bddf5e0f32 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,11 @@ ### Minor * **Pushover**: Adds support for images in pushover notifier [#10780](https://github.com/grafana/grafana/issues/10780), thx [@jpenalbae](https://github.com/jpenalbae) +* **Stackdriver**: Template variables in filters using globbing format [#15182](https://github.com/grafana/grafana/issues/15182) +* **Cloudwatch**: Add `resource_arns` template variable query function [#8207](https://github.com/grafana/grafana/issues/8207), thx [@jeroenvollenbrock](https://github.com/jeroenvollenbrock) +* **Cloudwatch**: Add AWS/Neptune metrics [#14231](https://github.com/grafana/grafana/issues/14231), thx [@tcpatterson](https://github.com/tcpatterson) +* **Cloudwatch**: Add AWS RDS ServerlessDatabaseCapacity metric [#15265](https://github.com/grafana/grafana/pull/15265), thx [@larsjoergensen](https://github.com/larsjoergensen) +* **Annotations**: Support PATCH verb in annotations http api [#12546](https://github.com/grafana/grafana/issues/12546), thx [@SamuelToh](https://github.com/SamuelToh) # 6.0.0-beta1 (2019-01-30) diff --git a/Gopkg.lock b/Gopkg.lock index d7795cbd6ba..dca36f1b3d0 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -37,6 +37,7 @@ "aws/credentials", "aws/credentials/ec2rolecreds", "aws/credentials/endpointcreds", + "aws/credentials/processcreds", "aws/credentials/stscreds", "aws/csm", "aws/defaults", @@ -45,13 +46,18 @@ "aws/request", "aws/session", "aws/signer/v4", + "internal/ini", + "internal/s3err", "internal/sdkio", "internal/sdkrand", + "internal/sdkuri", "internal/shareddefaults", "private/protocol", "private/protocol/ec2query", "private/protocol/eventstream", "private/protocol/eventstream/eventstreamapi", + "private/protocol/json/jsonutil", + "private/protocol/jsonrpc", "private/protocol/query", "private/protocol/query/queryutil", "private/protocol/rest", @@ -60,11 +66,13 @@ "service/cloudwatch", "service/ec2", "service/ec2/ec2iface", + "service/resourcegroupstaggingapi", + "service/resourcegroupstaggingapi/resourcegroupstaggingapiiface", "service/s3", "service/sts" ] - revision = "fde4ded7becdeae4d26bf1212916aabba79349b4" - version = "v1.14.12" + revision = "62936e15518acb527a1a9cb4a39d96d94d0fd9a2" + version = "v1.16.15" [[projects]] branch = "master" diff --git a/devenv/docker/blocks/loki/config.yaml b/devenv/docker/blocks/loki/config.yaml new file mode 100644 index 00000000000..9451b6ba79b --- /dev/null +++ b/devenv/docker/blocks/loki/config.yaml @@ -0,0 +1,27 @@ +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +positions: + filename: /tmp/positions.yaml + +client: + url: http://loki:3100/api/prom/push + +scrape_configs: +- job_name: system + entry_parser: raw + static_configs: + - targets: + - localhost + labels: + job: varlogs + __path__: /var/log/*log +- job_name: grafana + entry_parser: raw + static_configs: + - targets: + - localhost + labels: + job: grafana + __path__: /var/log/grafana/*log diff --git a/devenv/docker/blocks/loki/docker-compose.yaml b/devenv/docker/blocks/loki/docker-compose.yaml index d6cf21f7856..0ac5d439354 100644 --- a/devenv/docker/blocks/loki/docker-compose.yaml +++ b/devenv/docker/blocks/loki/docker-compose.yaml @@ -1,22 +1,14 @@ -version: "3" - -networks: - loki: - -services: loki: image: grafana/loki:master ports: - "3100:3100" command: -config.file=/etc/loki/local-config.yaml - networks: - - loki promtail: image: grafana/promtail:master volumes: + - ./docker/blocks/loki/config.yaml:/etc/promtail/docker-config.yaml - /var/log:/var/log + - ../data/log:/var/log/grafana command: -config.file=/etc/promtail/docker-config.yaml - networks: - - loki diff --git a/docs/sources/features/datasources/cloudwatch.md b/docs/sources/features/datasources/cloudwatch.md index 22f9f38c854..6ca10b2f8e8 100644 --- a/docs/sources/features/datasources/cloudwatch.md +++ b/docs/sources/features/datasources/cloudwatch.md @@ -74,6 +74,12 @@ Here is a minimal policy example: "ec2:DescribeRegions" ], "Resource": "*" + }, + { + "Sid": "AllowReadingResourcesForTags", + "Effect" : "Allow", + "Action" : "tag:GetResources", + "Resource" : "*" } ] } @@ -128,6 +134,7 @@ Name | Description *dimension_values(region, namespace, metric, dimension_key, [filters])* | Returns a list of dimension values matching the specified `region`, `namespace`, `metric`, `dimension_key` or you can use dimension `filters` to get more specific result as well. *ebs_volume_ids(region, instance_id)* | Returns a list of volume ids matching the specified `region`, `instance_id`. *ec2_instance_attribute(region, attribute_name, filters)* | Returns a list of attributes matching the specified `region`, `attribute_name`, `filters`. +*resource_arns(region, resource_type, tags)* | Returns a list of ARNs matching the specified `region`, `resource_type` and `tags`. For details about the metrics CloudWatch provides, please refer to the [CloudWatch documentation](https://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/CW_Support_For_AWS.html). @@ -143,6 +150,8 @@ Query | Service *dimension_values(us-east-1,AWS/RDS,CPUUtilization,DBInstanceIdentifier)* | RDS *dimension_values(us-east-1,AWS/S3,BucketSizeBytes,BucketName)* | S3 *dimension_values(us-east-1,CWAgent,disk_used_percent,device,{"InstanceId":"$instance_id"})* | CloudWatch Agent +*resource_arns(eu-west-1,elasticloadbalancing:loadbalancer,{"elasticbeanstalk:environment-name":["myApp-dev","myApp-prod"]})* | ELB +*resource_arns(eu-west-1,ec2:instance,{"elasticbeanstalk:environment-name":["myApp-dev","myApp-prod"]})* | EC2 ## ec2_instance_attribute examples @@ -205,6 +214,16 @@ Example `ec2_instance_attribute()` query ec2_instance_attribute(us-east-1, Tags.Name, { "tag:Team": [ "sysops" ] }) ``` +## Using json format template variables + +Some of query takes JSON format filter. Grafana support to interpolate template variable to JSON format string, it can use as filter string. + +If `env = 'production', 'staging'`, following query will return ARNs of EC2 instances which `Environment` tag is `production` or `staging`. + +``` +resource_arns(us-east-1, ec2:instance, {"Environment":${env:json}}) +``` + ## Cost Amazon provides 1 million CloudWatch API requests each month at no additional charge. Past this, diff --git a/docs/sources/http_api/annotations.md b/docs/sources/http_api/annotations.md index 6633714d77b..e1d2876f48a 100644 --- a/docs/sources/http_api/annotations.md +++ b/docs/sources/http_api/annotations.md @@ -97,7 +97,7 @@ Creates an annotation in the Grafana database. The `dashboardId` and `panelId` f **Example Request**: -```json +```http POST /api/annotations HTTP/1.1 Accept: application/json Content-Type: application/json @@ -115,7 +115,7 @@ Content-Type: application/json **Example Response**: -```json +```http HTTP/1.1 200 Content-Type: application/json @@ -135,7 +135,7 @@ format (string with multiple tags being separated by a space). **Example Request**: -```json +```http POST /api/annotations/graphite HTTP/1.1 Accept: application/json Content-Type: application/json @@ -150,7 +150,7 @@ Content-Type: application/json **Example Response**: -```json +```http HTTP/1.1 200 Content-Type: application/json @@ -164,11 +164,14 @@ Content-Type: application/json `PUT /api/annotations/:id` +Updates all properties of an annotation that matches the specified id. To only update certain property, consider using the [Patch Annotation](#patch-annotation) operation. + **Example Request**: -```json +```http PUT /api/annotations/1141 HTTP/1.1 Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk Content-Type: application/json { @@ -180,6 +183,50 @@ Content-Type: application/json } ``` +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "message":"Annotation updated" +} +``` + +## Patch Annotation + +`PATCH /api/annotations/:id` + +Updates one or more properties of an annotation that matches the specified id. + +This operation currently supports updating of the `text`, `tags`, `time` and `timeEnd` properties. It does not handle updating of the `isRegion` and `regionId` properties. To make an annotation regional or vice versa, consider using the [Update Annotation](#update-annotation) operation. + +**Example Request**: + +```http +PATCH /api/annotations/1145 HTTP/1.1 +Accept: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +Content-Type: application/json + +{ + "text":"New Annotation Description", + "tags":["tag6","tag7","tag8"] +} +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "message":"Annotation patched" +} +``` + ## Delete Annotation By Id `DELETE /api/annotations/:id` @@ -201,7 +248,9 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk HTTP/1.1 200 Content-Type: application/json -{"message":"Annotation deleted"} +{ + "message":"Annotation deleted" +} ``` ## Delete Annotation By RegionId @@ -225,5 +274,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk HTTP/1.1 200 Content-Type: application/json -{"message":"Annotation region deleted"} +{ + "message":"Annotation region deleted" +} ``` diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index 46bab83654e..ac3dc6ebfd0 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -393,9 +393,7 @@ Analytics ID here. By default this feature is disabled. ### check_for_updates -Set to false to disable all checks to https://grafana.com for new versions of Grafana and installed plugins. Check is used -in some UI views to notify that a Grafana or plugin update exists. This option does not cause any auto updates, nor -send any sensitive information. +Set to false to disable all checks to https://grafana.com for new versions of installed plugins and to the Grafana GitHub repository to check for a newer version of Grafana. The version information is used in some UI views to notify that a new Grafana update or a plugin update exists. This option does not cause any auto updates, nor send any sensitive information. The check is run every 10 minutes.
diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 7426877654b..bf3fbd6a229 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -50,6 +50,7 @@ Filter Option | Example | Raw | Interpolated | Description `regex` | ${servers:regex} | `'test.', 'test2'` | (test\.|test2) | Formats multi-value variable into a regex string `pipe` | ${servers:pipe} | `'test.', 'test2'` | test.|test2 | Formats multi-value variable into a pipe-separated string `csv`| ${servers:csv} | `'test1', 'test2'` | `test1,test2` | Formats multi-value variable as a comma-separated string +`json`| ${servers:json} | `'test1', 'test2'` | `["test1","test2"]` | Formats multi-value variable as a JSON string `distributed`| ${servers:distributed} | `'test1', 'test2'` | `test1,servers=test2` | Formats multi-value variable in custom format for OpenTSDB. `lucene`| ${servers:lucene} | `'test', 'test2'` | `("test" OR "test2")` | Formats multi-value variable as a lucene expression. `percentencode` | ${servers:percentencode} | `'foo()bar BAZ', 'test2'` | `{foo%28%29bar%20BAZ%2Ctest2}` | Formats multi-value variable into a glob, percent-encoded. diff --git a/packages/grafana-ui/src/components/Select/SelectOptionGroup.tsx b/packages/grafana-ui/src/components/Select/SelectOptionGroup.tsx index efc5e4516fc..9a787a84819 100644 --- a/packages/grafana-ui/src/components/Select/SelectOptionGroup.tsx +++ b/packages/grafana-ui/src/components/Select/SelectOptionGroup.tsx @@ -49,7 +49,7 @@ export default class SelectOptionGroup extends PureComponent
- {label} + {label} {' '}
{expanded && children} diff --git a/packages/grafana-ui/src/components/ValueMappingsEditor/ValueMappingsEditor.story.tsx b/packages/grafana-ui/src/components/ValueMappingsEditor/ValueMappingsEditor.story.tsx new file mode 100644 index 00000000000..85504f6cd09 --- /dev/null +++ b/packages/grafana-ui/src/components/ValueMappingsEditor/ValueMappingsEditor.story.tsx @@ -0,0 +1,10 @@ +import React from 'react'; +import { storiesOf } from '@storybook/react'; +import { action } from '@storybook/addon-actions'; +import { ValueMappingsEditor } from './ValueMappingsEditor'; + +const ValueMappingsEditorStories = storiesOf('UI/ValueMappingsEditor', module); + +ValueMappingsEditorStories.add('default', () => { + return ; +}); diff --git a/packages/grafana-ui/src/types/plugin.ts b/packages/grafana-ui/src/types/plugin.ts index 00735827825..c8f156c08dc 100644 --- a/packages/grafana-ui/src/types/plugin.ts +++ b/packages/grafana-ui/src/types/plugin.ts @@ -1,6 +1,6 @@ import { ComponentClass } from 'react'; import { PanelProps, PanelOptionsProps } from './panel'; -import { DataQueryOptions, DataQuery, DataQueryResponse, QueryHint } from './datasource'; +import { DataQueryOptions, DataQuery, DataQueryResponse, QueryHint, QueryFixAction } from './datasource'; export interface DataSourceApi { /** @@ -41,6 +41,12 @@ export interface DataSourceApi { pluginExports?: PluginExports; } +export interface ExploreDataSourceApi extends DataSourceApi { + modifyQuery?(query: TQuery, action: QueryFixAction): TQuery; + getHighlighterExpression?(query: TQuery): string; + languageProvider?: any; +} + export interface QueryEditorProps { datasource: DSType; query: TQuery; @@ -48,15 +54,30 @@ export interface QueryEditorProps void; } +export interface ExploreQueryFieldProps { + datasource: DSType; + query: TQuery; + error?: string | JSX.Element; + hint?: QueryHint; + history: any[]; + onExecuteQuery?: () => void; + onQueryChange?: (value: TQuery) => void; + onExecuteHint?: (action: QueryFixAction) => void; +} + +export interface ExploreStartPageProps { + onClickExample: (query: DataQuery) => void; +} + export interface PluginExports { Datasource?: DataSourceApi; QueryCtrl?: any; - QueryEditor?: ComponentClass>; + QueryEditor?: ComponentClass>; ConfigCtrl?: any; AnnotationsQueryCtrl?: any; VariableQueryEditor?: any; - ExploreQueryField?: any; - ExploreStartPage?: any; + ExploreQueryField?: ComponentClass>; + ExploreStartPage?: ComponentClass; // Panel plugin PanelCtrl?: any; @@ -114,5 +135,3 @@ export interface PluginMetaInfo { updated: string; version: string; } - - diff --git a/pkg/api/annotations.go b/pkg/api/annotations.go index 242b5531f51..de9d2517caa 100644 --- a/pkg/api/annotations.go +++ b/pkg/api/annotations.go @@ -210,6 +210,65 @@ func UpdateAnnotation(c *m.ReqContext, cmd dtos.UpdateAnnotationsCmd) Response { return Success("Annotation updated") } +func PatchAnnotation(c *m.ReqContext, cmd dtos.PatchAnnotationsCmd) Response { + annotationID := c.ParamsInt64(":annotationId") + + repo := annotations.GetRepository() + + if resp := canSave(c, repo, annotationID); resp != nil { + return resp + } + + items, err := repo.Find(&annotations.ItemQuery{AnnotationId: annotationID, OrgId: c.OrgId}) + + if err != nil || len(items) == 0 { + return Error(404, "Could not find annotation to update", err) + } + + existing := annotations.Item{ + OrgId: c.OrgId, + UserId: c.UserId, + Id: annotationID, + Epoch: items[0].Time, + Text: items[0].Text, + Tags: items[0].Tags, + RegionId: items[0].RegionId, + } + + if cmd.Tags != nil { + existing.Tags = cmd.Tags + } + + if cmd.Text != "" && cmd.Text != existing.Text { + existing.Text = cmd.Text + } + + if cmd.Time > 0 && cmd.Time != existing.Epoch { + existing.Epoch = cmd.Time + } + + if err := repo.Update(&existing); err != nil { + return Error(500, "Failed to update annotation", err) + } + + // Update region end time if provided + if existing.RegionId != 0 && cmd.TimeEnd > 0 { + itemRight := existing + itemRight.RegionId = existing.Id + itemRight.Epoch = cmd.TimeEnd + + // We don't know id of region right event, so set it to 0 and find then using query like + // ... WHERE region_id = AND id != ... + itemRight.Id = 0 + + if err := repo.Update(&itemRight); err != nil { + return Error(500, "Failed to update annotation for region end time", err) + } + } + + return Success("Annotation patched") +} + func DeleteAnnotations(c *m.ReqContext, cmd dtos.DeleteAnnotationsCmd) Response { repo := annotations.GetRepository() diff --git a/pkg/api/annotations_test.go b/pkg/api/annotations_test.go index 08f3018c694..ebdd867a031 100644 --- a/pkg/api/annotations_test.go +++ b/pkg/api/annotations_test.go @@ -27,6 +27,12 @@ func TestAnnotationsApiEndpoint(t *testing.T) { IsRegion: false, } + patchCmd := dtos.PatchAnnotationsCmd{ + Time: 1000, + Text: "annotation text", + Tags: []string{"tag1", "tag2"}, + } + Convey("When user is an Org Viewer", func() { role := m.ROLE_VIEWER Convey("Should not be allowed to save an annotation", func() { @@ -40,6 +46,11 @@ func TestAnnotationsApiEndpoint(t *testing.T) { So(sc.resp.Code, ShouldEqual, 403) }) + patchAnnotationScenario("When calling PATCH on", "/api/annotations/1", "/api/annotations/:annotationId", role, patchCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("PATCH", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 403) + }) + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) { sc.handlerFunc = DeleteAnnotationByID sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() @@ -67,6 +78,11 @@ func TestAnnotationsApiEndpoint(t *testing.T) { So(sc.resp.Code, ShouldEqual, 200) }) + patchAnnotationScenario("When calling PATCH on", "/api/annotations/1", "/api/annotations/:annotationId", role, patchCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("PATCH", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) { sc.handlerFunc = DeleteAnnotationByID sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() @@ -100,6 +116,13 @@ func TestAnnotationsApiEndpoint(t *testing.T) { Id: 1, } + patchCmd := dtos.PatchAnnotationsCmd{ + Time: 8000, + Text: "annotation text 50", + Tags: []string{"foo", "bar"}, + Id: 1, + } + deleteCmd := dtos.DeleteAnnotationsCmd{ DashboardId: 1, PanelId: 1, @@ -136,6 +159,11 @@ func TestAnnotationsApiEndpoint(t *testing.T) { So(sc.resp.Code, ShouldEqual, 403) }) + patchAnnotationScenario("When calling PATCH on", "/api/annotations/1", "/api/annotations/:annotationId", role, patchCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("PATCH", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 403) + }) + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) { sc.handlerFunc = DeleteAnnotationByID sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() @@ -163,6 +191,11 @@ func TestAnnotationsApiEndpoint(t *testing.T) { So(sc.resp.Code, ShouldEqual, 200) }) + patchAnnotationScenario("When calling PATCH on", "/api/annotations/1", "/api/annotations/:annotationId", role, patchCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("PATCH", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) { sc.handlerFunc = DeleteAnnotationByID sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() @@ -189,6 +222,12 @@ func TestAnnotationsApiEndpoint(t *testing.T) { sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec() So(sc.resp.Code, ShouldEqual, 200) }) + + patchAnnotationScenario("When calling PATCH on", "/api/annotations/1", "/api/annotations/:annotationId", role, patchCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("PATCH", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + deleteAnnotationsScenario("When calling POST on", "/api/annotations/mass-delete", "/api/annotations/mass-delete", role, deleteCmd, func(sc *scenarioContext) { sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() So(sc.resp.Code, ShouldEqual, 200) @@ -264,6 +303,29 @@ func putAnnotationScenario(desc string, url string, routePattern string, role m. }) } +func patchAnnotationScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.PatchAnnotationsCmd, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + sc.defaultHandler = Wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.UserId = TestUserID + sc.context.OrgId = TestOrgID + sc.context.OrgRole = role + + return PatchAnnotation(c, cmd) + }) + + fakeAnnoRepo = &fakeAnnotationsRepo{} + annotations.SetRepository(fakeAnnoRepo) + + sc.m.Patch(routePattern, sc.defaultHandler) + + fn(sc) + }) +} + func deleteAnnotationsScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.DeleteAnnotationsCmd, fn scenarioFunc) { Convey(desc+" "+url, func() { defer bus.ClearBusHandlers() diff --git a/pkg/api/api.go b/pkg/api/api.go index 07cb712f794..0685ef3814d 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -108,8 +108,8 @@ func (hs *HTTPServer) registerRoutes() { r.Get("/api/snapshots-delete/:deleteKey", Wrap(DeleteDashboardSnapshotByDeleteKey)) r.Delete("/api/snapshots/:key", reqEditorRole, Wrap(DeleteDashboardSnapshot)) - // api renew session based on remember cookie - r.Get("/api/login/ping", quota("session"), hs.LoginAPIPing) + // api renew session based on cookie + r.Get("/api/login/ping", quota("session"), Wrap(hs.LoginAPIPing)) // authed api r.Group("/api", func(apiRoute routing.RouteRegister) { @@ -354,6 +354,7 @@ func (hs *HTTPServer) registerRoutes() { annotationsRoute.Post("/", bind(dtos.PostAnnotationsCmd{}), Wrap(PostAnnotation)) annotationsRoute.Delete("/:annotationId", Wrap(DeleteAnnotationByID)) annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), Wrap(UpdateAnnotation)) + annotationsRoute.Patch("/:annotationId", bind(dtos.PatchAnnotationsCmd{}), Wrap(PatchAnnotation)) annotationsRoute.Delete("/region/:regionId", Wrap(DeleteAnnotationRegion)) annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), Wrap(PostGraphiteAnnotation)) }) diff --git a/pkg/api/common_test.go b/pkg/api/common_test.go index eb1f89e3f22..fe02c94e277 100644 --- a/pkg/api/common_test.go +++ b/pkg/api/common_test.go @@ -149,4 +149,4 @@ func (s *fakeUserAuthTokenService) UserAuthenticatedHook(user *m.User, c *m.ReqC return nil } -func (s *fakeUserAuthTokenService) UserSignedOutHook(c *m.ReqContext) {} +func (s *fakeUserAuthTokenService) SignOutUser(c *m.ReqContext) error { return nil } diff --git a/pkg/api/dtos/annotations.go b/pkg/api/dtos/annotations.go index c917b0d9feb..bdee8599fea 100644 --- a/pkg/api/dtos/annotations.go +++ b/pkg/api/dtos/annotations.go @@ -22,6 +22,14 @@ type UpdateAnnotationsCmd struct { TimeEnd int64 `json:"timeEnd"` } +type PatchAnnotationsCmd struct { + Id int64 `json:"id"` + Time int64 `json:"time"` + Text string `json:"text"` + Tags []string `json:"tags"` + TimeEnd int64 `json:"timeEnd"` +} + type DeleteAnnotationsCmd struct { AlertId int64 `json:"alertId"` DashboardId int64 `json:"dashboardId"` diff --git a/pkg/api/login.go b/pkg/api/login.go index 50c62e0835a..49da147724e 100644 --- a/pkg/api/login.go +++ b/pkg/api/login.go @@ -136,7 +136,7 @@ func (hs *HTTPServer) loginUserWithUser(user *m.User, c *m.ReqContext) { } func (hs *HTTPServer) Logout(c *m.ReqContext) { - hs.AuthTokenService.UserSignedOutHook(c) + hs.AuthTokenService.SignOutUser(c) if setting.SignoutRedirectUrl != "" { c.Redirect(setting.SignoutRedirectUrl) diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go index 11740574d0b..4679c449853 100644 --- a/pkg/middleware/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -602,4 +602,4 @@ func (s *fakeUserAuthTokenService) UserAuthenticatedHook(user *m.User, c *m.ReqC return nil } -func (s *fakeUserAuthTokenService) UserSignedOutHook(c *m.ReqContext) {} +func (s *fakeUserAuthTokenService) SignOutUser(c *m.ReqContext) error { return nil } diff --git a/pkg/services/auth/auth_token.go b/pkg/services/auth/auth_token.go index 98687f2013d..13b9ef607f5 100644 --- a/pkg/services/auth/auth_token.go +++ b/pkg/services/auth/auth_token.go @@ -3,6 +3,7 @@ package auth import ( "crypto/sha256" "encoding/hex" + "errors" "net/http" "net/url" "time" @@ -31,7 +32,7 @@ var ( type UserAuthTokenService interface { InitContextWithToken(ctx *models.ReqContext, orgID int64) bool UserAuthenticatedHook(user *models.User, c *models.ReqContext) error - UserSignedOutHook(c *models.ReqContext) + SignOutUser(c *models.ReqContext) error } type UserAuthTokenServiceImpl struct { @@ -85,7 +86,7 @@ func (s *UserAuthTokenServiceImpl) InitContextWithToken(ctx *models.ReqContext, func (s *UserAuthTokenServiceImpl) writeSessionCookie(ctx *models.ReqContext, value string, maxAge int) { if setting.Env == setting.DEV { - ctx.Logger.Info("new token", "unhashed token", value) + ctx.Logger.Debug("new token", "unhashed token", value) } ctx.Resp.Header().Del("Set-Cookie") @@ -112,8 +113,19 @@ func (s *UserAuthTokenServiceImpl) UserAuthenticatedHook(user *models.User, c *m return nil } -func (s *UserAuthTokenServiceImpl) UserSignedOutHook(c *models.ReqContext) { +func (s *UserAuthTokenServiceImpl) SignOutUser(c *models.ReqContext) error { + unhashedToken := c.GetCookie(s.Cfg.LoginCookieName) + if unhashedToken == "" { + return errors.New("cannot logout without session token") + } + + hashedToken := hashToken(unhashedToken) + + sql := `DELETE FROM user_auth_token WHERE auth_token = ?` + _, err := s.SQLStore.NewSession().Exec(sql, hashedToken) + s.writeSessionCookie(c, "", -1) + return err } func (s *UserAuthTokenServiceImpl) CreateToken(userId int64, clientIP, userAgent string) (*userAuthToken, error) { diff --git a/pkg/services/auth/auth_token_test.go b/pkg/services/auth/auth_token_test.go index 2f75c660d9d..312e53a3970 100644 --- a/pkg/services/auth/auth_token_test.go +++ b/pkg/services/auth/auth_token_test.go @@ -1,10 +1,15 @@ package auth import ( + "fmt" + "net/http" + "net/http/httptest" "testing" "time" + "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" + macaron "gopkg.in/macaron.v1" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/services/sqlstore" @@ -46,6 +51,40 @@ func TestUserAuthToken(t *testing.T) { So(err, ShouldEqual, ErrAuthTokenNotFound) So(LookupToken, ShouldBeNil) }) + + Convey("signing out should delete token and cookie if present", func() { + httpreq := &http.Request{Header: make(http.Header)} + httpreq.AddCookie(&http.Cookie{Name: userAuthTokenService.Cfg.LoginCookieName, Value: token.UnhashedToken}) + + ctx := &models.ReqContext{Context: &macaron.Context{ + Req: macaron.Request{Request: httpreq}, + Resp: macaron.NewResponseWriter("POST", httptest.NewRecorder()), + }, + Logger: log.New("fakelogger"), + } + + err = userAuthTokenService.SignOutUser(ctx) + So(err, ShouldBeNil) + + // makes sure we tell the browser to overwrite the cookie + cookieHeader := fmt.Sprintf("%s=; Path=/; Max-Age=0; HttpOnly", userAuthTokenService.Cfg.LoginCookieName) + So(ctx.Resp.Header().Get("Set-Cookie"), ShouldEqual, cookieHeader) + }) + + Convey("signing out an none existing session should return an error", func() { + httpreq := &http.Request{Header: make(http.Header)} + httpreq.AddCookie(&http.Cookie{Name: userAuthTokenService.Cfg.LoginCookieName, Value: ""}) + + ctx := &models.ReqContext{Context: &macaron.Context{ + Req: macaron.Request{Request: httpreq}, + Resp: macaron.NewResponseWriter("POST", httptest.NewRecorder()), + }, + Logger: log.New("fakelogger"), + } + + err = userAuthTokenService.SignOutUser(ctx) + So(err, ShouldNotBeNil) + }) }) Convey("expires correctly", func() { diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go index fb0f0938573..6debaca89a1 100644 --- a/pkg/services/sqlstore/sqlstore.go +++ b/pkg/services/sqlstore/sqlstore.go @@ -242,10 +242,7 @@ func (ss *SqlStore) buildConnectionString() (string, error) { cnnstr += ss.buildExtraConnectionString('&') case migrator.POSTGRES: - host, port, err := util.SplitIPPort(ss.dbCfg.Host, "5432") - if err != nil { - return "", err - } + host, port := util.SplitHostPortDefault(ss.dbCfg.Host, "127.0.0.1", "5432") if ss.dbCfg.Pwd == "" { ss.dbCfg.Pwd = "''" } diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index 8bb1ab6c928..8d67fe7db8c 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -21,6 +21,7 @@ import ( "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/aws/aws-sdk-go/service/ec2/ec2iface" + "github.com/aws/aws-sdk-go/service/resourcegroupstaggingapi/resourcegroupstaggingapiiface" "github.com/grafana/grafana/pkg/components/null" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/metrics" @@ -28,7 +29,8 @@ import ( type CloudWatchExecutor struct { *models.DataSource - ec2Svc ec2iface.EC2API + ec2Svc ec2iface.EC2API + rgtaSvc resourcegroupstaggingapiiface.ResourceGroupsTaggingAPIAPI } type DatasourceInfo struct { diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index dfa03d2dfa9..34181d19673 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -15,6 +15,7 @@ import ( "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/aws/aws-sdk-go/service/ec2" + "github.com/aws/aws-sdk-go/service/resourcegroupstaggingapi" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/tsdb" @@ -95,10 +96,11 @@ func init() { "AWS/Logs": {"IncomingBytes", "IncomingLogEvents", "ForwardedBytes", "ForwardedLogEvents", "DeliveryErrors", "DeliveryThrottling"}, "AWS/ML": {"PredictCount", "PredictFailureCount"}, "AWS/NATGateway": {"PacketsOutToDestination", "PacketsOutToSource", "PacketsInFromSource", "PacketsInFromDestination", "BytesOutToDestination", "BytesOutToSource", "BytesInFromSource", "BytesInFromDestination", "ErrorPortAllocation", "ActiveConnectionCount", "ConnectionAttemptCount", "ConnectionEstablishedCount", "IdleTimeoutCount", "PacketsDropCount"}, + "AWS/Neptune": {"CPUUtilization", "ClusterReplicaLag", "ClusterReplicaLagMaximum", "ClusterReplicaLagMinimum", "EngineUptime", "FreeableMemory", "FreeLocalStorage", "GremlinHttp1xx", "GremlinHttp2xx", "GremlinHttp4xx", "GremlinHttp5xx", "GremlinErrors", "GremlinRequests", "GremlinRequestsPerSec", "GremlinWebSocketSuccess", "GremlinWebSocketClientErrors", "GremlinWebSocketServerErrors", "GremlinWebSocketAvailableConnections", "Http1xx", "Http2xx", "Http4xx", "Http5xx", "Http100", "Http101", "Http200", "Http400", "Http403", "Http405", "Http413", "Http429", "Http500", "Http501", "LoaderErrors", "LoaderRequests", "NetworkReceiveThroughput", "NetworkThroughput", "NetworkTransmitThroughput", "SparqlHttp1xx", "SparqlHttp2xx", "SparqlHttp4xx", "SparqlHttp5xx", "SparqlErrors", "SparqlRequests", "SparqlRequestsPerSec", "StatusErrors", "StatusRequests", "VolumeBytesUsed", "VolumeReadIOPs", "VolumeWriteIOPs"}, "AWS/NetworkELB": {"ActiveFlowCount", "ConsumedLCUs", "HealthyHostCount", "NewFlowCount", "ProcessedBytes", "TCP_Client_Reset_Count", "TCP_ELB_Reset_Count", "TCP_Target_Reset_Count", "UnHealthyHostCount"}, "AWS/OpsWorks": {"cpu_idle", "cpu_nice", "cpu_system", "cpu_user", "cpu_waitio", "load_1", "load_5", "load_15", "memory_buffers", "memory_cached", "memory_free", "memory_swap", "memory_total", "memory_used", "procs"}, "AWS/Redshift": {"CPUUtilization", "DatabaseConnections", "HealthStatus", "MaintenanceMode", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "PercentageDiskSpaceUsed", "QueriesCompletedPerSecond", "QueryDuration", "QueryRuntimeBreakdown", "ReadIOPS", "ReadLatency", "ReadThroughput", "WLMQueriesCompletedPerSecond", "WLMQueryDuration", "WLMQueueLength", "WriteIOPS", "WriteLatency", "WriteThroughput"}, - "AWS/RDS": {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "BurstBalance", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"}, + "AWS/RDS": {"ActiveTransactions", "AuroraBinlogReplicaLag", "AuroraReplicaLag", "AuroraReplicaLagMaximum", "AuroraReplicaLagMinimum", "BinLogDiskUsage", "BlockedTransactions", "BufferCacheHitRatio", "BurstBalance", "CommitLatency", "CommitThroughput", "BinLogDiskUsage", "CPUCreditBalance", "CPUCreditUsage", "CPUUtilization", "DatabaseConnections", "DDLLatency", "DDLThroughput", "Deadlocks", "DeleteLatency", "DeleteThroughput", "DiskQueueDepth", "DMLLatency", "DMLThroughput", "EngineUptime", "FailedSqlStatements", "FreeableMemory", "FreeLocalStorage", "FreeStorageSpace", "InsertLatency", "InsertThroughput", "LoginFailures", "NetworkReceiveThroughput", "NetworkTransmitThroughput", "NetworkThroughput", "Queries", "ReadIOPS", "ReadLatency", "ReadThroughput", "ReplicaLag", "ResultSetCacheHitRatio", "SelectLatency", "SelectThroughput", "ServerlessDatabaseCapacity", "SwapUsage", "TotalConnections", "UpdateLatency", "UpdateThroughput", "VolumeBytesUsed", "VolumeReadIOPS", "VolumeWriteIOPS", "WriteIOPS", "WriteLatency", "WriteThroughput"}, "AWS/Route53": {"ChildHealthCheckHealthyCount", "HealthCheckStatus", "HealthCheckPercentageHealthy", "ConnectionTime", "SSLHandshakeTime", "TimeToFirstByte"}, "AWS/S3": {"BucketSizeBytes", "NumberOfObjects", "AllRequests", "GetRequests", "PutRequests", "DeleteRequests", "HeadRequests", "PostRequests", "ListRequests", "BytesDownloaded", "BytesUploaded", "4xxErrors", "5xxErrors", "FirstByteLatency", "TotalRequestLatency"}, "AWS/SES": {"Bounce", "Complaint", "Delivery", "Reject", "Send", "Reputation.BounceRate", "Reputation.ComplaintRate"}, @@ -149,6 +151,7 @@ func init() { "AWS/Logs": {"LogGroupName", "DestinationType", "FilterName"}, "AWS/ML": {"MLModelId", "RequestMode"}, "AWS/NATGateway": {"NatGatewayId"}, + "AWS/Neptune": {"DBClusterIdentifier", "Role", "DatabaseClass", "EngineName"}, "AWS/NetworkELB": {"LoadBalancer", "TargetGroup", "AvailabilityZone"}, "AWS/OpsWorks": {"StackId", "LayerId", "InstanceId"}, "AWS/Redshift": {"NodeID", "ClusterIdentifier", "latency", "service class", "wmlid"}, @@ -198,6 +201,8 @@ func (e *CloudWatchExecutor) executeMetricFindQuery(ctx context.Context, queryCo data, err = e.handleGetEbsVolumeIds(ctx, parameters, queryContext) case "ec2_instance_attribute": data, err = e.handleGetEc2InstanceAttribute(ctx, parameters, queryContext) + case "resource_arns": + data, err = e.handleGetResourceArns(ctx, parameters, queryContext) } transformToTable(data, queryResult) @@ -534,6 +539,65 @@ func (e *CloudWatchExecutor) handleGetEc2InstanceAttribute(ctx context.Context, return result, nil } +func (e *CloudWatchExecutor) ensureRGTAClientSession(region string) error { + if e.rgtaSvc == nil { + dsInfo := e.getDsInfo(region) + cfg, err := e.getAwsConfig(dsInfo) + if err != nil { + return fmt.Errorf("Failed to call ec2:getAwsConfig, %v", err) + } + sess, err := session.NewSession(cfg) + if err != nil { + return fmt.Errorf("Failed to call ec2:NewSession, %v", err) + } + e.rgtaSvc = resourcegroupstaggingapi.New(sess, cfg) + } + return nil +} + +func (e *CloudWatchExecutor) handleGetResourceArns(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { + region := parameters.Get("region").MustString() + resourceType := parameters.Get("resourceType").MustString() + filterJson := parameters.Get("tags").MustMap() + + err := e.ensureRGTAClientSession(region) + if err != nil { + return nil, err + } + + var filters []*resourcegroupstaggingapi.TagFilter + for k, v := range filterJson { + if vv, ok := v.([]interface{}); ok { + var vvvvv []*string + for _, vvv := range vv { + if vvvv, ok := vvv.(string); ok { + vvvvv = append(vvvvv, &vvvv) + } + } + filters = append(filters, &resourcegroupstaggingapi.TagFilter{ + Key: aws.String(k), + Values: vvvvv, + }) + } + } + + var resourceTypes []*string + resourceTypes = append(resourceTypes, &resourceType) + + resources, err := e.resourceGroupsGetResources(region, filters, resourceTypes) + if err != nil { + return nil, err + } + + result := make([]suggestData, 0) + for _, resource := range resources.ResourceTagMappingList { + data := *resource.ResourceARN + result = append(result, suggestData{Text: data, Value: data}) + } + + return result, nil +} + func (e *CloudWatchExecutor) cloudwatchListMetrics(region string, namespace string, metricName string, dimensions []*cloudwatch.DimensionFilter) (*cloudwatch.ListMetricsOutput, error) { svc, err := e.getClient(region) if err != nil { @@ -585,6 +649,28 @@ func (e *CloudWatchExecutor) ec2DescribeInstances(region string, filters []*ec2. return &resp, nil } +func (e *CloudWatchExecutor) resourceGroupsGetResources(region string, filters []*resourcegroupstaggingapi.TagFilter, resourceTypes []*string) (*resourcegroupstaggingapi.GetResourcesOutput, error) { + params := &resourcegroupstaggingapi.GetResourcesInput{ + ResourceTypeFilters: resourceTypes, + TagFilters: filters, + } + + var resp resourcegroupstaggingapi.GetResourcesOutput + err := e.rgtaSvc.GetResourcesPages(params, + func(page *resourcegroupstaggingapi.GetResourcesOutput, lastPage bool) bool { + resources, _ := awsutil.ValuesAtPath(page, "ResourceTagMappingList") + for _, resource := range resources { + resp.ResourceTagMappingList = append(resp.ResourceTagMappingList, resource.(*resourcegroupstaggingapi.ResourceTagMapping)) + } + return !lastPage + }) + if err != nil { + return nil, errors.New("Failed to call tags:GetResources") + } + + return &resp, nil +} + func getAllMetrics(cwData *DatasourceInfo) (cloudwatch.ListMetricsOutput, error) { creds, err := GetCredentials(cwData) if err != nil { diff --git a/pkg/tsdb/cloudwatch/metric_find_query_test.go b/pkg/tsdb/cloudwatch/metric_find_query_test.go index 34c3379b4df..bc6c8b163a0 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query_test.go +++ b/pkg/tsdb/cloudwatch/metric_find_query_test.go @@ -8,6 +8,8 @@ import ( "github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/aws/aws-sdk-go/service/ec2" "github.com/aws/aws-sdk-go/service/ec2/ec2iface" + "github.com/aws/aws-sdk-go/service/resourcegroupstaggingapi" + "github.com/aws/aws-sdk-go/service/resourcegroupstaggingapi/resourcegroupstaggingapiiface" "github.com/bmizerany/assert" "github.com/grafana/grafana/pkg/components/securejsondata" "github.com/grafana/grafana/pkg/components/simplejson" @@ -22,6 +24,11 @@ type mockedEc2 struct { RespRegions ec2.DescribeRegionsOutput } +type mockedRGTA struct { + resourcegroupstaggingapiiface.ResourceGroupsTaggingAPIAPI + Resp resourcegroupstaggingapi.GetResourcesOutput +} + func (m mockedEc2) DescribeInstancesPages(in *ec2.DescribeInstancesInput, fn func(*ec2.DescribeInstancesOutput, bool) bool) error { fn(&m.Resp, true) return nil @@ -30,6 +37,11 @@ func (m mockedEc2) DescribeRegions(in *ec2.DescribeRegionsInput) (*ec2.DescribeR return &m.RespRegions, nil } +func (m mockedRGTA) GetResourcesPages(in *resourcegroupstaggingapi.GetResourcesInput, fn func(*resourcegroupstaggingapi.GetResourcesOutput, bool) bool) error { + fn(&m.Resp, true) + return nil +} + func TestCloudWatchMetrics(t *testing.T) { Convey("When calling getMetricsForCustomMetrics", t, func() { @@ -209,6 +221,51 @@ func TestCloudWatchMetrics(t *testing.T) { So(result[7].Text, ShouldEqual, "vol-4-2") }) }) + + Convey("When calling handleGetResourceArns", t, func() { + executor := &CloudWatchExecutor{ + rgtaSvc: mockedRGTA{ + Resp: resourcegroupstaggingapi.GetResourcesOutput{ + ResourceTagMappingList: []*resourcegroupstaggingapi.ResourceTagMapping{ + { + ResourceARN: aws.String("arn:aws:ec2:us-east-1:123456789012:instance/i-12345678901234567"), + Tags: []*resourcegroupstaggingapi.Tag{ + { + Key: aws.String("Environment"), + Value: aws.String("production"), + }, + }, + }, + { + ResourceARN: aws.String("arn:aws:ec2:us-east-1:123456789012:instance/i-76543210987654321"), + Tags: []*resourcegroupstaggingapi.Tag{ + { + Key: aws.String("Environment"), + Value: aws.String("production"), + }, + }, + }, + }, + }, + }, + } + + json := simplejson.New() + json.Set("region", "us-east-1") + json.Set("resourceType", "ec2:instance") + tags := make(map[string]interface{}) + tags["Environment"] = []string{"production"} + json.Set("tags", tags) + result, _ := executor.handleGetResourceArns(context.Background(), json, &tsdb.TsdbQuery{}) + + Convey("Should return all two instances", func() { + So(result[0].Text, ShouldEqual, "arn:aws:ec2:us-east-1:123456789012:instance/i-12345678901234567") + So(result[0].Value, ShouldEqual, "arn:aws:ec2:us-east-1:123456789012:instance/i-12345678901234567") + So(result[1].Text, ShouldEqual, "arn:aws:ec2:us-east-1:123456789012:instance/i-76543210987654321") + So(result[1].Value, ShouldEqual, "arn:aws:ec2:us-east-1:123456789012:instance/i-76543210987654321") + + }) + }) } func TestParseMultiSelectValue(t *testing.T) { diff --git a/pkg/tsdb/mssql/mssql.go b/pkg/tsdb/mssql/mssql.go index bd4510f6cf3..12f2b6c03c9 100644 --- a/pkg/tsdb/mssql/mssql.go +++ b/pkg/tsdb/mssql/mssql.go @@ -49,10 +49,7 @@ func generateConnectionString(datasource *models.DataSource) (string, error) { } } - server, port, err := util.SplitIPPort(datasource.Url, "1433") - if err != nil { - return "", err - } + server, port := util.SplitHostPortDefault(datasource.Url, "localhost", "1433") encrypt := datasource.JsonData.Get("encrypt").MustString("false") connStr := fmt.Sprintf("server=%s;port=%s;database=%s;user id=%s;password=%s;", diff --git a/pkg/util/ip.go b/pkg/util/ip.go deleted file mode 100644 index d3809318191..00000000000 --- a/pkg/util/ip.go +++ /dev/null @@ -1,25 +0,0 @@ -package util - -import ( - "net" -) - -// SplitIPPort splits the ip string and port. -func SplitIPPort(ipStr string, portDefault string) (ip string, port string, err error) { - ipAddr := net.ParseIP(ipStr) - - if ipAddr == nil { - // Port was included - ip, port, err = net.SplitHostPort(ipStr) - - if err != nil { - return "", "", err - } - } else { - // No port was included - ip = ipAddr.String() - port = portDefault - } - - return ip, port, nil -} diff --git a/pkg/util/ip_address.go b/pkg/util/ip_address.go index d8d95ef3acd..b5ffb361e0b 100644 --- a/pkg/util/ip_address.go +++ b/pkg/util/ip_address.go @@ -7,23 +7,48 @@ import ( // ParseIPAddress parses an IP address and removes port and/or IPV6 format func ParseIPAddress(input string) string { - s := input - lastIndex := strings.LastIndex(input, ":") + host, _ := SplitHostPort(input) - if lastIndex != -1 { - if lastIndex > 0 && input[lastIndex-1:lastIndex] != ":" { - s = input[:lastIndex] - } + ip := net.ParseIP(host) + + if ip == nil { + return host } - s = strings.Replace(s, "[", "", -1) - s = strings.Replace(s, "]", "", -1) - - ip := net.ParseIP(s) - if ip.IsLoopback() { return "127.0.0.1" } return ip.String() } + +// SplitHostPortDefault splits ip address/hostname string by host and port. Defaults used if no match found +func SplitHostPortDefault(input, defaultHost, defaultPort string) (host string, port string) { + port = defaultPort + s := input + lastIndex := strings.LastIndex(input, ":") + + if lastIndex != -1 { + if lastIndex > 0 && input[lastIndex-1:lastIndex] != ":" { + s = input[:lastIndex] + port = input[lastIndex+1:] + } else if lastIndex == 0 { + s = defaultHost + port = input[lastIndex+1:] + } + } else { + port = defaultPort + } + + s = strings.Replace(s, "[", "", -1) + s = strings.Replace(s, "]", "", -1) + port = strings.Replace(port, "[", "", -1) + port = strings.Replace(port, "]", "", -1) + + return s, port +} + +// SplitHostPort splits ip address/hostname string by host and port +func SplitHostPort(input string) (host string, port string) { + return SplitHostPortDefault(input, "", "") +} diff --git a/pkg/util/ip_address_test.go b/pkg/util/ip_address_test.go index fd3e3ea8587..b926de1a36b 100644 --- a/pkg/util/ip_address_test.go +++ b/pkg/util/ip_address_test.go @@ -9,8 +9,90 @@ import ( func TestParseIPAddress(t *testing.T) { Convey("Test parse ip address", t, func() { So(ParseIPAddress("192.168.0.140:456"), ShouldEqual, "192.168.0.140") + So(ParseIPAddress("192.168.0.140"), ShouldEqual, "192.168.0.140") So(ParseIPAddress("[::1:456]"), ShouldEqual, "127.0.0.1") So(ParseIPAddress("[::1]"), ShouldEqual, "127.0.0.1") - So(ParseIPAddress("192.168.0.140"), ShouldEqual, "192.168.0.140") + So(ParseIPAddress("::1"), ShouldEqual, "127.0.0.1") + So(ParseIPAddress("::1:123"), ShouldEqual, "127.0.0.1") + }) +} + +func TestSplitHostPortDefault(t *testing.T) { + Convey("Test split ip address to host and port", t, func() { + host, port := SplitHostPortDefault("192.168.0.140:456", "", "") + So(host, ShouldEqual, "192.168.0.140") + So(port, ShouldEqual, "456") + + host, port = SplitHostPortDefault("192.168.0.140", "", "123") + So(host, ShouldEqual, "192.168.0.140") + So(port, ShouldEqual, "123") + + host, port = SplitHostPortDefault("[::1:456]", "", "") + So(host, ShouldEqual, "::1") + So(port, ShouldEqual, "456") + + host, port = SplitHostPortDefault("[::1]", "", "123") + So(host, ShouldEqual, "::1") + So(port, ShouldEqual, "123") + + host, port = SplitHostPortDefault("::1:123", "", "") + So(host, ShouldEqual, "::1") + So(port, ShouldEqual, "123") + + host, port = SplitHostPortDefault("::1", "", "123") + So(host, ShouldEqual, "::1") + So(port, ShouldEqual, "123") + + host, port = SplitHostPortDefault(":456", "1.2.3.4", "") + So(host, ShouldEqual, "1.2.3.4") + So(port, ShouldEqual, "456") + + host, port = SplitHostPortDefault("xyz.rds.amazonaws.com", "", "123") + So(host, ShouldEqual, "xyz.rds.amazonaws.com") + So(port, ShouldEqual, "123") + + host, port = SplitHostPortDefault("xyz.rds.amazonaws.com:123", "", "") + So(host, ShouldEqual, "xyz.rds.amazonaws.com") + So(port, ShouldEqual, "123") + }) +} + +func TestSplitHostPort(t *testing.T) { + Convey("Test split ip address to host and port", t, func() { + host, port := SplitHostPort("192.168.0.140:456") + So(host, ShouldEqual, "192.168.0.140") + So(port, ShouldEqual, "456") + + host, port = SplitHostPort("192.168.0.140") + So(host, ShouldEqual, "192.168.0.140") + So(port, ShouldEqual, "") + + host, port = SplitHostPort("[::1:456]") + So(host, ShouldEqual, "::1") + So(port, ShouldEqual, "456") + + host, port = SplitHostPort("[::1]") + So(host, ShouldEqual, "::1") + So(port, ShouldEqual, "") + + host, port = SplitHostPort("::1:123") + So(host, ShouldEqual, "::1") + So(port, ShouldEqual, "123") + + host, port = SplitHostPort("::1") + So(host, ShouldEqual, "::1") + So(port, ShouldEqual, "") + + host, port = SplitHostPort(":456") + So(host, ShouldEqual, "") + So(port, ShouldEqual, "456") + + host, port = SplitHostPort("xyz.rds.amazonaws.com") + So(host, ShouldEqual, "xyz.rds.amazonaws.com") + So(port, ShouldEqual, "") + + host, port = SplitHostPort("xyz.rds.amazonaws.com:123") + So(host, ShouldEqual, "xyz.rds.amazonaws.com") + So(port, ShouldEqual, "123") }) } diff --git a/pkg/util/ip_test.go b/pkg/util/ip_test.go deleted file mode 100644 index 3a62a080e26..00000000000 --- a/pkg/util/ip_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package util - -import ( - "testing" - - . "github.com/smartystreets/goconvey/convey" -) - -func TestSplitIPPort(t *testing.T) { - - Convey("When parsing an IPv4 without explicit port", t, func() { - ip, port, err := SplitIPPort("1.2.3.4", "5678") - - So(err, ShouldEqual, nil) - So(ip, ShouldEqual, "1.2.3.4") - So(port, ShouldEqual, "5678") - }) - - Convey("When parsing an IPv6 without explicit port", t, func() { - ip, port, err := SplitIPPort("::1", "5678") - - So(err, ShouldEqual, nil) - So(ip, ShouldEqual, "::1") - So(port, ShouldEqual, "5678") - }) - - Convey("When parsing an IPv4 with explicit port", t, func() { - ip, port, err := SplitIPPort("1.2.3.4:56", "78") - - So(err, ShouldEqual, nil) - So(ip, ShouldEqual, "1.2.3.4") - So(port, ShouldEqual, "56") - }) - - Convey("When parsing an IPv6 with explicit port", t, func() { - ip, port, err := SplitIPPort("[::1]:56", "78") - - So(err, ShouldEqual, nil) - So(ip, ShouldEqual, "::1") - So(port, ShouldEqual, "56") - }) - -} diff --git a/public/app/core/services/backend_srv.ts b/public/app/core/services/backend_srv.ts index 38d7f2b76cb..c73cc7661f5 100644 --- a/public/app/core/services/backend_srv.ts +++ b/public/app/core/services/backend_srv.ts @@ -1,6 +1,7 @@ import _ from 'lodash'; import coreModule from 'app/core/core_module'; import appEvents from 'app/core/app_events'; +import config from 'app/core/config'; import { DashboardModel } from 'app/features/dashboard/state/DashboardModel'; export class BackendSrv { @@ -103,10 +104,17 @@ export class BackendSrv { err => { // handle unauthorized if (err.status === 401 && this.contextSrv.user.isSignedIn && firstAttempt) { - return this.loginPing().then(() => { - options.retry = 1; - return this.request(options); - }); + return this.loginPing() + .then(() => { + options.retry = 1; + return this.request(options); + }) + .catch(err => { + if (err.status === 401) { + window.location.href = config.appSubUrl + '/logout'; + throw err; + } + }); } this.$timeout(this.requestErrorHandler.bind(this, err), 50); @@ -184,13 +192,20 @@ export class BackendSrv { // handle unauthorized for backend requests if (requestIsLocal && firstAttempt && err.status === 401) { - return this.loginPing().then(() => { - options.retry = 1; - if (canceler) { - canceler.resolve(); - } - return this.datasourceRequest(options); - }); + return this.loginPing() + .then(() => { + options.retry = 1; + if (canceler) { + canceler.resolve(); + } + return this.datasourceRequest(options); + }) + .catch(err => { + if (err.status === 401) { + window.location.href = config.appSubUrl + '/logout'; + throw err; + } + }); } // populate error obj on Internal Error diff --git a/public/app/core/utils/explore.test.ts b/public/app/core/utils/explore.test.ts index 32135eab90a..1c00142c3b8 100644 --- a/public/app/core/utils/explore.test.ts +++ b/public/app/core/utils/explore.test.ts @@ -13,6 +13,11 @@ const DEFAULT_EXPLORE_STATE: ExploreUrlState = { datasource: null, queries: [], range: DEFAULT_RANGE, + ui: { + showingGraph: true, + showingTable: true, + showingLogs: true, + } }; describe('state functions', () => { @@ -69,9 +74,11 @@ describe('state functions', () => { to: 'now', }, }; + expect(serializeStateToUrlParam(state)).toBe( '{"datasource":"foo","queries":[{"expr":"metric{test=\\"a/b\\"}"},' + - '{"expr":"super{foo=\\"x/z\\"}"}],"range":{"from":"now-5h","to":"now"}}' + '{"expr":"super{foo=\\"x/z\\"}"}],"range":{"from":"now-5h","to":"now"},' + + '"ui":{"showingGraph":true,"showingTable":true,"showingLogs":true}}' ); }); @@ -93,7 +100,7 @@ describe('state functions', () => { }, }; expect(serializeStateToUrlParam(state, true)).toBe( - '["now-5h","now","foo",{"expr":"metric{test=\\"a/b\\"}"},{"expr":"super{foo=\\"x/z\\"}"}]' + '["now-5h","now","foo",{"expr":"metric{test=\\"a/b\\"}"},{"expr":"super{foo=\\"x/z\\"}"},{"ui":[true,true,true]}]' ); }); }); @@ -118,7 +125,28 @@ describe('state functions', () => { }; const serialized = serializeStateToUrlParam(state); const parsed = parseUrlState(serialized); + expect(state).toMatchObject(parsed); + }); + it('can parse the compact serialized state into the original state', () => { + const state = { + ...DEFAULT_EXPLORE_STATE, + datasource: 'foo', + queries: [ + { + expr: 'metric{test="a/b"}', + }, + { + expr: 'super{foo="x/z"}', + }, + ], + range: { + from: 'now - 5h', + to: 'now', + }, + }; + const serialized = serializeStateToUrlParam(state, true); + const parsed = parseUrlState(serialized); expect(state).toMatchObject(parsed); }); }); diff --git a/public/app/core/utils/explore.ts b/public/app/core/utils/explore.ts index 7a9f54a0cae..107f411353c 100644 --- a/public/app/core/utils/explore.ts +++ b/public/app/core/utils/explore.ts @@ -11,7 +11,7 @@ import { colors } from '@grafana/ui'; import TableModel, { mergeTablesIntoModel } from 'app/core/table_model'; // Types -import { RawTimeRange, IntervalValues, DataQuery } from '@grafana/ui/src/types'; +import { RawTimeRange, IntervalValues, DataQuery, DataSourceApi } from '@grafana/ui/src/types'; import TimeSeries from 'app/core/time_series2'; import { ExploreUrlState, @@ -27,6 +27,12 @@ export const DEFAULT_RANGE = { to: 'now', }; +export const DEFAULT_UI_STATE = { + showingTable: true, + showingGraph: true, + showingLogs: true, +}; + const MAX_HISTORY_ITEMS = 100; export const LAST_USED_DATASOURCE_KEY = 'grafana.explore.datasource'; @@ -147,7 +153,12 @@ export function buildQueryTransaction( export const clearQueryKeys: ((query: DataQuery) => object) = ({ key, refId, ...rest }) => rest; +const isMetricSegment = (segment: { [key: string]: string }) => segment.hasOwnProperty('expr'); +const isUISegment = (segment: { [key: string]: string }) => segment.hasOwnProperty('ui'); + export function parseUrlState(initial: string | undefined): ExploreUrlState { + let uiState = DEFAULT_UI_STATE; + if (initial) { try { const parsed = JSON.parse(decodeURI(initial)); @@ -160,20 +171,41 @@ export function parseUrlState(initial: string | undefined): ExploreUrlState { to: parsed[1], }; const datasource = parsed[2]; - const queries = parsed.slice(3); - return { datasource, queries, range }; + let queries = []; + + parsed.slice(3).forEach(segment => { + if (isMetricSegment(segment)) { + queries = [...queries, segment]; + } + + if (isUISegment(segment)) { + uiState = { + showingGraph: segment.ui[0], + showingLogs: segment.ui[1], + showingTable: segment.ui[2], + }; + } + }); + + return { datasource, queries, range, ui: uiState }; } return parsed; } catch (e) { console.error(e); } } - return { datasource: null, queries: [], range: DEFAULT_RANGE }; + return { datasource: null, queries: [], range: DEFAULT_RANGE, ui: uiState }; } export function serializeStateToUrlParam(urlState: ExploreUrlState, compact?: boolean): string { if (compact) { - return JSON.stringify([urlState.range.from, urlState.range.to, urlState.datasource, ...urlState.queries]); + return JSON.stringify([ + urlState.range.from, + urlState.range.to, + urlState.datasource, + ...urlState.queries, + { ui: [!!urlState.ui.showingGraph, !!urlState.ui.showingLogs, !!urlState.ui.showingTable] }, + ]); } return JSON.stringify(urlState); } @@ -304,3 +336,12 @@ export function clearHistory(datasourceId: string) { const historyKey = `grafana.explore.history.${datasourceId}`; store.delete(historyKey); } + +export const getQueryKeys = (queries: DataQuery[], datasourceInstance: DataSourceApi): string[] => { + const queryKeys = queries.reduce((newQueryKeys, query, index) => { + const primaryKey = datasourceInstance && datasourceInstance.name ? datasourceInstance.name : query.key; + return newQueryKeys.concat(`${primaryKey}-${index}`); + }, []); + + return queryKeys; +}; diff --git a/public/app/features/dashboard/components/AddPanelWidget/AddPanelWidget.test.tsx b/public/app/features/dashboard/components/AddPanelWidget/AddPanelWidget.test.tsx new file mode 100644 index 00000000000..91da066e4cc --- /dev/null +++ b/public/app/features/dashboard/components/AddPanelWidget/AddPanelWidget.test.tsx @@ -0,0 +1,23 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import { AddPanelWidget, Props } from './AddPanelWidget'; +import { DashboardModel, PanelModel } from '../../state'; + +const setup = (propOverrides?: object) => { + const props: Props = { + dashboard: {} as DashboardModel, + panel: {} as PanelModel, + }; + + Object.assign(props, propOverrides); + + return shallow(); +}; + +describe('Render', () => { + it('should render component', () => { + const wrapper = setup(); + + expect(wrapper).toMatchSnapshot(); + }); +}); diff --git a/public/app/features/dashboard/components/AddPanelWidget/AddPanelWidget.tsx b/public/app/features/dashboard/components/AddPanelWidget/AddPanelWidget.tsx index 8c1ab93cec1..135b04a8ac5 100644 --- a/public/app/features/dashboard/components/AddPanelWidget/AddPanelWidget.tsx +++ b/public/app/features/dashboard/components/AddPanelWidget/AddPanelWidget.tsx @@ -1,12 +1,20 @@ +// Libraries import React from 'react'; import _ from 'lodash'; + +// Utils import config from 'app/core/config'; -import { PanelModel } from '../../state/PanelModel'; -import { DashboardModel } from '../../state/DashboardModel'; import store from 'app/core/store'; -import { LS_PANEL_COPY_KEY } from 'app/core/constants'; -import { updateLocation } from 'app/core/actions'; + +// Store import { store as reduxStore } from 'app/store/store'; +import { updateLocation } from 'app/core/actions'; + +// Types +import { PanelModel } from '../../state'; +import { DashboardModel } from '../../state'; +import { LS_PANEL_COPY_KEY } from 'app/core/constants'; +import { LocationUpdate } from 'app/types'; export interface Props { panel: PanelModel; @@ -46,6 +54,7 @@ export class AddPanelWidget extends React.Component { copiedPanels.push(pluginCopy); } } + return _.sortBy(copiedPanels, 'sort'); } @@ -54,28 +63,7 @@ export class AddPanelWidget extends React.Component { this.props.dashboard.removePanel(this.props.dashboard.panels[0]); } - copyButton(panel) { - return ( - - ); - } - - moveToEdit(panel) { - reduxStore.dispatch( - updateLocation({ - query: { - panelId: panel.id, - edit: true, - fullscreen: true, - }, - partial: true, - }) - ); - } - - onCreateNewPanel = () => { + onCreateNewPanel = (tab = 'queries') => { const dashboard = this.props.dashboard; const { gridPos } = this.props.panel; @@ -88,7 +76,21 @@ export class AddPanelWidget extends React.Component { dashboard.addPanel(newPanel); dashboard.removePanel(this.props.panel); - this.moveToEdit(newPanel); + const location: LocationUpdate = { + query: { + panelId: newPanel.id, + edit: true, + fullscreen: true, + }, + partial: true, + }; + + if (tab === 'visualization') { + location.query.tab = 'visualization'; + location.query.openVizPicker = true; + } + + reduxStore.dispatch(updateLocation(location)); }; onPasteCopiedPanel = panelPluginInfo => { @@ -125,30 +127,50 @@ export class AddPanelWidget extends React.Component { dashboard.removePanel(this.props.panel); }; - render() { - let addCopyButton; + renderOptionLink = (icon, text, onClick) => { + return ( + + ); + }; - if (this.state.copiedPanelPlugins.length === 1) { - addCopyButton = this.copyButton(this.state.copiedPanelPlugins[0]); - } + render() { + const { copiedPanelPlugins } = this.state; return (
+ New Panel
- - {addCopyButton} - +
+ {this.renderOptionLink('queries', 'Add Query', this.onCreateNewPanel)} + {this.renderOptionLink('visualization', 'Choose Visualization', () => + this.onCreateNewPanel('visualization') + )} +
+
+ + {copiedPanelPlugins.length === 1 && ( + + )} +
diff --git a/public/app/features/dashboard/components/AddPanelWidget/_AddPanelWidget.scss b/public/app/features/dashboard/components/AddPanelWidget/_AddPanelWidget.scss index 5a1cbee4b44..288b2e7a410 100644 --- a/public/app/features/dashboard/components/AddPanelWidget/_AddPanelWidget.scss +++ b/public/app/features/dashboard/components/AddPanelWidget/_AddPanelWidget.scss @@ -14,6 +14,9 @@ align-items: center; width: 100%; cursor: move; + background: $page-header-bg; + box-shadow: $page-header-shadow; + border-bottom: 1px solid $page-header-border-color; .gicon { font-size: 30px; @@ -26,6 +29,29 @@ } } +.add-panel-widget__title { + font-size: $font-size-md; + font-weight: $font-weight-semi-bold; + margin-right: $spacer*2; +} + +.add-panel-widget__link { + margin: 0 8px; + width: 154px; +} + +.add-panel-widget__icon { + margin-bottom: 8px; + + .gicon { + color: white; + height: 44px; + width: 53px; + position: relative; + left: 5px; + } +} + .add-panel-widget__close { margin-left: auto; background-color: transparent; @@ -34,14 +60,25 @@ margin-right: -10px; } +.add-panel-widget__create { + display: inherit; + margin-bottom: 24px; + // this is to have the big button appear centered + margin-top: 55px; +} + +.add-panel-widget__actions { + display: inherit; +} + +.add-panel-widget__action { + margin: 0 4px; +} + .add-panel-widget__btn-container { + height: 100%; display: flex; justify-content: center; align-items: center; - height: 100%; flex-direction: column; - - .btn { - margin-bottom: 10px; - } } diff --git a/public/app/features/dashboard/components/AddPanelWidget/__snapshots__/AddPanelWidget.test.tsx.snap b/public/app/features/dashboard/components/AddPanelWidget/__snapshots__/AddPanelWidget.test.tsx.snap new file mode 100644 index 00000000000..00faf48d8df --- /dev/null +++ b/public/app/features/dashboard/components/AddPanelWidget/__snapshots__/AddPanelWidget.test.tsx.snap @@ -0,0 +1,86 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Render should render component 1`] = ` +
+
+
+ + + New Panel + + +
+
+ +
+ +
+
+
+
+`; diff --git a/public/app/features/dashboard/components/SaveModals/index.ts b/public/app/features/dashboard/components/SaveModals/index.ts index afab0796d28..6f55cc2ce06 100644 --- a/public/app/features/dashboard/components/SaveModals/index.ts +++ b/public/app/features/dashboard/components/SaveModals/index.ts @@ -1,2 +1,3 @@ export { SaveDashboardAsModalCtrl } from './SaveDashboardAsModalCtrl'; export { SaveDashboardModalCtrl } from './SaveDashboardModalCtrl'; +export { SaveProvisionedDashboardModalCtrl } from './SaveProvisionedDashboardModalCtrl'; diff --git a/public/app/features/dashboard/panel_editor/PanelEditor.tsx b/public/app/features/dashboard/panel_editor/PanelEditor.tsx index d7aafb89e55..bfdc13bc8f2 100644 --- a/public/app/features/dashboard/panel_editor/PanelEditor.tsx +++ b/public/app/features/dashboard/panel_editor/PanelEditor.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; import classNames from 'classnames'; import { QueriesTab } from './QueriesTab'; -import { VisualizationTab } from './VisualizationTab'; +import VisualizationTab from './VisualizationTab'; import { GeneralTab } from './GeneralTab'; import { AlertTab } from '../../alerting/AlertTab'; @@ -38,7 +38,7 @@ export class PanelEditor extends PureComponent { onChangeTab = (tab: PanelEditorTab) => { store.dispatch( updateLocation({ - query: { tab: tab.id }, + query: { tab: tab.id, openVizPicker: null }, partial: true, }) ); diff --git a/public/app/features/dashboard/panel_editor/QueriesTab.tsx b/public/app/features/dashboard/panel_editor/QueriesTab.tsx index 491f255d761..d46ff020906 100644 --- a/public/app/features/dashboard/panel_editor/QueriesTab.tsx +++ b/public/app/features/dashboard/panel_editor/QueriesTab.tsx @@ -133,7 +133,7 @@ export class QueriesTab extends PureComponent { return ( <> -
+
{!isAddingMixed && (
-
+
void; -} - -export default class PromStart extends PureComponent { +export default class PromStart extends PureComponent { render() { return (
diff --git a/public/app/plugins/datasource/stackdriver/components/QueryEditor.tsx b/public/app/plugins/datasource/stackdriver/components/QueryEditor.tsx index 94521041416..c3bd9212b21 100644 --- a/public/app/plugins/datasource/stackdriver/components/QueryEditor.tsx +++ b/public/app/plugins/datasource/stackdriver/components/QueryEditor.tsx @@ -10,21 +10,21 @@ import { Alignments } from './Alignments'; import { AlignmentPeriods } from './AlignmentPeriods'; import { AliasBy } from './AliasBy'; import { Help } from './Help'; -import { Target, MetricDescriptor } from '../types'; +import { StackdriverQuery, MetricDescriptor } from '../types'; import { getAlignmentPickerData } from '../functions'; import StackdriverDatasource from '../datasource'; import { SelectOptionItem } from '@grafana/ui'; export interface Props { - onQueryChange: (target: Target) => void; + onQueryChange: (target: StackdriverQuery) => void; onExecuteQuery: () => void; - target: Target; + target: StackdriverQuery; events: any; datasource: StackdriverDatasource; templateSrv: TemplateSrv; } -interface State extends Target { +interface State extends StackdriverQuery { alignOptions: SelectOptionItem[]; lastQuery: string; lastQueryError: string; diff --git a/public/app/plugins/datasource/stackdriver/datasource.ts b/public/app/plugins/datasource/stackdriver/datasource.ts index 025955105a7..4c1f07e0a06 100644 --- a/public/app/plugins/datasource/stackdriver/datasource.ts +++ b/public/app/plugins/datasource/stackdriver/datasource.ts @@ -2,9 +2,10 @@ import { stackdriverUnitMappings } from './constants'; import appEvents from 'app/core/app_events'; import _ from 'lodash'; import StackdriverMetricFindQuery from './StackdriverMetricFindQuery'; -import { MetricDescriptor } from './types'; +import { StackdriverQuery, MetricDescriptor } from './types'; +import { DataSourceApi, DataQueryOptions } from '@grafana/ui/src/types'; -export default class StackdriverDatasource { +export default class StackdriverDatasource implements DataSourceApi { id: number; url: string; baseUrl: string; @@ -39,9 +40,7 @@ export default class StackdriverDatasource { alignmentPeriod: this.templateSrv.replace(t.alignmentPeriod, options.scopedVars || {}), groupBys: this.interpolateGroupBys(t.groupBys, options.scopedVars), view: t.view || 'FULL', - filters: (t.filters || []).map(f => { - return this.templateSrv.replace(f, options.scopedVars || {}); - }), + filters: this.interpolateFilters(t.filters, options.scopedVars), aliasBy: this.templateSrv.replace(t.aliasBy, options.scopedVars || {}), type: 'timeSeriesQuery', }; @@ -63,7 +62,13 @@ export default class StackdriverDatasource { } } - async getLabels(metricType, refId) { + interpolateFilters(filters: string[], scopedVars: object) { + return (filters || []).map(f => { + return this.templateSrv.replace(f, scopedVars || {}, 'regex'); + }); + } + + async getLabels(metricType: string, refId: string) { const response = await this.getTimeSeries({ targets: [ { @@ -103,7 +108,7 @@ export default class StackdriverDatasource { return unit; } - async query(options) { + async query(options: DataQueryOptions) { const result = []; const data = await this.getTimeSeries(options); if (data.results) { diff --git a/public/app/plugins/datasource/stackdriver/img/stackdriver_logo.png b/public/app/plugins/datasource/stackdriver/img/stackdriver_logo.png deleted file mode 100644 index cd52e773deb..00000000000 Binary files a/public/app/plugins/datasource/stackdriver/img/stackdriver_logo.png and /dev/null differ diff --git a/public/app/plugins/datasource/stackdriver/img/stackdriver_logo.svg b/public/app/plugins/datasource/stackdriver/img/stackdriver_logo.svg new file mode 100644 index 00000000000..93878f20a06 --- /dev/null +++ b/public/app/plugins/datasource/stackdriver/img/stackdriver_logo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/public/app/plugins/datasource/stackdriver/plugin.json b/public/app/plugins/datasource/stackdriver/plugin.json index e68100c0e59..1ee3d57e9b1 100644 --- a/public/app/plugins/datasource/stackdriver/plugin.json +++ b/public/app/plugins/datasource/stackdriver/plugin.json @@ -14,8 +14,8 @@ "description": "Google Stackdriver Datasource for Grafana", "version": "1.0.0", "logos": { - "small": "img/stackdriver_logo.png", - "large": "img/stackdriver_logo.png" + "small": "img/stackdriver_logo.svg", + "large": "img/stackdriver_logo.svg" }, "author": { "name": "Grafana Project", diff --git a/public/app/plugins/datasource/stackdriver/query_ctrl.ts b/public/app/plugins/datasource/stackdriver/query_ctrl.ts index c6a8a4d9782..3a2d0bb970a 100644 --- a/public/app/plugins/datasource/stackdriver/query_ctrl.ts +++ b/public/app/plugins/datasource/stackdriver/query_ctrl.ts @@ -1,7 +1,7 @@ import _ from 'lodash'; import { QueryCtrl } from 'app/plugins/sdk'; -import { Target } from './types'; +import { StackdriverQuery } from './types'; import { TemplateSrv } from 'app/features/templating/template_srv'; export class StackdriverQueryCtrl extends QueryCtrl { @@ -16,7 +16,7 @@ export class StackdriverQueryCtrl extends QueryCtrl { this.onExecuteQuery = this.onExecuteQuery.bind(this); } - onQueryChange(target: Target) { + onQueryChange(target: StackdriverQuery) { Object.assign(this.target, target); } diff --git a/public/app/plugins/datasource/stackdriver/specs/datasource.test.ts b/public/app/plugins/datasource/stackdriver/specs/datasource.test.ts index 46cdd77b7a9..032f10d8ca5 100644 --- a/public/app/plugins/datasource/stackdriver/specs/datasource.test.ts +++ b/public/app/plugins/datasource/stackdriver/specs/datasource.test.ts @@ -1,7 +1,8 @@ import StackdriverDataSource from '../datasource'; import { metricDescriptors } from './testData'; import moment from 'moment'; -import { TemplateSrvStub } from 'test/specs/helpers'; +import { TemplateSrv } from 'app/features/templating/template_srv'; +import { CustomVariable } from 'app/features/templating/all'; describe('StackdriverDataSource', () => { const instanceSettings = { @@ -9,7 +10,7 @@ describe('StackdriverDataSource', () => { defaultProject: 'testproject', }, }; - const templateSrv = new TemplateSrvStub(); + const templateSrv = new TemplateSrv(); const timeSrv = {}; describe('when performing testDataSource', () => { @@ -154,15 +155,41 @@ describe('StackdriverDataSource', () => { }); }); + describe('when interpolating a template variable for the filter', () => { + let interpolated; + describe('and is single value variable', () => { + beforeEach(() => { + const filterTemplateSrv = initTemplateSrv('filtervalue1'); + const ds = new StackdriverDataSource(instanceSettings, {}, filterTemplateSrv, timeSrv); + interpolated = ds.interpolateFilters(['resource.label.zone', '=~', '${test}'], {}); + }); + + it('should replace the variable with the value', () => { + expect(interpolated.length).toBe(3); + expect(interpolated[2]).toBe('filtervalue1'); + }); + }); + + describe('and is multi value variable', () => { + beforeEach(() => { + const filterTemplateSrv = initTemplateSrv(['filtervalue1', 'filtervalue2'], true); + const ds = new StackdriverDataSource(instanceSettings, {}, filterTemplateSrv, timeSrv); + interpolated = ds.interpolateFilters(['resource.label.zone', '=~', '[[test]]'], {}); + }); + + it('should replace the variable with a regex expression', () => { + expect(interpolated[2]).toBe('(filtervalue1|filtervalue2)'); + }); + }); + }); + describe('when interpolating a template variable for group bys', () => { let interpolated; describe('and is single value variable', () => { beforeEach(() => { - templateSrv.data = { - test: 'groupby1', - }; - const ds = new StackdriverDataSource(instanceSettings, {}, templateSrv, timeSrv); + const groupByTemplateSrv = initTemplateSrv('groupby1'); + const ds = new StackdriverDataSource(instanceSettings, {}, groupByTemplateSrv, timeSrv); interpolated = ds.interpolateGroupBys(['[[test]]'], {}); }); @@ -174,10 +201,8 @@ describe('StackdriverDataSource', () => { describe('and is multi value variable', () => { beforeEach(() => { - templateSrv.data = { - test: 'groupby1,groupby2', - }; - const ds = new StackdriverDataSource(instanceSettings, {}, templateSrv, timeSrv); + const groupByTemplateSrv = initTemplateSrv(['groupby1', 'groupby2'], true); + const ds = new StackdriverDataSource(instanceSettings, {}, groupByTemplateSrv, timeSrv); interpolated = ds.interpolateGroupBys(['[[test]]'], {}); }); @@ -241,3 +266,19 @@ describe('StackdriverDataSource', () => { }); }); }); +function initTemplateSrv(values: any, multi = false) { + const templateSrv = new TemplateSrv(); + templateSrv.init([ + new CustomVariable( + { + name: 'test', + current: { + value: values, + }, + multi: multi, + }, + {} + ), + ]); + return templateSrv; +} diff --git a/public/app/plugins/datasource/stackdriver/types.ts b/public/app/plugins/datasource/stackdriver/types.ts index 29b12b4289d..b9a6893d4bd 100644 --- a/public/app/plugins/datasource/stackdriver/types.ts +++ b/public/app/plugins/datasource/stackdriver/types.ts @@ -1,3 +1,5 @@ +import { DataQuery } from '@grafana/ui/src/types'; + export enum MetricFindQueryTypes { Services = 'services', MetricTypes = 'metricTypes', @@ -20,20 +22,22 @@ export interface VariableQueryData { services: Array<{ value: string; name: string }>; } -export interface Target { - defaultProject: string; - unit: string; +export interface StackdriverQuery extends DataQuery { + defaultProject?: string; + unit?: string; metricType: string; - service: string; + service?: string; refId: string; crossSeriesReducer: string; - alignmentPeriod: string; + alignmentPeriod?: string; perSeriesAligner: string; - groupBys: string[]; - filters: string[]; - aliasBy: string; + groupBys?: string[]; + filters?: string[]; + aliasBy?: string; metricKind: string; valueType: string; + datasourceId?: number; + view?: string; } export interface AnnotationTarget { diff --git a/public/app/routes/GrafanaCtrl.ts b/public/app/routes/GrafanaCtrl.ts index 70bdf49e5e4..c6945f26d08 100644 --- a/public/app/routes/GrafanaCtrl.ts +++ b/public/app/routes/GrafanaCtrl.ts @@ -280,6 +280,28 @@ export function grafanaAppDirective(playlistSrv, contextSrv, $timeout, $rootScop if (popover.length > 0 && target.parents('.graph-legend').length === 0) { popover.hide(); } + + // hide time picker + const timePickerDropDownIsOpen = elem.find('.gf-timepicker-dropdown').length > 0; + if (timePickerDropDownIsOpen) { + const targetIsInTimePickerDropDown = target.parents('.gf-timepicker-dropdown').length > 0; + const targetIsInTimePickerNav = target.parents('.gf-timepicker-nav').length > 0; + const targetIsDatePickerRowBtn = target.parents('td[id^="datepicker-"]').length > 0; + const targetIsDatePickerHeaderBtn = target.parents('button[id^="datepicker-"]').length > 0; + + if ( + targetIsInTimePickerNav || + targetIsInTimePickerDropDown || + targetIsDatePickerRowBtn || + targetIsDatePickerHeaderBtn + ) { + return; + } + + scope.$apply(() => { + scope.appEvent('closeTimepicker'); + }); + } }); }, }; diff --git a/public/app/store/configureStore.ts b/public/app/store/configureStore.ts index dc9a478adf3..570a387cd74 100644 --- a/public/app/store/configureStore.ts +++ b/public/app/store/configureStore.ts @@ -1,6 +1,6 @@ import { createStore, applyMiddleware, compose, combineReducers } from 'redux'; import thunk from 'redux-thunk'; -// import { createLogger } from 'redux-logger'; +import { createLogger } from 'redux-logger'; import sharedReducers from 'app/core/reducers'; import alertingReducers from 'app/features/alerting/state/reducers'; import teamsReducers from 'app/features/teams/state/reducers'; @@ -39,7 +39,7 @@ export function configureStore() { if (process.env.NODE_ENV !== 'production') { // DEV builds we had the logger middleware - setStore(createStore(rootReducer, {}, composeEnhancers(applyMiddleware(thunk)))); + setStore(createStore(rootReducer, {}, composeEnhancers(applyMiddleware(thunk, createLogger())))); } else { setStore(createStore(rootReducer, {}, composeEnhancers(applyMiddleware(thunk)))); } diff --git a/public/app/types/explore.ts b/public/app/types/explore.ts index 34b7ff08c99..9c8d977c3ad 100644 --- a/public/app/types/explore.ts +++ b/public/app/types/explore.ts @@ -1,5 +1,14 @@ +import { ComponentClass } from 'react'; import { Value } from 'slate'; -import { RawTimeRange, TimeRange, DataQuery, DataSourceSelectItem, DataSourceApi, QueryHint } from '@grafana/ui'; +import { + RawTimeRange, + TimeRange, + DataQuery, + DataSourceSelectItem, + DataSourceApi, + QueryHint, + ExploreStartPageProps, +} from '@grafana/ui'; import { Emitter } from 'app/core/core'; import { LogsModel } from 'app/core/logs_model'; @@ -102,7 +111,7 @@ export interface ExploreItemState { /** * React component to be shown when no queries have been run yet, e.g., for a query language cheat sheet. */ - StartPage?: any; + StartPage?: ComponentClass; /** * Width used for calculating the graph interval (can't have more datapoints than pixels) */ @@ -144,10 +153,10 @@ export interface ExploreItemState { */ history: HistoryItem[]; /** - * Initial queries for this Explore, e.g., set via URL. Each query will be - * converted to a query row. Query edits should be tracked in `modifiedQueries` though. + * Queries for this Explore, e.g., set via URL. Each query will be + * converted to a query row. */ - initialQueries: DataQuery[]; + queries: DataQuery[]; /** * True if this Explore area has been initialized. * Used to distinguish URL state injection versus split view state injection. @@ -162,12 +171,6 @@ export interface ExploreItemState { * Log query result to be displayed in the logs result viewer. */ logsResult?: LogsModel; - /** - * Copy of `initialQueries` that tracks user edits. - * Don't connect this property to a react component as it is updated on every query change. - * Used when running queries. Needs to be reset to `initialQueries` when those are reset as well. - */ - modifiedQueries: DataQuery[]; /** * Query intervals for graph queries to determine how many datapoints to return. * Needs to be updated when `datasourceInstance` or `containerWidth` is changed. @@ -229,12 +232,24 @@ export interface ExploreItemState { * Table model that combines all query table results into a single table. */ tableResult?: TableModel; + + /** + * React keys for rendering of QueryRows + */ + queryKeys: string[]; +} + +export interface ExploreUIState { + showingTable: boolean; + showingGraph: boolean; + showingLogs: boolean; } export interface ExploreUrlState { datasource: string; queries: any[]; // Should be a DataQuery, but we're going to strip refIds, so typing makes less sense range: RawTimeRange; + ui: ExploreUIState; } export interface HistoryItem { diff --git a/public/img/icons_dark_theme/icon_advanced.svg b/public/img/icons_dark_theme/icon_advanced.svg index 5fd18a86dd5..dea3ddff685 100644 --- a/public/img/icons_dark_theme/icon_advanced.svg +++ b/public/img/icons_dark_theme/icon_advanced.svg @@ -4,7 +4,7 @@ diff --git a/public/img/icons_dark_theme/icon_advanced_active.svg b/public/img/icons_dark_theme/icon_advanced_active.svg index 80672a2595b..1227ddc868c 100644 --- a/public/img/icons_dark_theme/icon_advanced_active.svg +++ b/public/img/icons_dark_theme/icon_advanced_active.svg @@ -5,7 +5,7 @@ width="121px" height="100px" viewBox="0 0 121 100" style="enable-background:new 0 0 121 100;" xml:space="preserve">