Datasource/Cloudwatch: Adds support for Cloudwatch Logs (#23566)

* Datasource/Cloudwatch: Adds support for Cloudwatch Logs

* Fix rebase leftover

* Use jsurl for AWS url serialization

* WIP: Temporary workaround for CLIQ metrics

* Only allow up to 20 log groups to be selected

* WIP additional changes

* More changes based on feedback

* More changes based on PR feedback

* Fix strict null errors
This commit is contained in:
kay delaney 2020-04-25 21:48:20 +01:00 committed by GitHub
parent af00fa7214
commit f48ba11d4c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
78 changed files with 3914 additions and 399 deletions

9
go.mod
View File

@ -3,10 +3,9 @@ module github.com/grafana/grafana
go 1.14
require (
cloud.google.com/go v0.38.0 // indirect
github.com/BurntSushi/toml v0.3.1
github.com/VividCortex/mysqlerr v0.0.0-20170204212430-6c6b55f8796f
github.com/aws/aws-sdk-go v1.25.48
github.com/aws/aws-sdk-go v1.29.20
github.com/beevik/etree v1.1.0 // indirect
github.com/benbjohnson/clock v0.0.0-20161215174838-7dc76406b6d3
github.com/bradfitz/gomemcache v0.0.0-20190329173943-551aad21a668
@ -23,7 +22,7 @@ require (
github.com/go-macaron/binding v0.0.0-20190806013118-0b4f37bab25b
github.com/go-macaron/gzip v0.0.0-20160222043647-cad1c6580a07
github.com/go-macaron/session v0.0.0-20190805070824-1a3cdc6f5659
github.com/go-sql-driver/mysql v1.4.1
github.com/go-sql-driver/mysql v1.5.0
github.com/go-stack/stack v1.8.0
github.com/gobwas/glob v0.2.3
github.com/golang/protobuf v1.3.4
@ -48,7 +47,7 @@ require (
github.com/mattn/go-sqlite3 v1.11.0
github.com/opentracing/opentracing-go v1.1.0
github.com/patrickmn/go-cache v2.1.0+incompatible
github.com/pkg/errors v0.8.1
github.com/pkg/errors v0.9.1
github.com/prometheus/client_golang v1.3.0
github.com/prometheus/client_model v0.1.0
github.com/prometheus/common v0.7.0
@ -72,7 +71,7 @@ require (
go.uber.org/atomic v1.5.1 // indirect
golang.org/x/crypto v0.0.0-20200406173513-056763e48d71
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f // indirect
golang.org/x/net v0.0.0-20190923162816-aa69164e4478
golang.org/x/net v0.0.0-20200202094626-16171245cfb2
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e
golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 // indirect

22
go.sum
View File

@ -1,6 +1,7 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0 h1:eOI3/cP2VTU6uZLDYAoic+eyzzB9YyGmJ7eIjl8rOPg=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.37.4 h1:glPeL3BQJsbF6aIIYfZizMwc5LTYz250bDMjttbBGAU=
cloud.google.com/go v0.37.4/go.mod h1:NHPJ89PdicEuT9hdPXMROBD91xc5uRDxsMtSB16k7hw=
cloud.google.com/go v0.38.0 h1:ROfEUZz+Gh5pa62DJWXSaonyu3StP6EA6lPEXPI6mCo=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
@ -19,6 +20,9 @@ github.com/apache/arrow/go/arrow v0.0.0-20200403134915-89ce1cadb678/go.mod h1:QN
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/aws/aws-sdk-go v1.25.48 h1:J82DYDGZHOKHdhx6hD24Tm30c2C3GchYGfN0mf9iKUk=
github.com/aws/aws-sdk-go v1.25.48/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.29.20 h1:vAHJhARpdbdeJstTVaugeHgvVj5lBnfz3blbbD24gfo=
github.com/aws/aws-sdk-go v1.29.20/go.mod h1:1KvfttTE3SPKMpo8g2c6jL3ZKfXtFvKscTgahTma5Xg=
github.com/aws/aws-sdk-go v1.29.32 h1:o4I8Qc+h9ht8NXvTHeXZH3EmtSUZ/PC0bg9Wawr+aTA=
github.com/beevik/etree v1.0.1/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A=
github.com/beevik/etree v1.1.0 h1:T0xke/WvNtMoCqgzPhkX2r4rjY3GDZFi+FjpRZY2Jbs=
github.com/beevik/etree v1.1.0/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A=
@ -90,6 +94,8 @@ github.com/go-macaron/session v0.0.0-20190805070824-1a3cdc6f5659 h1:YXDFNK98PgKe
github.com/go-macaron/session v0.0.0-20190805070824-1a3cdc6f5659/go.mod h1:tLd0QEudXocQckwcpCq5pCuTCuYc24I0bRJDuRe9OuQ=
github.com/go-sql-driver/mysql v1.4.1 h1:g24URVg0OFbNUTx9qqY1IRZ9D9z3iPyi5zKhQZpNwpA=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0 h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-xorm/sqlfiddle v0.0.0-20180821085327-62ce714f951a h1:9wScpmSP5A3Bk8V3XHWUcJmYTh+ZnlHVyc+A4oZYS3Y=
@ -116,12 +122,14 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z
github.com/google/flatbuffers v1.11.0 h1:O7CEyB8Cb3/DmtxODGtLHcEvpr81Jm5qLg/hsHnxA2A=
github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20190430165422-3e4dfb77656c h1:7lF+Vz0LqiRidnzC1Oq86fpX1q/iEv2KJdrCtttYjT4=
@ -148,6 +156,7 @@ github.com/hashicorp/go-plugin v1.2.2/go.mod h1:F9eH4LrE/ZsRdbwhfjs9k9HoDUwAHnYt
github.com/hashicorp/go-version v1.1.0 h1:bPIoEKD27tNdebFGGxxYwcL4nepeY4j1QP23PFRGzg0=
github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ=
github.com/hashicorp/yamux v0.0.0-20181012175058-2f1d1f20f75d/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
@ -240,6 +249,8 @@ github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
@ -348,6 +359,7 @@ golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTk
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f h1:J5lckAjkw6qYlOZNj90mLYNTEKDvWeuc1yieZ8qUzUE=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
@ -362,12 +374,15 @@ golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190923162816-aa69164e4478 h1:l5EDrHhldLYb3ZRHDUhXF7Om7MvYXnkV9/iQNo1lX6g=
golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI=
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45 h1:SVwTIAaPC2U/AvvLNZ2a7OVsmBpC8L5BlwK1whH3hm0=
@ -388,6 +403,7 @@ golang.org/x/sys v0.0.0-20190129075346-302c3dd5f1cc/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -408,6 +424,7 @@ golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190802220118-1d1727260058/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI=
@ -423,8 +440,10 @@ golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IV
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1 h1:QzqyMA1tlu6CgqCDUtU9V+ZKhLFT2dkJuANu5QaxI3I=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
@ -433,12 +452,14 @@ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoA
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 h1:gSJIx1SDwno+2ElGhA4+qG2zF97qiUzTM+rQ0klBOcE=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.27.1 h1:zvIju4sqAGvwKspUQOhwnpcqSbzi7/H6QomNNjTL4sk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
@ -477,6 +498,7 @@ gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
xorm.io/builder v0.3.6 h1:ha28mQ2M+TFx96Hxo+iq6tQgnkC9IZkM6D8w9sKHHF8=
xorm.io/builder v0.3.6/go.mod h1:LEFAPISnRzG+zxaxj2vPicRwz67BdhFreKg8yv8/TgU=

View File

@ -205,6 +205,8 @@
"@reduxjs/toolkit": "1.3.4",
"@torkelo/react-select": "3.0.8",
"@types/braintree__sanitize-url": "4.0.0",
"@types/common-tags": "^1.8.0",
"@types/jsurl": "^1.2.28",
"@types/md5": "^2.1.33",
"@types/react-loadable": "5.5.2",
"@types/react-virtualized-auto-sizer": "1.0.0",
@ -220,6 +222,7 @@
"calculate-size": "1.1.1",
"classnames": "2.2.6",
"clipboard": "2.0.4",
"common-tags": "^1.8.0",
"core-js": "3.6.4",
"d3": "5.15.0",
"d3-scale-chromatic": "1.5.0",
@ -231,6 +234,7 @@
"immutable": "3.8.2",
"is-hotkey": "0.1.6",
"jquery": "3.4.1",
"jsurl": "^0.1.5",
"lodash": "4.17.15",
"lru-cache": "^5.1.1",
"marked": "0.6.2",

View File

@ -1,4 +1,5 @@
import { DataFrame, FieldType, Field, Vector } from '../types';
import {
Table,
ArrowType,
@ -162,15 +163,18 @@ export function grafanaDataFrameToArrowTable(data: DataFrame): Table {
}
export function resultsToDataFrames(rsp: any): DataFrame[] {
const frames: DataFrame[] = [];
for (const res of Object.values(rsp.results)) {
const r = res as any;
if (r.dataframes) {
for (const b of r.dataframes) {
const t = base64StringToArrowTable(b as string);
frames.push(arrowTableToDataFrame(t));
}
}
if (rsp === undefined || rsp.results === undefined) {
return [];
}
const results = rsp.results as Array<{ dataframes: string[] }>;
const frames: DataFrame[] = Object.values(results).flatMap(res => {
if (!res.dataframes) {
return [];
}
return res.dataframes.map((b: string) => arrowTableToDataFrame(base64StringToArrowTable(b)));
});
return frames;
}

View File

@ -58,11 +58,9 @@ export class FieldCache {
}
getFirstFieldOfType(type: FieldType): FieldWithIndex | undefined {
const arr = this.fieldByType[type];
if (arr && arr.length > 0) {
return arr[0];
}
return undefined;
const fields = this.fieldByType[type];
const firstField = fields.find(field => !(field.config.custom && field.config.custom['Hidden']));
return firstField;
}
hasFieldNamed(name: string): boolean {

View File

@ -178,6 +178,10 @@ export function guessFieldTypeFromNameAndValue(name: string, v: any): FieldType
* TODO: better Date/Time support! Look for standard date strings?
*/
export function guessFieldTypeFromValue(v: any): FieldType {
if (v instanceof Date || isDateTime(v)) {
return FieldType.time;
}
if (isNumber(v)) {
return FieldType.number;
}
@ -198,10 +202,6 @@ export function guessFieldTypeFromValue(v: any): FieldType {
return FieldType.boolean;
}
if (v instanceof Date || isDateTime(v)) {
return FieldType.time;
}
return FieldType.other;
}
@ -230,17 +230,19 @@ export function guessFieldTypeForField(field: Field): FieldType | undefined {
}
/**
* @returns a copy of the series with the best guess for each field type
* If the series already has field types defined, they will be used
* @returns A copy of the series with the best guess for each field type.
* If the series already has field types defined, they will be used, unless `guessDefined` is true.
* @param series The DataFrame whose field's types should be guessed
* @param guessDefined Whether to guess types of fields with already defined types
*/
export const guessFieldTypes = (series: DataFrame): DataFrame => {
for (let i = 0; i < series.fields.length; i++) {
if (!series.fields[i].type) {
export const guessFieldTypes = (series: DataFrame, guessDefined = false): DataFrame => {
for (const field of series.fields) {
if (!field.type || field.type === FieldType.other || guessDefined) {
// Something is missing a type, return a modified copy
return {
...series,
fields: series.fields.map(field => {
if (field.type && field.type !== FieldType.other) {
if (field.type && field.type !== FieldType.other && !guessDefined) {
return field;
}
// Calculate a reasonable schema value

View File

@ -38,6 +38,7 @@ export interface FeatureToggles {
* Available only in Grafana Enterprise
*/
meta: boolean;
cloudwatchLogs: boolean;
newVariables: boolean;
}

View File

@ -274,6 +274,8 @@ export abstract class DataSourceApi<
getVersion?(optionalOptions?: any): Promise<string>;
showContextToggle?(row?: LogRowModel): boolean;
/**
* Can be optionally implemented to allow datasource to be a source of annotations for dashboard. To be visible
* in the annotation editor `annotations` capability also needs to be enabled in plugin.json.
@ -307,6 +309,8 @@ export interface QueryEditorProps<
* Contains query response filtered by refId of QueryResultBase and possible query error
*/
data?: PanelData;
exploreMode?: ExploreMode;
exploreId?: any;
}
export enum DataSourceStatus {
@ -329,6 +333,7 @@ export interface ExploreQueryFieldProps<
onBlur?: () => void;
absoluteRange?: AbsoluteTimeRange;
exploreMode?: ExploreMode;
exploreId?: any;
}
export interface ExploreStartPageProps {

View File

@ -52,6 +52,7 @@ export class GrafanaBootConfig implements GrafanaConfig {
expressions: false,
newEdit: false,
meta: false,
cloudwatchLogs: false,
newVariables: true,
};
licenseInfo: LicenseInfo = {} as LicenseInfo;

View File

@ -10,7 +10,7 @@ interface Props {
isFocused?: boolean;
isInvalid?: boolean;
tooltip?: PopoverContent;
width?: number;
width?: number | 'auto';
}
export const FormLabel: FunctionComponent<Props> = ({
@ -23,7 +23,7 @@ export const FormLabel: FunctionComponent<Props> = ({
width,
...rest
}) => {
const classes = classNames(`gf-form-label width-${width ? width : '10'}`, className, {
const classes = classNames(className, `gf-form-label width-${width ? width : '10'}`, {
'gf-form-label--is-focused': isFocused,
'gf-form-label--is-invalid': isInvalid,
});

View File

@ -8,6 +8,7 @@ import {
LogRowContextQueryErrors,
HasMoreContextRows,
LogRowContextProvider,
RowContextOptions,
} from './LogRowContextProvider';
import { Themeable } from '../../types/theme';
import { withTheme } from '../../themes/index';
@ -33,8 +34,9 @@ interface Props extends Themeable {
onClickFilterLabel?: (key: string, value: string) => void;
onClickFilterOutLabel?: (key: string, value: string) => void;
onContextClick?: () => void;
getRowContext: (row: LogRowModel, options?: any) => Promise<DataQueryResponse>;
getRowContext: (row: LogRowModel, options?: RowContextOptions) => Promise<DataQueryResponse>;
getFieldLinks?: (field: Field, rowIndex: number) => Array<LinkModel<Field>>;
showContextToggle?: (row?: LogRowModel) => boolean;
}
interface State {
@ -122,6 +124,7 @@ class UnThemedLogRow extends PureComponent<Props, State> {
row,
showDuplicates,
timeZone,
showContextToggle,
showLabels,
showTime,
wrapLogMessage,
@ -176,7 +179,8 @@ class UnThemedLogRow extends PureComponent<Props, State> {
hasMoreContextRows={hasMoreContextRows}
updateLimit={updateLimit}
context={context}
showContext={showContext}
contextIsOpen={showContext}
showContextToggle={showContextToggle}
wrapLogMessage={wrapLogMessage}
onToggleContext={this.toggleContext}
/>

View File

@ -5,6 +5,11 @@ import useAsync from 'react-use/lib/useAsync';
import { DataQueryResponse, DataQueryError } from '@grafana/data';
export interface RowContextOptions {
direction?: 'BACKWARD' | 'FORWARD';
limit?: number;
}
export interface LogRowContextRows {
before?: string[];
after?: string[];
@ -26,7 +31,7 @@ interface ResultType {
interface LogRowContextProviderProps {
row: LogRowModel;
getRowContext: (row: LogRowModel, options?: any) => Promise<DataQueryResponse>;
getRowContext: (row: LogRowModel, options?: RowContextOptions) => Promise<DataQueryResponse>;
children: (props: {
result: LogRowContextRows;
errors: LogRowContextQueryErrors;
@ -36,7 +41,7 @@ interface LogRowContextProviderProps {
}
export const getRowContexts = async (
getRowContext: (row: LogRowModel, options?: any) => Promise<DataQueryResponse>,
getRowContext: (row: LogRowModel, options?: RowContextOptions) => Promise<DataQueryResponse>,
row: LogRowModel,
limit: number
) => {

View File

@ -20,10 +20,11 @@ import { LogMessageAnsi } from './LogMessageAnsi';
interface Props extends Themeable {
row: LogRowModel;
hasMoreContextRows?: HasMoreContextRows;
showContext: boolean;
contextIsOpen: boolean;
wrapLogMessage: boolean;
errors?: LogRowContextQueryErrors;
context?: LogRowContextRows;
showContextToggle?: (row?: LogRowModel) => boolean;
highlighterExpressions?: string[];
getRows: () => LogRowModel[];
onToggleContext: () => void;
@ -74,7 +75,8 @@ class UnThemedLogRowMessage extends PureComponent<Props> {
hasMoreContextRows,
updateLimit,
context,
showContext,
contextIsOpen,
showContextToggle,
wrapLogMessage,
onToggleContext,
} = this.props;
@ -97,7 +99,7 @@ class UnThemedLogRowMessage extends PureComponent<Props> {
return (
<td className={style.logsRowMessage}>
<div className={cx(styles.positionRelative, { [styles.horizontalScroll]: !wrapLogMessage })}>
{showContext && context && (
{contextIsOpen && context && (
<LogRowContext
row={row}
context={context}
@ -111,7 +113,7 @@ class UnThemedLogRowMessage extends PureComponent<Props> {
}}
/>
)}
<span className={cx(styles.positionRelative, { [styles.rowWithContext]: showContext })}>
<span className={cx(styles.positionRelative, { [styles.rowWithContext]: contextIsOpen })}>
{needsHighlighter ? (
<Highlighter
style={whiteSpacePreWrap}
@ -126,9 +128,9 @@ class UnThemedLogRowMessage extends PureComponent<Props> {
entry
)}
</span>
{row.searchWords && row.searchWords.length > 0 && (
{showContextToggle?.(row) && (
<span onClick={this.onContextToggle} className={cx(style.context)}>
{showContext ? 'Hide' : 'Show'} context
{contextIsOpen ? 'Hide' : 'Show'} context
</span>
)}
</div>

View File

@ -8,6 +8,7 @@ import { getLogRowStyles } from './getLogRowStyles';
//Components
import { LogRow } from './LogRow';
import { RowContextOptions } from './LogRowContextProvider';
export const PREVIEW_LIMIT = 100;
export const RENDER_LIMIT = 500;
@ -17,6 +18,7 @@ export interface Props extends Themeable {
deduplicatedRows?: LogRowModel[];
dedupStrategy: LogsDedupStrategy;
highlighterExpressions?: string[];
showContextToggle?: (row?: LogRowModel) => boolean;
showLabels: boolean;
showTime: boolean;
wrapLogMessage: boolean;
@ -26,7 +28,7 @@ export interface Props extends Themeable {
previewLimit?: number;
onClickFilterLabel?: (key: string, value: string) => void;
onClickFilterOutLabel?: (key: string, value: string) => void;
getRowContext?: (row: LogRowModel, options?: any) => Promise<any>;
getRowContext?: (row: LogRowModel, options?: RowContextOptions) => Promise<any>;
getFieldLinks?: (field: Field, rowIndex: number) => Array<LinkModel<Field>>;
}
@ -72,6 +74,7 @@ class UnThemedLogRows extends PureComponent<Props, State> {
render() {
const {
dedupStrategy,
showContextToggle,
showLabels,
showTime,
wrapLogMessage,
@ -119,6 +122,7 @@ class UnThemedLogRows extends PureComponent<Props, State> {
getRowContext={getRowContext}
highlighterExpressions={highlighterExpressions}
row={row}
showContextToggle={showContextToggle}
showDuplicates={showDuplicates}
showLabels={showLabels}
showTime={showTime}
@ -138,6 +142,7 @@ class UnThemedLogRows extends PureComponent<Props, State> {
getRows={getRows}
getRowContext={getRowContext}
row={row}
showContextToggle={showContextToggle}
showDuplicates={showDuplicates}
showLabels={showLabels}
showTime={showTime}

View File

@ -29,6 +29,7 @@ export interface QueryFieldProps {
onRunQuery?: () => void;
onBlur?: () => void;
onChange?: (value: string) => void;
onClick?: (event: Event, editor: CoreEditor, next: () => any) => any;
onTypeahead?: (typeahead: TypeaheadInput) => Promise<TypeaheadOutput>;
onWillApplySuggestion?: (suggestion: string, state: SuggestionsState) => string;
placeholder?: string;
@ -167,6 +168,7 @@ export class QueryField extends React.PureComponent<QueryFieldProps, QueryFieldS
*/
handleBlur = (event: Event, editor: CoreEditor, next: Function) => {
const { onBlur } = this.props;
if (onBlur) {
onBlur();
} else {
@ -196,6 +198,7 @@ export class QueryField extends React.PureComponent<QueryFieldProps, QueryFieldS
autoCorrect={false}
readOnly={this.props.disabled}
onBlur={this.handleBlur}
onClick={this.props.onClick}
// onKeyDown={this.onKeyDown}
onChange={(change: { value: Value }) => {
this.onChange(change.value, false);

View File

@ -91,6 +91,7 @@ export function SelectBase<T>({
allowCustomValue = false,
autoFocus = false,
backspaceRemovesValue = true,
cacheOptions,
className,
closeMenuOnSelect = true,
components,
@ -106,6 +107,7 @@ export function SelectBase<T>({
isLoading = false,
isMulti = false,
isOpen,
isOptionDisabled,
isSearchable = true,
loadOptions,
loadingMessage = 'Loading options...',
@ -183,6 +185,7 @@ export function SelectBase<T>({
isDisabled: disabled,
isLoading,
isMulti,
isOptionDisabled,
isSearchable,
maxMenuHeight,
maxVisibleValues,
@ -217,6 +220,7 @@ export function SelectBase<T>({
ReactSelectComponent = allowCustomValue ? AsyncCreatable : ReactAsyncSelect;
asyncSelectProps = {
loadOptions,
cacheOptions,
defaultOptions,
};
}
@ -337,6 +341,10 @@ export function SelectBase<T>({
position: 'relative',
width: width ? `${8 * width}px` : '100%',
}),
option: (provided: any, state: any) => ({
...provided,
opacity: state.isDisabled ? 0.5 : 1,
}),
}}
className={className}
{...commonSelectProps}

View File

@ -19,6 +19,7 @@ export interface SelectCommonProps<T> {
getOptionLabel?: (item: SelectableValue<T>) => string;
getOptionValue?: (item: SelectableValue<T>) => string;
inputValue?: string;
invalid?: boolean;
isClearable?: boolean;
isLoading?: boolean;
isMulti?: boolean;
@ -51,6 +52,7 @@ export interface SelectCommonProps<T> {
value?: SelectValue<T>;
/** Sets the width to a multiple of 8px. Should only be used with inline forms. Setting width of the container is preferred in other cases.*/
width?: number;
isOptionDisabled?: () => boolean;
}
export interface SelectAsyncProps<T> {
@ -58,6 +60,8 @@ export interface SelectAsyncProps<T> {
defaultOptions?: boolean | Array<SelectableValue<T>>;
/** Asynchronously load select options */
loadOptions?: (query: string) => Promise<Array<SelectableValue<T>>>;
/** If cacheOptions is true, then the loaded data will be cached. The cache will remain until cacheOptions changes value. */
cacheOptions?: boolean;
/** Message to display when options are loading */
loadingMessage?: string;
}

View File

@ -8,6 +8,7 @@ const getStyles = (theme: GrafanaTheme, height: number, visible: boolean) => {
return {
typeaheadItem: css`
label: type-ahead-item;
z-index: 11;
padding: ${theme.spacing.sm} ${theme.spacing.sm} ${theme.spacing.sm} ${theme.spacing.md};
border-radius: ${theme.border.radius.md};
border: ${selectThemeVariant(

View File

@ -28,7 +28,7 @@ const getStyles = (theme: GrafanaTheme) => ({
font-size: ${theme.typography.size.sm};
text-overflow: ellipsis;
overflow: hidden;
z-index: 1;
z-index: 11;
display: block;
white-space: nowrap;
cursor: pointer;

View File

@ -5,5 +5,5 @@ export { IndentationPlugin } from './indentation';
export { NewlinePlugin } from './newline';
export { RunnerPlugin } from './runner';
export { SelectionShortcutsPlugin } from './selection_shortcuts';
export { SlatePrism } from './slate-prism';
export { SlatePrism, Token } from './slate-prism';
export { SuggestionsPlugin } from './suggestions';

View File

@ -4,6 +4,18 @@ import { Plugin } from '@grafana/slate-react';
import Options, { OptionsFormat } from './options';
import TOKEN_MARK from './TOKEN_MARK';
export interface Token {
content: string;
offsets?: {
start: number;
end: number;
};
types: string[];
aliases: string[];
prev?: Token | null;
next?: Token | null;
}
/**
* A Slate plugin to highlight code syntax.
*/
@ -15,7 +27,25 @@ export function SlatePrism(optsParam: OptionsFormat = {}): Plugin {
if (!opts.onlyIn(node)) {
return next();
}
return decorateNode(opts, Block.create(node as Block));
const block = Block.create(node as Block);
const grammarName = opts.getSyntax(block);
const grammar = Prism.languages[grammarName];
if (!grammar) {
// Grammar not loaded
return [];
}
// Tokenize the whole block text
const texts = block.getTexts();
const blockText = texts.map(text => text && text.getText()).join('\n');
const tokens = Prism.tokenize(blockText, grammar);
const flattened = flattenTokens(tokens);
// @ts-ignore
editor.setData({ tokens: flattened });
return decorateNode(opts, tokens, block);
},
renderDecoration: (props, editor, next) =>
@ -33,18 +63,8 @@ export function SlatePrism(optsParam: OptionsFormat = {}): Plugin {
/**
* Returns the decoration for a node
*/
function decorateNode(opts: Options, block: Block) {
const grammarName = opts.getSyntax(block);
const grammar = Prism.languages[grammarName];
if (!grammar) {
// Grammar not loaded
return [];
}
// Tokenize the whole block text
function decorateNode(opts: Options, tokens: Array<string | Prism.Token>, block: Block) {
const texts = block.getTexts();
const blockText = texts.map(text => text && text.getText()).join('\n');
const tokens = Prism.tokenize(blockText, grammar);
// The list of decorations to return
const decorations: Decoration[] = [];
@ -67,13 +87,17 @@ function decorateNode(opts: Options, block: Block) {
className: `prism-token token ${accu}`,
block,
});
if (decoration) {
decorations.push(decoration);
}
}
offset += token.length;
} else {
accu = `${accu} ${token.type} ${token.alias || ''}`;
accu = `${accu} ${token.type}`;
if (token.alias) {
accu += ' ' + token.alias;
}
if (typeof token.content === 'string') {
const decoration = createDecoration({
@ -85,6 +109,7 @@ function decorateNode(opts: Options, block: Block) {
className: `prism-token token ${accu}`,
block,
});
if (decoration) {
decorations.push(decoration);
}
@ -158,3 +183,71 @@ function createDecoration({
return myDec;
}
function flattenToken(token: string | Prism.Token | Array<string | Prism.Token>): Token[] {
if (typeof token === 'string') {
return [
{
content: token,
types: [],
aliases: [],
},
];
} else if (Array.isArray(token)) {
return token.flatMap(t => flattenToken(t));
} else if (token instanceof Prism.Token) {
return flattenToken(token.content).flatMap(t => {
let aliases: string[] = [];
if (typeof token.alias === 'string') {
aliases = [token.alias];
} else {
aliases = token.alias ?? [];
}
return {
content: t.content,
types: [token.type, ...t.types],
aliases: [...aliases, ...t.aliases],
};
});
}
return [];
}
export function flattenTokens(token: string | Prism.Token | Array<string | Prism.Token>) {
const tokens = flattenToken(token);
if (!tokens.length) {
return [];
}
const firstToken = tokens[0];
firstToken.prev = null;
firstToken.next = tokens.length >= 2 ? tokens[1] : null;
firstToken.offsets = {
start: 0,
end: firstToken.content.length,
};
for (let i = 1; i < tokens.length - 1; i++) {
tokens[i].prev = tokens[i - 1];
tokens[i].next = tokens[i + 1];
tokens[i].offsets = {
start: tokens[i - 1].offsets!.end,
end: tokens[i - 1].offsets!.end + tokens[i].content.length,
};
}
const lastToken = tokens[tokens.length - 1];
lastToken.prev = tokens.length >= 2 ? tokens[tokens.length - 2] : null;
lastToken.next = null;
lastToken.offsets = {
start: tokens.length >= 2 ? tokens[tokens.length - 2].offsets!.end : 0,
end:
tokens.length >= 2 ? tokens[tokens.length - 2].offsets!.end + lastToken.content.length : lastToken.content.length,
};
return tokens;
}

View File

@ -217,14 +217,16 @@ const handleTypeahead = async (
// Get decorations associated with the current line
const parentBlock = value.document.getClosestBlock(value.focusBlock.key);
const myOffset = value.selection.start.offset - 1;
const selectionStartOffset = value.selection.start.offset - 1;
const decorations = parentBlock && parentBlock.getDecorations(editor as any);
const filteredDecorations = decorations
? decorations
.filter(
decoration =>
decoration!.start.offset <= myOffset && decoration!.end.offset > myOffset && decoration!.type === TOKEN_MARK
decoration!.start.offset <= selectionStartOffset &&
decoration!.end.offset > selectionStartOffset &&
decoration!.type === TOKEN_MARK
)
.toArray()
: [];
@ -235,7 +237,7 @@ const handleTypeahead = async (
decorations
.filter(
decoration =>
decoration!.end.offset <= myOffset &&
decoration!.end.offset <= selectionStartOffset &&
decoration!.type === TOKEN_MARK &&
decoration!.data.get('className').includes('label-key')
)
@ -272,6 +274,7 @@ const handleTypeahead = async (
value,
wrapperClasses,
labelKey: labelKey || undefined,
editor,
});
const filteredSuggestions = suggestions
@ -280,28 +283,29 @@ const handleTypeahead = async (
return group;
}
let newGroup = { ...group };
if (prefix) {
// Filter groups based on prefix
if (!group.skipFilter) {
group.items = group.items.filter(c => (c.filterText || c.label).length >= prefix.length);
newGroup.items = newGroup.items.filter(c => (c.filterText || c.label).length >= prefix.length);
if (group.prefixMatch) {
group.items = group.items.filter(c => (c.filterText || c.label).startsWith(prefix));
newGroup.items = newGroup.items.filter(c => (c.filterText || c.label).startsWith(prefix));
} else {
group.items = group.items.filter(c => (c.filterText || c.label).includes(prefix));
newGroup.items = newGroup.items.filter(c => (c.filterText || c.label).includes(prefix));
}
}
// Filter out the already typed value (prefix) unless it inserts custom text
group.items = group.items.filter(c => c.insertText || (c.filterText || c.label) !== prefix);
newGroup.items = newGroup.items.filter(c => c.insertText || (c.filterText || c.label) !== prefix);
}
if (!group.skipSort) {
group.items = sortBy(group.items, (item: CompletionItem) => item.sortText || item.label);
newGroup.items = sortBy(newGroup.items, (item: CompletionItem) => item.sortText || item.label);
}
return group;
return newGroup;
})
.filter(group => group.items && group.items.length); // Filter out empty groups
.filter(gr => gr.items && gr.items.length); // Filter out empty groups
onStateChange({
groupedItems: filteredSuggestions,

View File

@ -1,5 +1,4 @@
import { Value } from 'slate';
import { Editor } from '@grafana/slate-react';
import { Value, Editor as CoreEditor } from 'slate';
export interface CompletionItemGroup {
/**
@ -98,7 +97,7 @@ export interface TypeaheadInput {
wrapperClasses: string[];
labelKey?: string;
value?: Value;
editor?: Editor;
editor?: CoreEditor;
}
export interface SuggestionsState {

View File

@ -52,7 +52,7 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.Conditio
return nil, err
}
emptySerieCount := 0
emptySeriesCount := 0
evalMatchCount := 0
var matches []*alerting.EvalMatch
@ -61,7 +61,7 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.Conditio
evalMatch := c.Evaluator.Eval(reducedValue)
if !reducedValue.Valid {
emptySerieCount++
emptySeriesCount++
}
if context.IsTestRun {
@ -100,7 +100,7 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.Conditio
return &alerting.ConditionResult{
Firing: evalMatchCount > 0,
NoDataFound: emptySerieCount == len(seriesList),
NoDataFound: emptySeriesCount == len(seriesList),
Operator: c.Operator,
EvalMatches: matches,
}, nil
@ -224,6 +224,9 @@ func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, t
DataSource: datasource,
},
},
Headers: map[string]string{
"FromAlert": "true",
},
Debug: debug,
}

View File

@ -2,10 +2,17 @@ package cloudwatch
import (
"context"
"fmt"
"regexp"
"strconv"
"sync"
"time"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/aws/aws-sdk-go/service/ec2/ec2iface"
"github.com/aws/aws-sdk-go/service/resourcegroupstaggingapi/resourcegroupstaggingapiiface"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
@ -15,6 +22,9 @@ type CloudWatchExecutor struct {
*models.DataSource
ec2Svc ec2iface.EC2API
rgtaSvc resourcegroupstaggingapiiface.ResourceGroupsTaggingAPIAPI
logsClientsByRegion map[string](*cloudwatchlogs.CloudWatchLogs)
mux sync.Mutex
}
type DatasourceInfo struct {
@ -28,8 +38,43 @@ type DatasourceInfo struct {
SecretKey string
}
func NewCloudWatchExecutor(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
return &CloudWatchExecutor{}, nil
const CLOUDWATCH_TS_FORMAT = "2006-01-02 15:04:05.000"
func (e *CloudWatchExecutor) getLogsClient(region string) (*cloudwatchlogs.CloudWatchLogs, error) {
e.mux.Lock()
defer e.mux.Unlock()
if logsClient, ok := e.logsClientsByRegion[region]; ok {
return logsClient, nil
}
dsInfo := retrieveDsInfo(e.DataSource, region)
newLogsClient, err := retrieveLogsClient(dsInfo)
if err != nil {
return nil, err
}
e.logsClientsByRegion[region] = newLogsClient
return newLogsClient, nil
}
func NewCloudWatchExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
dsInfo := retrieveDsInfo(datasource, "default")
defaultLogsClient, err := retrieveLogsClient(dsInfo)
if err != nil {
return nil, err
}
logsClientsByRegion := make(map[string](*cloudwatchlogs.CloudWatchLogs))
logsClientsByRegion[dsInfo.Region] = defaultLogsClient
logsClientsByRegion["default"] = defaultLogsClient
return &CloudWatchExecutor{
logsClientsByRegion: logsClientsByRegion,
}, nil
}
var (
@ -43,10 +88,60 @@ func init() {
aliasFormat = regexp.MustCompile(`\{\{\s*(.+?)\s*\}\}`)
}
func (e *CloudWatchExecutor) alertQuery(ctx context.Context, logsClient *cloudwatchlogs.CloudWatchLogs, queryContext *tsdb.TsdbQuery) (*cloudwatchlogs.GetQueryResultsOutput, error) {
const maxAttempts = 8
const pollPeriod = 1000 * time.Millisecond
queryParams := queryContext.Queries[0].Model
startQueryOutput, err := e.executeStartQuery(ctx, logsClient, queryParams, queryContext.TimeRange)
if err != nil {
return nil, err
}
requestParams := simplejson.NewFromAny(map[string]interface{}{
"region": queryParams.Get("region").MustString(""),
"queryId": *startQueryOutput.QueryId,
})
ticker := time.NewTicker(pollPeriod)
defer ticker.Stop()
attemptCount := 1
for range ticker.C {
if res, err := e.executeGetQueryResults(ctx, logsClient, requestParams); err != nil {
return nil, err
} else if isTerminated(*res.Status) {
return res, err
} else if attemptCount >= maxAttempts {
return res, fmt.Errorf("fetching of query results exceeded max number of attempts")
}
attemptCount++
}
return nil, nil
}
func (e *CloudWatchExecutor) Query(ctx context.Context, dsInfo *models.DataSource, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) {
var result *tsdb.Response
e.DataSource = dsInfo
queryType := queryContext.Queries[0].Model.Get("type").MustString("")
/*
Unlike many other data sources, with Cloudwatch Logs query requests don't receive the results as the response to the query, but rather
an ID is first returned. Following this, a client is expected to send requests along with the ID until the status of the query is complete,
receiving (possibly partial) results each time. For queries made via dashboards and Explore, the logic of making these repeated queries is handled on
the frontend, but because alerts are executed on the backend the logic needs to be reimplemented here.
*/
queryParams := queryContext.Queries[0].Model
_, fromAlert := queryContext.Headers["FromAlert"]
isLogAlertQuery := fromAlert && queryParams.Get("mode").MustString("") == "Logs"
if isLogAlertQuery {
return e.executeLogAlertQuery(ctx, queryContext)
}
queryType := queryParams.Get("type").MustString("")
var err error
switch queryType {
@ -54,6 +149,8 @@ func (e *CloudWatchExecutor) Query(ctx context.Context, dsInfo *models.DataSourc
result, err = e.executeMetricFindQuery(ctx, queryContext)
case "annotationQuery":
result, err = e.executeAnnotationQuery(ctx, queryContext)
case "logAction":
result, err = e.executeLogActions(ctx, queryContext)
case "timeSeriesQuery":
fallthrough
default:
@ -62,3 +159,108 @@ func (e *CloudWatchExecutor) Query(ctx context.Context, dsInfo *models.DataSourc
return result, err
}
func (e *CloudWatchExecutor) executeLogAlertQuery(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) {
queryParams := queryContext.Queries[0].Model
queryParams.Set("subtype", "StartQuery")
queryParams.Set("queryString", queryParams.Get("expression").MustString(""))
region := queryParams.Get("region").MustString("default")
if region == "default" {
region = e.DataSource.JsonData.Get("defaultRegion").MustString()
queryParams.Set("region", region)
}
logsClient, err := e.getLogsClient(region)
if err != nil {
return nil, err
}
result, err := e.executeStartQuery(ctx, logsClient, queryParams, queryContext.TimeRange)
if err != nil {
return nil, err
}
queryParams.Set("queryId", *result.QueryId)
// Get Query Results
getQueryResultsOutput, err := e.alertQuery(ctx, logsClient, queryContext)
if err != nil {
return nil, err
}
dataframe, err := queryResultsToDataframe(getQueryResultsOutput)
if err != nil {
return nil, err
}
dataframeEnc, err := dataframe.MarshalArrow()
if err != nil {
return nil, err
}
response := &tsdb.Response{
Results: make(map[string]*tsdb.QueryResult),
}
response.Results["A"] = &tsdb.QueryResult{
RefId: "A",
Dataframes: [][]byte{dataframeEnc},
}
return response, nil
}
func queryResultsToDataframe(results *cloudwatchlogs.GetQueryResultsOutput) (*data.Frame, error) {
rowCount := len(results.Results)
fieldValues := make(map[string]interface{})
for i, row := range results.Results {
for _, resultField := range row {
// Strip @ptr field from results as it's not needed
if *resultField.Field == "@ptr" {
continue
}
if _, exists := fieldValues[*resultField.Field]; !exists {
if _, err := time.Parse(CLOUDWATCH_TS_FORMAT, *resultField.Value); err == nil {
fieldValues[*resultField.Field] = make([]*time.Time, rowCount)
} else if _, err := strconv.ParseFloat(*resultField.Value, 64); err == nil {
fieldValues[*resultField.Field] = make([]*float64, rowCount)
} else {
continue
}
}
if timeField, ok := fieldValues[*resultField.Field].([]*time.Time); ok {
parsedTime, err := time.Parse(CLOUDWATCH_TS_FORMAT, *resultField.Value)
if err != nil {
return nil, err
}
timeField[i] = &parsedTime
} else if numericField, ok := fieldValues[*resultField.Field].([]*float64); ok {
parsedFloat, err := strconv.ParseFloat(*resultField.Value, 64)
if err != nil {
return nil, err
}
numericField[i] = &parsedFloat
}
}
}
newFields := make([]*data.Field, 0)
for fieldName, vals := range fieldValues {
newFields = append(newFields, data.NewField(fieldName, nil, vals))
if fieldName == "@timestamp" {
newFields[len(newFields)-1].SetConfig(&data.FieldConfig{Title: "Time"})
}
}
frame := data.NewFrame("CloudWatchLogsResponse", newFields...)
return frame, nil
}
func isTerminated(queryStatus string) bool {
return queryStatus == "Complete" || queryStatus == "Cancelled" || queryStatus == "Failed" || queryStatus == "Timeout"
}

View File

@ -16,7 +16,9 @@ import (
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/cloudwatch"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/aws/aws-sdk-go/service/sts"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/setting"
)
@ -41,7 +43,7 @@ func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) {
}
credentialCacheLock.RUnlock()
accessKeyId := ""
accessKeyID := ""
secretAccessKey := ""
sessionToken := ""
var expiration *time.Time = nil
@ -78,7 +80,7 @@ func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) {
return nil, err
}
if resp.Credentials != nil {
accessKeyId = *resp.Credentials.AccessKeyId
accessKeyID = *resp.Credentials.AccessKeyId
secretAccessKey = *resp.Credentials.SecretAccessKey
sessionToken = *resp.Credentials.SessionToken
expiration = resp.Credentials.Expiration
@ -96,7 +98,7 @@ func GetCredentials(dsInfo *DatasourceInfo) (*credentials.Credentials, error) {
creds := credentials.NewChainCredentials(
[]credentials.Provider{
&credentials.StaticProvider{Value: credentials.Value{
AccessKeyID: accessKeyId,
AccessKeyID: accessKeyID,
SecretAccessKey: secretAccessKey,
SessionToken: sessionToken,
}},
@ -154,20 +156,24 @@ func ec2RoleProvider(sess *session.Session) credentials.Provider {
}
func (e *CloudWatchExecutor) getDsInfo(region string) *DatasourceInfo {
defaultRegion := e.DataSource.JsonData.Get("defaultRegion").MustString()
return retrieveDsInfo(e.DataSource, region)
}
func retrieveDsInfo(datasource *models.DataSource, region string) *DatasourceInfo {
defaultRegion := datasource.JsonData.Get("defaultRegion").MustString()
if region == "default" {
region = defaultRegion
}
authType := e.DataSource.JsonData.Get("authType").MustString()
assumeRoleArn := e.DataSource.JsonData.Get("assumeRoleArn").MustString()
decrypted := e.DataSource.DecryptedValues()
authType := datasource.JsonData.Get("authType").MustString()
assumeRoleArn := datasource.JsonData.Get("assumeRoleArn").MustString()
decrypted := datasource.DecryptedValues()
accessKey := decrypted["accessKey"]
secretKey := decrypted["secretKey"]
datasourceInfo := &DatasourceInfo{
Region: region,
Profile: e.DataSource.Database,
Profile: datasource.Database,
AuthType: authType,
AssumeRoleArn: assumeRoleArn,
AccessKey: accessKey,
@ -177,7 +183,7 @@ func (e *CloudWatchExecutor) getDsInfo(region string) *DatasourceInfo {
return datasourceInfo
}
func (e *CloudWatchExecutor) getAwsConfig(dsInfo *DatasourceInfo) (*aws.Config, error) {
func getAwsConfig(dsInfo *DatasourceInfo) (*aws.Config, error) {
creds, err := GetCredentials(dsInfo)
if err != nil {
return nil, err
@ -193,7 +199,7 @@ func (e *CloudWatchExecutor) getAwsConfig(dsInfo *DatasourceInfo) (*aws.Config,
func (e *CloudWatchExecutor) getClient(region string) (*cloudwatch.CloudWatch, error) {
datasourceInfo := e.getDsInfo(region)
cfg, err := e.getAwsConfig(datasourceInfo)
cfg, err := getAwsConfig(datasourceInfo)
if err != nil {
return nil, err
}
@ -211,3 +217,23 @@ func (e *CloudWatchExecutor) getClient(region string) (*cloudwatch.CloudWatch, e
return client, nil
}
func retrieveLogsClient(datasourceInfo *DatasourceInfo) (*cloudwatchlogs.CloudWatchLogs, error) {
cfg, err := getAwsConfig(datasourceInfo)
if err != nil {
return nil, err
}
sess, err := session.NewSession(cfg)
if err != nil {
return nil, err
}
client := cloudwatchlogs.New(sess, cfg)
client.Handlers.Send.PushFront(func(r *request.Request) {
r.HTTPRequest.Header.Set("User-Agent", fmt.Sprintf("Grafana/%s", setting.BuildVersion))
})
return client, nil
}

View File

@ -0,0 +1,307 @@
package cloudwatch
import (
"context"
"fmt"
"sort"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs/cloudwatchlogsiface"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/util/errutil"
"golang.org/x/sync/errgroup"
)
func (e *CloudWatchExecutor) executeLogActions(ctx context.Context, queryContext *tsdb.TsdbQuery) (*tsdb.Response, error) {
resultChan := make(chan *tsdb.QueryResult, len(queryContext.Queries))
eg, ectx := errgroup.WithContext(ctx)
for _, query := range queryContext.Queries {
query := query
eg.Go(func() error {
dataframe, err := e.executeLogAction(ectx, queryContext, query)
if err != nil {
return err
}
dataframeEnc, err := dataframe.MarshalArrow()
if err != nil {
return err
}
resultChan <- &tsdb.QueryResult{RefId: query.RefId, Dataframes: [][]byte{dataframeEnc}}
return nil
})
}
if err := eg.Wait(); err != nil {
return nil, err
}
close(resultChan)
response := &tsdb.Response{
Results: make(map[string]*tsdb.QueryResult),
}
for result := range resultChan {
response.Results[result.RefId] = result
}
return response, nil
}
func (e *CloudWatchExecutor) executeLogAction(ctx context.Context, queryContext *tsdb.TsdbQuery, query *tsdb.Query) (*data.Frame, error) {
parameters := query.Model
subType := query.Model.Get("subtype").MustString()
defaultRegion := e.DataSource.JsonData.Get("defaultRegion").MustString()
region := parameters.Get("region").MustString(defaultRegion)
logsClient, err := e.getLogsClient(region)
if err != nil {
return nil, err
}
var data *data.Frame = nil
switch subType {
case "DescribeLogGroups":
data, err = e.handleDescribeLogGroups(ctx, logsClient, parameters)
case "GetLogGroupFields":
data, err = e.handleGetLogGroupFields(ctx, logsClient, parameters, query.RefId)
case "StartQuery":
data, err = e.handleStartQuery(ctx, logsClient, parameters, queryContext.TimeRange, query.RefId)
case "StopQuery":
data, err = e.handleStopQuery(ctx, logsClient, parameters)
case "GetQueryResults":
data, err = e.handleGetQueryResults(ctx, logsClient, parameters, query.RefId)
case "GetLogEvents":
data, err = e.handleGetLogEvents(ctx, logsClient, parameters)
}
if err != nil {
return nil, err
}
return data, nil
}
func (e *CloudWatchExecutor) handleGetLogEvents(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, parameters *simplejson.Json) (*data.Frame, error) {
queryRequest := &cloudwatchlogs.GetLogEventsInput{
Limit: aws.Int64(parameters.Get("limit").MustInt64(10)),
StartFromHead: aws.Bool(parameters.Get("startFromHead").MustBool(false)),
}
logGroupName, err := parameters.Get("logGroupName").String()
if err != nil {
return nil, fmt.Errorf("Error: Parameter 'logGroupName' is required")
}
queryRequest.SetLogGroupName(logGroupName)
logStreamName, err := parameters.Get("logStreamName").String()
if err != nil {
return nil, fmt.Errorf("Error: Parameter 'logStream' is required")
}
queryRequest.SetLogStreamName(logStreamName)
if startTime, err := parameters.Get("startTime").Int64(); err == nil {
queryRequest.SetStartTime(startTime)
}
if endTime, err := parameters.Get("endTime").Int64(); err == nil {
queryRequest.SetEndTime(endTime)
}
logEvents, err := logsClient.GetLogEventsWithContext(ctx, queryRequest)
if err != nil {
return nil, errutil.Wrap(err.(awserr.Error).Message(), err)
}
messages := make([]*string, 0)
timestamps := make([]*int64, 0)
sort.Slice(logEvents.Events, func(i, j int) bool {
return *(logEvents.Events[i].Timestamp) > *(logEvents.Events[j].Timestamp)
})
for _, event := range logEvents.Events {
messages = append(messages, event.Message)
timestamps = append(timestamps, event.Timestamp)
}
timestampField := data.NewField("ts", nil, timestamps)
timestampField.SetConfig(&data.FieldConfig{Title: "Time"})
messageField := data.NewField("line", nil, messages)
return data.NewFrame("logEvents", timestampField, messageField), nil
}
func (e *CloudWatchExecutor) handleDescribeLogGroups(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, parameters *simplejson.Json) (*data.Frame, error) {
logGroupNamePrefix := parameters.Get("logGroupNamePrefix").MustString("")
var response *cloudwatchlogs.DescribeLogGroupsOutput = nil
var err error
if len(logGroupNamePrefix) < 1 {
response, err = logsClient.DescribeLogGroupsWithContext(ctx, &cloudwatchlogs.DescribeLogGroupsInput{
Limit: aws.Int64(parameters.Get("limit").MustInt64(50)),
})
} else {
response, err = logsClient.DescribeLogGroupsWithContext(ctx, &cloudwatchlogs.DescribeLogGroupsInput{
Limit: aws.Int64(parameters.Get("limit").MustInt64(50)),
LogGroupNamePrefix: aws.String(logGroupNamePrefix),
})
}
if err != nil || response == nil {
return nil, errutil.Wrap(err.(awserr.Error).Message(), err)
}
logGroupNames := make([]*string, 0)
for _, logGroup := range response.LogGroups {
logGroupNames = append(logGroupNames, logGroup.LogGroupName)
}
groupNamesField := data.NewField("logGroupName", nil, logGroupNames)
frame := data.NewFrame("logGroups", groupNamesField)
return frame, nil
}
func (e *CloudWatchExecutor) executeStartQuery(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, parameters *simplejson.Json, timeRange *tsdb.TimeRange) (*cloudwatchlogs.StartQueryOutput, error) {
startTime, err := timeRange.ParseFrom()
if err != nil {
return nil, err
}
endTime, err := timeRange.ParseTo()
if err != nil {
return nil, err
}
if !startTime.Before(endTime) {
return nil, fmt.Errorf("invalid time range: Start time must be before end time")
}
startQueryInput := &cloudwatchlogs.StartQueryInput{
StartTime: aws.Int64(startTime.Unix()),
EndTime: aws.Int64(endTime.Unix()),
Limit: aws.Int64(parameters.Get("limit").MustInt64(1000)),
LogGroupNames: aws.StringSlice(parameters.Get("logGroupNames").MustStringArray()),
QueryString: aws.String("fields @timestamp,@log,@logStream|" + parameters.Get("queryString").MustString("")),
}
return logsClient.StartQueryWithContext(ctx, startQueryInput)
}
func (e *CloudWatchExecutor) handleStartQuery(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, parameters *simplejson.Json, timeRange *tsdb.TimeRange, refID string) (*data.Frame, error) {
startQueryResponse, err := e.executeStartQuery(ctx, logsClient, parameters, timeRange)
if err != nil {
return nil, errutil.Wrap(err.(awserr.Error).Message(), err)
}
dataFrame := data.NewFrame(refID, data.NewField("queryId", nil, []string{*startQueryResponse.QueryId}))
dataFrame.RefID = refID
clientRegion := parameters.Get("region").MustString("default")
dataFrame.Meta = &data.FrameMeta{
Custom: map[string]interface{}{
"Region": clientRegion,
},
}
return dataFrame, nil
}
func (e *CloudWatchExecutor) executeStopQuery(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, parameters *simplejson.Json) (*cloudwatchlogs.StopQueryOutput, error) {
queryInput := &cloudwatchlogs.StopQueryInput{
QueryId: aws.String(parameters.Get("queryId").MustString()),
}
response, err := logsClient.StopQueryWithContext(ctx, queryInput)
if err != nil {
awsErr := err.(awserr.Error)
// If the query has already stopped by the time CloudWatch receives the stop query request,
// an "InvalidParameterException" error is returned. For our purposes though the query has been
// stopped, so we ignore the error.
if awsErr.Code() == "InvalidParameterException" {
response = &cloudwatchlogs.StopQueryOutput{Success: aws.Bool(false)}
err = nil
} else {
err = errutil.Wrap(awsErr.Message(), err)
}
}
return response, err
}
func (e *CloudWatchExecutor) handleStopQuery(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, parameters *simplejson.Json) (*data.Frame, error) {
response, err := e.executeStopQuery(ctx, logsClient, parameters)
if err != nil {
return nil, errutil.Wrap(err.(awserr.Error).Message(), err)
}
dataFrame := data.NewFrame("StopQueryResponse", data.NewField("success", nil, []bool{*response.Success}))
return dataFrame, nil
}
func (e *CloudWatchExecutor) executeGetQueryResults(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, parameters *simplejson.Json) (*cloudwatchlogs.GetQueryResultsOutput, error) {
queryInput := &cloudwatchlogs.GetQueryResultsInput{
QueryId: aws.String(parameters.Get("queryId").MustString()),
}
return logsClient.GetQueryResultsWithContext(ctx, queryInput)
}
func (e *CloudWatchExecutor) handleGetQueryResults(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, parameters *simplejson.Json, refID string) (*data.Frame, error) {
getQueryResultsOutput, err := e.executeGetQueryResults(ctx, logsClient, parameters)
if err != nil {
return nil, errutil.Wrap(err.(awserr.Error).Message(), err)
}
dataFrame, err := logsResultsToDataframes(getQueryResultsOutput)
if err != nil {
return nil, err
}
dataFrame.Name = refID
dataFrame.RefID = refID
return dataFrame, nil
}
func (e *CloudWatchExecutor) handleGetLogGroupFields(ctx context.Context, logsClient cloudwatchlogsiface.CloudWatchLogsAPI, parameters *simplejson.Json, refID string) (*data.Frame, error) {
queryInput := &cloudwatchlogs.GetLogGroupFieldsInput{
LogGroupName: aws.String(parameters.Get("logGroupName").MustString()),
Time: aws.Int64(parameters.Get("time").MustInt64()),
}
getLogGroupFieldsOutput, err := logsClient.GetLogGroupFieldsWithContext(ctx, queryInput)
if err != nil {
return nil, errutil.Wrap(err.(awserr.Error).Message(), err)
}
fieldNames := make([]*string, 0)
fieldPercentages := make([]*int64, 0)
for _, logGroupField := range getLogGroupFieldsOutput.LogGroupFields {
fieldNames = append(fieldNames, logGroupField.Name)
fieldPercentages = append(fieldPercentages, logGroupField.Percent)
}
dataFrame := data.NewFrame(
refID,
data.NewField("name", nil, fieldNames),
data.NewField("percent", nil, fieldPercentages),
)
dataFrame.RefID = refID
return dataFrame, nil
}

View File

@ -0,0 +1,218 @@
package cloudwatch
import (
"context"
"fmt"
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/stretchr/testify/assert"
)
//***
// LogActions Tests
//***
func TestHandleDescribeLogGroups_WhenLogGroupNamePrefixIsEmpty(t *testing.T) {
executor := &CloudWatchExecutor{}
logsClient := &FakeLogsClient{
Config: aws.Config{
Region: aws.String("default"),
},
}
params := simplejson.NewFromAny(map[string]interface{}{
"limit": 50,
})
frame, err := executor.handleDescribeLogGroups(context.Background(), logsClient, params)
expectedField := data.NewField("logGroupName", nil, []*string{aws.String("group_a"), aws.String("group_b"), aws.String("group_c")})
expectedFrame := data.NewFrame("logGroups", expectedField)
assert.Equal(t, nil, err)
assert.Equal(t, expectedFrame, frame)
}
func TestHandleDescribeLogGroups_WhenLogGroupNamePrefixIsNotEmpty(t *testing.T) {
executor := &CloudWatchExecutor{}
logsClient := &FakeLogsClient{
Config: aws.Config{
Region: aws.String("default"),
},
}
params := simplejson.NewFromAny(map[string]interface{}{
"logGroupNamePrefix": "g",
})
frame, err := executor.handleDescribeLogGroups(context.Background(), logsClient, params)
expectedField := data.NewField("logGroupName", nil, []*string{aws.String("group_a"), aws.String("group_b"), aws.String("group_c")})
expectedFrame := data.NewFrame("logGroups", expectedField)
assert.Equal(t, nil, err)
assert.Equal(t, expectedFrame, frame)
}
func TestHandleGetLogGroupFields_WhenLogGroupNamePrefixIsNotEmpty(t *testing.T) {
executor := &CloudWatchExecutor{}
logsClient := &FakeLogsClient{
Config: aws.Config{
Region: aws.String("default"),
},
}
params := simplejson.NewFromAny(map[string]interface{}{
"logGroupName": "group_a",
"limit": 50,
})
frame, err := executor.handleGetLogGroupFields(context.Background(), logsClient, params, "A")
expectedNameField := data.NewField("name", nil, []*string{aws.String("field_a"), aws.String("field_b"), aws.String("field_c")})
expectedPercentField := data.NewField("percent", nil, []*int64{aws.Int64(100), aws.Int64(30), aws.Int64(55)})
expectedFrame := data.NewFrame("A", expectedNameField, expectedPercentField)
expectedFrame.RefID = "A"
assert.Equal(t, nil, err)
assert.Equal(t, expectedFrame, frame)
}
func TestExecuteStartQuery(t *testing.T) {
executor := &CloudWatchExecutor{}
logsClient := &FakeLogsClient{
Config: aws.Config{
Region: aws.String("default"),
},
}
timeRange := &tsdb.TimeRange{
From: "1584873443000",
To: "1584700643000",
}
params := simplejson.NewFromAny(map[string]interface{}{
"region": "default",
"limit": 50,
"queryString": "fields @message",
})
response, err := executor.executeStartQuery(context.Background(), logsClient, params, timeRange)
var expectedResponse *cloudwatchlogs.StartQueryOutput = nil
assert.Equal(t, expectedResponse, response)
assert.Equal(t, fmt.Errorf("invalid time range: Start time must be before end time"), err)
}
func TestHandleStartQuery(t *testing.T) {
executor := &CloudWatchExecutor{}
logsClient := &FakeLogsClient{
Config: aws.Config{
Region: aws.String("default"),
},
}
timeRange := &tsdb.TimeRange{
From: "1584700643000",
To: "1584873443000",
}
params := simplejson.NewFromAny(map[string]interface{}{
"region": "default",
"limit": 50,
"queryString": "fields @message",
})
frame, err := executor.handleStartQuery(context.Background(), logsClient, params, timeRange, "A")
expectedField := data.NewField("queryId", nil, []string{"abcd-efgh-ijkl-mnop"})
expectedFrame := data.NewFrame("A", expectedField)
expectedFrame.RefID = "A"
expectedFrame.Meta = &data.FrameMeta{
Custom: map[string]interface{}{
"Region": "default",
},
}
assert.Equal(t, nil, err)
assert.Equal(t, expectedFrame, frame)
}
func TestHandleStopQuery(t *testing.T) {
executor := &CloudWatchExecutor{}
logsClient := &FakeLogsClient{
Config: aws.Config{
Region: aws.String("default"),
},
}
params := simplejson.NewFromAny(map[string]interface{}{
"queryId": "abcd-efgh-ijkl-mnop",
})
frame, err := executor.handleStopQuery(context.Background(), logsClient, params)
expectedField := data.NewField("success", nil, []bool{true})
expectedFrame := data.NewFrame("StopQueryResponse", expectedField)
assert.Equal(t, nil, err)
assert.Equal(t, expectedFrame, frame)
}
func TestHandleGetQueryResults(t *testing.T) {
executor := &CloudWatchExecutor{}
logsClient := &FakeLogsClient{
Config: aws.Config{
Region: aws.String("default"),
},
}
params := simplejson.NewFromAny(map[string]interface{}{
"queryId": "abcd-efgh-ijkl-mnop",
})
frame, err := executor.handleGetQueryResults(context.Background(), logsClient, params, "A")
timeA, _ := time.Parse("2006-01-02 15:04:05.000", "2020-03-20 10:37:23.000")
timeB, _ := time.Parse("2006-01-02 15:04:05.000", "2020-03-20 10:40:43.000")
expectedTimeField := data.NewField("@timestamp", nil, []*time.Time{
aws.Time(timeA), aws.Time(timeB),
})
expectedTimeField.SetConfig(&data.FieldConfig{Title: "Time"})
expectedFieldB := data.NewField("field_b", nil, []*string{
aws.String("b_1"), aws.String("b_2"),
})
expectedFrame := data.NewFrame("A", expectedTimeField, expectedFieldB)
expectedFrame.RefID = "A"
expectedFrame.Meta = &data.FrameMeta{
Custom: map[string]interface{}{
"Status": "Complete",
"Statistics": cloudwatchlogs.QueryStatistics{
BytesScanned: aws.Float64(512),
RecordsMatched: aws.Float64(256),
RecordsScanned: aws.Float64(1024),
},
},
}
assert.Equal(t, nil, err)
assert.ElementsMatch(t, expectedFrame.Fields, frame.Fields)
assert.Equal(t, expectedFrame.Meta, frame.Meta)
}

View File

@ -0,0 +1,81 @@
package cloudwatch
import (
"time"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
func logsResultsToDataframes(response *cloudwatchlogs.GetQueryResultsOutput) (*data.Frame, error) {
rowCount := len(response.Results)
fieldValues := make(map[string]interface{})
for i, row := range response.Results {
for _, resultField := range row {
// Strip @ptr field from results as it's not needed
if *resultField.Field == "@ptr" {
continue
}
if *resultField.Field == "@timestamp" {
if _, exists := fieldValues[*resultField.Field]; !exists {
fieldValues[*resultField.Field] = make([]*time.Time, rowCount)
}
parsedTime, err := time.Parse(CLOUDWATCH_TS_FORMAT, *resultField.Value)
if err != nil {
return nil, err
}
fieldValues[*resultField.Field].([]*time.Time)[i] = &parsedTime
} else {
if _, exists := fieldValues[*resultField.Field]; !exists {
// Check if field is time field
if _, err := time.Parse(CLOUDWATCH_TS_FORMAT, *resultField.Value); err == nil {
fieldValues[*resultField.Field] = make([]*time.Time, rowCount)
} else {
fieldValues[*resultField.Field] = make([]*string, rowCount)
}
}
if timeField, ok := fieldValues[*resultField.Field].([]*time.Time); ok {
parsedTime, err := time.Parse(CLOUDWATCH_TS_FORMAT, *resultField.Value)
if err != nil {
return nil, err
}
timeField[i] = &parsedTime
} else {
fieldValues[*resultField.Field].([]*string)[i] = resultField.Value
}
}
}
}
newFields := make([]*data.Field, 0)
for fieldName, vals := range fieldValues {
newFields = append(newFields, data.NewField(fieldName, nil, vals))
if fieldName == "@timestamp" {
newFields[len(newFields)-1].SetConfig(&data.FieldConfig{Title: "Time"})
} else if fieldName == "@logStream" || fieldName == "@log" {
newFields[len(newFields)-1].SetConfig(
&data.FieldConfig{
Custom: map[string]interface{}{
"Hidden": true,
},
},
)
}
}
frame := data.NewFrame("CloudWatchLogsResponse", newFields...)
frame.Meta = &data.FrameMeta{
Custom: map[string]interface{}{
"Status": *response.Status,
"Statistics": *response.Statistics,
},
}
return frame, nil
}

View File

@ -0,0 +1,161 @@
package cloudwatch
import (
"testing"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/assert"
)
//***
// LogQuery tests
//***
func TestLogsResultsToDataframes(t *testing.T) {
fakeCloudwatchResponse := &cloudwatchlogs.GetQueryResultsOutput{
Results: [][]*cloudwatchlogs.ResultField{
{
&cloudwatchlogs.ResultField{
Field: aws.String("@ptr"),
Value: aws.String("fake ptr"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("@timestamp"),
Value: aws.String("2020-03-02 15:04:05.000"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("line"),
Value: aws.String("test message 1"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("@logStream"),
Value: aws.String("fakelogstream"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("@log"),
Value: aws.String("fakelog"),
},
},
{
&cloudwatchlogs.ResultField{
Field: aws.String("@ptr"),
Value: aws.String("fake ptr"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("@timestamp"),
Value: aws.String("2020-03-02 16:04:05.000"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("line"),
Value: aws.String("test message 2"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("@logStream"),
Value: aws.String("fakelogstream"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("@log"),
Value: aws.String("fakelog"),
},
},
{
&cloudwatchlogs.ResultField{
Field: aws.String("@ptr"),
Value: aws.String("fake ptr"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("@timestamp"),
Value: aws.String("2020-03-02 17:04:05.000"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("line"),
Value: aws.String("test message 3"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("@logStream"),
Value: aws.String("fakelogstream"),
},
&cloudwatchlogs.ResultField{
Field: aws.String("@log"),
Value: aws.String("fakelog"),
},
},
},
Status: aws.String("ok"),
Statistics: &cloudwatchlogs.QueryStatistics{
BytesScanned: aws.Float64(2000),
RecordsMatched: aws.Float64(3),
RecordsScanned: aws.Float64(5000),
},
}
dataframes, _ := logsResultsToDataframes(fakeCloudwatchResponse)
timeA, _ := time.Parse("2006-01-02 15:04:05.000", "2020-03-02 15:04:05.000")
timeB, _ := time.Parse("2006-01-02 15:04:05.000", "2020-03-02 16:04:05.000")
timeC, _ := time.Parse("2006-01-02 15:04:05.000", "2020-03-02 17:04:05.000")
timeVals := []*time.Time{
&timeA, &timeB, &timeC,
}
timeField := data.NewField("@timestamp", nil, timeVals)
timeField.SetConfig(&data.FieldConfig{Title: "Time"})
lineField := data.NewField("line", nil, []*string{
aws.String("test message 1"),
aws.String("test message 2"),
aws.String("test message 3"),
})
logStreamField := data.NewField("@logStream", nil, []*string{
aws.String("fakelogstream"),
aws.String("fakelogstream"),
aws.String("fakelogstream"),
})
logStreamField.SetConfig(&data.FieldConfig{
Custom: map[string]interface{}{
"Hidden": true,
},
})
logField := data.NewField("@log", nil, []*string{
aws.String("fakelog"),
aws.String("fakelog"),
aws.String("fakelog"),
})
logField.SetConfig(&data.FieldConfig{
Custom: map[string]interface{}{
"Hidden": true,
},
})
expectedDataframe := &data.Frame{
Name: "CloudWatchLogsResponse",
Fields: []*data.Field{
timeField,
lineField,
logStreamField,
logField,
},
RefID: "",
Meta: &data.FrameMeta{
Custom: map[string]interface{}{
"Status": "ok",
"Statistics": cloudwatchlogs.QueryStatistics{
BytesScanned: aws.Float64(2000),
RecordsMatched: aws.Float64(3),
RecordsScanned: aws.Float64(5000),
},
},
},
}
// Splitting these assertions up so it's clearer what's wrong should the test
// fail in the future
assert.Equal(t, expectedDataframe.Name, dataframes.Name)
assert.Equal(t, expectedDataframe.RefID, dataframes.RefID)
assert.Equal(t, expectedDataframe.Meta, dataframes.Meta)
assert.ElementsMatch(t, expectedDataframe.Fields, dataframes.Fields)
}

View File

@ -0,0 +1,106 @@
package cloudwatch
import (
"context"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs/cloudwatchlogsiface"
)
type FakeLogsClient struct {
cloudwatchlogsiface.CloudWatchLogsAPI
Config aws.Config
}
func (f FakeLogsClient) DescribeLogGroupsWithContext(ctx context.Context, input *cloudwatchlogs.DescribeLogGroupsInput, option ...request.Option) (*cloudwatchlogs.DescribeLogGroupsOutput, error) {
return &cloudwatchlogs.DescribeLogGroupsOutput{
LogGroups: []*cloudwatchlogs.LogGroup{
{
LogGroupName: aws.String("group_a"),
},
{
LogGroupName: aws.String("group_b"),
},
{
LogGroupName: aws.String("group_c"),
},
},
}, nil
}
func (f FakeLogsClient) GetLogGroupFieldsWithContext(ctx context.Context, input *cloudwatchlogs.GetLogGroupFieldsInput, option ...request.Option) (*cloudwatchlogs.GetLogGroupFieldsOutput, error) {
return &cloudwatchlogs.GetLogGroupFieldsOutput{
LogGroupFields: []*cloudwatchlogs.LogGroupField{
{
Name: aws.String("field_a"),
Percent: aws.Int64(100),
},
{
Name: aws.String("field_b"),
Percent: aws.Int64(30),
},
{
Name: aws.String("field_c"),
Percent: aws.Int64(55),
},
},
}, nil
}
func (f FakeLogsClient) StartQueryWithContext(ctx context.Context, input *cloudwatchlogs.StartQueryInput, option ...request.Option) (*cloudwatchlogs.StartQueryOutput, error) {
return &cloudwatchlogs.StartQueryOutput{
QueryId: aws.String("abcd-efgh-ijkl-mnop"),
}, nil
}
func (f FakeLogsClient) StopQueryWithContext(ctx context.Context, input *cloudwatchlogs.StopQueryInput, option ...request.Option) (*cloudwatchlogs.StopQueryOutput, error) {
return &cloudwatchlogs.StopQueryOutput{
Success: aws.Bool(true),
}, nil
}
func (f FakeLogsClient) GetQueryResultsWithContext(ctx context.Context, input *cloudwatchlogs.GetQueryResultsInput, option ...request.Option) (*cloudwatchlogs.GetQueryResultsOutput, error) {
return &cloudwatchlogs.GetQueryResultsOutput{
Results: [][]*cloudwatchlogs.ResultField{
{
{
Field: aws.String("@timestamp"),
Value: aws.String("2020-03-20 10:37:23.000"),
},
{
Field: aws.String("field_b"),
Value: aws.String("b_1"),
},
{
Field: aws.String("@ptr"),
Value: aws.String("abcdefg"),
},
},
{
{
Field: aws.String("@timestamp"),
Value: aws.String("2020-03-20 10:40:43.000"),
},
{
Field: aws.String("field_b"),
Value: aws.String("b_2"),
},
{
Field: aws.String("@ptr"),
Value: aws.String("hijklmnop"),
},
},
},
Statistics: &cloudwatchlogs.QueryStatistics{
BytesScanned: aws.Float64(512),
RecordsMatched: aws.Float64(256),
RecordsScanned: aws.Float64(1024),
},
Status: aws.String("Complete"),
}, nil
}

View File

@ -469,7 +469,7 @@ func (e *CloudWatchExecutor) handleGetDimensionValues(ctx context.Context, param
func (e *CloudWatchExecutor) ensureClientSession(region string) error {
if e.ec2Svc == nil {
dsInfo := e.getDsInfo(region)
cfg, err := e.getAwsConfig(dsInfo)
cfg, err := getAwsConfig(dsInfo)
if err != nil {
return fmt.Errorf("Failed to call ec2:getAwsConfig, %v", err)
}
@ -595,7 +595,7 @@ func (e *CloudWatchExecutor) handleGetEc2InstanceAttribute(ctx context.Context,
func (e *CloudWatchExecutor) ensureRGTAClientSession(region string) error {
if e.rgtaSvc == nil {
dsInfo := e.getDsInfo(region)
cfg, err := e.getAwsConfig(dsInfo)
cfg, err := getAwsConfig(dsInfo)
if err != nil {
return fmt.Errorf("Failed to call ec2:getAwsConfig, %v", err)
}

View File

@ -14,7 +14,7 @@ import (
"github.com/grafana/grafana/pkg/tsdb"
)
// Parses the json queries and returns a requestQuery. The requstQuery has a 1 to 1 mapping to a query editor row
// Parses the json queries and returns a requestQuery. The requestQuery has a 1 to 1 mapping to a query editor row
func (e *CloudWatchExecutor) parseQueries(queryContext *tsdb.TsdbQuery, startTime time.Time, endTime time.Time) (map[string][]*requestQuery, error) {
requestQueries := make(map[string][]*requestQuery)

View File

@ -10,6 +10,7 @@ import (
type TsdbQuery struct {
TimeRange *TimeRange
Queries []*Query
Headers map[string]string
Debug bool
User *models.SignedInUser
}

View File

@ -16,7 +16,6 @@ import {
LogsMetaKind,
LogsDedupStrategy,
GraphSeriesXY,
dateTime,
toUtc,
NullValueMode,
toDataFrame,
@ -26,6 +25,7 @@ import {
TimeZone,
getDisplayProcessor,
textUtil,
dateTime,
} from '@grafana/data';
import { getThemeColor } from 'app/core/utils/colors';
@ -232,17 +232,19 @@ export function dataFrameToLogsModel(
};
}
function separateLogsAndMetrics(dataFrame: DataFrame[]) {
function separateLogsAndMetrics(dataFrames: DataFrame[]) {
const metricSeries: DataFrame[] = [];
const logSeries: DataFrame[] = [];
for (const series of dataFrame) {
if (isLogsData(series)) {
logSeries.push(series);
for (const dataFrame of dataFrames) {
if (isLogsData(dataFrame)) {
logSeries.push(dataFrame);
continue;
}
metricSeries.push(series);
if (dataFrame.length > 0) {
metricSeries.push(dataFrame);
}
}
return { logSeries, metricSeries };
@ -273,9 +275,9 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
const allSeries: LogFields[] = logSeries.map(series => {
const fieldCache = new FieldCache(series);
// Assume the first string field in the dataFrame is the message. This was right so far but probably needs some
// more explicit checks.
const stringField = fieldCache.getFirstFieldOfType(FieldType.string);
const stringField = fieldCache.hasFieldNamed('line')
? fieldCache.getFieldByName('line')
: fieldCache.getFirstFieldOfType(FieldType.string);
if (stringField?.labels) {
allLabels.push(stringField.labels);
}

View File

@ -239,8 +239,9 @@ export function parseUrlState(initial: string | undefined): ExploreUrlState {
};
const datasource = parsed[ParseUrlStateIndex.Datasource];
const parsedSegments = parsed.slice(ParseUrlStateIndex.SegmentsStart);
const metricProperties = ['expr', 'target', 'datasource', 'query'];
const metricProperties = ['expr', 'expression', 'target', 'datasource', 'query'];
const queries = parsedSegments.filter(segment => isSegment(segment, ...metricProperties));
const modeObj = parsedSegments.filter(segment => isSegment(segment, 'mode'))[0];
const mode = modeObj ? modeObj.mode : ExploreMode.Metrics;

View File

@ -4,6 +4,11 @@ import { AlertRuleDTO, AlertRulesState } from 'app/types';
import { reducerTester } from '../../../../test/core/redux/reducerTester';
describe('Alert rules', () => {
const realDateNow = Date.now.bind(global.Date);
const anchorUnix = dateTime('2019-09-04T10:01:01+02:00').valueOf();
const dateNowStub = jest.fn(() => anchorUnix);
global.Date.now = dateNowStub;
const newStateDate = dateTime().subtract(1, 'y');
const newStateDateFormatted = newStateDate.format('YYYY-MM-DD');
const newStateDateAge = newStateDate.fromNow(true);
@ -82,6 +87,10 @@ describe('Alert rules', () => {
},
];
afterAll(() => {
global.Date.now = realDateNow;
});
describe('when loadAlertRules is dispatched', () => {
it('then state should be correct', () => {
reducerTester<AlertRulesState>()

View File

@ -8,6 +8,7 @@ import memoizeOne from 'memoize-one';
// Services & Utils
import store from 'app/core/store';
import config from 'app/core/config';
// Components
import { ErrorBoundaryAlert, stylesFactory, withTheme } from '@grafana/ui';
@ -310,6 +311,10 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
const StartPage = datasourceInstance?.components?.ExploreStartPage;
const showStartPage = !queryResponse || queryResponse.state === LoadingState.NotStarted;
// TEMP: Remove for 7.0
const cloudwatchLogsDisabled =
datasourceInstance?.meta?.id === 'cloudwatch' && !config.featureToggles.cloudwatchLogs;
// gets an error without a refID, so non-query-row-related error, like a connection error
const queryErrors = queryResponse.error ? [queryResponse.error] : undefined;
const queryError = getFirstNonQueryRowSpecificError(queryErrors);
@ -372,7 +377,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
{mode === ExploreMode.Metrics && (
<TableContainer width={width} exploreId={exploreId} onClickCell={this.onClickFilterLabel} />
)}
{mode === ExploreMode.Logs && (
{mode === ExploreMode.Logs && !cloudwatchLogsDisabled && (
<LogsContainer
width={width}
exploreId={exploreId}

View File

@ -8,7 +8,15 @@ import { css } from 'emotion';
import { ExploreId, ExploreItemState } from 'app/types/explore';
import { ToggleButtonGroup, ToggleButton, Tooltip, LegacyForms, SetInterval, Icon, IconButton } from '@grafana/ui';
const { ButtonSelect } = LegacyForms;
import { RawTimeRange, TimeZone, TimeRange, DataQuery, ExploreMode } from '@grafana/data';
import {
RawTimeRange,
TimeZone,
TimeRange,
DataQuery,
ExploreMode,
DataSourceApi,
DataSourceJsonData,
} from '@grafana/data';
import { DataSourcePicker } from 'app/core/components/Select/DataSourcePicker';
import { StoreState } from 'app/types/store';
import {
@ -33,6 +41,7 @@ import { RunButton } from './RunButton';
import { LiveTailControls } from './useLiveTailControls';
import { getExploreDatasources } from './state/selectors';
import { setDashboardQueriesToUpdateOnLoad } from '../dashboard/state/reducers';
import { config } from '@grafana/runtime';
const getStyles = memoizeOne(() => {
return {
@ -68,6 +77,7 @@ interface StateProps {
datasourceLoading?: boolean;
containerWidth: number;
datasourceName?: string;
datasourceInstance: DataSourceApi<DataQuery, DataSourceJsonData>;
}
interface DispatchProps {
@ -178,6 +188,7 @@ export class UnConnectedExploreToolbar extends PureComponent<Props> {
isPaused,
originPanelId,
datasourceLoading,
datasourceInstance,
containerWidth,
} = this.props;
@ -191,6 +202,12 @@ export class UnConnectedExploreToolbar extends PureComponent<Props> {
const showSmallDataSourcePicker = (splitted ? containerWidth < 700 : containerWidth < 800) || false;
const showSmallTimePicker = splitted || containerWidth < 1210;
// TEMP: Remove for 7.0
const cloudwatchLogsDisabled =
datasourceInstance?.meta?.id === 'cloudwatch' && !config.featureToggles.cloudwatchLogs;
const showModeToggle = supportedModes.length > 1 && !cloudwatchLogsDisabled;
return (
<div className={splitted ? 'explore-toolbar splitted' : 'explore-toolbar'}>
<div className="explore-toolbar-item">
@ -233,7 +250,7 @@ export class UnConnectedExploreToolbar extends PureComponent<Props> {
hideTextValue={showSmallDataSourcePicker}
/>
</div>
{supportedModes.length > 1 ? (
{showModeToggle ? (
<div className="query-type-toggle">
<ToggleButtonGroup label="" transparent={true}>
<ToggleButton
@ -378,6 +395,7 @@ const mapStateToProps = (state: StoreState, { exploreId }: OwnProps): StateProps
return {
datasourceMissing,
datasourceName: datasourceInstance?.name,
datasourceInstance,
loading,
range,
timeZone: getTimeZone(state.user),

View File

@ -21,6 +21,7 @@ import store from 'app/core/store';
import { ExploreGraphPanel } from './ExploreGraphPanel';
import { MetaInfoText } from './MetaInfoText';
import { RowContextOptions } from '@grafana/ui/src/components/Logs/LogRowContextProvider';
const SETTINGS_KEYS = {
showLabels: 'grafana.explore.logs.showLabels',
@ -53,6 +54,7 @@ interface Props {
scanning?: boolean;
scanRange?: RawTimeRange;
dedupStrategy: LogsDedupStrategy;
showContextToggle?: (row?: LogRowModel) => boolean;
onChangeTime: (range: AbsoluteTimeRange) => void;
onClickFilterLabel?: (key: string, value: string) => void;
onClickFilterOutLabel?: (key: string, value: string) => void;
@ -60,7 +62,7 @@ interface Props {
onStopScanning?: () => void;
onDedupStrategyChange: (dedupStrategy: LogsDedupStrategy) => void;
onToggleLogLevel: (hiddenLogLevels: LogLevel[]) => void;
getRowContext?: (row: LogRowModel, options?: any) => Promise<any>;
getRowContext?: (row: LogRowModel, options?: RowContextOptions) => Promise<any>;
getFieldLinks: (field: Field, rowIndex: number) => Array<LinkModel<Field>>;
}
@ -149,6 +151,7 @@ export class Logs extends PureComponent<Props, State> {
timeZone,
scanning,
scanRange,
showContextToggle,
width,
dedupedRows,
absoluteRange,
@ -240,6 +243,7 @@ export class Logs extends PureComponent<Props, State> {
rowLimit={logRows ? logRows.length : undefined}
onClickFilterLabel={onClickFilterLabel}
onClickFilterOutLabel={onClickFilterOutLabel}
showContextToggle={showContextToggle}
showLabels={showLabels}
showTime={showTime}
wrapLogMessage={wrapLogMessage}

View File

@ -153,6 +153,7 @@ export class LogsContainer extends PureComponent<LogsContainerProps> {
timeZone={timeZone}
scanning={scanning}
scanRange={range.raw}
showContextToggle={this.props.datasourceInstance?.showContextToggle}
width={width}
getRowContext={this.getLogRowContext}
getFieldLinks={this.getFieldLinks}

View File

@ -121,6 +121,7 @@ export class QueryRow extends PureComponent<QueryRowProps, QueryRowState> {
queryResponse,
mode,
latency,
exploreId,
} = this.props;
const canToggleEditorModes =
@ -152,6 +153,7 @@ export class QueryRow extends PureComponent<QueryRowProps, QueryRowState> {
data={queryResponse}
absoluteRange={absoluteRange}
exploreMode={mode}
exploreId={exploreId}
/>
) : (
<QueryEditor

View File

@ -9,7 +9,7 @@ export enum IconSide {
interface Props extends React.HTMLAttributes<HTMLDivElement> {
splitted: boolean;
title: string;
onClick: () => void;
onClick?: () => void;
buttonClassName?: string;
icon?: IconName;
iconClassName?: string;
@ -21,7 +21,7 @@ function formatBtnTitle(title: string, iconSide?: string): string {
return iconSide === IconSide.left ? '\xA0' + title : iconSide === IconSide.right ? title + '\xA0' : title;
}
export const ResponsiveButton = forwardRef<HTMLDivElement, Props>((props, ref) => {
export const ResponsiveButton = forwardRef<HTMLButtonElement, Props>((props, ref) => {
const defaultProps = {
iconSide: IconSide.left,
};
@ -40,10 +40,11 @@ export const ResponsiveButton = forwardRef<HTMLDivElement, Props>((props, ref) =
} = props;
return (
<div ref={ref} {...divElementProps}>
<div {...divElementProps}>
<button
ref={ref}
className={`btn navbar-button ${buttonClassName ? buttonClassName : ''}`}
onClick={onClick}
onClick={onClick ?? undefined}
disabled={disabled || false}
>
{icon && iconSide === IconSide.left ? <Icon name={icon} className={iconClassName} size="lg" /> : null}

View File

@ -239,6 +239,7 @@ export const itemReducer = (state: ExploreItemState = makeExploreItemState(), ac
if (cancelQueriesAction.match(action)) {
stopQueryState(state.querySubscription);
return {
...state,
loading: false,

View File

@ -1,17 +1,24 @@
jest.mock('@grafana/data/src/datetime/moment_wrapper', () => ({
dateTime: (ts: any) => {
return {
valueOf: () => ts,
fromNow: () => 'fromNow() jest mocked',
format: (fmt: string) => 'format() jest mocked',
};
},
toUtc: (ts: any) => {
return {
format: (fmt: string) => 'format() jest mocked',
};
},
}));
const realMomentWrapper = jest.requireActual('@grafana/data/src/datetime/moment_wrapper');
jest.mock('@grafana/data/src/datetime/moment_wrapper', () => {
const momentMock = {
dateTime: (ts: any) => {
return {
valueOf: () => ts,
fromNow: () => 'fromNow() jest mocked',
format: (fmt: string) => 'format() jest mocked',
};
},
toUtc: null as any,
isDateTime: realMomentWrapper.isDateTime,
};
momentMock.toUtc = (ts: any) => ({
format: (fmt: string) => 'format() jest mocked',
local: () => momentMock.dateTime(ts),
});
return momentMock;
});
import { ResultProcessor } from './ResultProcessor';
import { ExploreItemState } from 'app/types/explore';
@ -104,26 +111,24 @@ describe('ResultProcessor', () => {
const valueField = dataFrames[0].fields[1];
const theResult = resultProcessor.getGraphResult();
expect(theResult).toEqual([
{
label: 'A-series',
color: '#7EB26D',
data: [
[100, 4],
[200, 5],
[300, 6],
],
info: [],
isVisible: true,
yAxis: {
index: 1,
},
seriesIndex: 0,
timeField,
valueField,
timeStep: 100,
expect(theResult[0]).toEqual({
label: 'A-series',
color: '#7EB26D',
data: [
[100, 4],
[200, 5],
[300, 6],
],
info: [],
isVisible: true,
yAxis: {
index: 1,
},
]);
seriesIndex: 0,
timeField,
valueField,
timeStep: 100,
});
});
});
@ -131,6 +136,7 @@ describe('ResultProcessor', () => {
it('then it should return correct table result', () => {
const { resultProcessor } = testContext();
let theResult = resultProcessor.getTableResult();
expect(theResult?.fields[0].name).toEqual('value');
expect(theResult?.fields[1].name).toEqual('time');
expect(theResult?.fields[2].name).toEqual('message');

View File

@ -28,7 +28,7 @@ export class ResultProcessor {
return null;
}
const onlyTimeSeries = this.dataFrames.filter(isTimeSeries);
const onlyTimeSeries = this.dataFrames.filter(frame => isTimeSeries(frame, this.state.datasourceInstance?.meta.id));
if (onlyTimeSeries.length === 0) {
return null;
@ -112,7 +112,12 @@ export class ResultProcessor {
}
}
export function isTimeSeries(frame: DataFrame): boolean {
export function isTimeSeries(frame: DataFrame, datasource?: string): boolean {
// TEMP: Temporary hack. Remove when logs/metrics unification is done
if (datasource && datasource === 'cloudwatch') {
return isTimeSeriesCloudWatch(frame);
}
if (frame.fields.length === 2) {
if (frame.fields[0].type === FieldType.time) {
return true;
@ -121,3 +126,11 @@ export function isTimeSeries(frame: DataFrame): boolean {
return false;
}
// TEMP: Temporary hack. Remove when logs/metrics unification is done
export function isTimeSeriesCloudWatch(frame: DataFrame): boolean {
return (
frame.fields.some(field => field.type === FieldType.time) &&
frame.fields.some(field => field.type === FieldType.number)
);
}

View File

@ -0,0 +1,18 @@
const JSURL = require('jsurl');
export interface AwsUrl {
end: string;
start: string;
timeType?: 'ABSOLUTE' | 'RELATIVE';
tz?: 'local' | 'UTC';
unit?: string;
editorString: string;
isLiveTail: boolean;
source: string[];
}
export function encodeUrl(obj: AwsUrl, region: string): string {
return `https://${region}.console.aws.amazon.com/cloudwatch/home?region=${region}#logsV2:logs-insights$3FqueryDetail$3D${JSURL.stringify(
obj
)}`;
}

View File

@ -2,9 +2,9 @@ import React, { ChangeEvent } from 'react';
import { LegacyForms } from '@grafana/ui';
const { Switch } = LegacyForms;
import { PanelData } from '@grafana/data';
import { CloudWatchQuery, AnnotationQuery } from '../types';
import CloudWatchDatasource from '../datasource';
import { QueryField, QueryFieldsEditor } from './';
import { AnnotationQuery } from '../types';
import { CloudWatchDatasource } from '../datasource';
import { QueryField, PanelQueryEditor } from './';
export type Props = {
query: AnnotationQuery;
@ -17,11 +17,12 @@ export function AnnotationQueryEditor(props: React.PropsWithChildren<Props>) {
const { query, onChange } = props;
return (
<>
<QueryFieldsEditor
<PanelQueryEditor
{...props}
onChange={(editorQuery: CloudWatchQuery) => onChange({ ...query, ...editorQuery })}
hideWilcard
></QueryFieldsEditor>
onChange={(editorQuery: AnnotationQuery) => onChange({ ...query, ...editorQuery })}
onRunQuery={() => {}}
history={[]}
></PanelQueryEditor>
<div className="gf-form-inline">
<Switch
label="Enable Prefix Matching"

View File

@ -0,0 +1,61 @@
import _ from 'lodash';
import React, { Component } from 'react';
import { CloudWatchLogsQuery } from '../types';
import { PanelData } from '@grafana/data';
import { encodeUrl, AwsUrl } from '../aws_url';
import { CloudWatchDatasource } from '../datasource';
interface Props {
query: CloudWatchLogsQuery;
panelData: PanelData;
datasource: CloudWatchDatasource;
}
interface State {
href: string;
}
export default class CloudWatchLink extends Component<Props, State> {
state: State = { href: '' };
async componentDidUpdate(prevProps: Props) {
if (prevProps.panelData !== this.props.panelData && this.props.panelData.request) {
const href = this.getExternalLink();
this.setState({ href });
}
}
getExternalLink(): string {
const { query, panelData, datasource } = this.props;
const range = panelData?.request?.range;
if (!range) {
return '';
}
const start = range.from.toISOString();
const end = range.to.toISOString();
const urlProps: AwsUrl = {
end,
start,
timeType: 'ABSOLUTE',
tz: 'UTC',
editorString: query.expression,
isLiveTail: false,
source: query.logGroupNames,
};
return encodeUrl(urlProps, datasource.getActualRegion(query.region));
}
render() {
const { href } = this.state;
return (
<a href={href} target="_blank" rel="noopener">
<i className="fa fa-share-square-o" /> CloudWatch Logs Insights
</a>
);
}
}

View File

@ -0,0 +1,48 @@
import React, { PureComponent } from 'react';
import { ExploreQueryFieldProps } from '@grafana/data';
import { RadioButtonGroup } from '@grafana/ui';
import { CloudWatchQuery } from '../types';
import { CloudWatchDatasource } from '../datasource';
import LogsQueryEditor from './LogsQueryEditor';
import { MetricsQueryEditor } from './MetricsQueryEditor';
import { cx, css } from 'emotion';
export type Props = ExploreQueryFieldProps<CloudWatchDatasource, CloudWatchQuery>;
export class CombinedMetricsEditor extends PureComponent<Props> {
renderMetricsEditor() {
return <MetricsQueryEditor {...this.props} />;
}
renderLogsEditor() {
return <LogsQueryEditor {...this.props} />;
}
render() {
const { query } = this.props;
const apiMode = query.apiMode ?? query.queryMode ?? 'Metrics';
return (
<>
<div
className={cx(
css`
margin-bottom: 4px;
`
)}
>
<RadioButtonGroup
options={[
{ label: 'Metrics API', value: 'Metrics' },
{ label: 'Logs API', value: 'Logs' },
]}
value={apiMode}
onChange={(v: 'Metrics' | 'Logs') => this.props.onChange({ ...query, apiMode: v })}
/>
</div>
{apiMode === 'Metrics' ? this.renderMetricsEditor() : this.renderLogsEditor()}
</>
);
}
}

View File

@ -11,7 +11,7 @@ import {
} from '@grafana/data';
import { SelectableValue } from '@grafana/data';
import { getDatasourceSrv } from 'app/features/plugins/datasource_srv';
import CloudWatchDatasource from '../datasource';
import { CloudWatchDatasource } from '../datasource';
import { CloudWatchJsonData, CloudWatchSecureJsonData } from '../types';
import { CancelablePromise, makePromiseCancelable } from 'app/core/utils/CancelablePromise';
@ -36,7 +36,7 @@ export class ConfigEditor extends PureComponent<Props, State> {
};
}
loadRegionsPromise: CancelablePromise<any> = null;
loadRegionsPromise: CancelablePromise<any> | null = null;
componentDidMount() {
this.loadRegionsPromise = makePromiseCancelable(this.loadRegions());
@ -56,9 +56,7 @@ export class ConfigEditor extends PureComponent<Props, State> {
async loadRegions() {
await getDatasourceSrv()
.loadDatasource(this.props.options.name)
.then((ds: CloudWatchDatasource) => {
return ds.getRegions();
})
.then((ds: CloudWatchDatasource) => ds.getRegions())
.then(
(regions: any) => {
this.setState({
@ -100,12 +98,10 @@ export class ConfigEditor extends PureComponent<Props, State> {
];
this.setState({
regions: regions.map((region: string) => {
return {
value: region,
label: region,
};
}),
regions: regions.map((region: string) => ({
value: region,
label: region,
})),
});
// expected to fail when creating new datasource
@ -162,7 +158,7 @@ export class ConfigEditor extends PureComponent<Props, State> {
)}
{options.jsonData.authType === 'keys' && (
<div>
{options.secureJsonFields.accessKey ? (
{options.secureJsonFields?.accessKey ? (
<div className="gf-form-inline">
<div className="gf-form">
<InlineFormLabel className="width-14">Access Key ID</InlineFormLabel>
@ -194,7 +190,7 @@ export class ConfigEditor extends PureComponent<Props, State> {
</div>
</div>
)}
{options.secureJsonFields.secretKey ? (
{options.secureJsonFields?.secretKey ? (
<div className="gf-form-inline">
<div className="gf-form">
<InlineFormLabel className="width-14">Secret Access Key</InlineFormLabel>

View File

@ -0,0 +1,138 @@
import React, { PureComponent } from 'react';
import { stripIndent, stripIndents } from 'common-tags';
import { ExploreStartPageProps, DataQuery, ExploreMode } from '@grafana/data';
import Prism from 'prismjs';
import tokenizer from '../syntax';
import { flattenTokens } from '@grafana/ui/src/slate-plugins/slate-prism';
const CLIQ_EXAMPLES = [
{
title: 'View latency statistics for 5-minute intervals',
expr: stripIndents`filter @type = "REPORT" |
stats avg(@duration), max(@duration), min(@duration) by bin(5m)`,
},
{
title: 'Determine the amount of overprovisioned memory',
expr: stripIndent`
filter @type = "REPORT" |
stats max(@memorySize / 1024 / 1024) as provisonedMemoryMB,
min(@maxMemoryUsed / 1024 / 1024) as smallestMemoryRequestMB,
avg(@maxMemoryUsed / 1024 / 1024) as avgMemoryUsedMB,
max(@maxMemoryUsed / 1024 / 1024) as maxMemoryUsedMB,
provisonedMemoryMB - maxMemoryUsedMB as overProvisionedMB`,
},
{
title: 'Find the most expensive requests',
expr: stripIndents`filter @type = "REPORT" |
fields @requestId, @billedDuration | sort by @billedDuration desc`,
},
{
title: 'Average, min, and max byte transfers by source and destination IP addresses',
expr: `stats avg(bytes), min(bytes), max(bytes) by srcAddr, dstAddr`,
},
{
title: 'IP addresses using UDP transfer protocol',
expr: 'filter protocol=17 | stats count(*) by srcAddr',
},
{
title: 'Top 10 byte transfers by source and destination IP addresses',
expr: stripIndents`stats sum(bytes) as bytesTransferred by srcAddr, dstAddr |
sort bytesTransferred desc |
limit 10`,
},
{
title: 'Top 20 source IP addresses with highest number of rejected requests',
expr: stripIndents`filter action="REJECT" |
stats count(*) as numRejections by srcAddr |
sort numRejections desc |
limit 20`,
},
{
title: 'Number of log entries by service, event type, and region',
expr: 'stats count(*) by eventSource, eventName, awsRegion',
},
{
title: 'Number of log entries by region and EC2 event type',
expr: stripIndents`filter eventSource="ec2.amazonaws.com" |
stats count(*) as eventCount by eventName, awsRegion |
sort eventCount desc`,
},
{
title: 'Regions, usernames, and ARNs of newly created IAM users',
expr: stripIndents`filter eventName="CreateUser" |
fields awsRegion, requestParameters.userName, responseElements.user.arn`,
},
{
title: '25 most recently added log events',
expr: stripIndents`fields @timestamp, @message |
sort @timestamp desc |
limit 25`,
},
{
title: 'Number of exceptions logged every 5 minutes',
expr: stripIndents`filter @message like /Exception/ |
stats count(*) as exceptionCount by bin(5m) |
sort exceptionCount desc`,
},
{
title: 'List of log events that are not exceptions',
expr: 'fields @message | filter @message not like /Exception/',
},
];
function renderHighlightedMarkup(code: string, keyPrefix: string) {
const grammar = Prism.languages['cloudwatch'] ?? tokenizer;
const tokens = flattenTokens(Prism.tokenize(code, grammar));
const spans = tokens
.filter(token => typeof token !== 'string')
.map((token, i) => {
return (
<span
className={`prism-token token ${token.types.join(' ')} ${token.aliases.join(' ')}`}
key={`${keyPrefix}-token-${i}`}
>
{token.content}
</span>
);
});
return <div className="slate-query-field">{spans}</div>;
}
export default class LogsCheatSheet extends PureComponent<ExploreStartPageProps, { userExamples: string[] }> {
renderExpression(expr: string, keyPrefix: string) {
const { onClickExample } = this.props;
return (
<div
className="cheat-sheet-item__example"
key={expr}
onClick={e => onClickExample({ refId: 'A', expression: expr } as DataQuery)}
>
<pre>{renderHighlightedMarkup(expr, keyPrefix)}</pre>
</div>
);
}
renderLogsCheatSheet() {
return (
<div>
<h2>CloudWatch Logs Cheat Sheet</h2>
{CLIQ_EXAMPLES.map((item, i) => (
<div className="cheat-sheet-item" key={`item-${i}`}>
<div className="cheat-sheet-item__title">{item.title}</div>
{this.renderExpression(item.expr, `item-${i}`)}
</div>
))}
</div>
);
}
render() {
const { exploreMode } = this.props;
return exploreMode === ExploreMode.Logs && this.renderLogsCheatSheet();
}
}

View File

@ -0,0 +1,67 @@
// Libraries
import React, { memo } from 'react';
// Types
import { AbsoluteTimeRange, QueryEditorProps } from '@grafana/data';
import { FormLabel } from '@grafana/ui/src/components/FormLabel/FormLabel';
import { CloudWatchDatasource } from '../datasource';
import { CloudWatchLogsQuery, CloudWatchQuery } from '../types';
import { CloudWatchLogsQueryField } from './LogsQueryField';
import { useCloudWatchSyntax } from '../useCloudwatchSyntax';
import { CloudWatchLanguageProvider } from '../language_provider';
import CloudWatchLink from './CloudWatchLink';
import { css } from 'emotion';
type Props = QueryEditorProps<CloudWatchDatasource, CloudWatchQuery>;
const labelClass = css`
margin-left: 3px;
flex-grow: 0;
`;
export const CloudWatchLogsQueryEditor = memo(function CloudWatchLogsQueryEditor(props: Props) {
const { query, data, datasource, onRunQuery, onChange, exploreId, exploreMode } = props;
let absolute: AbsoluteTimeRange;
if (data?.request?.range?.from) {
const { range } = data.request;
absolute = {
from: range.from.valueOf(),
to: range.to.valueOf(),
};
} else {
absolute = {
from: Date.now() - 10000,
to: Date.now(),
};
}
const { isSyntaxReady, syntax } = useCloudWatchSyntax(
datasource.languageProvider as CloudWatchLanguageProvider,
absolute
);
return (
<CloudWatchLogsQueryField
exploreId={exploreId}
exploreMode={exploreMode}
datasource={datasource}
query={query}
onBlur={() => {}}
onChange={(val: CloudWatchLogsQuery) => onChange({ ...val, queryMode: 'Logs' })}
onRunQuery={onRunQuery}
history={[]}
data={data}
absoluteRange={absolute}
syntaxLoaded={isSyntaxReady}
syntax={syntax}
ExtraFieldElement={
<FormLabel className={`gf-form-label--btn ${labelClass}`} width="auto" tooltip="Link to Graph in AWS">
<CloudWatchLink query={query as CloudWatchLogsQuery} panelData={data} datasource={datasource} />
</FormLabel>
}
/>
);
});
export default CloudWatchLogsQueryEditor;

View File

@ -0,0 +1,377 @@
// Libraries
import React, { ReactNode } from 'react';
import intersection from 'lodash/intersection';
import {
QueryField,
SlatePrism,
LegacyForms,
TypeaheadInput,
TypeaheadOutput,
BracesPlugin,
Select,
MultiSelect,
Token,
} from '@grafana/ui';
// Utils & Services
// dom also includes Element polyfills
import { Plugin, Node, Editor } from 'slate';
import syntax from '../syntax';
// Types
import { ExploreQueryFieldProps, AbsoluteTimeRange, SelectableValue, ExploreMode, AppEvents } from '@grafana/data';
import { CloudWatchQuery, CloudWatchLogsQuery } from '../types';
import { CloudWatchDatasource } from '../datasource';
import Prism, { Grammar } from 'prismjs';
import { CloudWatchLanguageProvider } from '../language_provider';
import { css } from 'emotion';
import { ExploreId } from 'app/types';
import { dispatch } from 'app/store/store';
import { changeModeAction } from 'app/features/explore/state/actionTypes';
import { appEvents } from 'app/core/core';
export interface CloudWatchLogsQueryFieldProps extends ExploreQueryFieldProps<CloudWatchDatasource, CloudWatchQuery> {
absoluteRange: AbsoluteTimeRange;
onLabelsRefresh?: () => void;
ExtraFieldElement?: ReactNode;
syntaxLoaded: boolean;
syntax: Grammar;
exploreId: ExploreId;
}
const containerClass = css`
flex-grow: 1;
min-height: 35px;
`;
const rowGap = css`
gap: 3px;
`;
interface State {
selectedLogGroups: Array<SelectableValue<string>>;
availableLogGroups: Array<SelectableValue<string>>;
loadingLogGroups: boolean;
regions: Array<SelectableValue<string>>;
selectedRegion: SelectableValue<string>;
invalidLogGroups: boolean;
hint:
| {
message: string;
fix: {
label: string;
action: () => void;
};
}
| undefined;
}
export class CloudWatchLogsQueryField extends React.PureComponent<CloudWatchLogsQueryFieldProps, State> {
state: State = {
selectedLogGroups:
(this.props.query as CloudWatchLogsQuery).logGroupNames?.map(logGroup => ({
value: logGroup,
label: logGroup,
})) ?? [],
availableLogGroups: [],
regions: [],
invalidLogGroups: false,
selectedRegion: (this.props.query as CloudWatchLogsQuery).region
? {
label: (this.props.query as CloudWatchLogsQuery).region,
value: (this.props.query as CloudWatchLogsQuery).region,
text: (this.props.query as CloudWatchLogsQuery).region,
}
: { label: 'default', value: 'default', text: 'default' },
loadingLogGroups: false,
hint: undefined,
};
plugins: Plugin[];
constructor(props: CloudWatchLogsQueryFieldProps, context: React.Context<any>) {
super(props, context);
Prism.languages['cloudwatch'] = syntax;
this.plugins = [
BracesPlugin(),
SlatePrism({
onlyIn: (node: Node) => node.object === 'block' && node.type === 'code_block',
getSyntax: (node: Node) => 'cloudwatch',
}),
];
}
fetchLogGroupOptions = async (region: string) => {
try {
const logGroups: string[] = await this.props.datasource.describeLogGroups({
refId: this.props.query.refId,
region,
});
return logGroups.map(logGroup => ({
value: logGroup,
label: logGroup,
}));
} catch (err) {
appEvents.emit(AppEvents.alertError, [err]);
return [];
}
};
componentWillMount = () => {
const { datasource, query } = this.props;
this.setState({
loadingLogGroups: true,
});
this.fetchLogGroupOptions(query.region).then(logGroups => {
this.setState({
loadingLogGroups: false,
availableLogGroups: logGroups,
});
});
datasource.getRegions().then(regions => {
this.setState({
regions,
});
});
};
onChangeQuery = (value: string, override?: boolean) => {
// Send text change to parent
const { query, onChange, onRunQuery } = this.props;
const { selectedLogGroups, selectedRegion } = this.state;
if (onChange) {
const nextQuery = {
...query,
expression: value,
logGroupNames: selectedLogGroups?.map(logGroupName => logGroupName.value) ?? [],
region: selectedRegion.value,
};
onChange(nextQuery);
if (override && onRunQuery) {
onRunQuery();
}
}
};
setSelectedLogGroups = (v: Array<SelectableValue<string>>) => {
this.setState({
selectedLogGroups: v,
});
const { onChange, query } = this.props;
if (onChange) {
const nextQuery = {
...query,
logGroupNames: v.map(logGroupName => logGroupName.value) ?? [],
};
onChange(nextQuery);
}
};
setSelectedRegion = async (v: SelectableValue<string>) => {
this.setState({
selectedRegion: v,
loadingLogGroups: true,
});
const logGroups = await this.fetchLogGroupOptions(v.value!);
this.setState(state => ({
availableLogGroups: logGroups,
selectedLogGroups: intersection(state.selectedLogGroups, logGroups),
loadingLogGroups: false,
}));
const { onChange, query } = this.props;
if (onChange) {
const nextQuery = {
...query,
region: v.value,
};
onChange(nextQuery);
}
};
onTypeahead = async (typeahead: TypeaheadInput): Promise<TypeaheadOutput> => {
const { datasource, exploreMode } = this.props;
const { selectedLogGroups } = this.state;
if (!datasource.languageProvider) {
return { suggestions: [] };
}
const cloudwatchLanguageProvider = datasource.languageProvider as CloudWatchLanguageProvider;
const { history, absoluteRange } = this.props;
const { prefix, text, value, wrapperClasses, labelKey, editor } = typeahead;
const result = await cloudwatchLanguageProvider.provideCompletionItems(
{ text, value, prefix, wrapperClasses, labelKey, editor },
{ history, absoluteRange, logGroupNames: selectedLogGroups.map(logGroup => logGroup.value!) }
);
const tokens = editor?.value.data.get('tokens');
const queryUsesStatsCommand = tokens.find(
(token: Token) => token.types.includes('query-command') && token.content.toLowerCase() === 'stats'
);
// TEMP: Remove when logs/metrics unification is complete
if (queryUsesStatsCommand && exploreMode === ExploreMode.Logs) {
this.setState({
hint: {
message: 'You are trying to run a stats query in Logs mode. ',
fix: {
label: 'Switch to Metrics mode.',
action: this.switchToMetrics,
},
},
});
}
return result;
};
switchToMetrics = () => {
const { query, onChange, exploreId } = this.props;
if (onChange) {
const nextQuery: CloudWatchLogsQuery = {
...(query as CloudWatchLogsQuery),
apiMode: 'Logs',
};
onChange(nextQuery);
}
dispatch(changeModeAction({ exploreId, mode: ExploreMode.Metrics }));
};
onQueryFieldClick = (_event: Event, _editor: Editor, next: () => any) => {
const { selectedLogGroups, loadingLogGroups } = this.state;
const queryFieldDisabled = loadingLogGroups || selectedLogGroups.length === 0;
if (queryFieldDisabled) {
this.setState({
invalidLogGroups: true,
});
}
next();
};
onOpenLogGroupMenu = () => {
this.setState({
invalidLogGroups: false,
});
};
render() {
const { ExtraFieldElement, data, query, syntaxLoaded, datasource } = this.props;
const {
selectedLogGroups,
availableLogGroups,
regions,
selectedRegion,
loadingLogGroups,
hint,
invalidLogGroups,
} = this.state;
const showError = data && data.error && data.error.refId === query.refId;
const cleanText = datasource.languageProvider ? datasource.languageProvider.cleanText : undefined;
const MAX_LOG_GROUPS = 20;
return (
<>
<div className={`gf-form gf-form--grow flex-grow-1 ${rowGap}`}>
<LegacyForms.FormField
label="Region"
labelWidth={4}
inputEl={
<Select
options={regions}
value={selectedRegion}
onChange={v => this.setSelectedRegion(v)}
width={18}
placeholder="Choose Region"
menuPlacement="bottom"
maxMenuHeight={500}
/>
}
/>
<LegacyForms.FormField
label="Log Groups"
labelWidth={6}
className="flex-grow-1"
inputEl={
<MultiSelect
options={availableLogGroups}
value={selectedLogGroups}
onChange={v => {
this.setSelectedLogGroups(v);
}}
className={containerClass}
closeMenuOnSelect={false}
isClearable={true}
invalid={invalidLogGroups}
isOptionDisabled={() => selectedLogGroups.length >= MAX_LOG_GROUPS}
placeholder="Choose Log Groups"
maxVisibleValues={4}
menuPlacement="bottom"
noOptionsMessage="No log groups available"
isLoading={loadingLogGroups}
onOpenMenu={this.onOpenLogGroupMenu}
/>
}
/>
</div>
<div className="gf-form-inline gf-form-inline--nowrap flex-grow-1">
<div className="gf-form gf-form--grow flex-shrink-1">
<QueryField
additionalPlugins={this.plugins}
query={query.expression}
onChange={this.onChangeQuery}
onBlur={this.props.onBlur}
onClick={this.onQueryFieldClick}
onRunQuery={this.props.onRunQuery}
onTypeahead={this.onTypeahead}
cleanText={cleanText}
placeholder="Enter a CloudWatch Logs Insights query"
portalOrigin="cloudwatch"
syntaxLoaded={syntaxLoaded}
disabled={loadingLogGroups || selectedLogGroups.length === 0}
/>
</div>
{ExtraFieldElement}
</div>
{hint && (
<div className="query-row-break">
<div className="text-warning">
{hint.message}
<a className="text-link muted" onClick={hint.fix.action}>
{hint.fix.label}
</a>
</div>
</div>
)}
{showError ? (
<div className="query-row-break">
<div className="prom-query-field-info text-error">{data?.error?.message}</div>
</div>
) : null}
</>
);
}
}

View File

@ -5,8 +5,8 @@ import { act } from 'react-dom/test-utils';
import { DataSourceInstanceSettings } from '@grafana/data';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { CustomVariable } from 'app/features/templating/all';
import { Props, QueryEditor, normalizeQuery } from './QueryEditor';
import CloudWatchDatasource from '../datasource';
import { MetricsQueryEditor, Props, normalizeQuery } from './MetricsQueryEditor';
import { CloudWatchDatasource } from '../datasource';
const setup = () => {
const instanceSettings = {
@ -37,6 +37,8 @@ const setup = () => {
const props: Props = {
query: {
queryMode: 'Metrics',
apiMode: 'Metrics',
refId: '',
id: '',
region: 'us-east-1',
@ -63,7 +65,7 @@ describe('QueryEditor', () => {
const { act } = renderer;
await act(async () => {
const props = setup();
const tree = renderer.create(<QueryEditor {...props} />).toJSON();
const tree = renderer.create(<MetricsQueryEditor {...props} />).toJSON();
expect(tree).toMatchSnapshot();
});
});
@ -74,7 +76,7 @@ describe('QueryEditor', () => {
await act(async () => {
const props = setup();
props.query.region = (null as unknown) as string;
const wrapper = mount(<QueryEditor {...props} />);
const wrapper = mount(<MetricsQueryEditor {...props} />);
expect(
wrapper
.find('.gf-form-inline')

View File

@ -1,11 +1,12 @@
import React, { PureComponent, ChangeEvent } from 'react';
import isEmpty from 'lodash/isEmpty';
import { ExploreQueryFieldProps } from '@grafana/data';
import { LegacyForms, ValidationEvents, EventsWithValidation, Icon } from '@grafana/ui';
const { Input, Switch } = LegacyForms;
import isEmpty from 'lodash/isEmpty';
import { CloudWatchQuery } from '../types';
import CloudWatchDatasource from '../datasource';
import { QueryField, Alias, QueryFieldsEditor } from './';
import { CloudWatchQuery, CloudWatchMetricsQuery } from '../types';
import { CloudWatchDatasource } from '../datasource';
import { QueryField, Alias, MetricsQueryFieldsEditor } from './';
export type Props = ExploreQueryFieldProps<CloudWatchDatasource, CloudWatchQuery>;
@ -33,7 +34,7 @@ export const normalizeQuery = ({
statistics,
period,
...rest
}: CloudWatchQuery): CloudWatchQuery => {
}: CloudWatchMetricsQuery): CloudWatchMetricsQuery => {
const normalizedQuery = {
namespace: namespace || '',
metricName: metricName || '',
@ -49,10 +50,10 @@ export const normalizeQuery = ({
return !rest.hasOwnProperty('matchExact') ? { ...normalizedQuery, matchExact: true } : normalizedQuery;
};
export class QueryEditor extends PureComponent<Props, State> {
export class MetricsQueryEditor extends PureComponent<Props, State> {
state: State = { showMeta: false };
onChange(query: CloudWatchQuery) {
onChange(query: CloudWatchMetricsQuery) {
const { onChange, onRunQuery } = this.props;
onChange(query);
onRunQuery();
@ -60,12 +61,14 @@ export class QueryEditor extends PureComponent<Props, State> {
render() {
const { data, onRunQuery } = this.props;
const metricsQuery = this.props.query as CloudWatchMetricsQuery;
const { showMeta } = this.state;
const query = normalizeQuery(this.props.query);
const query = normalizeQuery(metricsQuery);
const metaDataExist = data && Object.values(data).length && data.state === 'Done';
return (
<>
<QueryFieldsEditor {...{ ...this.props, query }}></QueryFieldsEditor>
<MetricsQueryFieldsEditor {...{ ...this.props, query }}></MetricsQueryFieldsEditor>
{query.statistics.length <= 1 && (
<div className="gf-form-inline">
<div className="gf-form">
@ -77,7 +80,7 @@ export class QueryEditor extends PureComponent<Props, State> {
className="gf-form-input width-8"
onBlur={onRunQuery}
onChange={(event: ChangeEvent<HTMLInputElement>) =>
this.onChange({ ...query, id: event.target.value })
this.onChange({ ...metricsQuery, id: event.target.value })
}
validationEvents={idValidationEvents}
value={query.id}
@ -93,9 +96,9 @@ export class QueryEditor extends PureComponent<Props, State> {
<Input
className="gf-form-input"
onBlur={onRunQuery}
value={query.expression}
value={query.expression || ''}
onChange={(event: ChangeEvent<HTMLInputElement>) =>
this.onChange({ ...query, expression: event.target.value })
this.onChange({ ...metricsQuery, expression: event.target.value })
}
/>
</QueryField>
@ -107,11 +110,11 @@ export class QueryEditor extends PureComponent<Props, State> {
<QueryField label="Period" tooltip="Minimum interval between points in seconds">
<Input
className="gf-form-input width-8"
value={query.period}
value={query.period || ''}
placeholder="auto"
onBlur={onRunQuery}
onChange={(event: ChangeEvent<HTMLInputElement>) =>
this.onChange({ ...query, period: event.target.value })
this.onChange({ ...metricsQuery, period: event.target.value })
}
/>
</QueryField>
@ -121,14 +124,22 @@ export class QueryEditor extends PureComponent<Props, State> {
label="Alias"
tooltip="Alias replacement variables: {{metric}}, {{stat}}, {{namespace}}, {{region}}, {{period}}, {{label}}, {{YOUR_DIMENSION_NAME}}"
>
<Alias value={query.alias} onChange={(value: string) => this.onChange({ ...query, alias: value })} />
<Alias
value={metricsQuery.alias}
onChange={(value: string) => this.onChange({ ...metricsQuery, alias: value })}
/>
</QueryField>
<Switch
label="Match Exact"
labelClass="query-keyword"
tooltip="Only show metrics that exactly match all defined dimension names."
checked={query.matchExact}
onChange={() => this.onChange({ ...query, matchExact: !query.matchExact })}
checked={metricsQuery.matchExact}
onChange={() =>
this.onChange({
...metricsQuery,
matchExact: !metricsQuery.matchExact,
})
}
/>
<label className="gf-form-label">
<a
@ -157,7 +168,7 @@ export class QueryEditor extends PureComponent<Props, State> {
</tr>
</thead>
<tbody>
{data?.series[0]?.meta?.gmdMeta.map(({ ID, Expression, Period }: any) => (
{data?.series?.[0]?.meta?.gmdMeta?.map(({ ID, Expression, Period }: any) => (
<tr key={ID}>
<td>{ID}</td>
<td>{Expression}</td>

View File

@ -1,16 +1,15 @@
import React, { useState, useEffect } from 'react';
import { SelectableValue } from '@grafana/data';
import { Segment, SegmentAsync } from '@grafana/ui';
import { CloudWatchQuery, SelectableStrings } from '../types';
import CloudWatchDatasource from '../datasource';
import { Stats, Dimensions, QueryInlineField } from './';
import { CloudWatchQuery, SelectableStrings, CloudWatchMetricsQuery } from '../types';
import { CloudWatchDatasource } from '../datasource';
import { Stats, Dimensions, QueryInlineField } from '.';
export type Props = {
query: CloudWatchQuery;
datasource: CloudWatchDatasource;
onRunQuery?: () => void;
onChange: (value: CloudWatchQuery) => void;
hideWilcard?: boolean;
};
interface State {
@ -21,13 +20,14 @@ interface State {
showMeta: boolean;
}
export function QueryFieldsEditor({
export function MetricsQueryFieldsEditor({
query,
datasource,
onChange,
onRunQuery = () => {},
hideWilcard = false,
}: React.PropsWithChildren<Props>) {
const metricsQuery = query as CloudWatchMetricsQuery;
const [state, setState] = useState<State>({
regions: [],
namespaces: [],
@ -74,13 +74,13 @@ export function QueryFieldsEditor({
// Load dimension values based on current selected dimensions.
// Remove the new dimension key and all dimensions that has a wildcard as selected value
const loadDimensionValues = (newKey: string) => {
const { [newKey]: value, ...dim } = query.dimensions;
const { [newKey]: value, ...dim } = metricsQuery.dimensions;
const newDimensions = Object.entries(dim).reduce(
(result, [key, value]) => (value === '*' ? result : { ...result, [key]: value }),
{}
);
return datasource
.getDimensionValues(query.region, query.namespace, query.metricName, newKey, newDimensions)
.getDimensionValues(query.region, query.namespace, metricsQuery.metricName, newKey, newDimensions)
.then(values => (values.length ? [{ value: '*', text: '*', label: '*' }, ...values] : values))
.then(appendTemplateVariables);
};
@ -112,27 +112,27 @@ export function QueryFieldsEditor({
<QueryInlineField label="Metric Name">
<SegmentAsync
value={query.metricName}
value={metricsQuery.metricName}
placeholder="Select metric name"
allowCustomValue
loadOptions={loadMetricNames}
onChange={({ value: metricName }) => onQueryChange({ ...query, metricName })}
onChange={({ value: metricName }) => onQueryChange({ ...metricsQuery, metricName })}
/>
</QueryInlineField>
<QueryInlineField label="Stats">
<Stats
stats={datasource.standardStatistics.map(toOption)}
values={query.statistics}
onChange={statistics => onQueryChange({ ...query, statistics })}
values={metricsQuery.statistics}
onChange={statistics => onQueryChange({ ...metricsQuery, statistics })}
variableOptionGroup={variableOptionGroup}
/>
</QueryInlineField>
<QueryInlineField label="Dimensions">
<Dimensions
dimensions={query.dimensions}
onChange={dimensions => onQueryChange({ ...query, dimensions })}
dimensions={metricsQuery.dimensions}
onChange={dimensions => onQueryChange({ ...metricsQuery, dimensions })}
loadKeys={() => datasource.getDimensionKeys(query.namespace, query.region).then(appendTemplateVariables)}
loadValues={loadDimensionValues}
/>

View File

@ -0,0 +1,48 @@
import React, { PureComponent } from 'react';
import { ExploreQueryFieldProps, ExploreMode } from '@grafana/data';
import { Segment } from '@grafana/ui';
import { CloudWatchQuery } from '../types';
import { CloudWatchDatasource } from '../datasource';
import { QueryInlineField } from './';
import { MetricsQueryEditor } from './MetricsQueryEditor';
import LogsQueryEditor from './LogsQueryEditor';
import { config } from '@grafana/runtime';
export type Props = ExploreQueryFieldProps<CloudWatchDatasource, CloudWatchQuery>;
interface State {
queryMode: ExploreMode;
}
export class PanelQueryEditor extends PureComponent<Props, State> {
state: State = { queryMode: (this.props.query.queryMode as ExploreMode) ?? ExploreMode.Metrics };
onQueryModeChange(mode: ExploreMode) {
this.setState({
queryMode: mode,
});
}
render() {
const { queryMode } = this.state;
const cloudwatchLogsDisabled = !config.featureToggles.cloudwatchLogs;
return (
<>
{!cloudwatchLogsDisabled && (
<QueryInlineField label="Query Mode">
<Segment
value={queryMode}
options={[
{ label: 'Metrics', value: ExploreMode.Metrics },
{ label: 'Logs', value: ExploreMode.Logs },
]}
onChange={({ value }) => this.onQueryModeChange(value ?? ExploreMode.Metrics)}
/>
</QueryInlineField>
)}
{queryMode === ExploreMode.Logs ? <LogsQueryEditor {...this.props} /> : <MetricsQueryEditor {...this.props} />}
</>
);
}
}

View File

@ -2,4 +2,6 @@ export { Stats } from './Stats';
export { Dimensions } from './Dimensions';
export { QueryInlineField, QueryField } from './Forms';
export { Alias } from './Alias';
export { QueryFieldsEditor } from './QueryFieldsEditor';
export { MetricsQueryFieldsEditor } from './MetricsQueryFieldsEditor';
export { PanelQueryEditor } from './PanelQueryEditor';
export { CloudWatchLogsQueryEditor } from './LogsQueryEditor';

View File

@ -13,15 +13,41 @@ import {
dateMath,
ScopedVars,
TimeRange,
DataFrame,
resultsToDataFrames,
DataQueryResponse,
LoadingState,
toDataFrame,
guessFieldTypes,
FieldType,
LogRowModel,
} from '@grafana/data';
import { getBackendSrv } from '@grafana/runtime';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv';
import { ThrottlingErrorMessage } from './components/ThrottlingErrorMessage';
import memoizedDebounce from './memoizedDebounce';
import { CloudWatchJsonData, CloudWatchQuery } from './types';
import {
CloudWatchQuery,
CloudWatchJsonData,
CloudWatchMetricsQuery,
CloudWatchLogsQuery,
CloudWatchLogsQueryStatus,
DescribeLogGroupsRequest,
TSDBResponse,
MetricRequest,
GetLogGroupFieldsRequest,
GetLogGroupFieldsResponse,
LogAction,
GetLogEventsRequest,
} from './types';
import { from, empty, Observable } from 'rxjs';
import { delay, expand, map, mergeMap, tap, finalize, catchError } from 'rxjs/operators';
import { CloudWatchLanguageProvider } from './language_provider';
const TSDB_QUERY_ENDPOINT = '/api/tsdb/query';
import { VariableWithMultiSupport } from 'app/features/templating/types';
import { RowContextOptions } from '@grafana/ui/src/components/Logs/LogRowContextProvider';
const displayAlert = (datasourceName: string, region: string) =>
store.dispatch(
@ -37,7 +63,11 @@ const displayAlert = (datasourceName: string, region: string) =>
const displayCustomError = (title: string, message: string) =>
store.dispatch(notifyApp(createErrorNotification(title, message)));
export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery, CloudWatchJsonData> {
// TODO: Temporary times here, could just change to some fixed number.
const MAX_ATTEMPTS = 8;
const POLLING_TIMES = [100, 200, 500, 1000];
export class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery, CloudWatchJsonData> {
type: any;
proxyUrl: any;
defaultRegion: any;
@ -45,6 +75,8 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
datasourceName: string;
debouncedAlert: (datasourceName: string, region: string) => void;
debouncedCustomAlert: (title: string, message: string) => void;
logQueries: Set<{ id: string; region: string }>;
languageProvider: CloudWatchLanguageProvider;
/** @ngInject */
constructor(
@ -60,52 +92,78 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
this.standardStatistics = ['Average', 'Maximum', 'Minimum', 'Sum', 'SampleCount'];
this.debouncedAlert = memoizedDebounce(displayAlert, AppNotificationTimeout.Error);
this.debouncedCustomAlert = memoizedDebounce(displayCustomError, AppNotificationTimeout.Error);
this.logQueries = new Set<{ id: string; region: string }>();
this.languageProvider = new CloudWatchLanguageProvider(this);
}
query(options: DataQueryRequest<CloudWatchQuery>) {
options = angular.copy(options);
const queries = _.filter(options.targets, item => {
return (
(item.id !== '' || item.hide !== true) &&
((!!item.region && !!item.namespace && !!item.metricName && !_.isEmpty(item.statistics)) ||
item.expression?.length > 0)
);
}).map(item => {
item.region = this.replace(this.getActualRegion(item.region), options.scopedVars, true, 'region');
item.namespace = this.replace(item.namespace, options.scopedVars, true, 'namespace');
item.metricName = this.replace(item.metricName, options.scopedVars, true, 'metric name');
item.dimensions = this.convertDimensionFormat(item.dimensions, options.scopedVars);
item.statistics = item.statistics.map(stat => this.replace(stat, options.scopedVars, true, 'statistics'));
item.period = String(this.getPeriod(item, options)); // use string format for period in graph query, and alerting
item.id = this.templateSrv.replace(item.id, options.scopedVars);
item.expression = this.templateSrv.replace(item.expression, options.scopedVars);
const firstTarget = options.targets[0];
// valid ExtendedStatistics is like p90.00, check the pattern
const hasInvalidStatistics = item.statistics.some(s => {
if (s.indexOf('p') === 0) {
const matches = /^p\d{2}(?:\.\d{1,2})?$/.exec(s);
return !matches || matches[0] !== s;
if (firstTarget.queryMode === 'Logs') {
const queryParams = options.targets.map((target: CloudWatchLogsQuery) => ({
queryString: target.expression,
refId: target.refId,
logGroupNames: target.logGroupNames,
region: this.replace(this.getActualRegion(target.region), options.scopedVars, true, 'region'),
}));
return this.makeLogActionRequest('StartQuery', queryParams, options.scopedVars).pipe(
mergeMap(dataFrames =>
this.logsQuery(
dataFrames.map(dataFrame => ({
queryId: dataFrame.fields[0].values.get(0),
region: dataFrame.meta?.custom?.['Region'] ?? 'default',
refId: dataFrame.refId,
}))
)
)
);
}
const queries = options.targets
.filter(
item =>
(item.id !== '' || item.hide !== true) &&
item.queryMode !== 'Logs' &&
((!!item.region && !!item.namespace && !!item.metricName && !_.isEmpty(item.statistics)) ||
item.expression?.length > 0)
)
.map((item: CloudWatchMetricsQuery) => {
item.region = this.replace(this.getActualRegion(item.region), options.scopedVars, true, 'region');
item.namespace = this.replace(item.namespace, options.scopedVars, true, 'namespace');
item.metricName = this.replace(item.metricName, options.scopedVars, true, 'metric name');
item.dimensions = this.convertDimensionFormat(item.dimensions, options.scopedVars);
item.statistics = item.statistics.map(stat => this.replace(stat, options.scopedVars, true, 'statistics'));
item.period = String(this.getPeriod(item, options)); // use string format for period in graph query, and alerting
item.id = this.templateSrv.replace(item.id, options.scopedVars);
item.expression = this.templateSrv.replace(item.expression, options.scopedVars);
// valid ExtendedStatistics is like p90.00, check the pattern
const hasInvalidStatistics = item.statistics.some(s => {
if (s.indexOf('p') === 0) {
const matches = /^p\d{2}(?:\.\d{1,2})?$/.exec(s);
return !matches || matches[0] !== s;
}
return false;
});
if (hasInvalidStatistics) {
throw { message: 'Invalid extended statistics' };
}
return false;
});
if (hasInvalidStatistics) {
throw { message: 'Invalid extended statistics' };
}
return _.extend(
{
return {
refId: item.refId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
datasourceId: this.id,
type: 'timeSeriesQuery',
},
item
);
});
...item,
};
});
// No valid targets, return the empty result to save a round trip.
if (_.isEmpty(queries)) {
@ -121,11 +179,128 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
return this.performTimeSeriesQuery(request, options.range);
}
logsQuery(queryParams: Array<{ queryId: string; limit?: number; region: string }>): Observable<DataQueryResponse> {
this.logQueries.clear();
queryParams.forEach(param => this.logQueries.add({ id: param.queryId, region: param.region }));
return withTeardown(
this.makeLogActionRequest('GetQueryResults', queryParams).pipe(
expand((dataFrames, i) => {
return dataFrames.every(
dataFrame => dataFrame.meta?.custom?.['Status'] === CloudWatchLogsQueryStatus.Complete
) || i >= MAX_ATTEMPTS
? empty()
: this.makeLogActionRequest('GetQueryResults', queryParams).pipe(
delay(POLLING_TIMES[Math.min(i, POLLING_TIMES.length - 1)])
);
}),
tap(dataFrames => {
dataFrames.forEach((dataframe, i) => {
if (
[
CloudWatchLogsQueryStatus.Complete,
CloudWatchLogsQueryStatus.Cancelled,
CloudWatchLogsQueryStatus.Failed,
].includes(dataframe.meta?.custom?.['Status'])
) {
this.logQueries.delete({ id: queryParams[i].queryId, region: queryParams[i].region });
}
});
}),
map(dataFrames => {
const correctedFrames = dataFrames.map(frame => correctFrameTypes(frame));
return {
data: correctedFrames,
key: 'test-key',
state: correctedFrames.every(
dataFrame => dataFrame.meta?.custom?.['Status'] === CloudWatchLogsQueryStatus.Complete
)
? LoadingState.Done
: LoadingState.Loading,
};
})
),
() => this.stopQueries()
);
}
stopQueries() {
if (this.logQueries.size > 0) {
this.makeLogActionRequest(
'StopQuery',
[...this.logQueries.values()].map(logQuery => ({ queryId: logQuery.id, region: logQuery.region })),
null,
false
).pipe(finalize(() => this.logQueries.clear()));
}
}
async describeLogGroups(params: DescribeLogGroupsRequest): Promise<string[]> {
const dataFrames = await this.makeLogActionRequest('DescribeLogGroups', [params]).toPromise();
const logGroupNames = dataFrames[0].fields[0].values.toArray();
return logGroupNames && logGroupNames.length > 0 ? logGroupNames : [];
}
async getLogGroupFields(params: GetLogGroupFieldsRequest): Promise<GetLogGroupFieldsResponse> {
const dataFrames = await this.makeLogActionRequest('GetLogGroupFields', [params]).toPromise();
const fieldNames = dataFrames[0].fields[0].values.toArray();
const fieldPercentages = dataFrames[0].fields[1].values.toArray();
const getLogGroupFieldsResponse = {
logGroupFields: fieldNames.map((val, i) => ({ name: val, percent: fieldPercentages[i] })) ?? [],
};
return getLogGroupFieldsResponse;
}
getLogRowContext = async (
row: LogRowModel,
{ limit = 10, direction = 'BACKWARD' }: RowContextOptions = {}
): Promise<{ data: DataFrame[] }> => {
let logStreamField = null;
let logField = null;
for (const field of row.dataFrame.fields) {
if (field.name === '@logStream') {
logStreamField = field;
if (logField !== null) {
break;
}
} else if (field.name === '@log') {
logField = field;
if (logStreamField !== null) {
break;
}
}
}
const requestParams: GetLogEventsRequest = {
limit,
startFromHead: direction !== 'BACKWARD',
logGroupName: parseLogGroupName(logField!.values.get(row.rowIndex)),
logStreamName: logStreamField!.values.get(row.rowIndex),
};
if (direction === 'BACKWARD') {
requestParams.endTime = row.timeEpochMs;
} else {
requestParams.startTime = row.timeEpochMs;
}
const dataFrames = await this.makeLogActionRequest('GetLogEvents', [requestParams]).toPromise();
return {
data: dataFrames,
};
};
get variables() {
return this.templateSrv.getVariables().map(v => `$${v.name}`);
}
getPeriod(target: any, options: any) {
getPeriod(target: CloudWatchMetricsQuery, options: any) {
let period = this.templateSrv.replace(target.period, options.scopedVars);
if (period && period.toLowerCase() !== 'auto') {
if (/^\d+$/.test(period)) {
@ -143,7 +318,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
}
buildCloudwatchConsoleUrl(
{ region, namespace, metricName, dimensions, statistics, expression }: CloudWatchQuery,
{ region, namespace, metricName, dimensions, statistics, expression }: CloudWatchMetricsQuery,
start: string,
end: string,
title: string,
@ -193,9 +368,9 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
)}`;
}
performTimeSeriesQuery(request: any, { from, to }: TimeRange): Promise<any> {
return this.awsRequest('/api/tsdb/query', request)
.then((res: any) => {
performTimeSeriesQuery(request: MetricRequest, { from, to }: TimeRange): Promise<any> {
return this.awsRequest(TSDB_QUERY_ENDPOINT, request)
.then((res: TSDBResponse) => {
if (!res.results) {
return { data: [] };
}
@ -248,8 +423,8 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
if (/^Throttling:.*/.test(err.data.message)) {
const failedRedIds = Object.keys(err.data.results);
const regionsAffected = Object.values(request.queries).reduce(
(res: string[], { refId, region }: CloudWatchQuery) =>
!failedRedIds.includes(refId) || res.includes(region) ? res : [...res, region],
(res: string[], { refId, region }) =>
(refId && !failedRedIds.includes(refId)) || res.includes(region) ? res : [...res, region],
[]
) as string[];
@ -264,40 +439,76 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
});
}
transformSuggestDataFromTable(suggestData: any) {
return _.map(suggestData.results['metricFindQuery'].tables[0].rows, v => {
return {
text: v[0],
value: v[1],
label: v[1],
};
});
transformSuggestDataFromTable(suggestData: TSDBResponse) {
return suggestData.results['metricFindQuery'].tables[0].rows.map(([text, value]) => ({
text,
value,
label: value,
}));
}
doMetricQueryRequest(subtype: any, parameters: any) {
doMetricQueryRequest(subtype: string, parameters: any) {
const range = this.timeSrv.timeRange();
return this.awsRequest('/api/tsdb/query', {
return this.awsRequest(TSDB_QUERY_ENDPOINT, {
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
queries: [
_.extend(
{
refId: 'metricFindQuery',
intervalMs: 1, // dummy
maxDataPoints: 1, // dummy
datasourceId: this.id,
type: 'metricFindQuery',
subtype: subtype,
},
parameters
),
{
refId: 'metricFindQuery',
intervalMs: 1, // dummy
maxDataPoints: 1, // dummy
datasourceId: this.id,
type: 'metricFindQuery',
subtype: subtype,
...parameters,
},
],
}).then((r: any) => {
}).then((r: TSDBResponse) => {
return this.transformSuggestDataFromTable(r);
});
}
getRegions() {
makeLogActionRequest(
subtype: LogAction,
queryParams: any[],
scopedVars?: any,
makeReplacements = true
): Observable<DataFrame[]> {
const range = this.timeSrv.timeRange();
const requestParams = {
from: range.from.valueOf().toString(),
to: range.to.valueOf().toString(),
queries: queryParams.map((param: any) => ({
refId: 'A',
intervalMs: 1, // dummy
maxDataPoints: 1, // dummy
datasourceId: this.id,
type: 'logAction',
subtype: subtype,
...param,
})),
};
if (makeReplacements) {
requestParams.queries.forEach(
query => (query.region = this.replace(this.getActualRegion(this.defaultRegion), scopedVars, true, 'region'))
);
}
return from(this.awsRequest(TSDB_QUERY_ENDPOINT, requestParams)).pipe(
map(response => resultsToDataFrames(response)),
catchError(err => {
if (err.data?.error) {
throw err.data.error;
}
throw err;
})
);
}
getRegions(): Promise<Array<{ label: string; value: string; text: string }>> {
return this.doMetricQueryRequest('regions', null).then((regions: any) => [
{ label: 'default', value: 'default', text: 'default' },
...regions,
@ -454,9 +665,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
annotationQuery(options: any) {
const annotation = options.annotation;
const statistics = _.map(annotation.statistics, s => {
return this.templateSrv.replace(s);
});
const statistics = annotation.statistics.map((s: any) => this.templateSrv.replace(s));
const defaultPeriod = annotation.prefixMatching ? '' : '300';
let period = annotation.period || defaultPeriod;
period = parseInt(period, 10);
@ -472,31 +681,25 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
alarmNamePrefix: annotation.alarmNamePrefix || '',
};
return this.awsRequest('/api/tsdb/query', {
return this.awsRequest(TSDB_QUERY_ENDPOINT, {
from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf().toString(),
queries: [
_.extend(
{
refId: 'annotationQuery',
intervalMs: 1, // dummy
maxDataPoints: 1, // dummy
datasourceId: this.id,
type: 'annotationQuery',
},
parameters
),
{
refId: 'annotationQuery',
datasourceId: this.id,
type: 'annotationQuery',
...parameters,
},
],
}).then((r: any) => {
return _.map(r.results['annotationQuery'].tables[0].rows, v => {
return {
annotation: annotation,
time: Date.parse(v[0]),
title: v[1],
tags: [v[2]],
text: v[3],
};
});
}).then((r: TSDBResponse) => {
return r.results['annotationQuery'].tables[0].rows.map(v => ({
annotation: annotation,
time: Date.parse(v[0]),
title: v[1],
tags: [v[2]],
text: v[3],
}));
});
}
@ -518,36 +721,39 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
const metricName = 'EstimatedCharges';
const dimensions = {};
return this.getDimensionValues(region, namespace, metricName, 'ServiceName', dimensions).then(() => {
return { status: 'success', message: 'Data source is working' };
});
return this.getDimensionValues(region, namespace, metricName, 'ServiceName', dimensions).then(() => ({
status: 'success',
message: 'Data source is working',
}));
}
awsRequest(url: string, data: any) {
async awsRequest(url: string, data: MetricRequest) {
const options = {
method: 'POST',
url,
data,
};
return getBackendSrv()
.datasourceRequest(options)
.then((result: any) => {
return result.data;
});
const result = await getBackendSrv().datasourceRequest(options);
return result.data;
}
getDefaultRegion() {
return this.defaultRegion;
}
getActualRegion(region: string) {
if (region === 'default' || _.isEmpty(region)) {
getActualRegion(region?: string) {
if (region === 'default' || region === undefined || region === '') {
return this.getDefaultRegion();
}
return region;
}
showContextToggle() {
return true;
}
convertToCloudWatchTime(date: any, roundUp: any) {
if (_.isString(date)) {
date = dateMath.parse(date, roundUp);
@ -594,3 +800,39 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery,
return this.templateSrv.replace(target, scopedVars);
}
}
function withTeardown<T = any>(observable: Observable<T>, onUnsubscribe: () => void): Observable<T> {
return new Observable<T>(subscriber => {
const innerSub = observable.subscribe({
next: val => subscriber.next(val),
error: err => subscriber.next(err),
complete: () => subscriber.complete(),
});
return () => {
innerSub.unsubscribe();
onUnsubscribe();
};
});
}
function correctFrameTypes(frame: DataFrame): DataFrame {
frame.fields.forEach(field => {
if (field.type === FieldType.string) {
field.type = FieldType.other;
}
});
const correctedFrame = guessFieldTypes(frame);
// const timeField = correctedFrame.fields.find(field => field.name === '@timestamp');
// if (timeField) {
// timeField.type = FieldType.time;
// }
return correctedFrame;
}
function parseLogGroupName(logIdentifier: string): string {
const colonIndex = logIdentifier.lastIndexOf(':');
return logIdentifier.substr(colonIndex + 1);
}

View File

@ -0,0 +1,429 @@
// Libraries
import _ from 'lodash';
// Services & Utils
import syntax, {
QUERY_COMMANDS,
FUNCTIONS,
AGGREGATION_FUNCTIONS_STATS,
STRING_FUNCTIONS,
DATETIME_FUNCTIONS,
IP_FUNCTIONS,
BOOLEAN_FUNCTIONS,
NUMERIC_OPERATORS,
} from './syntax';
// Types
import { CloudWatchQuery } from './types';
import { dateTime, AbsoluteTimeRange, LanguageProvider, HistoryItem } from '@grafana/data';
import { CloudWatchDatasource } from './datasource';
import { CompletionItem, TypeaheadInput, TypeaheadOutput, Token } from '@grafana/ui';
import { Grammar } from 'prismjs';
const HISTORY_ITEM_COUNT = 10;
const HISTORY_COUNT_CUTOFF = 1000 * 60 * 60 * 24; // 24h
const NS_IN_MS = 1000000;
export const LABEL_REFRESH_INTERVAL = 1000 * 30; // 30sec
const wrapLabel = (label: string) => ({ label });
export const rangeToParams = (range: AbsoluteTimeRange) => ({ start: range.from * NS_IN_MS, end: range.to * NS_IN_MS });
export type CloudWatchHistoryItem = HistoryItem<CloudWatchQuery>;
type TypeaheadContext = {
history?: CloudWatchHistoryItem[];
absoluteRange?: AbsoluteTimeRange;
logGroupNames?: string[];
};
export function addHistoryMetadata(item: CompletionItem, history: CloudWatchHistoryItem[]): CompletionItem {
const cutoffTs = Date.now() - HISTORY_COUNT_CUTOFF;
const historyForItem = history.filter(h => h.ts > cutoffTs && h.query.expression === item.label);
let hint = `Queried ${historyForItem.length} times in the last 24h.`;
const recent = historyForItem[0];
if (recent) {
const lastQueried = dateTime(recent.ts).fromNow();
hint = `${hint} Last queried ${lastQueried}.`;
}
return {
...item,
documentation: hint,
};
}
export class CloudWatchLanguageProvider extends LanguageProvider {
logLabelOptions: any[];
logLabelFetchTs?: number;
started: boolean;
initialRange: AbsoluteTimeRange;
datasource: CloudWatchDatasource;
constructor(datasource: CloudWatchDatasource, initialValues?: any) {
super();
this.datasource = datasource;
Object.assign(this, initialValues);
}
// Strip syntax chars
cleanText = (s: string) => s.replace(/[()]/g, '').trim();
getSyntax(): Grammar {
return syntax;
}
request = (url: string, params?: any): Promise<{ data: { data: string[] } }> => {
return this.datasource.awsRequest(url, params);
};
start = () => {
if (!this.startTask) {
this.startTask = Promise.resolve().then(() => {
this.started = true;
return [];
});
}
return this.startTask;
};
fetchFields = _.throttle(async (logGroups: string[]) => {
const results = await Promise.all(
logGroups.map(logGroup => this.datasource.getLogGroupFields({ logGroupName: logGroup }))
);
return [
...new Set<string>(
results.reduce((acc: string[], cur) => acc.concat(cur.logGroupFields?.map(f => f.name) as string[]), [])
).values(),
];
}, 30 * 1000);
/**
* Return suggestions based on input that can be then plugged into a typeahead dropdown.
* Keep this DOM-free for testing
* @param input
* @param context Is optional in types but is required in case we are doing getLabelCompletionItems
* @param context.absoluteRange Required in case we are doing getLabelCompletionItems
* @param context.history Optional used only in getEmptyCompletionItems
*/
async provideCompletionItems(input: TypeaheadInput, context?: TypeaheadContext): Promise<TypeaheadOutput> {
//console.log('Providing completion items...');
const { value } = input;
// Get tokens
const tokens = value?.data.get('tokens');
if (!tokens || !tokens.length) {
return { suggestions: [] };
}
const curToken: Token = tokens.filter(
(token: any) =>
token.offsets.start <= value!.selection?.start?.offset && token.offsets.end >= value!.selection?.start?.offset
)[0];
const isFirstToken = curToken.prev === null || curToken.prev === undefined;
const prevToken = prevNonWhitespaceToken(curToken);
const funcsWithFieldArgs = [
'avg',
'count',
'count_distinct',
'earliest',
'latest',
'sortsFirst',
'sortsLast',
'max',
'min',
'pct',
'stddev',
'ispresent',
'fromMillis',
'toMillis',
'isempty',
'isblank',
'isValidIp',
'isValidIpV4',
'isValidIpV6',
'isIpInSubnet',
'isIpv4InSubnet',
'isIpv6InSubnet',
].map(funcName => funcName.toLowerCase());
if (curToken.content === '(' && prevToken != null) {
if (funcsWithFieldArgs.includes(prevToken.content.toLowerCase()) && prevToken.types.includes('function')) {
const suggs = await this.getFieldCompletionItems(context?.logGroupNames ?? []);
return suggs;
}
}
// if (prevToken === null) {
// return {
// suggestions: [],
// };
// }
// if (prevToken) {
// console.log(`Previous token: '${prevToken.content}'`);
// }
const isCommandStart = isFirstToken || (!isFirstToken && prevToken?.types.includes('command-separator'));
//console.log(`Is command start? ${isCommandStart}`);
if (isCommandStart) {
return this.getCommandCompletionItems();
} else if (!isFirstToken) {
if (prevToken?.types.includes('keyword')) {
return this.handleKeyword(prevToken, context);
}
if (prevToken?.types.includes('comparison-operator')) {
const suggs = await this.getFieldCompletionItems(context?.logGroupNames ?? []);
const boolFuncSuggs = this.getBoolFuncCompletionItems();
const numFuncSuggs = this.getNumericFuncCompletionItems();
suggs.suggestions.push(...boolFuncSuggs.suggestions, ...numFuncSuggs.suggestions);
return suggs;
}
const commandToken = this.findCommandToken(curToken);
if (commandToken !== null) {
const typeaheadOutput = await this.handleCommand(commandToken, curToken, context);
return typeaheadOutput;
}
}
return {
suggestions: [],
};
}
handleKeyword = async (token: Token, context?: TypeaheadContext): Promise<TypeaheadOutput | null> => {
if (token.content.toLowerCase() === 'by') {
const suggs = await this.getFieldCompletionItems(context?.logGroupNames ?? []);
const functionSuggestions = [
{ prefixMatch: true, label: 'Functions', items: STRING_FUNCTIONS.concat(DATETIME_FUNCTIONS, IP_FUNCTIONS) },
];
suggs.suggestions.push(...functionSuggestions);
return suggs;
}
return null;
};
handleCommand = async (commandToken: Token, curToken: Token, context: TypeaheadContext): Promise<TypeaheadOutput> => {
const queryCommand = commandToken.content.toLowerCase();
const prevToken = prevNonWhitespaceToken(curToken);
const currentTokenIsFirstArg = prevToken === commandToken;
// console.log(
// `Query Command: '${queryCommand}'. Previous token: '${prevToken}'. First arg? ${currentTokenIsFirstArg}`
// );
if (queryCommand === 'sort') {
if (currentTokenIsFirstArg) {
return await this.getFieldCompletionItems(context.logGroupNames ?? []);
} else if (prevToken?.types.includes('field-name')) {
// suggest sort options
return {
suggestions: [
{
prefixMatch: true,
label: 'Sort Order',
items: [
{
label: 'asc',
},
{ label: 'desc' },
],
},
],
};
}
}
if (queryCommand === 'parse') {
if (currentTokenIsFirstArg) {
return await this.getFieldCompletionItems(context.logGroupNames ?? []);
}
}
let typeaheadOutput: TypeaheadOutput | null = null;
if (
(commandToken.next?.types.includes('whitespace') && commandToken.next.next === null) ||
nextNonWhitespaceToken(commandToken) === curToken ||
(curToken.content === ',' && curToken.types.includes('punctuation')) ||
(curToken.prev?.content === ',' && curToken.prev.types.includes('punctuation'))
) {
if (['display', 'fields'].includes(queryCommand)) {
// Current token comes straight after command OR after comma
typeaheadOutput = await this.getFieldCompletionItems(context.logGroupNames ?? []);
typeaheadOutput.suggestions.push(...this.getFunctionCompletionItems().suggestions);
return typeaheadOutput;
} else if (queryCommand === 'stats') {
typeaheadOutput = this.getStatsAggCompletionItems();
} else if (queryCommand === 'filter') {
if (currentTokenIsFirstArg) {
const sugg = await this.getFieldCompletionItems(context.logGroupNames ?? []);
const boolFuncs = this.getBoolFuncCompletionItems();
sugg.suggestions.push(...boolFuncs.suggestions);
return sugg;
}
}
if (
(curToken.content === ',' && curToken.types.includes('punctuation')) ||
(commandToken.next?.types.includes('whitespace') && commandToken.next.next === null)
) {
typeaheadOutput?.suggestions.forEach(group => {
group.skipFilter = true;
});
}
return typeaheadOutput!;
}
return { suggestions: [] };
};
findCommandToken = (startToken: Token): Token | null => {
let thisToken = { ...startToken };
while (thisToken.prev !== null) {
thisToken = thisToken.prev;
const isFirstCommand = thisToken.types.includes('query-command') && thisToken.prev === null;
if (thisToken.types.includes('command-separator') || isFirstCommand) {
// next token should be command
if (!isFirstCommand && thisToken.next?.types.includes('query-command')) {
return thisToken.next;
} else {
return thisToken;
}
}
}
return null;
};
getBeginningCompletionItems = (context: TypeaheadContext): TypeaheadOutput => {
return {
suggestions: [
...this.getEmptyCompletionItems(context).suggestions,
...this.getCommandCompletionItems().suggestions,
],
};
};
getEmptyCompletionItems(context: TypeaheadContext): TypeaheadOutput {
const history = context?.history;
const suggestions = [];
if (history?.length) {
const historyItems = _.chain(history)
.map(h => h.query.expression)
.filter()
.uniq()
.take(HISTORY_ITEM_COUNT)
.map(wrapLabel)
.map((item: CompletionItem) => addHistoryMetadata(item, history))
.value();
suggestions.push({
prefixMatch: true,
skipSort: true,
label: 'History',
items: historyItems,
});
}
return { suggestions };
}
getCommandCompletionItems = (): TypeaheadOutput => {
return { suggestions: [{ prefixMatch: true, label: 'Commands', items: QUERY_COMMANDS }] };
};
getFunctionCompletionItems = (): TypeaheadOutput => {
return { suggestions: [{ prefixMatch: true, label: 'Functions', items: FUNCTIONS }] };
};
getStatsAggCompletionItems = (): TypeaheadOutput => {
return { suggestions: [{ prefixMatch: true, label: 'Functions', items: AGGREGATION_FUNCTIONS_STATS }] };
};
getBoolFuncCompletionItems = (): TypeaheadOutput => {
return {
suggestions: [
{
prefixMatch: true,
label: 'Functions',
items: BOOLEAN_FUNCTIONS,
},
],
};
};
getNumericFuncCompletionItems = (): TypeaheadOutput => {
return {
suggestions: [
{
prefixMatch: true,
label: 'Functions',
items: NUMERIC_OPERATORS,
},
],
};
};
getFieldCompletionItems = async (logGroups: string[]): Promise<TypeaheadOutput> => {
//console.log(`Fetching fields... ${logGroups}`);
const fields = await this.fetchFields(logGroups);
//console.log(fields);
return {
suggestions: [
{
prefixMatch: true,
label: 'Fields',
items: fields.map(field => ({
label: field,
insertText: field.match(/@?[_a-zA-Z]+[_.0-9a-zA-Z]*/) ? field : `\`${field}\``,
})),
},
],
};
};
}
function nextNonWhitespaceToken(token: Token): Token | null {
let curToken = token;
while (curToken.next) {
if (curToken.next.types.includes('whitespace')) {
curToken = curToken.next;
} else {
return curToken.next;
}
}
return null;
}
function prevNonWhitespaceToken(token: Token): Token | null {
let curToken = token;
while (curToken.prev) {
if (curToken.prev.types.includes('whitespace')) {
curToken = curToken.prev;
} else {
return curToken.prev;
}
}
return null;
}

View File

@ -1,15 +1,20 @@
import './query_parameter_ctrl';
import { DataSourcePlugin } from '@grafana/data';
import { ConfigEditor } from './components/ConfigEditor';
import { QueryEditor } from './components/QueryEditor';
import CloudWatchDatasource from './datasource';
import { CloudWatchDatasource } from './datasource';
import { CloudWatchAnnotationsQueryCtrl } from './annotations_query_ctrl';
import { CloudWatchJsonData, CloudWatchQuery } from './types';
import { CloudWatchLogsQueryEditor } from './components/LogsQueryEditor';
import { PanelQueryEditor } from './components/PanelQueryEditor';
import LogsCheatSheet from './components/LogsCheatSheet';
import { CombinedMetricsEditor } from './components/CombinedMetricsEditor';
export const plugin = new DataSourcePlugin<CloudWatchDatasource, CloudWatchQuery, CloudWatchJsonData>(
CloudWatchDatasource
)
.setExploreStartPage(LogsCheatSheet)
.setConfigEditor(ConfigEditor)
.setQueryEditor(QueryEditor)
.setExploreQueryField(QueryEditor)
.setQueryEditor(PanelQueryEditor)
.setExploreMetricsQueryField(CombinedMetricsEditor)
.setExploreLogsQueryField(CloudWatchLogsQueryEditor)
.setAnnotationQueryCtrl(CloudWatchAnnotationsQueryCtrl);

View File

@ -13,11 +13,8 @@
<div class="gf-form">
<label class="gf-form-label query-keyword width-8">Metric</label>
<metric-segment
segment="namespaceSegment"
get-options="getNamespaces()"
on-change="namespaceChanged()"
></metric-segment>
<metric-segment segment="namespaceSegment" get-options="getNamespaces()" on-change="namespaceChanged()">
</metric-segment>
<metric-segment segment="metricSegment" get-options="getMetrics()" on-change="metricChanged()"></metric-segment>
</div>
@ -26,11 +23,8 @@
</div>
<div class="gf-form" ng-repeat="segment in statSegments">
<metric-segment
segment="segment"
get-options="getStatSegments(segment, $index)"
on-change="statSegmentChanged(segment, $index)"
></metric-segment>
<metric-segment segment="segment" get-options="getStatSegments(segment, $index)"
on-change="statSegmentChanged(segment, $index)"></metric-segment>
</div>
<div class="gf-form gf-form--grow">
@ -41,12 +35,8 @@
<div class="gf-form-inline" ng-if="target.expression.length === 0">
<div class="gf-form">
<label class="gf-form-label query-keyword width-8">Dimensions</label>
<metric-segment
ng-repeat="segment in dimSegments"
segment="segment"
get-options="getDimSegments(segment, $index)"
on-change="dimSegmentChanged(segment, $index)"
></metric-segment>
<metric-segment ng-repeat="segment in dimSegments" segment="segment" get-options="getDimSegments(segment, $index)"
on-change="dimSegmentChanged(segment, $index)"></metric-segment>
</div>
<div class="gf-form gf-form--grow">
@ -58,31 +48,16 @@
<div class="gf-form">
<label class=" gf-form-label query-keyword width-8 ">
Id
<info-popover mode="right-normal "
>Id can include numbers, letters, and underscore, and must start with a lowercase letter.</info-popover
>
<info-popover mode="right-normal ">Id can include numbers, letters, and underscore, and must start with a
lowercase letter.</info-popover>
</label>
<input
type="text "
class="gf-form-input "
ng-model="target.id "
spellcheck="false"
ng-pattern="/^[a-z][a-zA-Z0-9_]*$/"
ng-model-onblur
ng-change="onChange() "
/>
<input type="text " class="gf-form-input " ng-model="target.id " spellcheck="false"
ng-pattern="/^[a-z][a-zA-Z0-9_]*$/" ng-model-onblur ng-change="onChange() " />
</div>
<div class="gf-form max-width-30 ">
<label class="gf-form-label query-keyword width-7 ">Expression</label>
<input
type="text "
class="gf-form-input "
ng-model="target.expression
"
spellcheck="false"
ng-model-onblur
ng-change="onChange() "
/>
<input type="text " class="gf-form-input " ng-model="target.expression
" spellcheck="false" ng-model-onblur ng-change="onChange() " />
</div>
</div>
@ -92,27 +67,13 @@
Min period
<info-popover mode="right-normal ">Minimum interval between points in seconds</info-popover>
</label>
<input
type="text "
class="gf-form-input "
ng-model="target.period "
spellcheck="false"
placeholder="auto
"
ng-model-onblur
ng-change="onChange() "
/>
<input type="text " class="gf-form-input " ng-model="target.period " spellcheck="false" placeholder="auto
" ng-model-onblur ng-change="onChange() " />
</div>
<div class="gf-form max-width-30 ">
<label class="gf-form-label query-keyword width-7 ">Alias</label>
<input
type="text "
class="gf-form-input "
ng-model="target.alias "
spellcheck="false"
ng-model-onblur
ng-change="onChange() "
/>
<input type="text " class="gf-form-input " ng-model="target.alias " spellcheck="false" ng-model-onblur
ng-change="onChange() " />
<info-popover mode="right-absolute ">
Alias replacement variables:
<ul ng-non-bindable>

View File

@ -4,6 +4,7 @@
"id": "cloudwatch",
"category": "cloud",
"metrics": true,
"logs": true,
"alerting": true,
"annotations": true,
"includes": [

View File

@ -30,7 +30,7 @@ export class CloudWatchQueryParameterCtrl {
memo.push(uiSegmentSrv.newKeyValue(value));
return memo;
},
[]
[] as any
);
$scope.statSegments = _.map($scope.target.statistics, stat => {
@ -84,7 +84,7 @@ export class CloudWatchQueryParameterCtrl {
}
return memo;
},
[]
[] as any
);
$scope.ensurePlusButton($scope.statSegments);
@ -106,7 +106,7 @@ export class CloudWatchQueryParameterCtrl {
}
const target = $scope.target;
let query = Promise.resolve([]);
let query = Promise.resolve([] as any[]);
if (segment.type === 'key' || segment.type === 'plus-button') {
query = $scope.datasource.getDimensionKeys($scope.target.namespace, $scope.target.region);

View File

@ -1,10 +1,10 @@
import '../datasource';
import CloudWatchDatasource from '../datasource';
import { CloudWatchDatasource } from '../datasource';
import * as redux from 'app/store/store';
import { DataSourceInstanceSettings, dateMath } from '@grafana/data';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { CustomVariable } from 'app/features/templating/all';
import { CloudWatchQuery } from '../types';
import { CloudWatchQuery, CloudWatchMetricsQuery } from '../types';
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv';
import { convertToStoreState } from '../../../../../test/helpers/convertToStoreState';
@ -46,14 +46,13 @@ describe('CloudWatchDatasource', () => {
jest.clearAllMocks();
});
describe('When performing CloudWatch query', () => {
let requestParams: { queries: CloudWatchQuery[] };
describe('When performing CloudWatch metrics query', () => {
const query = {
range: defaultTimeRange,
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
{
type: 'Metrics',
expression: '',
refId: 'A',
region: 'us-east-1',
@ -72,6 +71,7 @@ describe('CloudWatchDatasource', () => {
timings: [null],
results: {
A: {
type: 'Metrics',
error: '',
refId: 'A',
meta: { gmdMeta: [] },
@ -93,25 +93,27 @@ describe('CloudWatchDatasource', () => {
};
beforeEach(() => {
datasourceRequestMock.mockImplementation(params => {
requestParams = params.data;
datasourceRequestMock.mockImplementation(() => {
return Promise.resolve({ data: response });
});
});
it('should generate the correct query', done => {
ctx.ds.query(query).then(() => {
const params = requestParams.queries[0];
expect(params.namespace).toBe(query.targets[0].namespace);
expect(params.metricName).toBe(query.targets[0].metricName);
expect(params.dimensions['InstanceId']).toStrictEqual(['i-12345678']);
expect(params.statistics).toEqual(query.targets[0].statistics);
expect(params.period).toBe(query.targets[0].period);
done();
});
it('should generate the correct query', async () => {
await ctx.ds.query(query);
expect(datasourceRequestMock.mock.calls[0][0].data.queries).toMatchObject(
expect.arrayContaining([
expect.objectContaining({
namespace: query.targets[0].namespace,
metricName: query.targets[0].metricName,
dimensions: { InstanceId: ['i-12345678'] },
statistics: query.targets[0].statistics,
period: query.targets[0].period,
}),
])
);
});
it('should generate the correct query with interval variable', done => {
it('should generate the correct query with interval variable', async () => {
templateSrv.init([
new CustomVariable(
{
@ -130,6 +132,7 @@ describe('CloudWatchDatasource', () => {
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
{
type: 'Metrics',
refId: 'A',
region: 'us-east-1',
namespace: 'AWS/EC2',
@ -143,11 +146,8 @@ describe('CloudWatchDatasource', () => {
],
};
ctx.ds.query(query).then(() => {
const params = requestParams.queries[0];
expect(params.period).toBe('600');
done();
});
await ctx.ds.query(query);
expect(datasourceRequestMock.mock.calls[0][0].data.queries[0].period).toEqual('600');
});
it.each(['pNN.NN', 'p9', 'p99.', 'p99.999'])('should cancel query for invalid extended statistics (%s)', stat => {
@ -156,6 +156,7 @@ describe('CloudWatchDatasource', () => {
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
{
type: 'Metrics',
refId: 'A',
region: 'us-east-1',
namespace: 'AWS/EC2',
@ -181,8 +182,7 @@ describe('CloudWatchDatasource', () => {
describe('a correct cloudwatch url should be built for each time series in the response', () => {
beforeEach(() => {
datasourceRequestMock.mockImplementation(params => {
requestParams = params.data;
datasourceRequestMock.mockImplementation(() => {
return Promise.resolve({ data: response });
});
});
@ -238,6 +238,7 @@ describe('CloudWatchDatasource', () => {
describe('and throttling exception is thrown', () => {
const partialQuery = {
type: 'Metrics',
namespace: 'AWS/EC2',
metricName: 'CPUUtilization',
dimensions: {
@ -376,6 +377,7 @@ describe('CloudWatchDatasource', () => {
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
{
type: 'Metrics',
refId: 'A',
region: 'default',
namespace: 'AWS/EC2',
@ -402,6 +404,7 @@ describe('CloudWatchDatasource', () => {
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
{
type: 'Metrics',
refId: 'A',
region: 'us-east-1',
namespace: 'AWS/ApplicationELB',
@ -463,7 +466,7 @@ describe('CloudWatchDatasource', () => {
});
describe('When performing CloudWatch query with template variables', () => {
let requestParams: { queries: CloudWatchQuery[] };
let requestParams: { queries: CloudWatchMetricsQuery[] };
beforeEach(() => {
const variables = [
new CustomVariable(
@ -534,6 +537,7 @@ describe('CloudWatchDatasource', () => {
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
{
type: 'Metrics',
refId: 'A',
region: 'us-east-1',
namespace: 'TestNamespace',
@ -559,6 +563,7 @@ describe('CloudWatchDatasource', () => {
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
{
type: 'Metrics',
refId: 'A',
region: 'us-east-1',
namespace: 'TestNamespace',
@ -592,6 +597,7 @@ describe('CloudWatchDatasource', () => {
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
{
type: 'Metrics',
refId: 'A',
region: 'us-east-1',
namespace: 'TestNamespace',
@ -621,6 +627,7 @@ describe('CloudWatchDatasource', () => {
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
{
type: 'Metrics',
refId: 'A',
region: 'us-east-1',
namespace: 'TestNamespace',

View File

@ -0,0 +1,367 @@
import { Grammar } from 'prismjs';
import { CompletionItem } from '@grafana/ui';
export const QUERY_COMMANDS: CompletionItem[] = [
{
label: 'fields',
documentation: 'Retrieves the specified fields from log events',
},
{ label: 'display', documentation: 'Specifies which fields to display in the query results' },
{
label: 'filter',
insertText: 'filter',
documentation: 'Filters the results of a query based on one or more conditions',
},
{
label: 'stats',
insertText: 'stats',
documentation: 'Calculates aggregate statistics based on the values of log fields',
},
{ label: 'sort', documentation: 'Sorts the retrieved log events' },
{ label: 'limit', documentation: 'Specifies the number of log events returned by the query' },
{
label: 'parse',
documentation:
'Extracts data from a log field, creating one or more ephemeral fields that you can process further in the query',
},
];
export const COMPARISON_OPERATORS = ['=', '!=', '<', '<=', '>', '>='];
export const ARITHMETIC_OPERATORS = ['+', '-', '*', '/', '^', '%'];
export const NUMERIC_OPERATORS = [
{
label: 'abs',
detail: 'abs(a)',
documentation: 'Absolute value.',
},
{
label: 'ceil',
detail: 'ceil(a)',
documentation: 'Round to ceiling (the smallest integer that is greater than the value of a).',
},
{
label: 'floor',
detail: 'floor(a)',
documentation: 'Round to floor (the largest integer that is smaller than the value of a).',
},
{
label: 'greatest',
detail: 'greatest(a,b, ... z)',
documentation: 'Returns the largest value.',
},
{
label: 'least',
detail: 'least(a, b, ... z)',
documentation: 'Returns the smallest value.',
},
{
label: 'log',
detail: 'log(a)',
documentation: 'Natural logarithm.',
},
{
label: 'sqrt',
detail: 'sqrt(a)',
documentation: 'Square root.',
},
];
export const GENERAL_FUNCTIONS = [
{
label: 'ispresent',
detail: 'ispresent(fieldname)',
documentation: 'Returns true if the field exists.',
},
{
label: 'coalesce',
detail: 'coalesce(fieldname1, fieldname2, ... fieldnamex)',
documentation: 'Returns the first non-null value from the list.',
},
];
export const STRING_FUNCTIONS = [
{
label: 'isempty',
detail: 'isempty(fieldname)',
documentation: 'Returns true if the field is missing or is an empty string.',
},
{
label: 'isblank',
detail: 'isblank(fieldname)',
documentation: 'Returns true if the field is missing, an empty string, or contains only white space.',
},
{
label: 'concat',
detail: 'concat(string1, string2, ... stringz)',
documentation: 'Concatenates the strings.',
},
{
label: 'ltrim',
detail: 'ltrim(string) or ltrim(string1, string2)',
documentation:
'Remove white space from the left of the string. If the function has a second string argument, it removes the characters of string2 from the left of string1.',
},
{
label: 'rtrim',
detail: 'rtrim(string) or rtrim(string1, string2)',
documentation:
'Remove white space from the right of the string. If the function has a second string argument, it removes the characters of string2 from the right of string1.',
},
{
label: 'trim',
detail: 'trim(string) or trim(string1, string2)',
documentation:
'Remove white space from both ends of the string. If the function has a second string argument, it removes the characters of string2 from both sides of string1.',
},
{
label: 'strlen',
detail: 'strlen(string)',
documentation: 'Returns the length of the string in Unicode code points.',
},
{
label: 'toupper',
detail: 'toupper(string)',
documentation: 'Converts the string to uppercase.',
},
{
label: 'tolower',
detail: 'tolower(string)',
documentation: 'Converts the string to lowercase.',
},
{
label: 'substr',
detail: 'substr(string1, x), or substr(string1, x, y)',
documentation:
'Returns a substring from the index specified by the number argument to the end of the string. If the function has a second number argument, it contains the length of the substring to be retrieved.',
},
{
label: 'replace',
detail: 'replace(string1, string2, string3)',
documentation: 'Replaces all instances of string2 in string1 with string3.',
},
{
label: 'strcontains',
detail: 'strcontains(string1, string2)',
documentation: 'Returns 1 if string1 contains string2 and 0 otherwise.',
},
];
export const DATETIME_FUNCTIONS = [
{
label: 'bin',
detail: 'bin(period)',
documentation: 'Rounds the value of @timestamp to the given period and then truncates.',
},
{
label: 'datefloor',
detail: 'datefloor(a, period)',
documentation: 'Truncates the timestamp to the given period.',
},
{
label: 'dateceil',
detail: 'dateceil(a, period)',
documentation: 'Rounds up the timestamp to the given period and then truncates.',
},
{
label: 'fromMillis',
detail: 'fromMillis(fieldname)',
documentation:
'Interprets the input field as the number of milliseconds since the Unix epoch and converts it to a timestamp.',
},
{
label: 'toMillis',
detail: 'toMillis(fieldname)',
documentation:
'Converts the timestamp found in the named field into a number representing the milliseconds since the Unix epoch.',
},
];
export const IP_FUNCTIONS = [
{
label: 'isValidIp',
detail: 'isValidIp(fieldname)',
documentation: 'Returns true if the field is a valid v4 or v6 IP address.',
},
{
label: 'isValidIpV4',
detail: 'isValidIpV4(fieldname)',
documentation: 'Returns true if the field is a valid v4 IP address.',
},
{
label: 'isValidIpV6',
detail: 'isValidIpV6(fieldname)',
documentation: 'Returns true if the field is a valid v6 IP address.',
},
{
label: 'isIpInSubnet',
detail: 'isIpInSubnet(fieldname, string)',
documentation: 'Returns true if the field is a valid v4 or v6 IP address within the specified v4 or v6 subnet.',
},
{
label: 'isIpv4InSubnet',
detail: 'isIpv4InSubnet(fieldname, string)',
documentation: 'Returns true if the field is a valid v4 IP address within the specified v4 subnet.',
},
{
label: 'isIpv6InSubnet',
insertText: 'isIpv6InSubnet',
detail: 'isIpv6InSubnet(fieldname, string)',
documentation: 'Returns true if the field is a valid v6 IP address within the specified v6 subnet.',
},
];
export const BOOLEAN_FUNCTIONS = [
{
label: 'ispresent',
detail: 'ispresent(fieldname)',
documentation: 'Returns true if the field exists.',
},
{
label: 'isempty',
detail: 'isempty(fieldname)',
documentation: 'Returns true if the field is missing or is an empty string.',
},
{
label: 'isblank',
detail: 'isblank(fieldname)',
documentation: 'Returns true if the field is missing, an empty string, or contains only white space.',
},
{
label: 'strcontains',
detail: 'strcontains(string1, string2)',
documentation: 'Returns 1 if string1 contains string2 and 0 otherwise.',
},
...IP_FUNCTIONS,
];
export const AGGREGATION_FUNCTIONS_STATS = [
{
label: 'avg',
detail: 'avg(NumericFieldname)',
documentation: 'The average of the values in the specified field.',
},
{
label: 'count',
detail: 'count(fieldname) or count(*)',
documentation: 'Counts the log records.',
},
{
label: 'count_distinct',
detail: 'count_distinct(fieldname)',
documentation: 'Returns the number of unique values for the field.',
},
{
label: 'max',
detail: 'max(fieldname)',
documentation: 'The maximum of the values for this log field in the queried logs.',
},
{
label: 'min',
detail: 'min(fieldname)',
documentation: 'The minimum of the values for this log field in the queried logs.',
},
{
label: 'pct',
detail: 'pct(fieldname, value)',
documentation: 'A percentile indicates the relative standing of a value in a datas.',
},
{
label: 'stddev',
detail: 'stddev(NumericFieldname)',
documentation: 'The standard deviation of the values in the specified field.',
},
{
label: 'sum',
detail: 'sum(NumericFieldname)',
documentation: 'The sum of the values in the specified field.',
},
];
export const NON_AGGREGATION_FUNCS_STATS = [
{
label: 'earliest',
detail: 'earliest(fieldname)',
documentation:
'Returns the value of fieldName from the log event that has the earliest time stamp in the queried logs.',
},
{
label: 'latest',
detail: 'latest(fieldname)',
documentation:
'Returns the value of fieldName from the log event that has the latest time stamp in the queried logs.',
},
{
label: 'sortsFirst',
detail: 'sortsFirst(fieldname)',
documentation: 'Returns the value of fieldName that sorts first in the queried logs.',
},
{
label: 'sortsLast',
detail: 'sortsLast(fieldname)',
documentation: 'Returns the value of fieldName that sorts last in the queried logs.',
},
];
export const STATS_FUNCS = [...AGGREGATION_FUNCTIONS_STATS, ...NON_AGGREGATION_FUNCS_STATS];
export const KEYWORDS = ['as', 'like', 'by', 'in', 'desc', 'asc'];
export const FUNCTIONS = [
...NUMERIC_OPERATORS,
...GENERAL_FUNCTIONS,
...STRING_FUNCTIONS,
...DATETIME_FUNCTIONS,
...IP_FUNCTIONS,
...STATS_FUNCS,
];
const tokenizer: Grammar = {
comment: {
pattern: /^#.*/,
greedy: true,
},
backticks: {
pattern: /`.*?`/,
alias: 'string',
greedy: true,
},
quote: {
pattern: /".*?"/,
alias: 'string',
greedy: true,
},
regex: {
pattern: /\/.*?\/(?=\||\s*$|,)/,
greedy: true,
},
'query-command': {
pattern: new RegExp(`\\b(?:${QUERY_COMMANDS.map(command => command.label).join('|')})\\b`, 'i'),
alias: 'function',
},
function: {
pattern: new RegExp(`\\b(?:${FUNCTIONS.map(f => f.label).join('|')})`, 'i'),
},
keyword: {
pattern: new RegExp(`(\\s+)(${KEYWORDS.join('|')})(?=\\s+)`, 'i'),
lookbehind: true,
},
// 'log-group-name': {
// pattern: /[\.\-_/#A-Za-z0-9]+/,
// },
'field-name': {
pattern: /(@?[_a-zA-Z]+[_.0-9a-zA-Z]*)|(`((\\`)|([^`]))*?`)/,
greedy: true,
},
number: /\b-?\d+((\.\d*)?([eE][+-]?\d+)?)?\b/,
'command-separator': {
pattern: /\|/,
alias: 'punctuation',
},
'comparison-operator': {
pattern: /([<>]=?)|(!?=)/,
},
punctuation: /[{}()`,.]/,
whitespace: /\s+/,
};
export default tokenizer;

View File

@ -1,19 +1,53 @@
import { DataQuery, SelectableValue, DataSourceJsonData } from '@grafana/data';
export interface CloudWatchQuery extends DataQuery {
export interface CloudWatchMetricsQuery extends DataQuery {
queryMode: 'Metrics';
apiMode: 'Logs' | 'Metrics'; // TEMP: Remove when logs/metrics unification is done
id: string;
region: string;
namespace: string;
expression: string;
metricName: string;
dimensions: { [key: string]: string | string[] };
statistics: string[];
period: string;
expression: string;
alias: string;
matchExact: boolean;
}
export interface AnnotationQuery extends CloudWatchQuery {
export type LogAction =
| 'DescribeLogGroups'
| 'GetQueryResults'
| 'GetLogGroupFields'
| 'GetLogEvents'
| 'StartQuery'
| 'StopQuery';
export enum CloudWatchLogsQueryStatus {
Scheduled = 'Scheduled',
Running = 'Running',
Complete = 'Complete',
Failed = 'Failed',
Cancelled = 'Cancelled',
}
export interface CloudWatchLogsQuery extends DataQuery {
queryMode: 'Logs';
apiMode: 'Logs' | 'Metrics'; // TEMP: Remove when logs/metrics unification is done
id: string;
region: string;
namespace: string;
expression: string;
logGroupNames: string[];
}
export type CloudWatchQuery = CloudWatchMetricsQuery | CloudWatchLogsQuery;
export interface AnnotationQuery extends CloudWatchMetricsQuery {
prefixMatching: boolean;
actionPrefix: string;
alarmNamePrefix: string;
@ -32,3 +66,270 @@ export interface CloudWatchSecureJsonData {
accessKey: string;
secretKey: string;
}
export interface GetQueryResultsRequest {
/**
* The ID number of the query.
*/
queryId: string;
}
export interface ResultField {
/**
* The log event field.
*/
field?: string;
/**
* The value of this field.
*/
value?: string;
}
export interface QueryStatistics {
/**
* The number of log events that matched the query string.
*/
recordsMatched?: number;
/**
* The total number of log events scanned during the query.
*/
recordsScanned?: number;
/**
* The total number of bytes in the log events scanned during the query.
*/
bytesScanned?: number;
}
export type QueryStatus = 'Scheduled' | 'Running' | 'Complete' | 'Failed' | 'Cancelled' | string;
export interface GetLogEventsRequest {
/**
* The name of the log group.
*/
logGroupName: string;
/**
* The name of the log stream.
*/
logStreamName: string;
/**
* The start of the time range, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. Events with a timestamp equal to this time or later than this time are included. Events with a timestamp earlier than this time are not included.
*/
startTime?: number;
/**
* The end of the time range, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. Events with a timestamp equal to or later than this time are not included.
*/
endTime?: number;
/**
* The token for the next set of items to return. (You received this token from a previous call.) Using this token works only when you specify true for startFromHead.
*/
nextToken?: string;
/**
* The maximum number of log events returned. If you don't specify a value, the maximum is as many log events as can fit in a response size of 1 MB, up to 10,000 log events.
*/
limit?: number;
/**
* If the value is true, the earliest log events are returned first. If the value is false, the latest log events are returned first. The default value is false. If you are using nextToken in this operation, you must specify true for startFromHead.
*/
startFromHead?: boolean;
}
export interface GetQueryResultsResponse {
/**
* The log events that matched the query criteria during the most recent time it ran. The results value is an array of arrays. Each log event is one object in the top-level array. Each of these log event objects is an array of field/value pairs.
*/
results?: ResultField[][];
/**
* Includes the number of log events scanned by the query, the number of log events that matched the query criteria, and the total number of bytes in the log events that were scanned.
*/
statistics?: QueryStatistics;
/**
* The status of the most recent running of the query. Possible values are Cancelled, Complete, Failed, Running, Scheduled, Timeout, and Unknown. Queries time out after 15 minutes of execution. To avoid having your queries time out, reduce the time range being searched, or partition your query into a number of queries.
*/
status?: QueryStatus;
}
export interface DescribeLogGroupsRequest {
/**
* The prefix to match.
*/
logGroupNamePrefix?: string;
/**
* The token for the next set of items to return. (You received this token from a previous call.)
*/
nextToken?: string;
/**
* The maximum number of items returned. If you don't specify a value, the default is up to 50 items.
*/
limit?: number;
refId?: string;
region?: string;
}
export interface TSDBResponse<T = any> {
results: Record<string, TSDBQueryResult<T>>;
message?: string;
}
export interface TSDBQueryResult<T = any> {
refId: string;
series: TSDBTimeSeries[];
tables: Array<TSDBTable<T>>;
dataframes: number[][];
error?: string;
meta?: any;
}
export interface TSDBTable<T = any> {
columns: Array<{ text: string }>;
rows: T[];
}
export interface TSDBTimeSeries {
name: string;
points: TSDBTimePoint[];
tags?: Record<string, string>;
}
export type TSDBTimePoint = [number, number];
export interface LogGroup {
/**
* The name of the log group.
*/
logGroupName?: string;
/**
* The creation time of the log group, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.
*/
creationTime?: number;
retentionInDays?: number;
/**
* The number of metric filters.
*/
metricFilterCount?: number;
/**
* The Amazon Resource Name (ARN) of the log group.
*/
arn?: string;
/**
* The number of bytes stored.
*/
storedBytes?: number;
/**
* The Amazon Resource Name (ARN) of the CMK to use when encrypting log data.
*/
kmsKeyId?: string;
}
export interface DescribeLogGroupsResponse {
/**
* The log groups.
*/
logGroups?: LogGroup[];
nextToken?: string;
}
export interface GetLogGroupFieldsRequest {
/**
* The name of the log group to search.
*/
logGroupName: string;
/**
* The time to set as the center of the query. If you specify time, the 8 minutes before and 8 minutes after this time are searched. If you omit time, the past 15 minutes are queried. The time value is specified as epoch time, the number of seconds since January 1, 1970, 00:00:00 UTC.
*/
time?: number;
}
export interface LogGroupField {
/**
* The name of a log field.
*/
name?: string;
/**
* The percentage of log events queried that contained the field.
*/
percent?: number;
}
export interface GetLogGroupFieldsResponse {
/**
* The array of fields found in the query. Each object in the array contains the name of the field, along with the percentage of time it appeared in the log events that were queried.
*/
logGroupFields?: LogGroupField[];
}
export interface StartQueryRequest {
/**
* The log group on which to perform the query. A StartQuery operation must include a logGroupNames or a logGroupName parameter, but not both.
*/
logGroupName?: string;
/**
* The list of log groups to be queried. You can include up to 20 log groups. A StartQuery operation must include a logGroupNames or a logGroupName parameter, but not both.
*/
logGroupNames?: string[];
/**
* The beginning of the time range to query. The range is inclusive, so the specified start time is included in the query. Specified as epoch time, the number of seconds since January 1, 1970, 00:00:00 UTC.
*/
startTime: number;
/**
* The end of the time range to query. The range is inclusive, so the specified end time is included in the query. Specified as epoch time, the number of seconds since January 1, 1970, 00:00:00 UTC.
*/
endTime: number;
/**
* The query string to use. For more information, see CloudWatch Logs Insights Query Syntax.
*/
queryString: string;
/**
* The maximum number of log events to return in the query. If the query string uses the fields command, only the specified fields and their values are returned. The default is 1000.
*/
limit?: number;
}
export interface StartQueryResponse {
/**
* The unique ID of the query.
*/
queryId?: string;
}
export interface MetricRequest {
from: string;
to: string;
queries: MetricQuery[];
debug?: boolean;
}
interface MetricQuery {
[key: string]: any;
datasourceId: number;
refId?: string;
maxDataPoints?: number;
intervalMs?: number;
}
// interface TsdbQuery {
// TimeRange *TimeRange
// Queries []*Query
// Debug bool
// }
// type Query struct {
// RefId string
// Model *simplejson.Json
// DataSource *models.DataSource
// MaxDataPoints int64
// IntervalMs int64
// }
export interface CloudWatchMetricsAnnotation {
namespace: string;
metricName: string;
expression: string;
dimensions: {};
region: string;
id: string;
alias: string;
statistics: string[];
matchExact: true;
prefixMatching: false;
actionPrefix: string;
alarmNamePrefix: string;
}

View File

@ -0,0 +1,69 @@
import { useState, useEffect } from 'react';
import Prism, { Grammar } from 'prismjs';
import { AbsoluteTimeRange } from '@grafana/data';
import { useRefMounted } from 'app/core/hooks/useRefMounted';
import { CloudWatchLanguageProvider } from './language_provider';
const PRISM_SYNTAX = 'cloudwatch';
/**
* Initialise the language provider. Returns a languageProviderInitialized boolean cause there does not seem other way
* to know if the provider is already initialised or not. By the initialisation it modifies the provided
* languageProvider directly.
*/
const useInitLanguageProvider = (languageProvider: CloudWatchLanguageProvider, absoluteRange: AbsoluteTimeRange) => {
const mounted = useRefMounted();
const [languageProviderInitialized, setLanguageProviderInitialized] = useState(false);
// Async
const initializeLanguageProvider = async () => {
languageProvider.initialRange = absoluteRange;
await languageProvider.start();
if (mounted.current) {
setLanguageProviderInitialized(true);
}
};
useEffect(() => {
initializeLanguageProvider();
}, []);
return languageProviderInitialized;
};
/**
* Returns syntax from languageProvider and initialises global Prism syntax. Waits until languageProvider itself is
* initialised (outside of this hook).
*/
const useCloudwatchSyntax = (languageProvider: CloudWatchLanguageProvider, languageProviderInitialized: boolean) => {
// State
const [syntax, setSyntax] = useState<Grammar | null>(null);
// Effects
useEffect(() => {
if (languageProviderInitialized) {
const syntax = languageProvider.getSyntax();
Prism.languages[PRISM_SYNTAX] = syntax;
setSyntax(syntax);
}
}, [languageProviderInitialized, languageProvider]);
return {
isSyntaxReady: !!syntax,
syntax,
};
};
/**
* Initializes given language provider, exposes Loki syntax and enables loading label option values
*/
export const useCloudWatchSyntax = (languageProvider: CloudWatchLanguageProvider, absoluteRange: AbsoluteTimeRange) => {
const languageProviderInitialized = useInitLanguageProvider(languageProvider, absoluteRange);
const { isSyntaxReady, syntax } = useCloudwatchSyntax(languageProvider, languageProviderInitialized);
return {
isSyntaxReady,
syntax,
};
};

View File

@ -13,6 +13,7 @@ export const LokiQueryEditor = memo(function LokiQueryEditor(props: Props) {
const { query, data, datasource, onChange, onRunQuery } = props;
let absolute: AbsoluteTimeRange;
if (data && data.request) {
const { range } = data.request;
absolute = {

View File

@ -43,6 +43,7 @@ import {
import { LiveStreams, LokiLiveTarget } from './live_streams';
import LanguageProvider from './language_provider';
import { serializeParams } from '../../../core/utils/fetch';
import { RowContextOptions } from '@grafana/ui/src/components/Logs/LogRowContextProvider';
export type RangeQueryOptions = Pick<DataQueryRequest<LokiQuery>, 'range' | 'intervalMs' | 'maxDataPoints' | 'reverse'>;
export const DEFAULT_MAX_LINES = 1000;
@ -57,11 +58,6 @@ const DEFAULT_QUERY_PARAMS: Partial<LokiRangeQueryRequest> = {
query: '',
};
interface LokiContextQueryOptions {
direction?: 'BACKWARD' | 'FORWARD';
limit?: number;
}
export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
private streams = new LiveStreams();
languageProvider: LanguageProvider;
@ -384,7 +380,7 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
return Math.ceil(date.valueOf() * 1e6);
}
getLogRowContext = (row: LogRowModel, options?: LokiContextQueryOptions): Promise<{ data: DataFrame[] }> => {
getLogRowContext = (row: LogRowModel, options?: RowContextOptions): Promise<{ data: DataFrame[] }> => {
const target = this.prepareLogRowContextQueryTarget(
row,
(options && options.limit) || 10,
@ -525,6 +521,10 @@ export class LokiDatasource extends DataSourceApi<LokiQuery, LokiOptions> {
return annotations;
}
showContextToggle = (row?: LogRowModel) => {
return row.searchWords && row.searchWords.length > 0;
};
throwUnless = (err: any, condition: boolean, target: LokiQuery) => {
if (condition) {
return of(err);

View File

@ -1,6 +1,14 @@
import _ from 'lodash';
import { colors } from '@grafana/ui';
import { getColorFromHexRgbOrName, TimeRange, FieldType, Field, DataFrame, getTimeField } from '@grafana/data';
import {
getColorFromHexRgbOrName,
TimeRange,
FieldType,
Field,
DataFrame,
getTimeField,
dateTime,
} from '@grafana/data';
import TimeSeries from 'app/core/time_series2';
import config from 'app/core/config';
@ -42,7 +50,7 @@ export class DataProcessor {
const datapoints = [];
for (let r = 0; r < series.length; r++) {
datapoints.push([field.values.get(r), timeField.values.get(r)]);
datapoints.push([field.values.get(r), dateTime(timeField.values.get(r)).valueOf()]);
}
list.push(this.toTimeSeries(field, name, i, j, datapoints, list.length, range));
}

View File

@ -43,6 +43,7 @@ export const getGraphSeriesModel = (
let fieldColumnIndex = -1;
for (const series of dataFrames) {
const { timeField } = getTimeField(series);
if (!timeField) {
continue;
}

View File

@ -172,6 +172,10 @@ i.navbar-page-btn__search {
}
}
&--danger {
@include buttonBackground($red-base, $red-shade);
}
&--tight {
padding: 0px 4px;
@ -184,6 +188,10 @@ i.navbar-page-btn__search {
@include buttonBackground($btn-primary-bg, $btn-primary-bg-hl);
}
&--danger {
@include buttonBackground($red-base, $red-shade);
}
&:hover {
svg {
color: $text-color;

View File

@ -24,6 +24,7 @@
.slate-query-field__wrapper--disabled {
background-color: inherit;
cursor: not-allowed;
}
.slate-typeahead {
@ -146,6 +147,7 @@
.token.bold {
font-weight: bold;
}
.token.italic {
font-style: italic;
}

View File

@ -24,9 +24,11 @@
background-image: none;
background-color: transparent;
color: $orange-dark !important;
&:focus {
background-color: transparent;
}
i {
text-shadow: none;
background: linear-gradient(180deg, #f05a28 30%, #fbca0a 100%);
@ -35,6 +37,7 @@
-moz-text-fill-color: transparent;
}
}
.explore-ds-picker {
min-width: 200px;
max-width: 300px;
@ -435,7 +438,6 @@
.cheat-sheet-item {
margin: $space-lg 0;
width: 50%;
}
.cheat-sheet-item__title {

View File

@ -5625,6 +5625,11 @@
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.4.tgz#38fd73ddfd9b55abb1e1b2ed578cb55bd7b7d339"
integrity sha512-8+KAKzEvSUdeo+kmqnKrqgeE+LcA0tjYWFY7RPProVYwnqDjukzO+3b6dLD56rYX5TdWejnEOLJYOIeh4CXKuA==
"@types/jsurl@^1.2.28":
version "1.2.28"
resolved "https://registry.yarnpkg.com/@types/jsurl/-/jsurl-1.2.28.tgz#03430a7cc878d1415c5b3cb03547ff238bc7a75c"
integrity sha1-A0MKfMh40UFcWzywNUf/I4vHp1w=
"@types/lodash@4.14.123":
version "4.14.123"
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.123.tgz#39be5d211478c8dd3bdae98ee75bb7efe4abfe4d"
@ -15727,6 +15732,11 @@ jstransformer@1.0.0:
is-promise "^2.0.0"
promise "^7.0.1"
jsurl@^0.1.5:
version "0.1.5"
resolved "https://registry.yarnpkg.com/jsurl/-/jsurl-0.1.5.tgz#2a5c8741de39cacafc12f448908bf34e960dcee8"
integrity sha1-KlyHQd45ysr8EvRIkIvzTpYNzug=
jsx-ast-utils@^2.2.3:
version "2.2.3"
resolved "https://registry.yarnpkg.com/jsx-ast-utils/-/jsx-ast-utils-2.2.3.tgz#8a9364e402448a3ce7f14d357738310d9248054f"