Profiling: Add Phlare and Parca datasources (#57809)

* Add phlare datasource

* Rename

* Add parca

* Add self field to parca

* Make sure phlare works with add to dashboard flow

* Add profiling category and hide behind feature flag

* Update description and logos

* Update phlare icon

* Cleanup logging

* Clean up logging

* Fix for shift+enter

* onRunQuery to set label

* Update type naming

* Fix lint

* Fix test and quality issues

Co-authored-by: Joey Tawadrous <joey.tawadrous@grafana.com>
This commit is contained in:
Andrej Ocenas 2022-10-28 13:33:37 +02:00 committed by GitHub
parent 1d53a6f57e
commit 0845ac2f53
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
81 changed files with 41611 additions and 271 deletions

70
go.mod
View File

@ -19,8 +19,8 @@ replace github.com/gomodule/redigo => github.com/gomodule/redigo v1.8.9
require (
cloud.google.com/go/storage v1.21.0
cuelang.org/go v0.4.3
github.com/Azure/azure-sdk-for-go v59.3.0+incompatible
github.com/Azure/go-autorest/autorest v0.11.22
github.com/Azure/azure-sdk-for-go v65.0.0+incompatible
github.com/Azure/go-autorest/autorest v0.11.27
github.com/BurntSushi/toml v1.1.0
github.com/Masterminds/semver v1.5.0
github.com/VividCortex/mysqlerr v0.0.0-20170204212430-6c6b55f8796f
@ -37,7 +37,7 @@ require (
github.com/gchaincl/sqlhooks v1.3.0
github.com/getsentry/sentry-go v0.13.0
github.com/go-git/go-git/v5 v5.4.2
github.com/go-kit/kit v0.11.0
github.com/go-kit/kit v0.12.0
github.com/go-openapi/strfmt v0.21.3
github.com/go-redis/redis/v8 v8.11.4
github.com/go-sourcemap/sourcemap v2.1.3+incompatible
@ -48,7 +48,7 @@ require (
github.com/gogo/protobuf v1.3.2
github.com/golang/mock v1.6.0
github.com/golang/snappy v0.0.4
github.com/google/go-cmp v0.5.8
github.com/google/go-cmp v0.5.9
github.com/google/uuid v1.3.0
github.com/google/wire v0.5.0
github.com/gorilla/websocket v1.5.0
@ -99,10 +99,10 @@ require (
github.com/yudai/gojsondiff v1.0.0
go.opentelemetry.io/collector v0.31.0
go.opentelemetry.io/collector/model v0.31.0
go.opentelemetry.io/otel v1.6.3
go.opentelemetry.io/otel v1.7.0
go.opentelemetry.io/otel/exporters/jaeger v1.0.0
go.opentelemetry.io/otel/sdk v1.6.3
go.opentelemetry.io/otel/trace v1.6.3
go.opentelemetry.io/otel/sdk v1.7.0
go.opentelemetry.io/otel/trace v1.7.0
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d
golang.org/x/exp v0.0.0-20220613132600-b0d781184e0d
golang.org/x/net v0.0.0-20220909164309-bea034e7d591 // indirect
@ -111,8 +111,8 @@ require (
golang.org/x/time v0.0.0-20220609170525-579cf78fd858
golang.org/x/tools v0.1.12
gonum.org/v1/gonum v0.11.0
google.golang.org/api v0.74.0
google.golang.org/grpc v1.45.0
google.golang.org/api v0.80.0
google.golang.org/grpc v1.47.0
google.golang.org/protobuf v1.28.1
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
gopkg.in/ini.v1 v1.66.2
@ -137,7 +137,7 @@ require (
github.com/FZambia/eagle v0.0.2 // indirect
github.com/FZambia/sentinel v1.1.0 // indirect
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect
github.com/andybalholm/brotli v1.0.3
github.com/andybalholm/brotli v1.0.4
github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 // indirect
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect
github.com/beorn7/perks v1.0.1 // indirect
@ -174,9 +174,9 @@ require (
github.com/golang/protobuf v1.5.2
github.com/gomodule/redigo v2.0.0+incompatible // indirect
github.com/google/btree v1.0.1 // indirect
github.com/google/flatbuffers v2.0.0+incompatible // indirect
github.com/googleapis/gax-go/v2 v2.2.0
github.com/gorilla/mux v1.8.0 // indirect
github.com/google/flatbuffers v2.0.5+incompatible // indirect
github.com/googleapis/gax-go/v2 v2.3.0
github.com/gorilla/mux v1.8.0
github.com/grafana/grafana-google-sdk-go v0.0.0-20211104130251-b190293eaf58
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.1-0.20191002090509-6af20e3a5340 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
@ -193,8 +193,8 @@ require (
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattermost/xml-roundtrip-validator v0.1.0 // indirect
github.com/mattetti/filebuffer v1.0.1 // indirect
github.com/mattn/go-runewidth v0.0.9 // indirect
github.com/miekg/dns v1.1.43 // indirect
github.com/mattn/go-runewidth v0.0.13 // indirect
github.com/miekg/dns v1.1.49 // indirect
github.com/mitchellh/go-testing-interface v1.14.0 // indirect
github.com/mna/redisc v1.3.2 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
@ -232,7 +232,7 @@ require (
golang.org/x/text v0.3.8
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3
google.golang.org/genproto v0.0.0-20220708155623-50e5f4832e73
gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d // indirect
)
@ -240,10 +240,11 @@ require (
cloud.google.com/go/kms v1.4.0
github.com/Azure/azure-sdk-for-go/sdk/azidentity v0.13.2
github.com/Azure/azure-sdk-for-go/sdk/keyvault/azkeys v0.4.0
github.com/Azure/go-autorest/autorest/adal v0.9.17
github.com/Azure/go-autorest/autorest/adal v0.9.20
github.com/armon/go-radix v1.0.0
github.com/blugelabs/bluge v0.1.9
github.com/blugelabs/bluge_segment_api v0.2.0
github.com/bufbuild/connect-go v1.0.0
github.com/dlmiddlecote/sqlstats v1.0.2
github.com/drone/drone-cli v1.5.0
github.com/getkin/kin-openapi v0.94.0
@ -252,12 +253,13 @@ require (
github.com/grafana/dskit v0.0.0-20211011144203-3a88ec0b675f
github.com/jmoiron/sqlx v1.3.5
github.com/matryer/is v1.4.0
github.com/parca-dev/parca v0.12.1
github.com/urfave/cli v1.22.5
go.etcd.io/etcd/api/v3 v3.5.4
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.31.0
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.32.0
go.opentelemetry.io/contrib/propagators/jaeger v1.6.0
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.6.3
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.6.3
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.7.0
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.7.0
gocloud.dev v0.25.0
gotest.tools v2.2.0+incompatible
)
@ -270,21 +272,22 @@ require (
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/drone/drone-yaml v0.0.0-20190729072335-70fa398b3560 // indirect
github.com/go-ozzo/ozzo-validation/v4 v4.3.0 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/gofuzz v1.2.0 // indirect
github.com/gosimple/unidecode v1.0.1 // indirect
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
github.com/hashicorp/memberlist v0.4.0 // indirect
github.com/kr/text v0.2.0 // indirect
github.com/mattn/go-colorable v0.1.12 // indirect
github.com/mitchellh/mapstructure v1.4.3 // indirect
github.com/rivo/uniseg v0.2.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/segmentio/asm v1.1.4 // indirect
go.starlark.net v0.0.0-20201118183435-e55f603d8c79 // indirect
)
require (
cloud.google.com/go/compute v1.5.0 // indirect
cloud.google.com/go/compute v1.6.1 // indirect
cloud.google.com/go/iam v0.3.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/azcore v0.22.0 // indirect
github.com/Azure/azure-sdk-for-go/sdk/keyvault/internal v0.2.1 // indirect
@ -303,12 +306,10 @@ require (
github.com/blugelabs/ice v1.0.0 // indirect
github.com/caio/go-tdigest v3.1.0+incompatible // indirect
github.com/chromedp/cdproto v0.0.0-20220208224320-6efb837e6bc2 // indirect
github.com/containerd/containerd v1.6.6 // indirect
github.com/coreos/go-semver v0.3.0 // indirect
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc // indirect
github.com/dgryski/go-metro v0.0.0-20211217172704-adc40b04c140 // indirect
github.com/elazarl/goproxy v0.0.0-20220115173737-adb46da277ac // indirect
github.com/emirpasic/gods v1.12.0 // indirect
github.com/fsnotify/fsnotify v1.5.4 // indirect
github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 // indirect
github.com/go-git/gcfg v1.5.0 // indirect
github.com/go-git/go-billy/v5 v5.3.1 // indirect
@ -316,11 +317,11 @@ require (
github.com/go-logr/stdr v1.2.2 // indirect
github.com/golang-jwt/jwt v3.2.2+incompatible // indirect
github.com/google/go-github v17.0.0+incompatible
github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0 // indirect
github.com/grpc-ecosystem/grpc-gateway/v2 v2.10.3 // indirect
github.com/imdario/mergo v0.3.12 // indirect
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 // indirect
github.com/klauspost/compress v1.15.2 // indirect
github.com/klauspost/compress v1.15.5 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/labstack/echo/v4 v4.9.0 // indirect
github.com/labstack/gommon v0.3.1 // indirect
@ -328,20 +329,16 @@ require (
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
github.com/mschoch/smat v0.2.0 // indirect
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 // indirect
github.com/pierrec/lz4/v4 v4.1.8 // indirect
github.com/pierrec/lz4/v4 v4.1.12 // indirect
github.com/valyala/fasttemplate v1.2.1 // indirect
github.com/wk8/go-ordered-map v1.0.0
github.com/xanzy/ssh-agent v0.3.0 // indirect
github.com/xlab/treeprint v1.1.0 // indirect
github.com/yudai/pp v2.0.1+incompatible // indirect
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.6.3 // indirect
go.opentelemetry.io/proto/otlp v0.15.0 // indirect
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.7.0 // indirect
go.opentelemetry.io/proto/otlp v0.16.0 // indirect
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
gopkg.in/warnings.v0 v0.1.2 // indirect
k8s.io/api v0.22.5 // indirect
k8s.io/apimachinery v0.22.5 // indirect
k8s.io/klog/v2 v2.30.0 // indirect
k8s.io/utils v0.0.0-20210930125809-cb0fa318a74b // indirect
)
// Use fork of crewjam/saml with fixes for some issues until changes get merged into upstream
@ -374,3 +371,8 @@ replace xorm.io/xorm => github.com/grafana/xorm v0.8.3-0.20220614223926-2fcda756
// Use our fork of the upstream alertmanagers.
// This is required in order to get notification delivery errors from the receivers API.
replace github.com/prometheus/alertmanager => github.com/grafana/prometheus-alertmanager v0.24.1-0.20221012142027-823cd9150293
// grpc v1.46.0 removed "WithBalancerName()" API, still in use by weaveworks/commons.
replace google.golang.org/grpc => google.golang.org/grpc v1.45.0
replace google.golang.org/genproto => google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3

323
go.sum
View File

@ -1,5 +1,4 @@
bazil.org/fuse v0.0.0-20160811212531-371fbbdaa898/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.37.4/go.mod h1:NHPJ89PdicEuT9hdPXMROBD91xc5uRDxsMtSB16k7hw=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
@ -47,8 +46,10 @@ cloud.google.com/go/bigtable v1.3.0/go.mod h1:z5EyKrPE8OQmeg4h5MNdKvuSnI9CCT49Ki
cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTBXtfbBFow=
cloud.google.com/go/compute v1.2.0/go.mod h1:xlogom/6gr8RJGBe7nT2eGsQYAFUbbv8dbC29qE3Xmw=
cloud.google.com/go/compute v1.3.0/go.mod h1:cCZiE1NHEtai4wiufUhW8I8S1JKkAnhnQJWM7YD99wM=
cloud.google.com/go/compute v1.5.0 h1:b1zWmYuuHz7gO9kDcM/EpHGr06UgsYNRpNJzI2kFiLM=
cloud.google.com/go/compute v1.5.0/go.mod h1:9SMHyhJlzhlkJqrPAc839t2BZFTSk6Jdj6mkzQJeu0M=
cloud.google.com/go/compute v1.6.0/go.mod h1:T29tfhtVbq1wvAPo0E3+7vhgmkOYeXjhFvz/FMzPu0s=
cloud.google.com/go/compute v1.6.1 h1:2sMmt8prCn7DPaG4Pmh0N3Inmc8cT8ae5k1M6VJ9Wqc=
cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
@ -112,8 +113,9 @@ github.com/Azure/azure-sdk-for-go v52.5.0+incompatible/go.mod h1:9XXNKU+eRnpl9mo
github.com/Azure/azure-sdk-for-go v54.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v55.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v57.1.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v59.3.0+incompatible h1:dPIm0BO4jsMXFcCI/sLTPkBtE7mk8WMuRHA0JeWhlcQ=
github.com/Azure/azure-sdk-for-go v59.3.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go v65.0.0+incompatible h1:HzKLt3kIwMm4KeJYTdx9EbjRYTySD/t8i1Ee/W5EGXw=
github.com/Azure/azure-sdk-for-go v65.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc=
github.com/Azure/azure-sdk-for-go/sdk/azcore v0.20.0/go.mod h1:ZPW/Z0kLCTdDZaDbYTetxc9Cxl/2lNqxYHYNOF2bti0=
github.com/Azure/azure-sdk-for-go/sdk/azcore v0.21.0/go.mod h1:fBF9PQNqB8scdgpZ3ufzaLntG0AG7C1WjPMsiFOmfHM=
github.com/Azure/azure-sdk-for-go/sdk/azcore v0.22.0 h1:zBJcBJwte0x6PcPK7XaWDMvK2o2ZM2f1sMaqNNavQ5g=
@ -154,8 +156,9 @@ github.com/Azure/go-autorest/autorest v0.11.17/go.mod h1:eipySxLmqSyC5s5k1CLupqe
github.com/Azure/go-autorest/autorest v0.11.18/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA=
github.com/Azure/go-autorest/autorest v0.11.19/go.mod h1:dSiJPy22c3u0OtOKDNttNgqpNFY/GeWa7GH/Pz56QRA=
github.com/Azure/go-autorest/autorest v0.11.20/go.mod h1:o3tqFY+QR40VOlk+pV4d77mORO64jOXSgEnPQgLK6JY=
github.com/Azure/go-autorest/autorest v0.11.22 h1:bXiQwDjrRmBQOE67bwlvUKAC1EU1yZTPQ38c+bstZws=
github.com/Azure/go-autorest/autorest v0.11.22/go.mod h1:BAWYUWGPEtKPzjVkp0Q6an0MJcJDsoh5Z1BFAEFs4Xs=
github.com/Azure/go-autorest/autorest v0.11.27 h1:F3R3q42aWytozkV8ihzcgMO4OA4cuqr3bNlsEuF6//A=
github.com/Azure/go-autorest/autorest v0.11.27/go.mod h1:7l8ybrIdUmGqZMTD0sRtAr8NvbHjfofbf8RSP2q7w7U=
github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0=
github.com/Azure/go-autorest/autorest/adal v0.8.0/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc=
github.com/Azure/go-autorest/autorest/adal v0.8.1-0.20191028180845-3492b2aff503/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc=
@ -169,8 +172,10 @@ github.com/Azure/go-autorest/autorest/adal v0.9.11/go.mod h1:nBKAnTomx8gDtl+3ZCJ
github.com/Azure/go-autorest/autorest/adal v0.9.13/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M=
github.com/Azure/go-autorest/autorest/adal v0.9.14/go.mod h1:W/MM4U6nLxnIskrw4UwWzlHfGjwUS50aOsc/I3yuU8M=
github.com/Azure/go-autorest/autorest/adal v0.9.15/go.mod h1:tGMin8I49Yij6AQ+rvV+Xa/zwxYQB5hmsd6DkfAx2+A=
github.com/Azure/go-autorest/autorest/adal v0.9.17 h1:esOPl2dhcz9P3jqBSJ8tPGEj2EqzPPT6zfyuloiogKY=
github.com/Azure/go-autorest/autorest/adal v0.9.17/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ=
github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ=
github.com/Azure/go-autorest/autorest/adal v0.9.20 h1:gJ3E98kMpFB1MFqQCvA1yFab8vthOeD4VlFRQULxahg=
github.com/Azure/go-autorest/autorest/adal v0.9.20/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ=
github.com/Azure/go-autorest/autorest/azure/auth v0.4.2/go.mod h1:90gmfKdlmKgfjUpnCEpOJzsUEjrWDSLwHIG73tSXddM=
github.com/Azure/go-autorest/autorest/azure/auth v0.5.8/go.mod h1:kxyKZTSfKh8OVFWPAgOgQ/frrJgeYQJPyR5fLFmXko4=
github.com/Azure/go-autorest/autorest/azure/auth v0.5.9/go.mod h1:hg3/1yw0Bq87O3KvvnJoAh34/0zbP7SFizX/qN5JvjU=
@ -184,8 +189,9 @@ github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxB
github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0=
github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM=
github.com/Azure/go-autorest/autorest/mocks v0.4.0/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k=
github.com/Azure/go-autorest/autorest/mocks v0.4.1 h1:K0laFcLE6VLTOwNgSxaGbUcLPuGXlNkbVvq4cW4nIHk=
github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k=
github.com/Azure/go-autorest/autorest/mocks v0.4.2 h1:PGN4EDXnuQbojHbU0UWoNvmu9AGVwYHG9/fkDYhtAfw=
github.com/Azure/go-autorest/autorest/mocks v0.4.2/go.mod h1:Vy7OitM9Kei0i1Oj+LvyAWMXJHeKH1MVlzFugfVrmyU=
github.com/Azure/go-autorest/autorest/to v0.3.0/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA=
github.com/Azure/go-autorest/autorest/to v0.3.1-0.20191028180845-3492b2aff503/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA=
github.com/Azure/go-autorest/autorest/to v0.4.0 h1:oXVqrxakqqV1UZdSazDOPOLvOIz+XA683u8EctwboHk=
@ -219,8 +225,8 @@ github.com/FZambia/sentinel v1.1.0/go.mod h1:ytL1Am/RLlAoAXG6Kj5LNuw/TRRQrv2rt2F
github.com/GoogleCloudPlatform/cloudsql-proxy v1.29.0/go.mod h1:spvB9eLJH9dutlbPSRmHvSXXHOwGRyeXh1jVdquA2G8=
github.com/HdrHistogram/hdrhistogram-go v0.9.0/go.mod h1:nxrse8/Tzg2tg3DZcZjm6qEclQKK70g0KxO61gFFZD4=
github.com/HdrHistogram/hdrhistogram-go v1.0.1/go.mod h1:BWJ+nMSHY3L41Zj7CA3uXnloDp7xxV0YvstAE7nKTaM=
github.com/HdrHistogram/hdrhistogram-go v1.1.0 h1:6dpdDPTRoo78HxAJ6T1HfMiKSnqhgRRqzCuPshRkQ7I=
github.com/HdrHistogram/hdrhistogram-go v1.1.0/go.mod h1:yDgFjdqOqDEKOvasDdhWNXYg9BVp4O+o5f6V/ehm6Oo=
github.com/HdrHistogram/hdrhistogram-go v1.1.2 h1:5IcZpTvzydCQeHzK4Ef/D5rrSqwxob0t8PQPMybUNFM=
github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0=
github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
@ -296,8 +302,8 @@ github.com/alicebob/miniredis v2.5.0+incompatible/go.mod h1:8HZjEj4yU0dwhYHky+Dx
github.com/alicebob/miniredis/v2 v2.14.3/go.mod h1:gquAfGbzn92jvtrSC69+6zZnwSODVXVpYDRaGhWaL6I=
github.com/aliyun/aliyun-oss-go-sdk v2.0.4+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/andybalholm/brotli v1.0.3 h1:fpcw+r1N1h0Poc1F/pHbW40cUm/lMEQslZtCkBQ0UnM=
github.com/andybalholm/brotli v1.0.3/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/andybalholm/brotli v1.0.4 h1:V7DdXeJtZscaqfNuAdSRuRFzuiKlHSC/Zh3zl9qY3JY=
github.com/andybalholm/brotli v1.0.4/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q=
@ -460,6 +466,8 @@ github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pO
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk=
github.com/bsm/sarama-cluster v2.1.13+incompatible/go.mod h1:r7ao+4tTNXvWm+VRpRJchr2kQhqxgmAp2iEX5W96gMM=
github.com/bufbuild/connect-go v1.0.0 h1:htSflKUT8y1jxhoPhPYTZMrsY3ipUXjjrbcZR5O2cVo=
github.com/bufbuild/connect-go v1.0.0/go.mod h1:9iNvh/NOsfhNBUH5CtvXeVUskQO1xsrEviH7ZArwZ3I=
github.com/buger/jsonparser v0.0.0-20180808090653-f4dd9f5a6b44/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s=
github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8=
github.com/bugsnag/osext v0.0.0-20130617224835-0dd3f918b21b/go.mod h1:obH5gd0BsqsP2LwDJ9aOkm/6J86V6lyAXCoQWGw3K50=
@ -475,7 +483,6 @@ github.com/casbin/casbin/v2 v2.31.6/go.mod h1:vByNa/Fchek0KZUgG5wEsl7iFsiviAYKRt
github.com/cenkalti/backoff v1.0.0/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
github.com/cenkalti/backoff/v4 v4.1.1/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw=
github.com/cenkalti/backoff/v4 v4.1.2/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw=
github.com/cenkalti/backoff/v4 v4.1.3 h1:cFAlzYUlVYDysBEH2T5hyJZMh3+5+WCBvSnK6Q8UtC4=
github.com/cenkalti/backoff/v4 v4.1.3/go.mod h1:scbssz8iZGpm3xbr14ovlUdkxfGXNInqkPWOWmG2CLw=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
@ -511,10 +518,6 @@ github.com/cilium/ebpf v0.6.2/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJ
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
@ -569,8 +572,6 @@ github.com/containerd/containerd v1.5.0-rc.0/go.mod h1:V/IXoMqNGgBlabz3tHD2TWDoT
github.com/containerd/containerd v1.5.1/go.mod h1:0DOxVqwDy2iZvrZp2JUx/E+hS0UNTVn7dJnIOwtYR4g=
github.com/containerd/containerd v1.5.4/go.mod h1:sx18RgvW6ABJ4iYUw7Q5x7bgFOAB9B6G7+yO0XBc4zw=
github.com/containerd/containerd v1.5.8/go.mod h1:YdFSv5bTFLpG2HIYmfqDpSYYTDX+mc5qtSuYx1YUb/s=
github.com/containerd/containerd v1.6.6 h1:xJNPhbrmz8xAMDNoVjHy9YHtWwEQNS+CDkcIRh7t8Y0=
github.com/containerd/containerd v1.6.6/go.mod h1:ZoP1geJldzCVY3Tonoz7b1IXk8rIX0Nltt5QE4OMNk0=
github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y=
github.com/containerd/continuity v0.0.0-20190815185530-f2a389ac0a02/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y=
github.com/containerd/continuity v0.0.0-20191127005431-f65d91d395eb/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y=
@ -710,8 +711,9 @@ github.com/dgrijalva/jwt-go/v4 v4.0.0-preview1/go.mod h1:+hnT3ywWDTAFrW5aE+u2Sa/
github.com/dgryski/go-bitstream v0.0.0-20180413035011-3522498ce2c8/go.mod h1:VMaSuZ+SZcx/wljOQKvp5srsbCiKDEb6K2wC4+PiBmQ=
github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc h1:8WFBn63wegobsYAX0YjD+8suexZDga5CctH4CCTx2+8=
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc/go.mod h1:c9O8+fpSOX1DM8cPNSkX/qsBWdkD4yd2dpciOWQjpBw=
github.com/dgryski/go-metro v0.0.0-20211217172704-adc40b04c140 h1:y7y0Oa6UawqTFPCDw9JG6pdKt4F9pAhHv0B7FMGaGD0=
github.com/dgryski/go-metro v0.0.0-20211217172704-adc40b04c140/go.mod h1:c9O8+fpSOX1DM8cPNSkX/qsBWdkD4yd2dpciOWQjpBw=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
@ -728,8 +730,8 @@ github.com/digitalocean/godo v1.57.0/go.mod h1:p7dOjjtSBqCTUksqtA5Fd3uaKs9kyTq2x
github.com/digitalocean/godo v1.58.0/go.mod h1:p7dOjjtSBqCTUksqtA5Fd3uaKs9kyTq2xcz76ulEJRU=
github.com/digitalocean/godo v1.60.0/go.mod h1:p7dOjjtSBqCTUksqtA5Fd3uaKs9kyTq2xcz76ulEJRU=
github.com/digitalocean/godo v1.62.0/go.mod h1:p7dOjjtSBqCTUksqtA5Fd3uaKs9kyTq2xcz76ulEJRU=
github.com/digitalocean/godo v1.65.0 h1:3SywGJBC18HaYtPQF+T36jYzXBi+a6eIMonSjDll7TA=
github.com/digitalocean/godo v1.65.0/go.mod h1:p7dOjjtSBqCTUksqtA5Fd3uaKs9kyTq2xcz76ulEJRU=
github.com/digitalocean/godo v1.80.0 h1:ZULJ/fWDM97YtO7Fa+K6hzJLd7+smCu4N+0n+B/xtj4=
github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8=
github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE=
github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91 h1:Izz0+t1Z5nI16/II7vuEo/nHjodOg0p7+OiDpjX5t1E=
@ -737,8 +739,8 @@ github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwu
github.com/dlmiddlecote/sqlstats v1.0.2 h1:gSU11YN23D/iY50A2zVYwgXgy072khatTsIW6UPjUtI=
github.com/dlmiddlecote/sqlstats v1.0.2/go.mod h1:0CWaIh/Th+z2aI6Q9Jpfg/o21zmGxWhbByHgQSCUQvY=
github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E=
github.com/dnaeon/go-vcr v1.1.0 h1:ReYa/UBrRyQdant9B4fNHGoCNKw6qh6P0fsdGmZpR7c=
github.com/dnaeon/go-vcr v1.1.0/go.mod h1:M7tiix8f0r6mKKJ3Yq/kqU1OYf3MnfmBWVbPx/yU9ko=
github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI=
github.com/docker/distribution v0.0.0-20170726174610-edc3ab29cdff/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/distribution v0.0.0-20190905152932-14b96e55d84c/go.mod h1:0+TTO4EOBfRPhZXAeF1Vu+W3hHZ8eLp8PgKVZlcvtFY=
github.com/docker/distribution v2.7.0+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
@ -753,8 +755,8 @@ github.com/docker/docker v20.10.3+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05b
github.com/docker/docker v20.10.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v20.10.6+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v20.10.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v20.10.8+incompatible h1:RVqD337BgQicVCzYrrlhLDWhq6OAD2PJDUg2LsEUvKM=
github.com/docker/docker v20.10.8+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/docker v20.10.16+incompatible h1:2Db6ZR/+FUR3hqPMwnogOPHFn405crbpxvWzKovETOQ=
github.com/docker/go-connections v0.3.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
@ -810,20 +812,12 @@ github.com/emicklei/proto v1.10.0 h1:pDGyFRVV5RvV+nkBK9iy3q67FBy9Xa7vwrOTE+g5aGw
github.com/emicklei/proto v1.10.0/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A=
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4safvEdbitLhGGK48rN6g=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po=
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
github.com/envoyproxy/go-control-plane v0.9.9/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ=
github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021 h1:fP+fF0up6oPY49OrjPrhIJ8yQfdIM85NXMLkMg1EXVs=
github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0=
github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1 h1:xvqufLtNVwAhN8NMyWklVgxnWohi+wtMGQMhtxexlm0=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/envoyproxy/protoc-gen-validate v0.6.1 h1:4CF52PCseTFt4bE+Yk3dIpdVi7XWuPVMhPtm4FaIJPM=
github.com/envoyproxy/protoc-gen-validate v0.6.1/go.mod h1:txg5va2Qkip90uYoSKH+nkAAmXrb2j3iq4FLwdrCbXQ=
github.com/envoyproxy/protoc-gen-validate v0.6.7 h1:qcZcULcd/abmQg6dwigimCNEyi4gg31M/xaciQlDml8=
github.com/evanphx/json-patch v0.0.0-20200808040245-162e5629780b/go.mod h1:NAJj0yf/KaRKURN6nyi7A9IZydMivZEm9oQLWNjfKDc=
github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
github.com/evanphx/json-patch v4.5.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
@ -856,7 +850,6 @@ github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMo
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU=
github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI=
github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU=
github.com/fsouza/fake-gcs-server v1.7.0/go.mod h1:5XIRs4YvwNbNoz+1JF8j6KLAyDh7RHGAyAK3EP2EsNk=
github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA=
github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
@ -907,8 +900,9 @@ github.com/go-ini/ini v1.25.4/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3I
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o=
github.com/go-kit/kit v0.11.0 h1:IGmIEl7aHTYh6E2HlT+ptILBotjo4xl8PMDl852etiI=
github.com/go-kit/kit v0.11.0/go.mod h1:73/6Ixaufkvb5Osvkls8C79vuQ49Ba1rUEUYNSf+FUw=
github.com/go-kit/kit v0.12.0 h1:e4o3o3IsBfAKQh5Qbbiqyfu97Ku7jrO/JbohvztANh4=
github.com/go-kit/kit v0.12.0/go.mod h1:lHd+EkCZPIwYItmGDDRdhinkzX2A1sj+M9biaEaizzs=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0=
github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU=
@ -923,7 +917,6 @@ github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KE
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
github.com/go-logr/logr v0.4.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU=
github.com/go-logr/logr v1.2.0/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0=
github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A=
@ -1060,6 +1053,8 @@ github.com/go-openapi/validate v0.20.2/go.mod h1:e7OJoKNgd0twXZwIn0A43tHbvIcr/rZ
github.com/go-openapi/validate v0.21.0/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg=
github.com/go-openapi/validate v0.22.0 h1:b0QecH6VslW/TxtpKgzpO1SNG7GU2FsaqKdP1E2T50Y=
github.com/go-openapi/validate v0.22.0/go.mod h1:rjnrwK57VJ7A8xqfpAOEKRH8yQSGUriMu5/zuPSQ1hg=
github.com/go-ozzo/ozzo-validation/v4 v4.3.0 h1:byhDUpfEwjsVQb1vBunvIjh2BHQ9ead57VkAEY4V+Es=
github.com/go-ozzo/ozzo-validation/v4 v4.3.0/go.mod h1:2NKgrcHl3z6cJs+3Oo940FPRiTzuqKbvfrL2RxCj6Ew=
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8=
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
@ -1228,8 +1223,10 @@ github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4=
github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA=
github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/flatbuffers v1.12.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/flatbuffers v2.0.0+incompatible h1:dicJ2oXwypfwUGnB2/TYWYEKiuk9eYQlQO/AnOHl5mI=
github.com/google/flatbuffers v2.0.0+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/flatbuffers v2.0.5+incompatible h1:ANsW0idDAXIY+mNHzIHxWRfabV2x5LUEEIIWcwsYgB8=
github.com/google/flatbuffers v2.0.5+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/gnostic v0.5.7-v3refs h1:FhTMOKj2VhjpouxvWJAV1TL304uMlb9zcDqkl6cEI54=
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
@ -1243,8 +1240,9 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-github v17.0.0+incompatible h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY=
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
github.com/google/go-github/v45 v45.2.0 h1:5oRLszbrkvxDDqBCNj2hjDZMKmvexaZ1xw/FCD+K3FI=
@ -1260,7 +1258,6 @@ github.com/google/gofuzz v0.0.0-20170612174753-24818f796faf/go.mod h1:HP5RmnzzSN
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible h1:xmapqc1AyLoB+ddYT6r04bD9lIjlOqGaREovi0SzFaE=
github.com/google/martian v2.1.1-0.20190517191504-25dcb96d9e51+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
@ -1310,8 +1307,9 @@ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0=
github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM=
github.com/googleapis/gax-go/v2 v2.2.0 h1:s7jOdKSaksJVOxE0Y/S32otcfiP+UQ0cL8/GTKaONwE=
github.com/googleapis/gax-go/v2 v2.2.0/go.mod h1:as02EH8zWkzwUoLbBaFeQ+arQaj/OthfcblKl4IGNaM=
github.com/googleapis/gax-go/v2 v2.3.0 h1:nRJtk3y8Fm770D42QV6T90ZnvFZyk7agSo3Q+Z9p3WI=
github.com/googleapis/gax-go/v2 v2.3.0/go.mod h1:b8LNqSzNabLiUpXKkY7HAR5jr6bIT99EXz9pXxye9YM=
github.com/googleapis/gnostic v0.0.0-20170426233943-68f4ded48ba9/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
github.com/googleapis/gnostic v0.1.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY=
@ -1334,8 +1332,8 @@ github.com/gophercloud/gophercloud v0.15.0/go.mod h1:VX0Ibx85B60B5XOrZr6kaNwrmPU
github.com/gophercloud/gophercloud v0.16.0/go.mod h1:wRtmUelyIIv3CSSDI47aUwbs075O6i+LY+pXsKCBsb4=
github.com/gophercloud/gophercloud v0.17.0/go.mod h1:wRtmUelyIIv3CSSDI47aUwbs075O6i+LY+pXsKCBsb4=
github.com/gophercloud/gophercloud v0.18.0/go.mod h1:wRtmUelyIIv3CSSDI47aUwbs075O6i+LY+pXsKCBsb4=
github.com/gophercloud/gophercloud v0.20.0 h1:1+4jrsjVhdX5omlAo4jkmFc6ftLbuXLzgFo4i6lH+Gk=
github.com/gophercloud/gophercloud v0.20.0/go.mod h1:wRtmUelyIIv3CSSDI47aUwbs075O6i+LY+pXsKCBsb4=
github.com/gophercloud/gophercloud v0.24.0 h1:jDsIMGJ1KZpAjYfQgGI2coNQj5Q83oPzuiGJRFWgMzw=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20191106031601-ce3c9ade29de/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
@ -1406,8 +1404,9 @@ github.com/grpc-ecosystem/grpc-gateway v1.14.4/go.mod h1:6CwZWGDSPRJidgKAtJVvND6
github.com/grpc-ecosystem/grpc-gateway v1.14.6/go.mod h1:zdiPV4Yse/1gnckTHtghG4GkDEdKCRJduHpTxT3/jcw=
github.com/grpc-ecosystem/grpc-gateway v1.15.0/go.mod h1:vO11I9oWA+KsxmfFQPhLnnIb1VDE24M+pdxZFiuZcA8=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0 h1:BZHcxBETFHIdVyhyEfOvn/RdU/QGdLI4y34qQGjGWO0=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.10.3 h1:BGNSrTRW4rwfhJiFwvwF4XQ0Y72Jj9YEgxVrtovbD5o=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.10.3/go.mod h1:VHn7KgNsRriXa4mcgtkpR00OXyQY6g67JWMvn+R27A4=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645/go.mod h1:6iZfnjpejD4L/4DwD7NryNaJyCQdzwWwH2MWhCA90Kw=
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4=
github.com/hanwen/go-fuse v1.0.0/go.mod h1:unqXarDXqzAk0rt98O2tVndEPIpUgLD9+rwFisZH3Ok=
@ -1420,8 +1419,8 @@ github.com/hashicorp/consul/api v1.6.0/go.mod h1:1NSuaUUkFaJzMasbfq/11wKYWSR67Xn
github.com/hashicorp/consul/api v1.7.0/go.mod h1:1NSuaUUkFaJzMasbfq/11wKYWSR67Xn6r2DXKhuDNFg=
github.com/hashicorp/consul/api v1.8.1/go.mod h1:sDjTOq0yUyv5G4h+BqSea7Fn6BU+XbolEz1952UB+mk=
github.com/hashicorp/consul/api v1.9.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M=
github.com/hashicorp/consul/api v1.10.1 h1:MwZJp86nlnL+6+W1Zly4JUuVn9YHhMggBirMpHGD7kw=
github.com/hashicorp/consul/api v1.10.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M=
github.com/hashicorp/consul/api v1.12.0 h1:k3y1FYv6nuKyNTqj6w9gXOx5r5CfLj/k/euUeBXj1OY=
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
github.com/hashicorp/consul/sdk v0.4.0/go.mod h1:fY08Y9z5SvJqevyZNy6WWPXiG3KwBPAvlcdx16zZ0fM=
@ -1434,8 +1433,8 @@ github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brv
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
github.com/hashicorp/go-hclog v0.16.1 h1:IVQwpTGNRRIHafnTs2dQLIk4ENtneRIEEJWOVDqz99o=
github.com/hashicorp/go-hclog v0.16.1/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
@ -1497,8 +1496,8 @@ github.com/hashicorp/serf v0.8.3/go.mod h1:UpNcs7fFbpKIyZaUuSW6EPiH+eZC7OuyFD+wc
github.com/hashicorp/serf v0.8.5/go.mod h1:UpNcs7fFbpKIyZaUuSW6EPiH+eZC7OuyFD+wc1oal+k=
github.com/hashicorp/serf v0.9.0/go.mod h1:YL0HO+FifKOW2u1ke99DGVu1zhcpZzNwrLIqBC7vbYU=
github.com/hashicorp/serf v0.9.3/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
github.com/hashicorp/serf v0.9.5 h1:EBWvyu9tcRszt3Bxp3KNssBMP1KuHWyO51lz9+786iM=
github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk=
github.com/hashicorp/serf v0.9.6 h1:uuEX1kLR6aoda1TBttmJQKDLZE1Ob7KN0NPdE7EtCDc=
github.com/hashicorp/vault/api v1.0.4/go.mod h1:gDcqh3WGcR1cpF5AJz/B1UFheUEneMoIospckxBxk6Q=
github.com/hashicorp/vault/sdk v0.1.13/go.mod h1:B+hVj7TpuQY1Y/GPbCpffmgd+tSEwvhkWnjtSYCaS2M=
github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM=
@ -1512,8 +1511,8 @@ github.com/hetznercloud/hcloud-go v1.23.1/go.mod h1:xng8lbDUg+xM1dgc0yGHX5EeqbwI
github.com/hetznercloud/hcloud-go v1.24.0/go.mod h1:3YmyK8yaZZ48syie6xpm3dt26rtB6s65AisBHylXYFA=
github.com/hetznercloud/hcloud-go v1.25.0/go.mod h1:2C5uMtBiMoFr3m7lBFPf7wXTdh33CevmZpQIIDPGYJI=
github.com/hetznercloud/hcloud-go v1.26.2/go.mod h1:2C5uMtBiMoFr3m7lBFPf7wXTdh33CevmZpQIIDPGYJI=
github.com/hetznercloud/hcloud-go v1.32.0 h1:7zyN2V7hMlhm3HZdxOarmOtvzKvkcYKjM0hcwYMQZz0=
github.com/hetznercloud/hcloud-go v1.32.0/go.mod h1:XX/TQub3ge0yWR2yHWmnDVIrB+MQbda1pHxkUmDlUME=
github.com/hetznercloud/hcloud-go v1.33.2 h1:ptWKVYLW7YtjXzsqTFKFxwpVo3iM9UMkVPBYQE4teLU=
github.com/hodgesds/perf-utils v0.0.8/go.mod h1:F6TfvsbtrF88i++hou29dTXlI2sfsJv+gRZDtmTJkAs=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo=
@ -1687,8 +1686,8 @@ github.com/klauspost/compress v1.12.2/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8
github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.15.1/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.15.2 h1:3WH+AG7s2+T8o3nrM/8u2rdqUEcQhmga7smjrT41nAw=
github.com/klauspost/compress v1.15.2/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
github.com/klauspost/compress v1.15.5 h1:qyCLMz2JCrKADihKOh9FxnW3houKeNsp2h5OEz0QSEA=
github.com/klauspost/compress v1.15.5/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
github.com/klauspost/cpuid v0.0.0-20170728055534-ae7887de9fa5/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/cpuid v1.2.3/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/cpuid v1.3.1/go.mod h1:bYW4mA6ZgKPob1/Dlai2LviZJO7KGI3uoWLd42rAQw4=
@ -1754,14 +1753,13 @@ github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0U
github.com/linkedin/goavro/v2 v2.10.0 h1:eTBIRoInBM88gITGXYtUSqqxLTFXfOsJBiX8ZMW0o4U=
github.com/linkedin/goavro/v2 v2.10.0/go.mod h1:UgQUb2N/pmueQYH9bfqFioWxzYCZXSfF8Jw03O5sjqA=
github.com/linode/linodego v0.28.5/go.mod h1:BR0gVkCJffEdIGJSl6bHR80Ty+Uvg/2jkjmrWaFectM=
github.com/linode/linodego v0.32.0 h1:IK04cx2b/IwAAd6XLruf1Dl/n3dRXj87Uw/5qo6afVU=
github.com/linode/linodego v0.32.0/go.mod h1:BR0gVkCJffEdIGJSl6bHR80Ty+Uvg/2jkjmrWaFectM=
github.com/linode/linodego v1.5.0 h1:p1TgkDsz0ubaIPLNviZBTIjlsX3PdvqZQ4eO2r0L1Hk=
github.com/lovoo/gcloud-opentracing v0.3.0/go.mod h1:ZFqk2y38kMDDikZPAK7ynTTGuyt17nSPdS3K5e+ZTBY=
github.com/lucasb-eyer/go-colorful v1.0.2/go.mod h1:0MS4r+7BZKSJ5mw4/S5MPN+qHFF1fYclkSPilDOKW0s=
github.com/lucasb-eyer/go-colorful v1.0.3/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/lufia/iostat v1.1.0/go.mod h1:rEPNA0xXgjHQjuI5Cy05sLlS2oRcSlWHRLrvh/AQ+Pg=
github.com/lyft/protoc-gen-star v0.5.1/go.mod h1:9toiA3cC7z5uVbODF7kEQ91Xn7XNFkVUl+SrEe+ZORU=
github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ=
github.com/m3db/prometheus_remote_client_golang v0.4.4 h1:DsAIjVKoCp7Ym35tAOFL1OuMLIdIikAEHeNPHY+yyM8=
github.com/m3db/prometheus_remote_client_golang v0.4.4/go.mod h1:wHfVbA3eAK6dQvKjCkHhusWYegCk3bDGkA15zymSHdc=
github.com/magefile/mage v1.11.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
@ -1824,8 +1822,9 @@ github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzp
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-runewidth v0.0.6/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-runewidth v0.0.8/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU=
github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o=
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
@ -1857,8 +1856,9 @@ github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7
github.com/miekg/dns v1.1.38/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM=
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
github.com/miekg/dns v1.1.42/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4=
github.com/miekg/dns v1.1.43 h1:JKfpVSCB84vrAmHzyrsxB5NAr5kLoMXZArPSw7Qlgyg=
github.com/miekg/dns v1.1.43/go.mod h1:+evo5L0630/F6ca/Z9+GAqzhjGyn8/c+TBaOyfEl0V4=
github.com/miekg/dns v1.1.49 h1:qe0mQU3Z/XpFeE+AEBo2rqaS1IPBJ3anmqZ4XiZJVG8=
github.com/miekg/dns v1.1.49/go.mod h1:e3IlAVfNqAllflbibAZEWOXOQ+Ynzk/dDozDxY7XnME=
github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=
github.com/mileusna/useragent v0.0.0-20190129205925-3e331f0949a5/go.mod h1:JWhYAp2EXqUtsxTKdeGlY8Wp44M7VxThC9FEoNGi2IE=
github.com/minio/highwayhash v1.0.1/go.mod h1:BQskDq+xkJ12lmlUUi7U0M5Swg3EWR+dLTk+kldvVxY=
@ -2049,6 +2049,8 @@ github.com/openzipkin/zipkin-go v0.2.1/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnh
github.com/openzipkin/zipkin-go v0.2.2/go.mod h1:NaW6tEwdmWMaCDZzg8sh+IBNOxHMPnhQw8ySjnjRyN4=
github.com/openzipkin/zipkin-go v0.2.5/go.mod h1:KpXfKdgRDnnhsxw4pNIH9Md5lyFqKUa4YDFlwRYAMyE=
github.com/pact-foundation/pact-go v1.0.4/go.mod h1:uExwJY4kCzNPcHRj+hCR/HBbOOIwwtUjcrb0b5/5kLM=
github.com/parca-dev/parca v0.12.1 h1:QZ5UudbnXeLxER4gEeD0urP/cInTUc8GvSlWsg1AJyo=
github.com/parca-dev/parca v0.12.1/go.mod h1:MWhbinjhcb96IlRxk9GJvz9bTkHSTwDM+GQoGDpytGc=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
@ -2072,8 +2074,9 @@ github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk
github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pierrec/lz4 v2.6.0+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pierrec/lz4/v4 v4.1.8 h1:ieHkV+i2BRzngO4Wd/3HGowuZStgq6QkPsD1eolNAO4=
github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pierrec/lz4/v4 v4.1.12 h1:44l88ehTZAUGW4VlO1QC4zkilL99M6Y9MXNwEs0uzP8=
github.com/pierrec/lz4/v4 v4.1.12/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA=
github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4/go.mod h1:N6UoU20jOqggOuDwUaBQpluzLNDqif3kq9z2wpdYEfQ=
@ -2213,6 +2216,8 @@ github.com/retailnext/hllpp v1.0.1-0.20180308014038-101a6d2f8b52/go.mod h1:RDpi1
github.com/rhnvrm/simples3 v0.6.1/go.mod h1:Y+3vYm2V7Y4VijFoJHHTrja6OgPrJ2cBti8dPGkC3sA=
github.com/rivo/tview v0.0.0-20200219210816-cd38d7432498/go.mod h1:6lkG1x+13OShEf0EaOCaTQYyB7d5nSbb181KtjlS+84=
github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
@ -2250,8 +2255,8 @@ github.com/sanity-io/litter v1.2.0/go.mod h1:JF6pZUFgu2Q0sBZ+HSV35P8TVPI1TTzEwyu
github.com/santhosh-tekuri/jsonschema v1.2.4/go.mod h1:TEAUOeZSmIxTTuHatJzrvARHiuO9LYd+cIxzgEHCQI4=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/satori/go.uuid v1.2.1-0.20181028125025-b2ce2384e17b/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.7.0.20210223165440-c65ae3540d44 h1:3egqo0Vut6daANFm7tOXdNAa8v5/uLU+sgCJrc88Meo=
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.7.0.20210223165440-c65ae3540d44/go.mod h1:CJJ5VAbozOl0yEw7nHB9+7BXTJbIn6h7W+f6Gau5IP8=
github.com/scaleway/scaleway-sdk-go v1.0.0-beta.9 h1:0roa6gXKgyta64uqh52AQG3wzZXH21unn+ltzQSXML0=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo=
@ -2572,8 +2577,8 @@ go.opentelemetry.io/collector/model v0.31.0 h1:IgMOkSBd/n/gV4EQQ1nJ+/ylddOlqTfMG
go.opentelemetry.io/collector/model v0.31.0/go.mod h1:PcHNnM+RUl0uD8VkSn93PO78N7kQYhfqpI/eki57pl4=
go.opentelemetry.io/contrib v0.21.0/go.mod h1:EH4yDYeNoaTqn/8yCWQmfNB78VHfGX2Jt2bvnvzBlGM=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.21.0/go.mod h1:Vm5u/mtkj1OMhtao0v+BGo2LUoLCgHYXvRmj0jWITlE=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.31.0 h1:li8u9OSMvLau7rMs8bmiL82OazG6MAkwPz2i6eS8TBQ=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.31.0/go.mod h1:SY9qHHUES6W3oZnO1H2W8NvsSovIoXRg/A1AH9px8+I=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.32.0 h1:WenoaOMNP71oq3KkMZ/jnxI9xU/JSCLw8yZILSI2lfU=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.32.0/go.mod h1:J0dBVrt7dPS/lKJyQoW0xzQiUr4r2Ik1VwPjAUWnofI=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.21.0/go.mod h1:JQAtechjxLEL81EjmbRwxBq/XEzGaHcsPuDHAx54hg4=
go.opentelemetry.io/contrib/propagators/jaeger v1.6.0 h1:tCc+sWgHVeOMp4zmUxHHTaoA5vQlGO089zfg97d+BvU=
go.opentelemetry.io/contrib/propagators/jaeger v1.6.0/go.mod h1:cqu1XdBYBXqXHxZLJdK00G9rT5Hda7Fa938I8LVYz/Y=
@ -2582,31 +2587,31 @@ go.opentelemetry.io/otel v0.11.0/go.mod h1:G8UCk+KooF2HLkgo8RHX9epABH/aRGYET7gQO
go.opentelemetry.io/otel v1.0.0-RC1/go.mod h1:x9tRa9HK4hSSq7jf2TKbqFbtt58/TGk0f9XiEYISI1I=
go.opentelemetry.io/otel v1.0.0/go.mod h1:AjRVh9A5/5DE7S+mZtTR6t8vpKKryam+0lREnfmS4cg=
go.opentelemetry.io/otel v1.6.1/go.mod h1:blzUabWHkX6LJewxvadmzafgh/wnvBSDBdOuwkAtrWQ=
go.opentelemetry.io/otel v1.6.3 h1:FLOfo8f9JzFVFVyU+MSRJc2HdEAXQgm7pIv2uFKRSZE=
go.opentelemetry.io/otel v1.6.3/go.mod h1:7BgNga5fNlF/iZjG06hM3yofffp0ofKCDwSXx1GC4dI=
go.opentelemetry.io/otel v1.7.0 h1:Z2lA3Tdch0iDcrhJXDIlC94XE+bxok1F9B+4Lz/lGsM=
go.opentelemetry.io/otel v1.7.0/go.mod h1:5BdUoMIz5WEs0vt0CUEMtSSaTSHBBVwrhnz7+nrD5xk=
go.opentelemetry.io/otel/exporters/jaeger v1.0.0 h1:cLhx8llHw02h5JTqGqaRbYn+QVKHmrzD9vEbKnSPk5U=
go.opentelemetry.io/otel/exporters/jaeger v1.0.0/go.mod h1:q10N1AolE1JjqKrFJK2tYw0iZpmX+HBaXBtuCzRnBGQ=
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.6.3 h1:nAmg1WgsUXoXf46dJG9eS/AzOcvkCTK4xJSUYpWyHYg=
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.6.3/go.mod h1:NEu79Xo32iVb+0gVNV8PMd7GoWqnyDXRlj04yFjqz40=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.6.3 h1:4/UjHWMVVc5VwX/KAtqJOHErKigMCH8NexChMuanb/o=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.6.3/go.mod h1:UJmXdiVVBaZ63umRUTwJuCMAV//GCMvDiQwn703/GoY=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.6.3 h1:leYDq5psbM3K4QNcZ2juCj30LjUnvxjuYQj1mkGjXFM=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.6.3/go.mod h1:ycItY/esVj8c0dKgYTOztTERXtPzcfDU/0o8EdwCjoA=
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.7.0 h1:7Yxsak1q4XrJ5y7XBnNwqWx9amMZvoidCctv62XOQ6Y=
go.opentelemetry.io/otel/exporters/otlp/internal/retry v1.7.0/go.mod h1:M1hVZHNxcbkAlcvrOMlpQ4YOO3Awf+4N2dxkZL3xm04=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.7.0 h1:cMDtmgJ5FpRvqx9x2Aq+Mm0O6K/zcUkH73SFz20TuBw=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.7.0/go.mod h1:ceUgdyfNv4h4gLxHR0WNfDiiVmZFodZhZSbOLhpxqXE=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.7.0 h1:MFAyzUPrTwLOwCi+cltN0ZVyy4phU41lwH+lyMyQTS4=
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.7.0/go.mod h1:E+/KKhwOSw8yoPxSSuUHG6vKppkvhN+S1Jc7Nib3k3o=
go.opentelemetry.io/otel/internal/metric v0.21.0/go.mod h1:iOfAaY2YycsXfYD4kaRSbLx2LKmfpKObWBEv9QK5zFo=
go.opentelemetry.io/otel/metric v0.21.0/go.mod h1:JWCt1bjivC4iCrz/aCrM1GSw+ZcvY44KCbaeeRhzHnc=
go.opentelemetry.io/otel/oteltest v1.0.0-RC1/go.mod h1:+eoIG0gdEOaPNftuy1YScLr1Gb4mL/9lpDkZ0JjMRq4=
go.opentelemetry.io/otel/sdk v1.0.0-RC1/go.mod h1:kj6yPn7Pgt5ByRuwesbaWcRLA+V7BSDg3Hf8xRvsvf8=
go.opentelemetry.io/otel/sdk v1.0.0/go.mod h1:PCrDHlSy5x1kjezSdL37PhbFUMjrsLRshJ2zCzeXwbM=
go.opentelemetry.io/otel/sdk v1.6.3 h1:prSHYdwCQOX5DrsEzxowH3nLhoAzEBdZhvrR79scfLs=
go.opentelemetry.io/otel/sdk v1.6.3/go.mod h1:A4iWF7HTXa+GWL/AaqESz28VuSBIcZ+0CV+IzJ5NMiQ=
go.opentelemetry.io/otel/sdk v1.7.0 h1:4OmStpcKVOfvDOgCt7UriAPtKolwIhxpnSNI/yK+1B0=
go.opentelemetry.io/otel/sdk v1.7.0/go.mod h1:uTEOTwaqIVuTGiJN7ii13Ibp75wJmYUDe374q6cZwUU=
go.opentelemetry.io/otel/trace v1.0.0-RC1/go.mod h1:86UHmyHWFEtWjfWPSbu0+d0Pf9Q6e1U+3ViBOc+NXAg=
go.opentelemetry.io/otel/trace v1.0.0/go.mod h1:PXTWqayeFUlJV1YDNhsJYB184+IvAH814St6o6ajzIs=
go.opentelemetry.io/otel/trace v1.6.1/go.mod h1:RkFRM1m0puWIq10oxImnGEduNBzxiN7TXluRBtE+5j0=
go.opentelemetry.io/otel/trace v1.6.3 h1:IqN4L+5b0mPNjdXIiZ90Ni4Bl5BRkDQywePLWemd9bc=
go.opentelemetry.io/otel/trace v1.6.3/go.mod h1:GNJQusJlUgZl9/TQBPKU/Y/ty+0iVB5fjhKeJGZPGFs=
go.opentelemetry.io/otel/trace v1.7.0 h1:O37Iogk1lEkMRXewVtZ1BBTVn5JEp8GrJvP92bJqC6o=
go.opentelemetry.io/otel/trace v1.7.0/go.mod h1:fzLSB9nqR2eXzxPXb2JW9IKE+ScyXA48yyE4TNvoHqU=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.opentelemetry.io/proto/otlp v0.15.0 h1:h0bKrvdrT/9sBwEJ6iWUqT/N/xPcS66bL4u3isneJ6w=
go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
go.opentelemetry.io/proto/otlp v0.16.0 h1:WHzDWdXUvbc5bG2ObdrGfaNpQz7ft7QN9HHmJlbiB1E=
go.opentelemetry.io/proto/otlp v0.16.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
go.starlark.net v0.0.0-20201118183435-e55f603d8c79 h1:JPjLPz44y2N9mkzh2N344kTk1Y4/V4yJAjTrXGmzv8I=
go.starlark.net v0.0.0-20201118183435-e55f603d8c79/go.mod h1:5YFcFnRptTN+41758c2bMPiqpGg4zBfYji1IQz8wNFk=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
@ -2732,8 +2737,6 @@ golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+o
golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
@ -2841,17 +2844,19 @@ golang.org/x/net v0.0.0-20210520170846-37e1c6afe023/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210903162142-ad29c8ab022f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211020060615-d418f374d309/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211209124913-491a49abca63/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220107192237-5cfca573fb4d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220325170049-de3da57026de/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220401154927-543a649e0bdd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220412020605-290c469a71a5/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220418201149-a630d4f3e7a2/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.0.0-20220909164309-bea034e7d591 h1:D0B/7al0LLrVC8aWF4+oxpv/m8bc7ViFfVS8/gXGdqI=
golang.org/x/net v0.0.0-20220909164309-bea034e7d591/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
@ -2881,6 +2886,7 @@ golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ
golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb h1:8tDJ3aechhddbdPAxpycgXHJRMLpk/Ab+aa4OgdN5/g=
golang.org/x/oauth2 v0.0.0-20220608161450-d0670ef3b1eb/go.mod h1:jaDAt6Dkxork7LmZnYtzbRWj0W47D86a3TGe0YHBvmE=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -3069,6 +3075,7 @@ golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220330033206-e17cdc41300f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 h1:WIoqL4EROvwiPdUtaip4VcDdpZ4kha7wBWZrbVKCIZg=
@ -3108,12 +3115,10 @@ golang.org/x/time v0.0.0-20220609170525-579cf78fd858 h1:Dpdu/EMxGMFgq0CeYMh4fazT
golang.org/x/time v0.0.0-20220609170525-579cf78fd858/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181112210238-4b1f3b6b1646/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
@ -3209,6 +3214,7 @@ golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
golang.org/x/tools v0.1.12 h1:VveCTK38A2rkS8ZqFY25HIDFscX5X9OoEhJd3quQmXU=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
@ -3281,9 +3287,10 @@ google.golang.org/api v0.68.0/go.mod h1:sOM8pTpwgflXRhz+oC8H2Dr+UcbMqkPPWNJo88Q7
google.golang.org/api v0.69.0/go.mod h1:boanBiw+h5c3s+tBPgEzLDRHfFLWV0qXxRHz3ws7C80=
google.golang.org/api v0.70.0/go.mod h1:Bs4ZM2HGifEvXwd50TtW70ovgJffJYw2oRCOFU/SkfA=
google.golang.org/api v0.71.0/go.mod h1:4PyU6e6JogV1f9eA4voyrTY2batOLdgZ5qZ5HOCc4j8=
google.golang.org/api v0.74.0 h1:ExR2D+5TYIrMphWgs5JCgwRhEDlPDXXrLwHHMgPHTXE=
google.golang.org/api v0.74.0/go.mod h1:ZpfMZOVRMywNyvJFeqL9HRWBgAuRfSjJFpe9QtRRyDs=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/api v0.75.0/go.mod h1:pU9QmyHLnzlpar1Mjt4IbapUCy8J+6HD6GeELN69ljA=
google.golang.org/api v0.80.0 h1:IQWaGVCYnsm4MO3hh+WtSXMzMzuyFx/fuR8qkN3A0Qo=
google.golang.org/api v0.80.0/go.mod h1:xY3nI94gbvBrE0J6NHXhxOmW97HG7Khjkku6AFB3Hyg=
google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
@ -3295,154 +3302,8 @@ google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCID
google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk=
google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20180518175338-11a468237815/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20180608181217-32ee49c4dd80/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190404172233-64821d5d2107/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190522204451-c2c4e71fbf69/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s=
google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dTEVtUr6QSP8miRzeRqwQOioJ9I66odjN4I7s=
google.golang.org/genproto v0.0.0-20190716160619-c506a9f90610/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
google.golang.org/genproto v0.0.0-20190927181202-20e1ac93f88c/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
google.golang.org/genproto v0.0.0-20191028173616-919d9bdd9fe6/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200108215221-bd8f9a0ef82f/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200117163144-32f20d992d24/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA=
google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200420144010-e5e8543f8aeb/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
google.golang.org/genproto v0.0.0-20200603110839-e855014d5736/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA=
google.golang.org/genproto v0.0.0-20200710124503-20a17af7bd0e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200724131911-43cab4749ae7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200815001618-f69a88009b70/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200911024640-645f7a48b24f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201019141844-1ed22bb0c154/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210312152112-fc591d9ea70f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A=
google.golang.org/genproto v0.0.0-20210429181445-86c259c2b4ab/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
google.golang.org/genproto v0.0.0-20210517163617-5e0236093d7a/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
google.golang.org/genproto v0.0.0-20210604141403-392c879c8b08/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
google.golang.org/genproto v0.0.0-20210608205507-b6d2f5bf0d7d/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0=
google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24=
google.golang.org/genproto v0.0.0-20210630183607-d20f26d13c79/go.mod h1:yiaVoXHpRzHGyxV3o4DktVWY4mSUErTKaeEOq6C3t3U=
google.golang.org/genproto v0.0.0-20210713002101-d411969a0d9a/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=
google.golang.org/genproto v0.0.0-20210716133855-ce7ef5c701ea/go.mod h1:AxrInvYm1dci+enl5hChSFPOmmUF1+uAa/UsgNRWd7k=
google.golang.org/genproto v0.0.0-20210728212813-7823e685a01f/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
google.golang.org/genproto v0.0.0-20210805201207-89edb61ffb67/go.mod h1:ob2IJxKrgPT52GcgX759i1sleT07tiKowYBGbczaW48=
google.golang.org/genproto v0.0.0-20210813162853-db860fec028c/go.mod h1:cFeNkxwySK631ADgubI+/XFU/xp8FD5KIVV4rj8UC5w=
google.golang.org/genproto v0.0.0-20210821163610-241b8fcbd6c8/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210917145530-b395a37504d4/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY=
google.golang.org/genproto v0.0.0-20210921142501-181ce0d877f6/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211018162055-cf77aa76bad2/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220111164026-67b88f271998/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220114231437-d2e6a121cae0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220201184016-50beb8ab5c44/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220204002441-d6cc3cc0770e/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220207164111-0872dc986b00/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
google.golang.org/genproto v0.0.0-20220211171837-173942840c17/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
google.golang.org/genproto v0.0.0-20220216160803-4663080d8bc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
google.golang.org/genproto v0.0.0-20220218161850-94dd64e39d7c/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
google.golang.org/genproto v0.0.0-20220222213610-43724f9ea8cf/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
google.golang.org/genproto v0.0.0-20220304144024-325a89244dc8/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
google.golang.org/genproto v0.0.0-20220310185008-1973136f34c6/go.mod h1:kGP+zUP2Ddo0ayMi4YuN7C3WZyJvGLZRh8Z5wnAqvEI=
google.golang.org/genproto v0.0.0-20220324131243-acbaeb5b85eb/go.mod h1:hAL49I2IFola2sVEjAn7MEwsja0xp51I0tlGAf9hz4E=
google.golang.org/genproto v0.0.0-20220401170504-314d38edb7de/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3 h1:SeX3QUcBj3fciwnfPT9kt5gBhFy/FCZtYZ+I/RB8agc=
google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3/go.mod h1:8w6bsBMX6yCPbAVTeqQHvzxW0EIFigd5lZyahWgyfDo=
google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.12.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw=
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.18.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.22.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.22.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60=
google.golang.org/grpc v1.29.0/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk=
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.32.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8=
google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU=
google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
google.golang.org/grpc v1.39.1/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
google.golang.org/grpc v1.40.1/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
google.golang.org/grpc v1.42.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=
google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=
google.golang.org/grpc v1.45.0 h1:NEpgUqV3Z+ZjkqMsxMg11IaDrXY4RY6CQukSGK0uI1M=
google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ=
google.golang.org/grpc/cmd/protoc-gen-go-grpc v0.0.0-20200910201057-6591123024b3/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
@ -3454,7 +3315,6 @@ google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miE
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
@ -3531,7 +3391,6 @@ gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk=
gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
@ -3559,8 +3418,7 @@ k8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8=
k8s.io/api v0.21.0/go.mod h1:+YbrhBBGgsxbF6o6Kj4KJPJnBmAKuXDeS3E18bgHNVU=
k8s.io/api v0.21.1/go.mod h1:FstGROTmsSHBarKc8bylzXih8BLNYTiS3TZcsoEDg2s=
k8s.io/api v0.22.1/go.mod h1:bh13rkTp3F1XEaLGykbyRD2QaTTzPm0e/BMd8ptFONY=
k8s.io/api v0.22.5 h1:xk7C+rMjF/EGELiD560jdmwzrB788mfcHiNbMQLIVI8=
k8s.io/api v0.22.5/go.mod h1:mEhXyLaSD1qTOf40rRiKXkc+2iCem09rWLlFwhCEiAs=
k8s.io/api v0.24.0 h1:J0hann2hfxWr1hinZIDefw7Q96wmCBx6SSB8IY0MdDg=
k8s.io/apimachinery v0.0.0-20181201231028-18a5ff3097b4/go.mod h1:ccL7Eh7zubPUSh9A3USN90/OzHNSVN6zxzde07TDCL0=
k8s.io/apimachinery v0.0.0-20190809020650-423f5d784010/go.mod h1:Waf/xTS2FGRrgXCkO5FP3XxTOWh0qLf2QhL1qFZZ/R8=
k8s.io/apimachinery v0.0.0-20191115015347-3c7067801da2/go.mod h1:dXFS2zaQR8fyzuvRdJDHw2Aerij/yVGJSre0bZQSVJA=
@ -3579,8 +3437,7 @@ k8s.io/apimachinery v0.20.6/go.mod h1:ejZXtW1Ra6V1O5H8xPBGz+T3+4gfkTCeExAHKU57MA
k8s.io/apimachinery v0.21.0/go.mod h1:jbreFvJo3ov9rj7eWT7+sYiRx+qZuCYXwWT1bcDswPY=
k8s.io/apimachinery v0.21.1/go.mod h1:jbreFvJo3ov9rj7eWT7+sYiRx+qZuCYXwWT1bcDswPY=
k8s.io/apimachinery v0.22.1/go.mod h1:O3oNtNadZdeOMxHFVxOreoznohCpy0z6mocxbZr7oJ0=
k8s.io/apimachinery v0.22.5 h1:cIPwldOYm1Slq9VLBRPtEYpyhjIm1C6aAMAoENuvN9s=
k8s.io/apimachinery v0.22.5/go.mod h1:xziclGKwuuJ2RM5/rSFQSYAj0zdbci3DH8kj+WvyN0U=
k8s.io/apimachinery v0.24.0 h1:ydFCyC/DjCvFCHK5OPMKBlxayQytB8pxy8YQInd5UyQ=
k8s.io/apiserver v0.20.1/go.mod h1:ro5QHeQkgMS7ZGpvf4tSMx6bBOgPfE+f52KwvXfScaU=
k8s.io/apiserver v0.20.4/go.mod h1:Mc80thBKOyy7tbvFtB4kJv1kbdD0eIH8k8vianJcbFM=
k8s.io/apiserver v0.20.6/go.mod h1:QIJXNt6i6JB+0YQRNcS0hdRHJlMhflFmsBDeSgT1r8Q=
@ -3600,6 +3457,7 @@ k8s.io/klog v0.1.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
k8s.io/klog v0.3.0/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
k8s.io/klog v0.3.1/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk=
k8s.io/klog v0.4.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8=
k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE=
k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y=
@ -3609,8 +3467,7 @@ k8s.io/klog/v2 v2.5.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec=
k8s.io/klog/v2 v2.8.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec=
k8s.io/klog/v2 v2.9.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec=
k8s.io/klog/v2 v2.10.0/go.mod h1:hy9LJ/NvuK+iVyP4Ehqva4HxZG/oXyIS3n3Jmire4Ec=
k8s.io/klog/v2 v2.30.0 h1:bUO6drIvCIsvZ/XFgfxoGFQU/a4Qkh0iAlvUR7vlHJw=
k8s.io/klog/v2 v2.30.0/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0=
k8s.io/klog/v2 v2.60.1 h1:VW25q3bZx9uE3vvdL6M8ezOX79vA2Aq1nEWLqNQclHc=
k8s.io/kube-openapi v0.0.0-20190709113604-33be087ad058/go.mod h1:nfDlWeOsu3pUf4yWGL+ERqohP4YsZcBJXWMK+gkzOA4=
k8s.io/kube-openapi v0.0.0-20190722073852-5e22f3d471e6/go.mod h1:RZvgC8MSN6DjiMV6oIfEE9pDL9CYXokkfaCKZeHm3nc=
k8s.io/kube-openapi v0.0.0-20191107075043-30be4d16710a/go.mod h1:1TqjTSzOxsLGIKfj0lK8EeCP7K1iUG65v09OM0/WG5E=
@ -3620,15 +3477,14 @@ k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6/go.mod h1:UuqjUnNftUyPE5H
k8s.io/kube-openapi v0.0.0-20201113171705-d219536bb9fd/go.mod h1:WOJ3KddDSol4tAGcJo0Tvi+dK12EcqSLqcWsryKMpfM=
k8s.io/kube-openapi v0.0.0-20210305001622-591a79e4bda7/go.mod h1:wXW5VT87nVfh/iLV8FpR2uDvrFyomxbtb1KivDbvPTE=
k8s.io/kube-openapi v0.0.0-20210421082810-95288971da7e/go.mod h1:vHXdDvt9+2spS2Rx9ql3I8tycm3H9FDfdUoIuKCefvw=
k8s.io/kube-openapi v0.0.0-20211109043538-20434351676c/go.mod h1:vHXdDvt9+2spS2Rx9ql3I8tycm3H9FDfdUoIuKCefvw=
k8s.io/kube-openapi v0.0.0-20220328201542-3ee0da9b0b42 h1:Gii5eqf+GmIEwGNKQYQClCayuJCe2/4fZUvF7VG99sU=
k8s.io/kubernetes v1.13.0/go.mod h1:ocZa8+6APFNC2tX1DZASIbocyYT5jHzqFVsY5aoB7Jk=
k8s.io/utils v0.0.0-20190809000727-6c36bc71fc4a/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew=
k8s.io/utils v0.0.0-20191114200735-6ca3b61696b6/go.mod h1:sZAwmy6armz5eXlNoLmJcl4F1QuKu7sr+mFQ0byX7Ew=
k8s.io/utils v0.0.0-20200414100711-2df71ebbae66/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
k8s.io/utils v0.0.0-20201110183641-67b214c5f920/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
k8s.io/utils v0.0.0-20210707171843-4b05e18ac7d9/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
k8s.io/utils v0.0.0-20210930125809-cb0fa318a74b h1:wxEMGetGMur3J1xuGLQY7GEQYg9bZxKn3tKo5k/eYcs=
k8s.io/utils v0.0.0-20210930125809-cb0fa318a74b/go.mod h1:jPW/WVKK9YHAvNhRxK0md/EJ228hCsBRufyofKtW8HA=
k8s.io/utils v0.0.0-20220210201930-3a6ce19ff2f9 h1:HNSDgDCrr/6Ly3WEGKZftiE7IY19Vz2GdbOCyI4qqhc=
nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
@ -3636,6 +3492,7 @@ rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.14/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg=
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.15/go.mod h1:LEScyzhFmoF5pso/YSeBstl57mOzx9xlU9n85RGrDQg=
sigs.k8s.io/json v0.0.0-20211208200746-9f7c6b3444d2 h1:kDi4JBNAsJWfz1aEXhO8Jg87JJaPNLh5tIzYHgStQ9Y=
sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e h1:4Z09Hglb792X0kfOBBJUPFEyvVfQWrYT/l8h5EKA6JQ=
sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI=
sigs.k8s.io/structured-merge-diff/v2 v2.0.1/go.mod h1:Wb7vfKAodbKgf6tn1Kl0VvGj7mRH6DGaRcixXEJXTsE=
@ -3645,11 +3502,11 @@ sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK
sigs.k8s.io/structured-merge-diff/v4 v4.0.2/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
sigs.k8s.io/structured-merge-diff/v4 v4.0.3/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
sigs.k8s.io/structured-merge-diff/v4 v4.1.0/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw=
sigs.k8s.io/structured-merge-diff/v4 v4.1.2 h1:Hr/htKFmJEbtMgS/UD0N+gtgctAqz81t3nu+sPzynno=
sigs.k8s.io/structured-merge-diff/v4 v4.1.2/go.mod h1:j/nl6xW8vLS49O8YvXW1ocPhZawJtm+Yrr7PPRQ0Vg4=
sigs.k8s.io/structured-merge-diff/v4 v4.2.1 h1:bKCqE9GvQ5tiVHn5rfn1r+yao3aLQEaLzkkmAkf+A6Y=
sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=
sigs.k8s.io/yaml v1.2.0 h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q=
sigs.k8s.io/yaml v1.2.0/go.mod h1:yfXDCHCao9+ENCvLSE62v9VSji2MKu5jeNfTrofGhJc=
sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo=
sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU=
xorm.io/builder v0.3.6 h1:ha28mQ2M+TFx96Hxo+iq6tQgnkC9IZkM6D8w9sKHHF8=
xorm.io/builder v0.3.6/go.mod h1:LEFAPISnRzG+zxaxj2vPicRwz67BdhFreKg8yv8/TgU=

View File

@ -24,6 +24,7 @@ import (
"github.com/grafana/grafana/pkg/plugins/storage"
ac "github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/org"
"github.com/grafana/grafana/pkg/services/pluginsettings"
"github.com/grafana/grafana/pkg/setting"
@ -103,6 +104,10 @@ func (hs *HTTPServer) GetPluginList(c *models.ReqContext) response.Response {
}
}
if (pluginDef.ID == "parca" || pluginDef.ID == "phlare") && !hs.Features.IsEnabled(featuremgmt.FlagFlameGraph) {
continue
}
filteredPluginDefinitions = append(filteredPluginDefinitions, pluginDef)
filteredPluginIDs[pluginDef.ID] = true
}

View File

@ -7,6 +7,8 @@ import (
"context"
"github.com/google/wire"
"github.com/grafana/grafana/pkg/tsdb/parca"
phlare "github.com/grafana/grafana/pkg/tsdb/phlare"
sdkhttpclient "github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana/pkg/api"
@ -259,6 +261,8 @@ var wireSet = wire.NewSet(
graphite.ProvideService,
prometheus.ProvideService,
elasticsearch.ProvideService,
phlare.ProvideService,
parca.ProvideService,
secretsMigrator.ProvideSecretsMigrator,
wire.Bind(new(secrets.Migrator), new(*secretsMigrator.SecretsMigrator)),
grafanads.ProvideService,

View File

@ -16,7 +16,7 @@ seqs: [
// grafana.com, then the plugin id has to follow the naming
// conventions.
id: string & strings.MinRunes(1)
id: =~"^([0-9a-z]+\\-([0-9a-z]+\\-)?(\(strings.Join([for t in _types {t}], "|"))))|(alertGroups|alertlist|annolist|barchart|bargauge|candlestick|canvas|dashlist|debug|gauge|geomap|gettingstarted|graph|heatmap|heatmap-old|histogram|icon|live|logs|news|nodeGraph|piechart|pluginlist|stat|state-timeline|status-history|table|table-old|text|timeseries|traces|welcome|xychart|alertmanager|cloudwatch|dashboard|elasticsearch|grafana|grafana-azure-monitor-datasource|graphite|influxdb|jaeger|loki|mixed|mssql|mysql|opentsdb|postgres|prometheus|stackdriver|tempo|testdata|zipkin)$"
id: =~"^([0-9a-z]+\\-([0-9a-z]+\\-)?(\(strings.Join([for t in _types {t}], "|"))))|(alertGroups|alertlist|annolist|barchart|bargauge|candlestick|canvas|dashlist|debug|gauge|geomap|gettingstarted|graph|heatmap|heatmap-old|histogram|icon|live|logs|news|nodeGraph|piechart|pluginlist|stat|state-timeline|status-history|table|table-old|text|timeseries|traces|welcome|xychart|alertmanager|cloudwatch|dashboard|elasticsearch|grafana|grafana-azure-monitor-datasource|graphite|influxdb|jaeger|loki|mixed|mssql|mysql|opentsdb|postgres|prometheus|stackdriver|tempo|testdata|zipkin|phlare|parca)$"
// The set of all plugin types. This hidden field exists solely
// so that the set can be string-interpolated into other fields.
@ -35,7 +35,7 @@ seqs: [
name: string
// Plugin category used on the Add data source page.
category?: "tsdb" | "logging" | "cloud" | "tracing" | "sql" | "enterprise" | "other"
category?: "tsdb" | "logging" | "cloud" | "tracing" | "sql" | "enterprise" | "profiling" | "other"
// For data source plugins, if the plugin supports annotation
// queries.

View File

@ -27,6 +27,8 @@ const (
CategoryOther Category = "other"
CategoryProfiling Category = "profiling"
CategorySql Category = "sql"
CategoryTracing Category = "tracing"

View File

@ -19,6 +19,8 @@ import (
"github.com/grafana/grafana/pkg/tsdb/mssql"
"github.com/grafana/grafana/pkg/tsdb/mysql"
"github.com/grafana/grafana/pkg/tsdb/opentsdb"
"github.com/grafana/grafana/pkg/tsdb/parca"
"github.com/grafana/grafana/pkg/tsdb/phlare"
"github.com/grafana/grafana/pkg/tsdb/postgres"
"github.com/grafana/grafana/pkg/tsdb/prometheus"
"github.com/grafana/grafana/pkg/tsdb/tempo"
@ -41,6 +43,8 @@ const (
MySQL = "mysql"
MSSQL = "mssql"
Grafana = "grafana"
Phlare = "phlare"
Parca = "parca"
)
func init() {
@ -62,7 +66,7 @@ func NewRegistry(store map[string]backendplugin.PluginFactoryFunc) *Registry {
func ProvideCoreRegistry(am *azuremonitor.Service, cw *cloudwatch.CloudWatchService, cm *cloudmonitoring.Service,
es *elasticsearch.Service, grap *graphite.Service, idb *influxdb.Service, lk *loki.Service, otsdb *opentsdb.Service,
pr *prometheus.Service, t *tempo.Service, td *testdatasource.Service, pg *postgres.Service, my *mysql.Service,
ms *mssql.Service, graf *grafanads.Service) *Registry {
ms *mssql.Service, graf *grafanads.Service, phlare *phlare.Service, parca *parca.Service) *Registry {
return NewRegistry(map[string]backendplugin.PluginFactoryFunc{
CloudWatch: asBackendPlugin(cw.Executor),
CloudMonitoring: asBackendPlugin(cm),
@ -79,6 +83,8 @@ func ProvideCoreRegistry(am *azuremonitor.Service, cw *cloudwatch.CloudWatchServ
MySQL: asBackendPlugin(my),
MSSQL: asBackendPlugin(ms),
Grafana: asBackendPlugin(graf),
Phlare: asBackendPlugin(phlare),
Parca: asBackendPlugin(parca),
})
}

View File

@ -11,6 +11,8 @@ import (
"github.com/grafana/grafana-azure-sdk-go/azsettings"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana/pkg/tsdb/parca"
"github.com/grafana/grafana/pkg/tsdb/phlare"
"github.com/stretchr/testify/require"
"gopkg.in/ini.v1"
@ -97,8 +99,10 @@ func TestIntegrationPluginManager(t *testing.T) {
ms := mssql.ProvideService(cfg)
sv2 := searchV2.ProvideService(cfg, db.InitTestDB(t), nil, nil, tracer, features, nil, nil, nil)
graf := grafanads.ProvideService(sv2, nil)
phlare := phlare.ProvideService(hcp)
parca := parca.ProvideService(hcp)
coreRegistry := coreplugin.ProvideCoreRegistry(am, cw, cm, es, grap, idb, lk, otsdb, pr, tmpo, td, pg, my, ms, graf)
coreRegistry := coreplugin.ProvideCoreRegistry(am, cw, cm, es, grap, idb, lk, otsdb, pr, tmpo, td, pg, my, ms, graf, phlare, parca)
pCfg := config.ProvideConfig(setting.ProvideProvider(cfg), cfg)
reg := registry.ProvideService()
@ -200,6 +204,8 @@ func verifyCorePluginCatalogue(t *testing.T, ctx context.Context, ps *store.Serv
"jaeger": {},
"mixed": {},
"zipkin": {},
"phlare": {},
"parca": {},
}
expApps := map[string]struct{}{

View File

@ -53,6 +53,8 @@ func coreTreeList(rt *thema.Runtime) pfs.TreeList {
makeTreeOrPanic("public/app/plugins/datasource/loki", "loki", rt),
makeTreeOrPanic("public/app/plugins/datasource/mssql", "mssql", rt),
makeTreeOrPanic("public/app/plugins/datasource/mysql", "mysql", rt),
makeTreeOrPanic("public/app/plugins/datasource/parca", "parca", rt),
makeTreeOrPanic("public/app/plugins/datasource/phlare", "phlare", rt),
makeTreeOrPanic("public/app/plugins/datasource/postgres", "postgres", rt),
makeTreeOrPanic("public/app/plugins/datasource/prometheus", "prometheus", rt),
makeTreeOrPanic("public/app/plugins/datasource/tempo", "tempo", rt),

View File

@ -157,6 +157,8 @@ import (
"github.com/grafana/grafana/pkg/tsdb/mssql"
"github.com/grafana/grafana/pkg/tsdb/mysql"
"github.com/grafana/grafana/pkg/tsdb/opentsdb"
"github.com/grafana/grafana/pkg/tsdb/parca"
"github.com/grafana/grafana/pkg/tsdb/phlare"
"github.com/grafana/grafana/pkg/tsdb/postgres"
"github.com/grafana/grafana/pkg/tsdb/prometheus"
"github.com/grafana/grafana/pkg/tsdb/tempo"
@ -277,6 +279,8 @@ var wireBasicSet = wire.NewSet(
graphite.ProvideService,
prometheus.ProvideService,
elasticsearch.ProvideService,
phlare.ProvideService,
parca.ProvideService,
encryptionservice.ProvideEncryptionService,
wire.Bind(new(encryption.Internal), new(*encryptionservice.Service)),
secretsManager.ProvideSecretsService,

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,829 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc (unknown)
// source: parca/debuginfo/v1alpha1/debuginfo.proto
package debuginfov1alpha1
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// Source enum describes the source a debuginfo is from.
type DownloadInfo_Source int32
const (
// To understand when no source is set we have the unknown source.
DownloadInfo_SOURCE_UNKNOWN_UNSPECIFIED DownloadInfo_Source = 0
// The debuginfo was uploaded by a user/agent.
DownloadInfo_SOURCE_UPLOAD DownloadInfo_Source = 1
// The debuginfo was downloaded from a public debuginfod server.
DownloadInfo_SOURCE_DEBUGINFOD DownloadInfo_Source = 2
)
// Enum value maps for DownloadInfo_Source.
var (
DownloadInfo_Source_name = map[int32]string{
0: "SOURCE_UNKNOWN_UNSPECIFIED",
1: "SOURCE_UPLOAD",
2: "SOURCE_DEBUGINFOD",
}
DownloadInfo_Source_value = map[string]int32{
"SOURCE_UNKNOWN_UNSPECIFIED": 0,
"SOURCE_UPLOAD": 1,
"SOURCE_DEBUGINFOD": 2,
}
)
func (x DownloadInfo_Source) Enum() *DownloadInfo_Source {
p := new(DownloadInfo_Source)
*p = x
return p
}
func (x DownloadInfo_Source) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (DownloadInfo_Source) Descriptor() protoreflect.EnumDescriptor {
return file_parca_debuginfo_v1alpha1_debuginfo_proto_enumTypes[0].Descriptor()
}
func (DownloadInfo_Source) Type() protoreflect.EnumType {
return &file_parca_debuginfo_v1alpha1_debuginfo_proto_enumTypes[0]
}
func (x DownloadInfo_Source) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use DownloadInfo_Source.Descriptor instead.
func (DownloadInfo_Source) EnumDescriptor() ([]byte, []int) {
return file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescGZIP(), []int{7, 0}
}
// ExistsRequest request to determine if debug info exists for a given build_id
type ExistsRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// build_id is a unique identifier for the debug data
BuildId string `protobuf:"bytes,1,opt,name=build_id,json=buildId,proto3" json:"build_id,omitempty"`
// hash is the hash of the debug information file
Hash string `protobuf:"bytes,2,opt,name=hash,proto3" json:"hash,omitempty"`
}
func (x *ExistsRequest) Reset() {
*x = ExistsRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ExistsRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ExistsRequest) ProtoMessage() {}
func (x *ExistsRequest) ProtoReflect() protoreflect.Message {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ExistsRequest.ProtoReflect.Descriptor instead.
func (*ExistsRequest) Descriptor() ([]byte, []int) {
return file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescGZIP(), []int{0}
}
func (x *ExistsRequest) GetBuildId() string {
if x != nil {
return x.BuildId
}
return ""
}
func (x *ExistsRequest) GetHash() string {
if x != nil {
return x.Hash
}
return ""
}
// ExistsResponse returns whether the given build_id has debug info
type ExistsResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// exists indicates if there is debug data present for the given build_id
Exists bool `protobuf:"varint,1,opt,name=exists,proto3" json:"exists,omitempty"`
}
func (x *ExistsResponse) Reset() {
*x = ExistsResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ExistsResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ExistsResponse) ProtoMessage() {}
func (x *ExistsResponse) ProtoReflect() protoreflect.Message {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ExistsResponse.ProtoReflect.Descriptor instead.
func (*ExistsResponse) Descriptor() ([]byte, []int) {
return file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescGZIP(), []int{1}
}
func (x *ExistsResponse) GetExists() bool {
if x != nil {
return x.Exists
}
return false
}
// UploadRequest upload debug info
type UploadRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// data contains either the upload info metadata or the debug info
//
// Types that are assignable to Data:
// *UploadRequest_Info
// *UploadRequest_ChunkData
Data isUploadRequest_Data `protobuf_oneof:"data"`
}
func (x *UploadRequest) Reset() {
*x = UploadRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *UploadRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*UploadRequest) ProtoMessage() {}
func (x *UploadRequest) ProtoReflect() protoreflect.Message {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use UploadRequest.ProtoReflect.Descriptor instead.
func (*UploadRequest) Descriptor() ([]byte, []int) {
return file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescGZIP(), []int{2}
}
func (m *UploadRequest) GetData() isUploadRequest_Data {
if m != nil {
return m.Data
}
return nil
}
func (x *UploadRequest) GetInfo() *UploadInfo {
if x, ok := x.GetData().(*UploadRequest_Info); ok {
return x.Info
}
return nil
}
func (x *UploadRequest) GetChunkData() []byte {
if x, ok := x.GetData().(*UploadRequest_ChunkData); ok {
return x.ChunkData
}
return nil
}
type isUploadRequest_Data interface {
isUploadRequest_Data()
}
type UploadRequest_Info struct {
// info is the metadata for the debug info
Info *UploadInfo `protobuf:"bytes,1,opt,name=info,proto3,oneof"`
}
type UploadRequest_ChunkData struct {
// chunk_data is the raw bytes of the debug info
ChunkData []byte `protobuf:"bytes,2,opt,name=chunk_data,json=chunkData,proto3,oneof"`
}
func (*UploadRequest_Info) isUploadRequest_Data() {}
func (*UploadRequest_ChunkData) isUploadRequest_Data() {}
// UploadInfo contains the build_id and other metadata for the debug data
type UploadInfo struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// build_id is a unique identifier for the debug data
BuildId string `protobuf:"bytes,1,opt,name=build_id,json=buildId,proto3" json:"build_id,omitempty"`
// hash is the hash of the source file that debug information extracted from
Hash string `protobuf:"bytes,2,opt,name=hash,proto3" json:"hash,omitempty"`
}
func (x *UploadInfo) Reset() {
*x = UploadInfo{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *UploadInfo) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*UploadInfo) ProtoMessage() {}
func (x *UploadInfo) ProtoReflect() protoreflect.Message {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use UploadInfo.ProtoReflect.Descriptor instead.
func (*UploadInfo) Descriptor() ([]byte, []int) {
return file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescGZIP(), []int{3}
}
func (x *UploadInfo) GetBuildId() string {
if x != nil {
return x.BuildId
}
return ""
}
func (x *UploadInfo) GetHash() string {
if x != nil {
return x.Hash
}
return ""
}
// UploadResponse returns the build_id and the size of the uploaded debug info
type UploadResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// build_id is a unique identifier for the debug data
BuildId string `protobuf:"bytes,1,opt,name=build_id,json=buildId,proto3" json:"build_id,omitempty"`
// size is the number of bytes of the debug info
Size uint64 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"`
}
func (x *UploadResponse) Reset() {
*x = UploadResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *UploadResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*UploadResponse) ProtoMessage() {}
func (x *UploadResponse) ProtoReflect() protoreflect.Message {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use UploadResponse.ProtoReflect.Descriptor instead.
func (*UploadResponse) Descriptor() ([]byte, []int) {
return file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescGZIP(), []int{4}
}
func (x *UploadResponse) GetBuildId() string {
if x != nil {
return x.BuildId
}
return ""
}
func (x *UploadResponse) GetSize() uint64 {
if x != nil {
return x.Size
}
return 0
}
// DownloadRequest upload debug info
type DownloadRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// build_id is a unique identifier for the debug data
BuildId string `protobuf:"bytes,1,opt,name=build_id,json=buildId,proto3" json:"build_id,omitempty"`
}
func (x *DownloadRequest) Reset() {
*x = DownloadRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *DownloadRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*DownloadRequest) ProtoMessage() {}
func (x *DownloadRequest) ProtoReflect() protoreflect.Message {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use DownloadRequest.ProtoReflect.Descriptor instead.
func (*DownloadRequest) Descriptor() ([]byte, []int) {
return file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescGZIP(), []int{5}
}
func (x *DownloadRequest) GetBuildId() string {
if x != nil {
return x.BuildId
}
return ""
}
// DownloadRequest returns chunked data of the debuginfo.
type DownloadResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// data contains either the upload info metadata or the debug info
//
// Types that are assignable to Data:
// *DownloadResponse_Info
// *DownloadResponse_ChunkData
Data isDownloadResponse_Data `protobuf_oneof:"data"`
}
func (x *DownloadResponse) Reset() {
*x = DownloadResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *DownloadResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*DownloadResponse) ProtoMessage() {}
func (x *DownloadResponse) ProtoReflect() protoreflect.Message {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use DownloadResponse.ProtoReflect.Descriptor instead.
func (*DownloadResponse) Descriptor() ([]byte, []int) {
return file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescGZIP(), []int{6}
}
func (m *DownloadResponse) GetData() isDownloadResponse_Data {
if m != nil {
return m.Data
}
return nil
}
func (x *DownloadResponse) GetInfo() *DownloadInfo {
if x, ok := x.GetData().(*DownloadResponse_Info); ok {
return x.Info
}
return nil
}
func (x *DownloadResponse) GetChunkData() []byte {
if x, ok := x.GetData().(*DownloadResponse_ChunkData); ok {
return x.ChunkData
}
return nil
}
type isDownloadResponse_Data interface {
isDownloadResponse_Data()
}
type DownloadResponse_Info struct {
// info is the metadata for the debug info
Info *DownloadInfo `protobuf:"bytes,1,opt,name=info,proto3,oneof"`
}
type DownloadResponse_ChunkData struct {
// chunk_data is the raw bytes of the debug info
ChunkData []byte `protobuf:"bytes,2,opt,name=chunk_data,json=chunkData,proto3,oneof"`
}
func (*DownloadResponse_Info) isDownloadResponse_Data() {}
func (*DownloadResponse_ChunkData) isDownloadResponse_Data() {}
// DownloadInfo metadata for the debug data that is being downloaded.
type DownloadInfo struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Source indicates the origin of the debuginfo being downloaded.
Source DownloadInfo_Source `protobuf:"varint,1,opt,name=source,proto3,enum=parca.debuginfo.v1alpha1.DownloadInfo_Source" json:"source,omitempty"`
}
func (x *DownloadInfo) Reset() {
*x = DownloadInfo{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *DownloadInfo) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*DownloadInfo) ProtoMessage() {}
func (x *DownloadInfo) ProtoReflect() protoreflect.Message {
mi := &file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use DownloadInfo.ProtoReflect.Descriptor instead.
func (*DownloadInfo) Descriptor() ([]byte, []int) {
return file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescGZIP(), []int{7}
}
func (x *DownloadInfo) GetSource() DownloadInfo_Source {
if x != nil {
return x.Source
}
return DownloadInfo_SOURCE_UNKNOWN_UNSPECIFIED
}
var File_parca_debuginfo_v1alpha1_debuginfo_proto protoreflect.FileDescriptor
var file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDesc = []byte{
0x0a, 0x28, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2f, 0x64, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66,
0x6f, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x64, 0x65, 0x62, 0x75, 0x67,
0x69, 0x6e, 0x66, 0x6f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x18, 0x70, 0x61, 0x72, 0x63,
0x61, 0x2e, 0x64, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x2e, 0x76, 0x31, 0x61, 0x6c,
0x70, 0x68, 0x61, 0x31, 0x22, 0x3e, 0x0a, 0x0d, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69,
0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x49, 0x64,
0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
0x68, 0x61, 0x73, 0x68, 0x22, 0x28, 0x0a, 0x0e, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73,
0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x65, 0x78, 0x69, 0x73, 0x74, 0x73, 0x22, 0x74,
0x0a, 0x0d, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
0x3a, 0x0a, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e,
0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x64, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x2e,
0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49,
0x6e, 0x66, 0x6f, 0x48, 0x00, 0x52, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x12, 0x1f, 0x0a, 0x0a, 0x63,
0x68, 0x75, 0x6e, 0x6b, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x48,
0x00, 0x52, 0x09, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x44, 0x61, 0x74, 0x61, 0x42, 0x06, 0x0a, 0x04,
0x64, 0x61, 0x74, 0x61, 0x22, 0x3b, 0x0a, 0x0a, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6e,
0x66, 0x6f, 0x12, 0x19, 0x0a, 0x08, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x01,
0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x49, 0x64, 0x12, 0x12, 0x0a,
0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x61, 0x73,
0x68, 0x22, 0x3f, 0x0a, 0x0e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f,
0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x64, 0x18,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x49, 0x64, 0x12, 0x12,
0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x04, 0x73, 0x69,
0x7a, 0x65, 0x22, 0x2c, 0x0a, 0x0f, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65,
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69,
0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x49, 0x64,
0x22, 0x79, 0x0a, 0x10, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x12, 0x3c, 0x0a, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x01, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x26, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x64, 0x65, 0x62, 0x75, 0x67,
0x69, 0x6e, 0x66, 0x6f, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x6f,
0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6e, 0x66, 0x6f, 0x48, 0x00, 0x52, 0x04, 0x69, 0x6e,
0x66, 0x6f, 0x12, 0x1f, 0x0a, 0x0a, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x5f, 0x64, 0x61, 0x74, 0x61,
0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x09, 0x63, 0x68, 0x75, 0x6e, 0x6b, 0x44,
0x61, 0x74, 0x61, 0x42, 0x06, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0xa9, 0x01, 0x0a, 0x0c,
0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x49, 0x6e, 0x66, 0x6f, 0x12, 0x45, 0x0a, 0x06,
0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2d, 0x2e, 0x70,
0x61, 0x72, 0x63, 0x61, 0x2e, 0x64, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x2e, 0x76,
0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64,
0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75,
0x72, 0x63, 0x65, 0x22, 0x52, 0x0a, 0x06, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x1e, 0x0a,
0x1a, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x5f,
0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x11, 0x0a,
0x0d, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x55, 0x50, 0x4c, 0x4f, 0x41, 0x44, 0x10, 0x01,
0x12, 0x15, 0x0a, 0x11, 0x53, 0x4f, 0x55, 0x52, 0x43, 0x45, 0x5f, 0x44, 0x45, 0x42, 0x55, 0x47,
0x49, 0x4e, 0x46, 0x4f, 0x44, 0x10, 0x02, 0x32, 0xb9, 0x02, 0x0a, 0x10, 0x44, 0x65, 0x62, 0x75,
0x67, 0x49, 0x6e, 0x66, 0x6f, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x5d, 0x0a, 0x06,
0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x12, 0x27, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x64,
0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61,
0x31, 0x2e, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
0x28, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x64, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66,
0x6f, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x45, 0x78, 0x69, 0x73, 0x74,
0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x5f, 0x0a, 0x06, 0x55,
0x70, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x27, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x64, 0x65,
0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31,
0x2e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x28,
0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x64, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f,
0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x55, 0x70, 0x6c, 0x6f, 0x61, 0x64,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x28, 0x01, 0x12, 0x65, 0x0a, 0x08,
0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x29, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61,
0x2e, 0x64, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70,
0x68, 0x61, 0x31, 0x2e, 0x44, 0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x71, 0x75,
0x65, 0x73, 0x74, 0x1a, 0x2a, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x64, 0x65, 0x62, 0x75,
0x67, 0x69, 0x6e, 0x66, 0x6f, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x44,
0x6f, 0x77, 0x6e, 0x6c, 0x6f, 0x61, 0x64, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
0x00, 0x30, 0x01, 0x42, 0x84, 0x02, 0x0a, 0x1c, 0x63, 0x6f, 0x6d, 0x2e, 0x70, 0x61, 0x72, 0x63,
0x61, 0x2e, 0x64, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x2e, 0x76, 0x31, 0x61, 0x6c,
0x70, 0x68, 0x61, 0x31, 0x42, 0x0e, 0x44, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x50,
0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x52, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63,
0x6f, 0x6d, 0x2f, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x70, 0x61, 0x72,
0x63, 0x61, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x2f,
0x70, 0x61, 0x72, 0x63, 0x61, 0x2f, 0x64, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x2f,
0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x64, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e,
0x66, 0x6f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xa2, 0x02, 0x03, 0x50, 0x44, 0x58,
0xaa, 0x02, 0x18, 0x50, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x44, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e,
0x66, 0x6f, 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x18, 0x50, 0x61,
0x72, 0x63, 0x61, 0x5c, 0x44, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x5c, 0x56, 0x31,
0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xe2, 0x02, 0x24, 0x50, 0x61, 0x72, 0x63, 0x61, 0x5c, 0x44,
0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f, 0x5c, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61,
0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x1a,
0x50, 0x61, 0x72, 0x63, 0x61, 0x3a, 0x3a, 0x44, 0x65, 0x62, 0x75, 0x67, 0x69, 0x6e, 0x66, 0x6f,
0x3a, 0x3a, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x33,
}
var (
file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescOnce sync.Once
file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescData = file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDesc
)
func file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescGZIP() []byte {
file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescOnce.Do(func() {
file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescData = protoimpl.X.CompressGZIP(file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescData)
})
return file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDescData
}
var file_parca_debuginfo_v1alpha1_debuginfo_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes = make([]protoimpl.MessageInfo, 8)
var file_parca_debuginfo_v1alpha1_debuginfo_proto_goTypes = []interface{}{
(DownloadInfo_Source)(0), // 0: parca.debuginfo.v1alpha1.DownloadInfo.Source
(*ExistsRequest)(nil), // 1: parca.debuginfo.v1alpha1.ExistsRequest
(*ExistsResponse)(nil), // 2: parca.debuginfo.v1alpha1.ExistsResponse
(*UploadRequest)(nil), // 3: parca.debuginfo.v1alpha1.UploadRequest
(*UploadInfo)(nil), // 4: parca.debuginfo.v1alpha1.UploadInfo
(*UploadResponse)(nil), // 5: parca.debuginfo.v1alpha1.UploadResponse
(*DownloadRequest)(nil), // 6: parca.debuginfo.v1alpha1.DownloadRequest
(*DownloadResponse)(nil), // 7: parca.debuginfo.v1alpha1.DownloadResponse
(*DownloadInfo)(nil), // 8: parca.debuginfo.v1alpha1.DownloadInfo
}
var file_parca_debuginfo_v1alpha1_debuginfo_proto_depIdxs = []int32{
4, // 0: parca.debuginfo.v1alpha1.UploadRequest.info:type_name -> parca.debuginfo.v1alpha1.UploadInfo
8, // 1: parca.debuginfo.v1alpha1.DownloadResponse.info:type_name -> parca.debuginfo.v1alpha1.DownloadInfo
0, // 2: parca.debuginfo.v1alpha1.DownloadInfo.source:type_name -> parca.debuginfo.v1alpha1.DownloadInfo.Source
1, // 3: parca.debuginfo.v1alpha1.DebugInfoService.Exists:input_type -> parca.debuginfo.v1alpha1.ExistsRequest
3, // 4: parca.debuginfo.v1alpha1.DebugInfoService.Upload:input_type -> parca.debuginfo.v1alpha1.UploadRequest
6, // 5: parca.debuginfo.v1alpha1.DebugInfoService.Download:input_type -> parca.debuginfo.v1alpha1.DownloadRequest
2, // 6: parca.debuginfo.v1alpha1.DebugInfoService.Exists:output_type -> parca.debuginfo.v1alpha1.ExistsResponse
5, // 7: parca.debuginfo.v1alpha1.DebugInfoService.Upload:output_type -> parca.debuginfo.v1alpha1.UploadResponse
7, // 8: parca.debuginfo.v1alpha1.DebugInfoService.Download:output_type -> parca.debuginfo.v1alpha1.DownloadResponse
6, // [6:9] is the sub-list for method output_type
3, // [3:6] is the sub-list for method input_type
3, // [3:3] is the sub-list for extension type_name
3, // [3:3] is the sub-list for extension extendee
0, // [0:3] is the sub-list for field type_name
}
func init() { file_parca_debuginfo_v1alpha1_debuginfo_proto_init() }
func file_parca_debuginfo_v1alpha1_debuginfo_proto_init() {
if File_parca_debuginfo_v1alpha1_debuginfo_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ExistsRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ExistsResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*UploadRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*UploadInfo); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*UploadResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DownloadRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DownloadResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*DownloadInfo); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[2].OneofWrappers = []interface{}{
(*UploadRequest_Info)(nil),
(*UploadRequest_ChunkData)(nil),
}
file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes[6].OneofWrappers = []interface{}{
(*DownloadResponse_Info)(nil),
(*DownloadResponse_ChunkData)(nil),
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDesc,
NumEnums: 1,
NumMessages: 8,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_parca_debuginfo_v1alpha1_debuginfo_proto_goTypes,
DependencyIndexes: file_parca_debuginfo_v1alpha1_debuginfo_proto_depIdxs,
EnumInfos: file_parca_debuginfo_v1alpha1_debuginfo_proto_enumTypes,
MessageInfos: file_parca_debuginfo_v1alpha1_debuginfo_proto_msgTypes,
}.Build()
File_parca_debuginfo_v1alpha1_debuginfo_proto = out.File
file_parca_debuginfo_v1alpha1_debuginfo_proto_rawDesc = nil
file_parca_debuginfo_v1alpha1_debuginfo_proto_goTypes = nil
file_parca_debuginfo_v1alpha1_debuginfo_proto_depIdxs = nil
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,137 @@
// Code generated by protoc-gen-connect-go. DO NOT EDIT.
//
// Source: parca/debuginfo/v1alpha1/debuginfo.proto
package debuginfov1alpha1connect
import (
context "context"
errors "errors"
connect_go "github.com/bufbuild/connect-go"
v1alpha1 "github.com/parca-dev/parca/gen/proto/go/parca/debuginfo/v1alpha1"
http "net/http"
strings "strings"
)
// This is a compile-time assertion to ensure that this generated file and the connect package are
// compatible. If you get a compiler error that this constant is not defined, this code was
// generated with a version of connect newer than the one compiled into your binary. You can fix the
// problem by either regenerating this code with an older version of connect or updating the connect
// version compiled into your binary.
const _ = connect_go.IsAtLeastVersion0_1_0
const (
// DebugInfoServiceName is the fully-qualified name of the DebugInfoService service.
DebugInfoServiceName = "parca.debuginfo.v1alpha1.DebugInfoService"
)
// DebugInfoServiceClient is a client for the parca.debuginfo.v1alpha1.DebugInfoService service.
type DebugInfoServiceClient interface {
// Exists returns true if the given build_id has debug info uploaded for it.
Exists(context.Context, *connect_go.Request[v1alpha1.ExistsRequest]) (*connect_go.Response[v1alpha1.ExistsResponse], error)
// Upload ingests debug info for a given build_id
Upload(context.Context) *connect_go.ClientStreamForClient[v1alpha1.UploadRequest, v1alpha1.UploadResponse]
// Download returns the debug info for a given build_id.
Download(context.Context, *connect_go.Request[v1alpha1.DownloadRequest]) (*connect_go.ServerStreamForClient[v1alpha1.DownloadResponse], error)
}
// NewDebugInfoServiceClient constructs a client for the parca.debuginfo.v1alpha1.DebugInfoService
// service. By default, it uses the Connect protocol with the binary Protobuf Codec, asks for
// gzipped responses, and sends uncompressed requests. To use the gRPC or gRPC-Web protocols, supply
// the connect.WithGRPC() or connect.WithGRPCWeb() options.
//
// The URL supplied here should be the base URL for the Connect or gRPC server (for example,
// http://api.acme.com or https://acme.com/grpc).
func NewDebugInfoServiceClient(httpClient connect_go.HTTPClient, baseURL string, opts ...connect_go.ClientOption) DebugInfoServiceClient {
baseURL = strings.TrimRight(baseURL, "/")
return &debugInfoServiceClient{
exists: connect_go.NewClient[v1alpha1.ExistsRequest, v1alpha1.ExistsResponse](
httpClient,
baseURL+"/parca.debuginfo.v1alpha1.DebugInfoService/Exists",
opts...,
),
upload: connect_go.NewClient[v1alpha1.UploadRequest, v1alpha1.UploadResponse](
httpClient,
baseURL+"/parca.debuginfo.v1alpha1.DebugInfoService/Upload",
opts...,
),
download: connect_go.NewClient[v1alpha1.DownloadRequest, v1alpha1.DownloadResponse](
httpClient,
baseURL+"/parca.debuginfo.v1alpha1.DebugInfoService/Download",
opts...,
),
}
}
// debugInfoServiceClient implements DebugInfoServiceClient.
type debugInfoServiceClient struct {
exists *connect_go.Client[v1alpha1.ExistsRequest, v1alpha1.ExistsResponse]
upload *connect_go.Client[v1alpha1.UploadRequest, v1alpha1.UploadResponse]
download *connect_go.Client[v1alpha1.DownloadRequest, v1alpha1.DownloadResponse]
}
// Exists calls parca.debuginfo.v1alpha1.DebugInfoService.Exists.
func (c *debugInfoServiceClient) Exists(ctx context.Context, req *connect_go.Request[v1alpha1.ExistsRequest]) (*connect_go.Response[v1alpha1.ExistsResponse], error) {
return c.exists.CallUnary(ctx, req)
}
// Upload calls parca.debuginfo.v1alpha1.DebugInfoService.Upload.
func (c *debugInfoServiceClient) Upload(ctx context.Context) *connect_go.ClientStreamForClient[v1alpha1.UploadRequest, v1alpha1.UploadResponse] {
return c.upload.CallClientStream(ctx)
}
// Download calls parca.debuginfo.v1alpha1.DebugInfoService.Download.
func (c *debugInfoServiceClient) Download(ctx context.Context, req *connect_go.Request[v1alpha1.DownloadRequest]) (*connect_go.ServerStreamForClient[v1alpha1.DownloadResponse], error) {
return c.download.CallServerStream(ctx, req)
}
// DebugInfoServiceHandler is an implementation of the parca.debuginfo.v1alpha1.DebugInfoService
// service.
type DebugInfoServiceHandler interface {
// Exists returns true if the given build_id has debug info uploaded for it.
Exists(context.Context, *connect_go.Request[v1alpha1.ExistsRequest]) (*connect_go.Response[v1alpha1.ExistsResponse], error)
// Upload ingests debug info for a given build_id
Upload(context.Context, *connect_go.ClientStream[v1alpha1.UploadRequest]) (*connect_go.Response[v1alpha1.UploadResponse], error)
// Download returns the debug info for a given build_id.
Download(context.Context, *connect_go.Request[v1alpha1.DownloadRequest], *connect_go.ServerStream[v1alpha1.DownloadResponse]) error
}
// NewDebugInfoServiceHandler builds an HTTP handler from the service implementation. It returns the
// path on which to mount the handler and the handler itself.
//
// By default, handlers support the Connect, gRPC, and gRPC-Web protocols with the binary Protobuf
// and JSON codecs. They also support gzip compression.
func NewDebugInfoServiceHandler(svc DebugInfoServiceHandler, opts ...connect_go.HandlerOption) (string, http.Handler) {
mux := http.NewServeMux()
mux.Handle("/parca.debuginfo.v1alpha1.DebugInfoService/Exists", connect_go.NewUnaryHandler(
"/parca.debuginfo.v1alpha1.DebugInfoService/Exists",
svc.Exists,
opts...,
))
mux.Handle("/parca.debuginfo.v1alpha1.DebugInfoService/Upload", connect_go.NewClientStreamHandler(
"/parca.debuginfo.v1alpha1.DebugInfoService/Upload",
svc.Upload,
opts...,
))
mux.Handle("/parca.debuginfo.v1alpha1.DebugInfoService/Download", connect_go.NewServerStreamHandler(
"/parca.debuginfo.v1alpha1.DebugInfoService/Download",
svc.Download,
opts...,
))
return "/parca.debuginfo.v1alpha1.DebugInfoService/", mux
}
// UnimplementedDebugInfoServiceHandler returns CodeUnimplemented from all methods.
type UnimplementedDebugInfoServiceHandler struct{}
func (UnimplementedDebugInfoServiceHandler) Exists(context.Context, *connect_go.Request[v1alpha1.ExistsRequest]) (*connect_go.Response[v1alpha1.ExistsResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.debuginfo.v1alpha1.DebugInfoService.Exists is not implemented"))
}
func (UnimplementedDebugInfoServiceHandler) Upload(context.Context, *connect_go.ClientStream[v1alpha1.UploadRequest]) (*connect_go.Response[v1alpha1.UploadResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.debuginfo.v1alpha1.DebugInfoService.Upload is not implemented"))
}
func (UnimplementedDebugInfoServiceHandler) Download(context.Context, *connect_go.Request[v1alpha1.DownloadRequest], *connect_go.ServerStream[v1alpha1.DownloadResponse]) error {
return connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.debuginfo.v1alpha1.DebugInfoService.Download is not implemented"))
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,327 @@
// Code generated by protoc-gen-connect-go. DO NOT EDIT.
//
// Source: parca/metastore/v1alpha1/metastore.proto
package metastorev1alpha1connect
import (
context "context"
errors "errors"
connect_go "github.com/bufbuild/connect-go"
v1alpha1 "github.com/parca-dev/parca/gen/proto/go/parca/metastore/v1alpha1"
http "net/http"
strings "strings"
)
// This is a compile-time assertion to ensure that this generated file and the connect package are
// compatible. If you get a compiler error that this constant is not defined, this code was
// generated with a version of connect newer than the one compiled into your binary. You can fix the
// problem by either regenerating this code with an older version of connect or updating the connect
// version compiled into your binary.
const _ = connect_go.IsAtLeastVersion0_1_0
const (
// MetastoreServiceName is the fully-qualified name of the MetastoreService service.
MetastoreServiceName = "parca.metastore.v1alpha1.MetastoreService"
)
// MetastoreServiceClient is a client for the parca.metastore.v1alpha1.MetastoreService service.
type MetastoreServiceClient interface {
// GetOrCreateMappings checks if the mappings in the request are already
// known and returns the known mapping (including its ID) if so. If a mapping
// does not already exist, it is written to the backing metastore.
GetOrCreateMappings(context.Context, *connect_go.Request[v1alpha1.GetOrCreateMappingsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateMappingsResponse], error)
// GetOrCreateFunctions checks if the functions in the request are already
// known and returns the known function (including its ID) if so. If a
// function does not already exist, it is written to the backing metastore.
GetOrCreateFunctions(context.Context, *connect_go.Request[v1alpha1.GetOrCreateFunctionsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateFunctionsResponse], error)
// GetOrCreateLocations checks if the locations in the request are already
// known and returns the known location (including its ID) if so. If a
// location does not already exist, it is written to the backing metastore.
GetOrCreateLocations(context.Context, *connect_go.Request[v1alpha1.GetOrCreateLocationsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateLocationsResponse], error)
// GetOrCreateStacktraces checks if the stacktraces in the request are
// already known and returns the known stacktrace (including its ID) if so.
// If a stacktrace does not already exist, it is written to the backing
// metastore.
GetOrCreateStacktraces(context.Context, *connect_go.Request[v1alpha1.GetOrCreateStacktracesRequest]) (*connect_go.Response[v1alpha1.GetOrCreateStacktracesResponse], error)
// UnsymbolizedLocations returns locations that can be symbolized but haven't
// been asynchronously symbolized yet.
UnsymbolizedLocations(context.Context, *connect_go.Request[v1alpha1.UnsymbolizedLocationsRequest]) (*connect_go.Response[v1alpha1.UnsymbolizedLocationsResponse], error)
// CreateLocationLines creates the location lines contained in the provided
// locations.
CreateLocationLines(context.Context, *connect_go.Request[v1alpha1.CreateLocationLinesRequest]) (*connect_go.Response[v1alpha1.CreateLocationLinesResponse], error)
// Locations retrieves locations.
Locations(context.Context, *connect_go.Request[v1alpha1.LocationsRequest]) (*connect_go.Response[v1alpha1.LocationsResponse], error)
// Functions retrieves functions.
Functions(context.Context, *connect_go.Request[v1alpha1.FunctionsRequest]) (*connect_go.Response[v1alpha1.FunctionsResponse], error)
// Mappings retrieves mappings.
Mappings(context.Context, *connect_go.Request[v1alpha1.MappingsRequest]) (*connect_go.Response[v1alpha1.MappingsResponse], error)
// Stacktraces retrieves mappings.
Stacktraces(context.Context, *connect_go.Request[v1alpha1.StacktracesRequest]) (*connect_go.Response[v1alpha1.StacktracesResponse], error)
}
// NewMetastoreServiceClient constructs a client for the parca.metastore.v1alpha1.MetastoreService
// service. By default, it uses the Connect protocol with the binary Protobuf Codec, asks for
// gzipped responses, and sends uncompressed requests. To use the gRPC or gRPC-Web protocols, supply
// the connect.WithGRPC() or connect.WithGRPCWeb() options.
//
// The URL supplied here should be the base URL for the Connect or gRPC server (for example,
// http://api.acme.com or https://acme.com/grpc).
func NewMetastoreServiceClient(httpClient connect_go.HTTPClient, baseURL string, opts ...connect_go.ClientOption) MetastoreServiceClient {
baseURL = strings.TrimRight(baseURL, "/")
return &metastoreServiceClient{
getOrCreateMappings: connect_go.NewClient[v1alpha1.GetOrCreateMappingsRequest, v1alpha1.GetOrCreateMappingsResponse](
httpClient,
baseURL+"/parca.metastore.v1alpha1.MetastoreService/GetOrCreateMappings",
opts...,
),
getOrCreateFunctions: connect_go.NewClient[v1alpha1.GetOrCreateFunctionsRequest, v1alpha1.GetOrCreateFunctionsResponse](
httpClient,
baseURL+"/parca.metastore.v1alpha1.MetastoreService/GetOrCreateFunctions",
opts...,
),
getOrCreateLocations: connect_go.NewClient[v1alpha1.GetOrCreateLocationsRequest, v1alpha1.GetOrCreateLocationsResponse](
httpClient,
baseURL+"/parca.metastore.v1alpha1.MetastoreService/GetOrCreateLocations",
opts...,
),
getOrCreateStacktraces: connect_go.NewClient[v1alpha1.GetOrCreateStacktracesRequest, v1alpha1.GetOrCreateStacktracesResponse](
httpClient,
baseURL+"/parca.metastore.v1alpha1.MetastoreService/GetOrCreateStacktraces",
opts...,
),
unsymbolizedLocations: connect_go.NewClient[v1alpha1.UnsymbolizedLocationsRequest, v1alpha1.UnsymbolizedLocationsResponse](
httpClient,
baseURL+"/parca.metastore.v1alpha1.MetastoreService/UnsymbolizedLocations",
opts...,
),
createLocationLines: connect_go.NewClient[v1alpha1.CreateLocationLinesRequest, v1alpha1.CreateLocationLinesResponse](
httpClient,
baseURL+"/parca.metastore.v1alpha1.MetastoreService/CreateLocationLines",
opts...,
),
locations: connect_go.NewClient[v1alpha1.LocationsRequest, v1alpha1.LocationsResponse](
httpClient,
baseURL+"/parca.metastore.v1alpha1.MetastoreService/Locations",
opts...,
),
functions: connect_go.NewClient[v1alpha1.FunctionsRequest, v1alpha1.FunctionsResponse](
httpClient,
baseURL+"/parca.metastore.v1alpha1.MetastoreService/Functions",
opts...,
),
mappings: connect_go.NewClient[v1alpha1.MappingsRequest, v1alpha1.MappingsResponse](
httpClient,
baseURL+"/parca.metastore.v1alpha1.MetastoreService/Mappings",
opts...,
),
stacktraces: connect_go.NewClient[v1alpha1.StacktracesRequest, v1alpha1.StacktracesResponse](
httpClient,
baseURL+"/parca.metastore.v1alpha1.MetastoreService/Stacktraces",
opts...,
),
}
}
// metastoreServiceClient implements MetastoreServiceClient.
type metastoreServiceClient struct {
getOrCreateMappings *connect_go.Client[v1alpha1.GetOrCreateMappingsRequest, v1alpha1.GetOrCreateMappingsResponse]
getOrCreateFunctions *connect_go.Client[v1alpha1.GetOrCreateFunctionsRequest, v1alpha1.GetOrCreateFunctionsResponse]
getOrCreateLocations *connect_go.Client[v1alpha1.GetOrCreateLocationsRequest, v1alpha1.GetOrCreateLocationsResponse]
getOrCreateStacktraces *connect_go.Client[v1alpha1.GetOrCreateStacktracesRequest, v1alpha1.GetOrCreateStacktracesResponse]
unsymbolizedLocations *connect_go.Client[v1alpha1.UnsymbolizedLocationsRequest, v1alpha1.UnsymbolizedLocationsResponse]
createLocationLines *connect_go.Client[v1alpha1.CreateLocationLinesRequest, v1alpha1.CreateLocationLinesResponse]
locations *connect_go.Client[v1alpha1.LocationsRequest, v1alpha1.LocationsResponse]
functions *connect_go.Client[v1alpha1.FunctionsRequest, v1alpha1.FunctionsResponse]
mappings *connect_go.Client[v1alpha1.MappingsRequest, v1alpha1.MappingsResponse]
stacktraces *connect_go.Client[v1alpha1.StacktracesRequest, v1alpha1.StacktracesResponse]
}
// GetOrCreateMappings calls parca.metastore.v1alpha1.MetastoreService.GetOrCreateMappings.
func (c *metastoreServiceClient) GetOrCreateMappings(ctx context.Context, req *connect_go.Request[v1alpha1.GetOrCreateMappingsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateMappingsResponse], error) {
return c.getOrCreateMappings.CallUnary(ctx, req)
}
// GetOrCreateFunctions calls parca.metastore.v1alpha1.MetastoreService.GetOrCreateFunctions.
func (c *metastoreServiceClient) GetOrCreateFunctions(ctx context.Context, req *connect_go.Request[v1alpha1.GetOrCreateFunctionsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateFunctionsResponse], error) {
return c.getOrCreateFunctions.CallUnary(ctx, req)
}
// GetOrCreateLocations calls parca.metastore.v1alpha1.MetastoreService.GetOrCreateLocations.
func (c *metastoreServiceClient) GetOrCreateLocations(ctx context.Context, req *connect_go.Request[v1alpha1.GetOrCreateLocationsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateLocationsResponse], error) {
return c.getOrCreateLocations.CallUnary(ctx, req)
}
// GetOrCreateStacktraces calls parca.metastore.v1alpha1.MetastoreService.GetOrCreateStacktraces.
func (c *metastoreServiceClient) GetOrCreateStacktraces(ctx context.Context, req *connect_go.Request[v1alpha1.GetOrCreateStacktracesRequest]) (*connect_go.Response[v1alpha1.GetOrCreateStacktracesResponse], error) {
return c.getOrCreateStacktraces.CallUnary(ctx, req)
}
// UnsymbolizedLocations calls parca.metastore.v1alpha1.MetastoreService.UnsymbolizedLocations.
func (c *metastoreServiceClient) UnsymbolizedLocations(ctx context.Context, req *connect_go.Request[v1alpha1.UnsymbolizedLocationsRequest]) (*connect_go.Response[v1alpha1.UnsymbolizedLocationsResponse], error) {
return c.unsymbolizedLocations.CallUnary(ctx, req)
}
// CreateLocationLines calls parca.metastore.v1alpha1.MetastoreService.CreateLocationLines.
func (c *metastoreServiceClient) CreateLocationLines(ctx context.Context, req *connect_go.Request[v1alpha1.CreateLocationLinesRequest]) (*connect_go.Response[v1alpha1.CreateLocationLinesResponse], error) {
return c.createLocationLines.CallUnary(ctx, req)
}
// Locations calls parca.metastore.v1alpha1.MetastoreService.Locations.
func (c *metastoreServiceClient) Locations(ctx context.Context, req *connect_go.Request[v1alpha1.LocationsRequest]) (*connect_go.Response[v1alpha1.LocationsResponse], error) {
return c.locations.CallUnary(ctx, req)
}
// Functions calls parca.metastore.v1alpha1.MetastoreService.Functions.
func (c *metastoreServiceClient) Functions(ctx context.Context, req *connect_go.Request[v1alpha1.FunctionsRequest]) (*connect_go.Response[v1alpha1.FunctionsResponse], error) {
return c.functions.CallUnary(ctx, req)
}
// Mappings calls parca.metastore.v1alpha1.MetastoreService.Mappings.
func (c *metastoreServiceClient) Mappings(ctx context.Context, req *connect_go.Request[v1alpha1.MappingsRequest]) (*connect_go.Response[v1alpha1.MappingsResponse], error) {
return c.mappings.CallUnary(ctx, req)
}
// Stacktraces calls parca.metastore.v1alpha1.MetastoreService.Stacktraces.
func (c *metastoreServiceClient) Stacktraces(ctx context.Context, req *connect_go.Request[v1alpha1.StacktracesRequest]) (*connect_go.Response[v1alpha1.StacktracesResponse], error) {
return c.stacktraces.CallUnary(ctx, req)
}
// MetastoreServiceHandler is an implementation of the parca.metastore.v1alpha1.MetastoreService
// service.
type MetastoreServiceHandler interface {
// GetOrCreateMappings checks if the mappings in the request are already
// known and returns the known mapping (including its ID) if so. If a mapping
// does not already exist, it is written to the backing metastore.
GetOrCreateMappings(context.Context, *connect_go.Request[v1alpha1.GetOrCreateMappingsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateMappingsResponse], error)
// GetOrCreateFunctions checks if the functions in the request are already
// known and returns the known function (including its ID) if so. If a
// function does not already exist, it is written to the backing metastore.
GetOrCreateFunctions(context.Context, *connect_go.Request[v1alpha1.GetOrCreateFunctionsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateFunctionsResponse], error)
// GetOrCreateLocations checks if the locations in the request are already
// known and returns the known location (including its ID) if so. If a
// location does not already exist, it is written to the backing metastore.
GetOrCreateLocations(context.Context, *connect_go.Request[v1alpha1.GetOrCreateLocationsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateLocationsResponse], error)
// GetOrCreateStacktraces checks if the stacktraces in the request are
// already known and returns the known stacktrace (including its ID) if so.
// If a stacktrace does not already exist, it is written to the backing
// metastore.
GetOrCreateStacktraces(context.Context, *connect_go.Request[v1alpha1.GetOrCreateStacktracesRequest]) (*connect_go.Response[v1alpha1.GetOrCreateStacktracesResponse], error)
// UnsymbolizedLocations returns locations that can be symbolized but haven't
// been asynchronously symbolized yet.
UnsymbolizedLocations(context.Context, *connect_go.Request[v1alpha1.UnsymbolizedLocationsRequest]) (*connect_go.Response[v1alpha1.UnsymbolizedLocationsResponse], error)
// CreateLocationLines creates the location lines contained in the provided
// locations.
CreateLocationLines(context.Context, *connect_go.Request[v1alpha1.CreateLocationLinesRequest]) (*connect_go.Response[v1alpha1.CreateLocationLinesResponse], error)
// Locations retrieves locations.
Locations(context.Context, *connect_go.Request[v1alpha1.LocationsRequest]) (*connect_go.Response[v1alpha1.LocationsResponse], error)
// Functions retrieves functions.
Functions(context.Context, *connect_go.Request[v1alpha1.FunctionsRequest]) (*connect_go.Response[v1alpha1.FunctionsResponse], error)
// Mappings retrieves mappings.
Mappings(context.Context, *connect_go.Request[v1alpha1.MappingsRequest]) (*connect_go.Response[v1alpha1.MappingsResponse], error)
// Stacktraces retrieves mappings.
Stacktraces(context.Context, *connect_go.Request[v1alpha1.StacktracesRequest]) (*connect_go.Response[v1alpha1.StacktracesResponse], error)
}
// NewMetastoreServiceHandler builds an HTTP handler from the service implementation. It returns the
// path on which to mount the handler and the handler itself.
//
// By default, handlers support the Connect, gRPC, and gRPC-Web protocols with the binary Protobuf
// and JSON codecs. They also support gzip compression.
func NewMetastoreServiceHandler(svc MetastoreServiceHandler, opts ...connect_go.HandlerOption) (string, http.Handler) {
mux := http.NewServeMux()
mux.Handle("/parca.metastore.v1alpha1.MetastoreService/GetOrCreateMappings", connect_go.NewUnaryHandler(
"/parca.metastore.v1alpha1.MetastoreService/GetOrCreateMappings",
svc.GetOrCreateMappings,
opts...,
))
mux.Handle("/parca.metastore.v1alpha1.MetastoreService/GetOrCreateFunctions", connect_go.NewUnaryHandler(
"/parca.metastore.v1alpha1.MetastoreService/GetOrCreateFunctions",
svc.GetOrCreateFunctions,
opts...,
))
mux.Handle("/parca.metastore.v1alpha1.MetastoreService/GetOrCreateLocations", connect_go.NewUnaryHandler(
"/parca.metastore.v1alpha1.MetastoreService/GetOrCreateLocations",
svc.GetOrCreateLocations,
opts...,
))
mux.Handle("/parca.metastore.v1alpha1.MetastoreService/GetOrCreateStacktraces", connect_go.NewUnaryHandler(
"/parca.metastore.v1alpha1.MetastoreService/GetOrCreateStacktraces",
svc.GetOrCreateStacktraces,
opts...,
))
mux.Handle("/parca.metastore.v1alpha1.MetastoreService/UnsymbolizedLocations", connect_go.NewUnaryHandler(
"/parca.metastore.v1alpha1.MetastoreService/UnsymbolizedLocations",
svc.UnsymbolizedLocations,
opts...,
))
mux.Handle("/parca.metastore.v1alpha1.MetastoreService/CreateLocationLines", connect_go.NewUnaryHandler(
"/parca.metastore.v1alpha1.MetastoreService/CreateLocationLines",
svc.CreateLocationLines,
opts...,
))
mux.Handle("/parca.metastore.v1alpha1.MetastoreService/Locations", connect_go.NewUnaryHandler(
"/parca.metastore.v1alpha1.MetastoreService/Locations",
svc.Locations,
opts...,
))
mux.Handle("/parca.metastore.v1alpha1.MetastoreService/Functions", connect_go.NewUnaryHandler(
"/parca.metastore.v1alpha1.MetastoreService/Functions",
svc.Functions,
opts...,
))
mux.Handle("/parca.metastore.v1alpha1.MetastoreService/Mappings", connect_go.NewUnaryHandler(
"/parca.metastore.v1alpha1.MetastoreService/Mappings",
svc.Mappings,
opts...,
))
mux.Handle("/parca.metastore.v1alpha1.MetastoreService/Stacktraces", connect_go.NewUnaryHandler(
"/parca.metastore.v1alpha1.MetastoreService/Stacktraces",
svc.Stacktraces,
opts...,
))
return "/parca.metastore.v1alpha1.MetastoreService/", mux
}
// UnimplementedMetastoreServiceHandler returns CodeUnimplemented from all methods.
type UnimplementedMetastoreServiceHandler struct{}
func (UnimplementedMetastoreServiceHandler) GetOrCreateMappings(context.Context, *connect_go.Request[v1alpha1.GetOrCreateMappingsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateMappingsResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.metastore.v1alpha1.MetastoreService.GetOrCreateMappings is not implemented"))
}
func (UnimplementedMetastoreServiceHandler) GetOrCreateFunctions(context.Context, *connect_go.Request[v1alpha1.GetOrCreateFunctionsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateFunctionsResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.metastore.v1alpha1.MetastoreService.GetOrCreateFunctions is not implemented"))
}
func (UnimplementedMetastoreServiceHandler) GetOrCreateLocations(context.Context, *connect_go.Request[v1alpha1.GetOrCreateLocationsRequest]) (*connect_go.Response[v1alpha1.GetOrCreateLocationsResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.metastore.v1alpha1.MetastoreService.GetOrCreateLocations is not implemented"))
}
func (UnimplementedMetastoreServiceHandler) GetOrCreateStacktraces(context.Context, *connect_go.Request[v1alpha1.GetOrCreateStacktracesRequest]) (*connect_go.Response[v1alpha1.GetOrCreateStacktracesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.metastore.v1alpha1.MetastoreService.GetOrCreateStacktraces is not implemented"))
}
func (UnimplementedMetastoreServiceHandler) UnsymbolizedLocations(context.Context, *connect_go.Request[v1alpha1.UnsymbolizedLocationsRequest]) (*connect_go.Response[v1alpha1.UnsymbolizedLocationsResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.metastore.v1alpha1.MetastoreService.UnsymbolizedLocations is not implemented"))
}
func (UnimplementedMetastoreServiceHandler) CreateLocationLines(context.Context, *connect_go.Request[v1alpha1.CreateLocationLinesRequest]) (*connect_go.Response[v1alpha1.CreateLocationLinesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.metastore.v1alpha1.MetastoreService.CreateLocationLines is not implemented"))
}
func (UnimplementedMetastoreServiceHandler) Locations(context.Context, *connect_go.Request[v1alpha1.LocationsRequest]) (*connect_go.Response[v1alpha1.LocationsResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.metastore.v1alpha1.MetastoreService.Locations is not implemented"))
}
func (UnimplementedMetastoreServiceHandler) Functions(context.Context, *connect_go.Request[v1alpha1.FunctionsRequest]) (*connect_go.Response[v1alpha1.FunctionsResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.metastore.v1alpha1.MetastoreService.Functions is not implemented"))
}
func (UnimplementedMetastoreServiceHandler) Mappings(context.Context, *connect_go.Request[v1alpha1.MappingsRequest]) (*connect_go.Response[v1alpha1.MappingsResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.metastore.v1alpha1.MetastoreService.Mappings is not implemented"))
}
func (UnimplementedMetastoreServiceHandler) Stacktraces(context.Context, *connect_go.Request[v1alpha1.StacktracesRequest]) (*connect_go.Response[v1alpha1.StacktracesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.metastore.v1alpha1.MetastoreService.Stacktraces is not implemented"))
}

View File

@ -0,0 +1,550 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc (unknown)
// source: parca/profilestore/v1alpha1/profilestore.proto
package profilestorev1alpha1
import (
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// WriteRawRequest writes a pprof profile for a given tenant
type WriteRawRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// tenant is the given tenant to store the pprof profile under
//
// Deprecated: Do not use.
Tenant string `protobuf:"bytes,1,opt,name=tenant,proto3" json:"tenant,omitempty"`
// series is a set raw pprof profiles and accompanying labels
Series []*RawProfileSeries `protobuf:"bytes,2,rep,name=series,proto3" json:"series,omitempty"`
// normalized is a flag indicating if the addresses in the profile is normalized for position independent code
Normalized bool `protobuf:"varint,3,opt,name=normalized,proto3" json:"normalized,omitempty"`
}
func (x *WriteRawRequest) Reset() {
*x = WriteRawRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *WriteRawRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*WriteRawRequest) ProtoMessage() {}
func (x *WriteRawRequest) ProtoReflect() protoreflect.Message {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use WriteRawRequest.ProtoReflect.Descriptor instead.
func (*WriteRawRequest) Descriptor() ([]byte, []int) {
return file_parca_profilestore_v1alpha1_profilestore_proto_rawDescGZIP(), []int{0}
}
// Deprecated: Do not use.
func (x *WriteRawRequest) GetTenant() string {
if x != nil {
return x.Tenant
}
return ""
}
func (x *WriteRawRequest) GetSeries() []*RawProfileSeries {
if x != nil {
return x.Series
}
return nil
}
func (x *WriteRawRequest) GetNormalized() bool {
if x != nil {
return x.Normalized
}
return false
}
// WriteRawResponse is the empty response
type WriteRawResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
}
func (x *WriteRawResponse) Reset() {
*x = WriteRawResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *WriteRawResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*WriteRawResponse) ProtoMessage() {}
func (x *WriteRawResponse) ProtoReflect() protoreflect.Message {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use WriteRawResponse.ProtoReflect.Descriptor instead.
func (*WriteRawResponse) Descriptor() ([]byte, []int) {
return file_parca_profilestore_v1alpha1_profilestore_proto_rawDescGZIP(), []int{1}
}
// RawProfileSeries represents the pprof profile and its associated labels
type RawProfileSeries struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// LabelSet is the key value pairs to identify the corresponding profile
Labels *LabelSet `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels,omitempty"`
// samples are the set of profile bytes
Samples []*RawSample `protobuf:"bytes,2,rep,name=samples,proto3" json:"samples,omitempty"`
}
func (x *RawProfileSeries) Reset() {
*x = RawProfileSeries{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *RawProfileSeries) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*RawProfileSeries) ProtoMessage() {}
func (x *RawProfileSeries) ProtoReflect() protoreflect.Message {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use RawProfileSeries.ProtoReflect.Descriptor instead.
func (*RawProfileSeries) Descriptor() ([]byte, []int) {
return file_parca_profilestore_v1alpha1_profilestore_proto_rawDescGZIP(), []int{2}
}
func (x *RawProfileSeries) GetLabels() *LabelSet {
if x != nil {
return x.Labels
}
return nil
}
func (x *RawProfileSeries) GetSamples() []*RawSample {
if x != nil {
return x.Samples
}
return nil
}
// Label is a key value pair of identifiers
type Label struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// name is the label name
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// value is the value for the label name
Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
}
func (x *Label) Reset() {
*x = Label{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Label) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Label) ProtoMessage() {}
func (x *Label) ProtoReflect() protoreflect.Message {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Label.ProtoReflect.Descriptor instead.
func (*Label) Descriptor() ([]byte, []int) {
return file_parca_profilestore_v1alpha1_profilestore_proto_rawDescGZIP(), []int{3}
}
func (x *Label) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *Label) GetValue() string {
if x != nil {
return x.Value
}
return ""
}
// LabelSet is a group of labels
type LabelSet struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// labels are the grouping of labels
Labels []*Label `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty"`
}
func (x *LabelSet) Reset() {
*x = LabelSet{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *LabelSet) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*LabelSet) ProtoMessage() {}
func (x *LabelSet) ProtoReflect() protoreflect.Message {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use LabelSet.ProtoReflect.Descriptor instead.
func (*LabelSet) Descriptor() ([]byte, []int) {
return file_parca_profilestore_v1alpha1_profilestore_proto_rawDescGZIP(), []int{4}
}
func (x *LabelSet) GetLabels() []*Label {
if x != nil {
return x.Labels
}
return nil
}
// RawSample is the set of bytes that correspond to a pprof profile
type RawSample struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// raw_profile is the set of bytes of the pprof profile
RawProfile []byte `protobuf:"bytes,1,opt,name=raw_profile,json=rawProfile,proto3" json:"raw_profile,omitempty"`
}
func (x *RawSample) Reset() {
*x = RawSample{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *RawSample) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*RawSample) ProtoMessage() {}
func (x *RawSample) ProtoReflect() protoreflect.Message {
mi := &file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use RawSample.ProtoReflect.Descriptor instead.
func (*RawSample) Descriptor() ([]byte, []int) {
return file_parca_profilestore_v1alpha1_profilestore_proto_rawDescGZIP(), []int{5}
}
func (x *RawSample) GetRawProfile() []byte {
if x != nil {
return x.RawProfile
}
return nil
}
var File_parca_profilestore_v1alpha1_profilestore_proto protoreflect.FileDescriptor
var file_parca_profilestore_v1alpha1_profilestore_proto_rawDesc = []byte{
0x0a, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2f, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73,
0x74, 0x6f, 0x72, 0x65, 0x2f, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x70, 0x72,
0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x12, 0x1b, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73,
0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x1a, 0x1c, 0x67,
0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x94, 0x01, 0x0a, 0x0f,
0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x61, 0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
0x1a, 0x0a, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42,
0x02, 0x18, 0x01, 0x52, 0x06, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x12, 0x45, 0x0a, 0x06, 0x73,
0x65, 0x72, 0x69, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x61,
0x72, 0x63, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x52, 0x61, 0x77, 0x50, 0x72, 0x6f,
0x66, 0x69, 0x6c, 0x65, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x52, 0x06, 0x73, 0x65, 0x72, 0x69,
0x65, 0x73, 0x12, 0x1e, 0x0a, 0x0a, 0x6e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64,
0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0a, 0x6e, 0x6f, 0x72, 0x6d, 0x61, 0x6c, 0x69, 0x7a,
0x65, 0x64, 0x22, 0x12, 0x0a, 0x10, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x61, 0x77, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x93, 0x01, 0x0a, 0x10, 0x52, 0x61, 0x77, 0x50, 0x72,
0x6f, 0x66, 0x69, 0x6c, 0x65, 0x53, 0x65, 0x72, 0x69, 0x65, 0x73, 0x12, 0x3d, 0x0a, 0x06, 0x6c,
0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x70, 0x61,
0x72, 0x63, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x53,
0x65, 0x74, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x40, 0x0a, 0x07, 0x73, 0x61,
0x6d, 0x70, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x70, 0x61,
0x72, 0x63, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x52, 0x61, 0x77, 0x53, 0x61, 0x6d,
0x70, 0x6c, 0x65, 0x52, 0x07, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x73, 0x22, 0x31, 0x0a, 0x05,
0x4c, 0x61, 0x62, 0x65, 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c,
0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22,
0x46, 0x0a, 0x08, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x53, 0x65, 0x74, 0x12, 0x3a, 0x0a, 0x06, 0x6c,
0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x70, 0x61,
0x72, 0x63, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x52,
0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x22, 0x2c, 0x0a, 0x09, 0x52, 0x61, 0x77, 0x53, 0x61,
0x6d, 0x70, 0x6c, 0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x72, 0x61, 0x77, 0x5f, 0x70, 0x72, 0x6f, 0x66,
0x69, 0x6c, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x72, 0x61, 0x77, 0x50, 0x72,
0x6f, 0x66, 0x69, 0x6c, 0x65, 0x32, 0x9e, 0x01, 0x0a, 0x13, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c,
0x65, 0x53, 0x74, 0x6f, 0x72, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x86, 0x01,
0x0a, 0x08, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x61, 0x77, 0x12, 0x2c, 0x2e, 0x70, 0x61, 0x72,
0x63, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e,
0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x61,
0x77, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2d, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61,
0x2e, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31,
0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x57, 0x72, 0x69, 0x74, 0x65, 0x52, 0x61, 0x77, 0x52,
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x1d, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x17, 0x3a,
0x01, 0x2a, 0x22, 0x12, 0x2f, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x2f, 0x77, 0x72,
0x69, 0x74, 0x65, 0x72, 0x61, 0x77, 0x42, 0x9c, 0x02, 0x0a, 0x1f, 0x63, 0x6f, 0x6d, 0x2e, 0x70,
0x61, 0x72, 0x63, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72,
0x65, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x11, 0x50, 0x72, 0x6f, 0x66,
0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a,
0x58, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x61, 0x72, 0x63,
0x61, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2f, 0x67, 0x65, 0x6e, 0x2f,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67, 0x6f, 0x2f, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2f, 0x70,
0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x76, 0x31, 0x61, 0x6c,
0x70, 0x68, 0x61, 0x31, 0x3b, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72,
0x65, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xa2, 0x02, 0x03, 0x50, 0x50, 0x58, 0xaa,
0x02, 0x1b, 0x50, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73,
0x74, 0x6f, 0x72, 0x65, 0x2e, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x1b,
0x50, 0x61, 0x72, 0x63, 0x61, 0x5c, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f,
0x72, 0x65, 0x5c, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xe2, 0x02, 0x27, 0x50, 0x61,
0x72, 0x63, 0x61, 0x5c, 0x50, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x5c, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74,
0x61, 0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x1d, 0x50, 0x61, 0x72, 0x63, 0x61, 0x3a, 0x3a, 0x50,
0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3a, 0x3a, 0x56, 0x31, 0x61,
0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_parca_profilestore_v1alpha1_profilestore_proto_rawDescOnce sync.Once
file_parca_profilestore_v1alpha1_profilestore_proto_rawDescData = file_parca_profilestore_v1alpha1_profilestore_proto_rawDesc
)
func file_parca_profilestore_v1alpha1_profilestore_proto_rawDescGZIP() []byte {
file_parca_profilestore_v1alpha1_profilestore_proto_rawDescOnce.Do(func() {
file_parca_profilestore_v1alpha1_profilestore_proto_rawDescData = protoimpl.X.CompressGZIP(file_parca_profilestore_v1alpha1_profilestore_proto_rawDescData)
})
return file_parca_profilestore_v1alpha1_profilestore_proto_rawDescData
}
var file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes = make([]protoimpl.MessageInfo, 6)
var file_parca_profilestore_v1alpha1_profilestore_proto_goTypes = []interface{}{
(*WriteRawRequest)(nil), // 0: parca.profilestore.v1alpha1.WriteRawRequest
(*WriteRawResponse)(nil), // 1: parca.profilestore.v1alpha1.WriteRawResponse
(*RawProfileSeries)(nil), // 2: parca.profilestore.v1alpha1.RawProfileSeries
(*Label)(nil), // 3: parca.profilestore.v1alpha1.Label
(*LabelSet)(nil), // 4: parca.profilestore.v1alpha1.LabelSet
(*RawSample)(nil), // 5: parca.profilestore.v1alpha1.RawSample
}
var file_parca_profilestore_v1alpha1_profilestore_proto_depIdxs = []int32{
2, // 0: parca.profilestore.v1alpha1.WriteRawRequest.series:type_name -> parca.profilestore.v1alpha1.RawProfileSeries
4, // 1: parca.profilestore.v1alpha1.RawProfileSeries.labels:type_name -> parca.profilestore.v1alpha1.LabelSet
5, // 2: parca.profilestore.v1alpha1.RawProfileSeries.samples:type_name -> parca.profilestore.v1alpha1.RawSample
3, // 3: parca.profilestore.v1alpha1.LabelSet.labels:type_name -> parca.profilestore.v1alpha1.Label
0, // 4: parca.profilestore.v1alpha1.ProfileStoreService.WriteRaw:input_type -> parca.profilestore.v1alpha1.WriteRawRequest
1, // 5: parca.profilestore.v1alpha1.ProfileStoreService.WriteRaw:output_type -> parca.profilestore.v1alpha1.WriteRawResponse
5, // [5:6] is the sub-list for method output_type
4, // [4:5] is the sub-list for method input_type
4, // [4:4] is the sub-list for extension type_name
4, // [4:4] is the sub-list for extension extendee
0, // [0:4] is the sub-list for field type_name
}
func init() { file_parca_profilestore_v1alpha1_profilestore_proto_init() }
func file_parca_profilestore_v1alpha1_profilestore_proto_init() {
if File_parca_profilestore_v1alpha1_profilestore_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*WriteRawRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*WriteRawResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*RawProfileSeries); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Label); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*LabelSet); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*RawSample); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_parca_profilestore_v1alpha1_profilestore_proto_rawDesc,
NumEnums: 0,
NumMessages: 6,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_parca_profilestore_v1alpha1_profilestore_proto_goTypes,
DependencyIndexes: file_parca_profilestore_v1alpha1_profilestore_proto_depIdxs,
MessageInfos: file_parca_profilestore_v1alpha1_profilestore_proto_msgTypes,
}.Build()
File_parca_profilestore_v1alpha1_profilestore_proto = out.File
file_parca_profilestore_v1alpha1_profilestore_proto_rawDesc = nil
file_parca_profilestore_v1alpha1_profilestore_proto_goTypes = nil
file_parca_profilestore_v1alpha1_profilestore_proto_depIdxs = nil
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,91 @@
// Code generated by protoc-gen-connect-go. DO NOT EDIT.
//
// Source: parca/profilestore/v1alpha1/profilestore.proto
package profilestorev1alpha1connect
import (
context "context"
errors "errors"
connect_go "github.com/bufbuild/connect-go"
v1alpha1 "github.com/parca-dev/parca/gen/proto/go/parca/profilestore/v1alpha1"
http "net/http"
strings "strings"
)
// This is a compile-time assertion to ensure that this generated file and the connect package are
// compatible. If you get a compiler error that this constant is not defined, this code was
// generated with a version of connect newer than the one compiled into your binary. You can fix the
// problem by either regenerating this code with an older version of connect or updating the connect
// version compiled into your binary.
const _ = connect_go.IsAtLeastVersion0_1_0
const (
// ProfileStoreServiceName is the fully-qualified name of the ProfileStoreService service.
ProfileStoreServiceName = "parca.profilestore.v1alpha1.ProfileStoreService"
)
// ProfileStoreServiceClient is a client for the parca.profilestore.v1alpha1.ProfileStoreService
// service.
type ProfileStoreServiceClient interface {
// WriteRaw accepts a raw set of bytes of a pprof file
WriteRaw(context.Context, *connect_go.Request[v1alpha1.WriteRawRequest]) (*connect_go.Response[v1alpha1.WriteRawResponse], error)
}
// NewProfileStoreServiceClient constructs a client for the
// parca.profilestore.v1alpha1.ProfileStoreService service. By default, it uses the Connect protocol
// with the binary Protobuf Codec, asks for gzipped responses, and sends uncompressed requests. To
// use the gRPC or gRPC-Web protocols, supply the connect.WithGRPC() or connect.WithGRPCWeb()
// options.
//
// The URL supplied here should be the base URL for the Connect or gRPC server (for example,
// http://api.acme.com or https://acme.com/grpc).
func NewProfileStoreServiceClient(httpClient connect_go.HTTPClient, baseURL string, opts ...connect_go.ClientOption) ProfileStoreServiceClient {
baseURL = strings.TrimRight(baseURL, "/")
return &profileStoreServiceClient{
writeRaw: connect_go.NewClient[v1alpha1.WriteRawRequest, v1alpha1.WriteRawResponse](
httpClient,
baseURL+"/parca.profilestore.v1alpha1.ProfileStoreService/WriteRaw",
opts...,
),
}
}
// profileStoreServiceClient implements ProfileStoreServiceClient.
type profileStoreServiceClient struct {
writeRaw *connect_go.Client[v1alpha1.WriteRawRequest, v1alpha1.WriteRawResponse]
}
// WriteRaw calls parca.profilestore.v1alpha1.ProfileStoreService.WriteRaw.
func (c *profileStoreServiceClient) WriteRaw(ctx context.Context, req *connect_go.Request[v1alpha1.WriteRawRequest]) (*connect_go.Response[v1alpha1.WriteRawResponse], error) {
return c.writeRaw.CallUnary(ctx, req)
}
// ProfileStoreServiceHandler is an implementation of the
// parca.profilestore.v1alpha1.ProfileStoreService service.
type ProfileStoreServiceHandler interface {
// WriteRaw accepts a raw set of bytes of a pprof file
WriteRaw(context.Context, *connect_go.Request[v1alpha1.WriteRawRequest]) (*connect_go.Response[v1alpha1.WriteRawResponse], error)
}
// NewProfileStoreServiceHandler builds an HTTP handler from the service implementation. It returns
// the path on which to mount the handler and the handler itself.
//
// By default, handlers support the Connect, gRPC, and gRPC-Web protocols with the binary Protobuf
// and JSON codecs. They also support gzip compression.
func NewProfileStoreServiceHandler(svc ProfileStoreServiceHandler, opts ...connect_go.HandlerOption) (string, http.Handler) {
mux := http.NewServeMux()
mux.Handle("/parca.profilestore.v1alpha1.ProfileStoreService/WriteRaw", connect_go.NewUnaryHandler(
"/parca.profilestore.v1alpha1.ProfileStoreService/WriteRaw",
svc.WriteRaw,
opts...,
))
return "/parca.profilestore.v1alpha1.ProfileStoreService/", mux
}
// UnimplementedProfileStoreServiceHandler returns CodeUnimplemented from all methods.
type UnimplementedProfileStoreServiceHandler struct{}
func (UnimplementedProfileStoreServiceHandler) WriteRaw(context.Context, *connect_go.Request[v1alpha1.WriteRawRequest]) (*connect_go.Response[v1alpha1.WriteRawResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.profilestore.v1alpha1.ProfileStoreService.WriteRaw is not implemented"))
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,232 @@
// Code generated by protoc-gen-connect-go. DO NOT EDIT.
//
// Source: parca/query/v1alpha1/query.proto
package queryv1alpha1connect
import (
context "context"
errors "errors"
connect_go "github.com/bufbuild/connect-go"
v1alpha1 "github.com/parca-dev/parca/gen/proto/go/parca/query/v1alpha1"
http "net/http"
strings "strings"
)
// This is a compile-time assertion to ensure that this generated file and the connect package are
// compatible. If you get a compiler error that this constant is not defined, this code was
// generated with a version of connect newer than the one compiled into your binary. You can fix the
// problem by either regenerating this code with an older version of connect or updating the connect
// version compiled into your binary.
const _ = connect_go.IsAtLeastVersion0_1_0
const (
// QueryServiceName is the fully-qualified name of the QueryService service.
QueryServiceName = "parca.query.v1alpha1.QueryService"
)
// QueryServiceClient is a client for the parca.query.v1alpha1.QueryService service.
type QueryServiceClient interface {
// QueryRange performs a profile query over a time range
QueryRange(context.Context, *connect_go.Request[v1alpha1.QueryRangeRequest]) (*connect_go.Response[v1alpha1.QueryRangeResponse], error)
// Query performs a profile query
Query(context.Context, *connect_go.Request[v1alpha1.QueryRequest]) (*connect_go.Response[v1alpha1.QueryResponse], error)
// Series is unimplemented
Series(context.Context, *connect_go.Request[v1alpha1.SeriesRequest]) (*connect_go.Response[v1alpha1.SeriesResponse], error)
// ProfileTypes returns the list of available profile types.
ProfileTypes(context.Context, *connect_go.Request[v1alpha1.ProfileTypesRequest]) (*connect_go.Response[v1alpha1.ProfileTypesResponse], error)
// Labels returns the set of label names against a given matching string and time frame
Labels(context.Context, *connect_go.Request[v1alpha1.LabelsRequest]) (*connect_go.Response[v1alpha1.LabelsResponse], error)
// Values returns the set of values that match a given label and time frame
Values(context.Context, *connect_go.Request[v1alpha1.ValuesRequest]) (*connect_go.Response[v1alpha1.ValuesResponse], error)
// ShareProfile uploads the given profile to pprof.me and returns a link to the profile.
ShareProfile(context.Context, *connect_go.Request[v1alpha1.ShareProfileRequest]) (*connect_go.Response[v1alpha1.ShareProfileResponse], error)
}
// NewQueryServiceClient constructs a client for the parca.query.v1alpha1.QueryService service. By
// default, it uses the Connect protocol with the binary Protobuf Codec, asks for gzipped responses,
// and sends uncompressed requests. To use the gRPC or gRPC-Web protocols, supply the
// connect.WithGRPC() or connect.WithGRPCWeb() options.
//
// The URL supplied here should be the base URL for the Connect or gRPC server (for example,
// http://api.acme.com or https://acme.com/grpc).
func NewQueryServiceClient(httpClient connect_go.HTTPClient, baseURL string, opts ...connect_go.ClientOption) QueryServiceClient {
baseURL = strings.TrimRight(baseURL, "/")
return &queryServiceClient{
queryRange: connect_go.NewClient[v1alpha1.QueryRangeRequest, v1alpha1.QueryRangeResponse](
httpClient,
baseURL+"/parca.query.v1alpha1.QueryService/QueryRange",
opts...,
),
query: connect_go.NewClient[v1alpha1.QueryRequest, v1alpha1.QueryResponse](
httpClient,
baseURL+"/parca.query.v1alpha1.QueryService/Query",
opts...,
),
series: connect_go.NewClient[v1alpha1.SeriesRequest, v1alpha1.SeriesResponse](
httpClient,
baseURL+"/parca.query.v1alpha1.QueryService/Series",
opts...,
),
profileTypes: connect_go.NewClient[v1alpha1.ProfileTypesRequest, v1alpha1.ProfileTypesResponse](
httpClient,
baseURL+"/parca.query.v1alpha1.QueryService/ProfileTypes",
opts...,
),
labels: connect_go.NewClient[v1alpha1.LabelsRequest, v1alpha1.LabelsResponse](
httpClient,
baseURL+"/parca.query.v1alpha1.QueryService/Labels",
opts...,
),
values: connect_go.NewClient[v1alpha1.ValuesRequest, v1alpha1.ValuesResponse](
httpClient,
baseURL+"/parca.query.v1alpha1.QueryService/Values",
opts...,
),
shareProfile: connect_go.NewClient[v1alpha1.ShareProfileRequest, v1alpha1.ShareProfileResponse](
httpClient,
baseURL+"/parca.query.v1alpha1.QueryService/ShareProfile",
opts...,
),
}
}
// queryServiceClient implements QueryServiceClient.
type queryServiceClient struct {
queryRange *connect_go.Client[v1alpha1.QueryRangeRequest, v1alpha1.QueryRangeResponse]
query *connect_go.Client[v1alpha1.QueryRequest, v1alpha1.QueryResponse]
series *connect_go.Client[v1alpha1.SeriesRequest, v1alpha1.SeriesResponse]
profileTypes *connect_go.Client[v1alpha1.ProfileTypesRequest, v1alpha1.ProfileTypesResponse]
labels *connect_go.Client[v1alpha1.LabelsRequest, v1alpha1.LabelsResponse]
values *connect_go.Client[v1alpha1.ValuesRequest, v1alpha1.ValuesResponse]
shareProfile *connect_go.Client[v1alpha1.ShareProfileRequest, v1alpha1.ShareProfileResponse]
}
// QueryRange calls parca.query.v1alpha1.QueryService.QueryRange.
func (c *queryServiceClient) QueryRange(ctx context.Context, req *connect_go.Request[v1alpha1.QueryRangeRequest]) (*connect_go.Response[v1alpha1.QueryRangeResponse], error) {
return c.queryRange.CallUnary(ctx, req)
}
// Query calls parca.query.v1alpha1.QueryService.Query.
func (c *queryServiceClient) Query(ctx context.Context, req *connect_go.Request[v1alpha1.QueryRequest]) (*connect_go.Response[v1alpha1.QueryResponse], error) {
return c.query.CallUnary(ctx, req)
}
// Series calls parca.query.v1alpha1.QueryService.Series.
func (c *queryServiceClient) Series(ctx context.Context, req *connect_go.Request[v1alpha1.SeriesRequest]) (*connect_go.Response[v1alpha1.SeriesResponse], error) {
return c.series.CallUnary(ctx, req)
}
// ProfileTypes calls parca.query.v1alpha1.QueryService.ProfileTypes.
func (c *queryServiceClient) ProfileTypes(ctx context.Context, req *connect_go.Request[v1alpha1.ProfileTypesRequest]) (*connect_go.Response[v1alpha1.ProfileTypesResponse], error) {
return c.profileTypes.CallUnary(ctx, req)
}
// Labels calls parca.query.v1alpha1.QueryService.Labels.
func (c *queryServiceClient) Labels(ctx context.Context, req *connect_go.Request[v1alpha1.LabelsRequest]) (*connect_go.Response[v1alpha1.LabelsResponse], error) {
return c.labels.CallUnary(ctx, req)
}
// Values calls parca.query.v1alpha1.QueryService.Values.
func (c *queryServiceClient) Values(ctx context.Context, req *connect_go.Request[v1alpha1.ValuesRequest]) (*connect_go.Response[v1alpha1.ValuesResponse], error) {
return c.values.CallUnary(ctx, req)
}
// ShareProfile calls parca.query.v1alpha1.QueryService.ShareProfile.
func (c *queryServiceClient) ShareProfile(ctx context.Context, req *connect_go.Request[v1alpha1.ShareProfileRequest]) (*connect_go.Response[v1alpha1.ShareProfileResponse], error) {
return c.shareProfile.CallUnary(ctx, req)
}
// QueryServiceHandler is an implementation of the parca.query.v1alpha1.QueryService service.
type QueryServiceHandler interface {
// QueryRange performs a profile query over a time range
QueryRange(context.Context, *connect_go.Request[v1alpha1.QueryRangeRequest]) (*connect_go.Response[v1alpha1.QueryRangeResponse], error)
// Query performs a profile query
Query(context.Context, *connect_go.Request[v1alpha1.QueryRequest]) (*connect_go.Response[v1alpha1.QueryResponse], error)
// Series is unimplemented
Series(context.Context, *connect_go.Request[v1alpha1.SeriesRequest]) (*connect_go.Response[v1alpha1.SeriesResponse], error)
// ProfileTypes returns the list of available profile types.
ProfileTypes(context.Context, *connect_go.Request[v1alpha1.ProfileTypesRequest]) (*connect_go.Response[v1alpha1.ProfileTypesResponse], error)
// Labels returns the set of label names against a given matching string and time frame
Labels(context.Context, *connect_go.Request[v1alpha1.LabelsRequest]) (*connect_go.Response[v1alpha1.LabelsResponse], error)
// Values returns the set of values that match a given label and time frame
Values(context.Context, *connect_go.Request[v1alpha1.ValuesRequest]) (*connect_go.Response[v1alpha1.ValuesResponse], error)
// ShareProfile uploads the given profile to pprof.me and returns a link to the profile.
ShareProfile(context.Context, *connect_go.Request[v1alpha1.ShareProfileRequest]) (*connect_go.Response[v1alpha1.ShareProfileResponse], error)
}
// NewQueryServiceHandler builds an HTTP handler from the service implementation. It returns the
// path on which to mount the handler and the handler itself.
//
// By default, handlers support the Connect, gRPC, and gRPC-Web protocols with the binary Protobuf
// and JSON codecs. They also support gzip compression.
func NewQueryServiceHandler(svc QueryServiceHandler, opts ...connect_go.HandlerOption) (string, http.Handler) {
mux := http.NewServeMux()
mux.Handle("/parca.query.v1alpha1.QueryService/QueryRange", connect_go.NewUnaryHandler(
"/parca.query.v1alpha1.QueryService/QueryRange",
svc.QueryRange,
opts...,
))
mux.Handle("/parca.query.v1alpha1.QueryService/Query", connect_go.NewUnaryHandler(
"/parca.query.v1alpha1.QueryService/Query",
svc.Query,
opts...,
))
mux.Handle("/parca.query.v1alpha1.QueryService/Series", connect_go.NewUnaryHandler(
"/parca.query.v1alpha1.QueryService/Series",
svc.Series,
opts...,
))
mux.Handle("/parca.query.v1alpha1.QueryService/ProfileTypes", connect_go.NewUnaryHandler(
"/parca.query.v1alpha1.QueryService/ProfileTypes",
svc.ProfileTypes,
opts...,
))
mux.Handle("/parca.query.v1alpha1.QueryService/Labels", connect_go.NewUnaryHandler(
"/parca.query.v1alpha1.QueryService/Labels",
svc.Labels,
opts...,
))
mux.Handle("/parca.query.v1alpha1.QueryService/Values", connect_go.NewUnaryHandler(
"/parca.query.v1alpha1.QueryService/Values",
svc.Values,
opts...,
))
mux.Handle("/parca.query.v1alpha1.QueryService/ShareProfile", connect_go.NewUnaryHandler(
"/parca.query.v1alpha1.QueryService/ShareProfile",
svc.ShareProfile,
opts...,
))
return "/parca.query.v1alpha1.QueryService/", mux
}
// UnimplementedQueryServiceHandler returns CodeUnimplemented from all methods.
type UnimplementedQueryServiceHandler struct{}
func (UnimplementedQueryServiceHandler) QueryRange(context.Context, *connect_go.Request[v1alpha1.QueryRangeRequest]) (*connect_go.Response[v1alpha1.QueryRangeResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.query.v1alpha1.QueryService.QueryRange is not implemented"))
}
func (UnimplementedQueryServiceHandler) Query(context.Context, *connect_go.Request[v1alpha1.QueryRequest]) (*connect_go.Response[v1alpha1.QueryResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.query.v1alpha1.QueryService.Query is not implemented"))
}
func (UnimplementedQueryServiceHandler) Series(context.Context, *connect_go.Request[v1alpha1.SeriesRequest]) (*connect_go.Response[v1alpha1.SeriesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.query.v1alpha1.QueryService.Series is not implemented"))
}
func (UnimplementedQueryServiceHandler) ProfileTypes(context.Context, *connect_go.Request[v1alpha1.ProfileTypesRequest]) (*connect_go.Response[v1alpha1.ProfileTypesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.query.v1alpha1.QueryService.ProfileTypes is not implemented"))
}
func (UnimplementedQueryServiceHandler) Labels(context.Context, *connect_go.Request[v1alpha1.LabelsRequest]) (*connect_go.Response[v1alpha1.LabelsResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.query.v1alpha1.QueryService.Labels is not implemented"))
}
func (UnimplementedQueryServiceHandler) Values(context.Context, *connect_go.Request[v1alpha1.ValuesRequest]) (*connect_go.Response[v1alpha1.ValuesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.query.v1alpha1.QueryService.Values is not implemented"))
}
func (UnimplementedQueryServiceHandler) ShareProfile(context.Context, *connect_go.Request[v1alpha1.ShareProfileRequest]) (*connect_go.Response[v1alpha1.ShareProfileResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.query.v1alpha1.QueryService.ShareProfile is not implemented"))
}

View File

@ -0,0 +1,600 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc (unknown)
// source: parca/scrape/v1alpha1/scrape.proto
package scrapev1alpha1
import (
v1alpha1 "github.com/parca-dev/parca/gen/proto/go/parca/profilestore/v1alpha1"
_ "google.golang.org/genproto/googleapis/api/annotations"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
durationpb "google.golang.org/protobuf/types/known/durationpb"
timestamppb "google.golang.org/protobuf/types/known/timestamppb"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// State represents the current state of a target
type TargetsRequest_State int32
const (
// STATE_ANY_UNSPECIFIED unspecified
TargetsRequest_STATE_ANY_UNSPECIFIED TargetsRequest_State = 0
// STATE_ACTIVE target active state
TargetsRequest_STATE_ACTIVE TargetsRequest_State = 1
// STATE_DROPPED target dropped state
TargetsRequest_STATE_DROPPED TargetsRequest_State = 2
)
// Enum value maps for TargetsRequest_State.
var (
TargetsRequest_State_name = map[int32]string{
0: "STATE_ANY_UNSPECIFIED",
1: "STATE_ACTIVE",
2: "STATE_DROPPED",
}
TargetsRequest_State_value = map[string]int32{
"STATE_ANY_UNSPECIFIED": 0,
"STATE_ACTIVE": 1,
"STATE_DROPPED": 2,
}
)
func (x TargetsRequest_State) Enum() *TargetsRequest_State {
p := new(TargetsRequest_State)
*p = x
return p
}
func (x TargetsRequest_State) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (TargetsRequest_State) Descriptor() protoreflect.EnumDescriptor {
return file_parca_scrape_v1alpha1_scrape_proto_enumTypes[0].Descriptor()
}
func (TargetsRequest_State) Type() protoreflect.EnumType {
return &file_parca_scrape_v1alpha1_scrape_proto_enumTypes[0]
}
func (x TargetsRequest_State) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use TargetsRequest_State.Descriptor instead.
func (TargetsRequest_State) EnumDescriptor() ([]byte, []int) {
return file_parca_scrape_v1alpha1_scrape_proto_rawDescGZIP(), []int{0, 0}
}
// Health are the possible health values of a target
type Target_Health int32
const (
// HEALTH_UNKNOWN_UNSPECIFIED unspecified
Target_HEALTH_UNKNOWN_UNSPECIFIED Target_Health = 0
// HEALTH_GOOD healthy target
Target_HEALTH_GOOD Target_Health = 1
// HEALTH_BAD unhealthy target
Target_HEALTH_BAD Target_Health = 2
)
// Enum value maps for Target_Health.
var (
Target_Health_name = map[int32]string{
0: "HEALTH_UNKNOWN_UNSPECIFIED",
1: "HEALTH_GOOD",
2: "HEALTH_BAD",
}
Target_Health_value = map[string]int32{
"HEALTH_UNKNOWN_UNSPECIFIED": 0,
"HEALTH_GOOD": 1,
"HEALTH_BAD": 2,
}
)
func (x Target_Health) Enum() *Target_Health {
p := new(Target_Health)
*p = x
return p
}
func (x Target_Health) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (Target_Health) Descriptor() protoreflect.EnumDescriptor {
return file_parca_scrape_v1alpha1_scrape_proto_enumTypes[1].Descriptor()
}
func (Target_Health) Type() protoreflect.EnumType {
return &file_parca_scrape_v1alpha1_scrape_proto_enumTypes[1]
}
func (x Target_Health) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use Target_Health.Descriptor instead.
func (Target_Health) EnumDescriptor() ([]byte, []int) {
return file_parca_scrape_v1alpha1_scrape_proto_rawDescGZIP(), []int{3, 0}
}
// TargetsRequest contains the parameters for the set of targets to return
type TargetsRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// state is the state of targets to returns
State TargetsRequest_State `protobuf:"varint,1,opt,name=state,proto3,enum=parca.scrape.v1alpha1.TargetsRequest_State" json:"state,omitempty"`
}
func (x *TargetsRequest) Reset() {
*x = TargetsRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_scrape_v1alpha1_scrape_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *TargetsRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*TargetsRequest) ProtoMessage() {}
func (x *TargetsRequest) ProtoReflect() protoreflect.Message {
mi := &file_parca_scrape_v1alpha1_scrape_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use TargetsRequest.ProtoReflect.Descriptor instead.
func (*TargetsRequest) Descriptor() ([]byte, []int) {
return file_parca_scrape_v1alpha1_scrape_proto_rawDescGZIP(), []int{0}
}
func (x *TargetsRequest) GetState() TargetsRequest_State {
if x != nil {
return x.State
}
return TargetsRequest_STATE_ANY_UNSPECIFIED
}
// TargetsResponse is the set of targets for the given requested state
type TargetsResponse struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// targets is the mapping of targets
Targets map[string]*Targets `protobuf:"bytes,1,rep,name=targets,proto3" json:"targets,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
}
func (x *TargetsResponse) Reset() {
*x = TargetsResponse{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_scrape_v1alpha1_scrape_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *TargetsResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*TargetsResponse) ProtoMessage() {}
func (x *TargetsResponse) ProtoReflect() protoreflect.Message {
mi := &file_parca_scrape_v1alpha1_scrape_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use TargetsResponse.ProtoReflect.Descriptor instead.
func (*TargetsResponse) Descriptor() ([]byte, []int) {
return file_parca_scrape_v1alpha1_scrape_proto_rawDescGZIP(), []int{1}
}
func (x *TargetsResponse) GetTargets() map[string]*Targets {
if x != nil {
return x.Targets
}
return nil
}
// Targets is a list of targets
type Targets struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// targets is a list of targets
Targets []*Target `protobuf:"bytes,1,rep,name=targets,proto3" json:"targets,omitempty"`
}
func (x *Targets) Reset() {
*x = Targets{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_scrape_v1alpha1_scrape_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Targets) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Targets) ProtoMessage() {}
func (x *Targets) ProtoReflect() protoreflect.Message {
mi := &file_parca_scrape_v1alpha1_scrape_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Targets.ProtoReflect.Descriptor instead.
func (*Targets) Descriptor() ([]byte, []int) {
return file_parca_scrape_v1alpha1_scrape_proto_rawDescGZIP(), []int{2}
}
func (x *Targets) GetTargets() []*Target {
if x != nil {
return x.Targets
}
return nil
}
// Target is the scrape target representation
type Target struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// discovered_labels are the set of labels for the target that have been discovered
DiscoveredLabels *v1alpha1.LabelSet `protobuf:"bytes,1,opt,name=discovered_labels,json=discoveredLabels,proto3" json:"discovered_labels,omitempty"`
// labels are the set of labels given for the target
Labels *v1alpha1.LabelSet `protobuf:"bytes,2,opt,name=labels,proto3" json:"labels,omitempty"`
// lase_error is the error message most recently received from a scrape attempt
LastError string `protobuf:"bytes,3,opt,name=last_error,json=lastError,proto3" json:"last_error,omitempty"`
// last_scrape is the time stamp the last scrape request was performed
LastScrape *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=last_scrape,json=lastScrape,proto3" json:"last_scrape,omitempty"`
// last_scrape_duration is the duration of the last scrape request
LastScrapeDuration *durationpb.Duration `protobuf:"bytes,5,opt,name=last_scrape_duration,json=lastScrapeDuration,proto3" json:"last_scrape_duration,omitempty"`
// url is the url of the target
Url string `protobuf:"bytes,6,opt,name=url,proto3" json:"url,omitempty"`
// health indicates the current health of the target
Health Target_Health `protobuf:"varint,7,opt,name=health,proto3,enum=parca.scrape.v1alpha1.Target_Health" json:"health,omitempty"`
}
func (x *Target) Reset() {
*x = Target{}
if protoimpl.UnsafeEnabled {
mi := &file_parca_scrape_v1alpha1_scrape_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Target) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Target) ProtoMessage() {}
func (x *Target) ProtoReflect() protoreflect.Message {
mi := &file_parca_scrape_v1alpha1_scrape_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Target.ProtoReflect.Descriptor instead.
func (*Target) Descriptor() ([]byte, []int) {
return file_parca_scrape_v1alpha1_scrape_proto_rawDescGZIP(), []int{3}
}
func (x *Target) GetDiscoveredLabels() *v1alpha1.LabelSet {
if x != nil {
return x.DiscoveredLabels
}
return nil
}
func (x *Target) GetLabels() *v1alpha1.LabelSet {
if x != nil {
return x.Labels
}
return nil
}
func (x *Target) GetLastError() string {
if x != nil {
return x.LastError
}
return ""
}
func (x *Target) GetLastScrape() *timestamppb.Timestamp {
if x != nil {
return x.LastScrape
}
return nil
}
func (x *Target) GetLastScrapeDuration() *durationpb.Duration {
if x != nil {
return x.LastScrapeDuration
}
return nil
}
func (x *Target) GetUrl() string {
if x != nil {
return x.Url
}
return ""
}
func (x *Target) GetHealth() Target_Health {
if x != nil {
return x.Health
}
return Target_HEALTH_UNKNOWN_UNSPECIFIED
}
var File_parca_scrape_v1alpha1_scrape_proto protoreflect.FileDescriptor
var file_parca_scrape_v1alpha1_scrape_proto_rawDesc = []byte{
0x0a, 0x22, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2f, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65, 0x2f, 0x76,
0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x12, 0x15, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x73, 0x63, 0x72, 0x61,
0x70, 0x65, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x1a, 0x1c, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69,
0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73,
0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x2e, 0x70, 0x61, 0x72, 0x63,
0x61, 0x2f, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x76,
0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2f, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73,
0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x9c, 0x01, 0x0a, 0x0e, 0x54,
0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x41, 0x0a,
0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2b, 0x2e, 0x70,
0x61, 0x72, 0x63, 0x61, 0x2e, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65, 0x2e, 0x76, 0x31, 0x61, 0x6c,
0x70, 0x68, 0x61, 0x31, 0x2e, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75,
0x65, 0x73, 0x74, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65,
0x22, 0x47, 0x0a, 0x05, 0x53, 0x74, 0x61, 0x74, 0x65, 0x12, 0x19, 0x0a, 0x15, 0x53, 0x54, 0x41,
0x54, 0x45, 0x5f, 0x41, 0x4e, 0x59, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49,
0x45, 0x44, 0x10, 0x00, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x54, 0x41, 0x54, 0x45, 0x5f, 0x41, 0x43,
0x54, 0x49, 0x56, 0x45, 0x10, 0x01, 0x12, 0x11, 0x0a, 0x0d, 0x53, 0x54, 0x41, 0x54, 0x45, 0x5f,
0x44, 0x52, 0x4f, 0x50, 0x50, 0x45, 0x44, 0x10, 0x02, 0x22, 0xbc, 0x01, 0x0a, 0x0f, 0x54, 0x61,
0x72, 0x67, 0x65, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4d, 0x0a,
0x07, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x33,
0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65, 0x2e, 0x76, 0x31,
0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x45, 0x6e,
0x74, 0x72, 0x79, 0x52, 0x07, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x1a, 0x5a, 0x0a, 0x0c,
0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03,
0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34,
0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e,
0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65, 0x2e, 0x76, 0x31, 0x61,
0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x52, 0x05, 0x76,
0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x42, 0x0a, 0x07, 0x54, 0x61, 0x72, 0x67,
0x65, 0x74, 0x73, 0x12, 0x37, 0x0a, 0x07, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x18, 0x01,
0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x73, 0x63, 0x72,
0x61, 0x70, 0x65, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x54, 0x61, 0x72,
0x67, 0x65, 0x74, 0x52, 0x07, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x22, 0xdf, 0x03, 0x0a,
0x06, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x12, 0x52, 0x0a, 0x11, 0x64, 0x69, 0x73, 0x63, 0x6f,
0x76, 0x65, 0x72, 0x65, 0x64, 0x5f, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x01,
0x28, 0x0b, 0x32, 0x25, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x66, 0x69,
0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31,
0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x53, 0x65, 0x74, 0x52, 0x10, 0x64, 0x69, 0x73, 0x63, 0x6f,
0x76, 0x65, 0x72, 0x65, 0x64, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x3d, 0x0a, 0x06, 0x6c,
0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x70, 0x61,
0x72, 0x63, 0x61, 0x2e, 0x70, 0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x65,
0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x53,
0x65, 0x74, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x1d, 0x0a, 0x0a, 0x6c, 0x61,
0x73, 0x74, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09,
0x6c, 0x61, 0x73, 0x74, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x3b, 0x0a, 0x0b, 0x6c, 0x61, 0x73,
0x74, 0x5f, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0a, 0x6c, 0x61, 0x73, 0x74,
0x53, 0x63, 0x72, 0x61, 0x70, 0x65, 0x12, 0x4b, 0x0a, 0x14, 0x6c, 0x61, 0x73, 0x74, 0x5f, 0x73,
0x63, 0x72, 0x61, 0x70, 0x65, 0x5f, 0x64, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52,
0x12, 0x6c, 0x61, 0x73, 0x74, 0x53, 0x63, 0x72, 0x61, 0x70, 0x65, 0x44, 0x75, 0x72, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09,
0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x3c, 0x0a, 0x06, 0x68, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x18,
0x07, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x24, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x73, 0x63,
0x72, 0x61, 0x70, 0x65, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x54, 0x61,
0x72, 0x67, 0x65, 0x74, 0x2e, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x06, 0x68, 0x65, 0x61,
0x6c, 0x74, 0x68, 0x22, 0x49, 0x0a, 0x06, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x12, 0x1e, 0x0a,
0x1a, 0x48, 0x45, 0x41, 0x4c, 0x54, 0x48, 0x5f, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x5f,
0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0f, 0x0a,
0x0b, 0x48, 0x45, 0x41, 0x4c, 0x54, 0x48, 0x5f, 0x47, 0x4f, 0x4f, 0x44, 0x10, 0x01, 0x12, 0x0e,
0x0a, 0x0a, 0x48, 0x45, 0x41, 0x4c, 0x54, 0x48, 0x5f, 0x42, 0x41, 0x44, 0x10, 0x02, 0x32, 0x7b,
0x0a, 0x0d, 0x53, 0x63, 0x72, 0x61, 0x70, 0x65, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12,
0x6a, 0x0a, 0x07, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x12, 0x25, 0x2e, 0x70, 0x61, 0x72,
0x63, 0x61, 0x2e, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65, 0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68,
0x61, 0x31, 0x2e, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x1a, 0x26, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65,
0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x2e, 0x54, 0x61, 0x72, 0x67, 0x65, 0x74,
0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x10, 0x82, 0xd3, 0xe4, 0x93, 0x02,
0x0a, 0x12, 0x08, 0x2f, 0x74, 0x61, 0x72, 0x67, 0x65, 0x74, 0x73, 0x42, 0xec, 0x01, 0x0a, 0x19,
0x63, 0x6f, 0x6d, 0x2e, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2e, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65,
0x2e, 0x76, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x42, 0x0b, 0x53, 0x63, 0x72, 0x61, 0x70,
0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x4c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62,
0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x70,
0x61, 0x72, 0x63, 0x61, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x67,
0x6f, 0x2f, 0x70, 0x61, 0x72, 0x63, 0x61, 0x2f, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65, 0x2f, 0x76,
0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x3b, 0x73, 0x63, 0x72, 0x61, 0x70, 0x65, 0x76, 0x31,
0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xa2, 0x02, 0x03, 0x50, 0x53, 0x58, 0xaa, 0x02, 0x15, 0x50,
0x61, 0x72, 0x63, 0x61, 0x2e, 0x53, 0x63, 0x72, 0x61, 0x70, 0x65, 0x2e, 0x56, 0x31, 0x61, 0x6c,
0x70, 0x68, 0x61, 0x31, 0xca, 0x02, 0x15, 0x50, 0x61, 0x72, 0x63, 0x61, 0x5c, 0x53, 0x63, 0x72,
0x61, 0x70, 0x65, 0x5c, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0xe2, 0x02, 0x21, 0x50,
0x61, 0x72, 0x63, 0x61, 0x5c, 0x53, 0x63, 0x72, 0x61, 0x70, 0x65, 0x5c, 0x56, 0x31, 0x61, 0x6c,
0x70, 0x68, 0x61, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61,
0xea, 0x02, 0x17, 0x50, 0x61, 0x72, 0x63, 0x61, 0x3a, 0x3a, 0x53, 0x63, 0x72, 0x61, 0x70, 0x65,
0x3a, 0x3a, 0x56, 0x31, 0x61, 0x6c, 0x70, 0x68, 0x61, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x33,
}
var (
file_parca_scrape_v1alpha1_scrape_proto_rawDescOnce sync.Once
file_parca_scrape_v1alpha1_scrape_proto_rawDescData = file_parca_scrape_v1alpha1_scrape_proto_rawDesc
)
func file_parca_scrape_v1alpha1_scrape_proto_rawDescGZIP() []byte {
file_parca_scrape_v1alpha1_scrape_proto_rawDescOnce.Do(func() {
file_parca_scrape_v1alpha1_scrape_proto_rawDescData = protoimpl.X.CompressGZIP(file_parca_scrape_v1alpha1_scrape_proto_rawDescData)
})
return file_parca_scrape_v1alpha1_scrape_proto_rawDescData
}
var file_parca_scrape_v1alpha1_scrape_proto_enumTypes = make([]protoimpl.EnumInfo, 2)
var file_parca_scrape_v1alpha1_scrape_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
var file_parca_scrape_v1alpha1_scrape_proto_goTypes = []interface{}{
(TargetsRequest_State)(0), // 0: parca.scrape.v1alpha1.TargetsRequest.State
(Target_Health)(0), // 1: parca.scrape.v1alpha1.Target.Health
(*TargetsRequest)(nil), // 2: parca.scrape.v1alpha1.TargetsRequest
(*TargetsResponse)(nil), // 3: parca.scrape.v1alpha1.TargetsResponse
(*Targets)(nil), // 4: parca.scrape.v1alpha1.Targets
(*Target)(nil), // 5: parca.scrape.v1alpha1.Target
nil, // 6: parca.scrape.v1alpha1.TargetsResponse.TargetsEntry
(*v1alpha1.LabelSet)(nil), // 7: parca.profilestore.v1alpha1.LabelSet
(*timestamppb.Timestamp)(nil), // 8: google.protobuf.Timestamp
(*durationpb.Duration)(nil), // 9: google.protobuf.Duration
}
var file_parca_scrape_v1alpha1_scrape_proto_depIdxs = []int32{
0, // 0: parca.scrape.v1alpha1.TargetsRequest.state:type_name -> parca.scrape.v1alpha1.TargetsRequest.State
6, // 1: parca.scrape.v1alpha1.TargetsResponse.targets:type_name -> parca.scrape.v1alpha1.TargetsResponse.TargetsEntry
5, // 2: parca.scrape.v1alpha1.Targets.targets:type_name -> parca.scrape.v1alpha1.Target
7, // 3: parca.scrape.v1alpha1.Target.discovered_labels:type_name -> parca.profilestore.v1alpha1.LabelSet
7, // 4: parca.scrape.v1alpha1.Target.labels:type_name -> parca.profilestore.v1alpha1.LabelSet
8, // 5: parca.scrape.v1alpha1.Target.last_scrape:type_name -> google.protobuf.Timestamp
9, // 6: parca.scrape.v1alpha1.Target.last_scrape_duration:type_name -> google.protobuf.Duration
1, // 7: parca.scrape.v1alpha1.Target.health:type_name -> parca.scrape.v1alpha1.Target.Health
4, // 8: parca.scrape.v1alpha1.TargetsResponse.TargetsEntry.value:type_name -> parca.scrape.v1alpha1.Targets
2, // 9: parca.scrape.v1alpha1.ScrapeService.Targets:input_type -> parca.scrape.v1alpha1.TargetsRequest
3, // 10: parca.scrape.v1alpha1.ScrapeService.Targets:output_type -> parca.scrape.v1alpha1.TargetsResponse
10, // [10:11] is the sub-list for method output_type
9, // [9:10] is the sub-list for method input_type
9, // [9:9] is the sub-list for extension type_name
9, // [9:9] is the sub-list for extension extendee
0, // [0:9] is the sub-list for field type_name
}
func init() { file_parca_scrape_v1alpha1_scrape_proto_init() }
func file_parca_scrape_v1alpha1_scrape_proto_init() {
if File_parca_scrape_v1alpha1_scrape_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_parca_scrape_v1alpha1_scrape_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*TargetsRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_scrape_v1alpha1_scrape_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*TargetsResponse); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_scrape_v1alpha1_scrape_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Targets); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_parca_scrape_v1alpha1_scrape_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Target); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_parca_scrape_v1alpha1_scrape_proto_rawDesc,
NumEnums: 2,
NumMessages: 5,
NumExtensions: 0,
NumServices: 1,
},
GoTypes: file_parca_scrape_v1alpha1_scrape_proto_goTypes,
DependencyIndexes: file_parca_scrape_v1alpha1_scrape_proto_depIdxs,
EnumInfos: file_parca_scrape_v1alpha1_scrape_proto_enumTypes,
MessageInfos: file_parca_scrape_v1alpha1_scrape_proto_msgTypes,
}.Build()
File_parca_scrape_v1alpha1_scrape_proto = out.File
file_parca_scrape_v1alpha1_scrape_proto_rawDesc = nil
file_parca_scrape_v1alpha1_scrape_proto_goTypes = nil
file_parca_scrape_v1alpha1_scrape_proto_depIdxs = nil
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,88 @@
// Code generated by protoc-gen-connect-go. DO NOT EDIT.
//
// Source: parca/scrape/v1alpha1/scrape.proto
package scrapev1alpha1connect
import (
context "context"
errors "errors"
connect_go "github.com/bufbuild/connect-go"
v1alpha1 "github.com/parca-dev/parca/gen/proto/go/parca/scrape/v1alpha1"
http "net/http"
strings "strings"
)
// This is a compile-time assertion to ensure that this generated file and the connect package are
// compatible. If you get a compiler error that this constant is not defined, this code was
// generated with a version of connect newer than the one compiled into your binary. You can fix the
// problem by either regenerating this code with an older version of connect or updating the connect
// version compiled into your binary.
const _ = connect_go.IsAtLeastVersion0_1_0
const (
// ScrapeServiceName is the fully-qualified name of the ScrapeService service.
ScrapeServiceName = "parca.scrape.v1alpha1.ScrapeService"
)
// ScrapeServiceClient is a client for the parca.scrape.v1alpha1.ScrapeService service.
type ScrapeServiceClient interface {
// Targets returns the set of scrape targets that are configured
Targets(context.Context, *connect_go.Request[v1alpha1.TargetsRequest]) (*connect_go.Response[v1alpha1.TargetsResponse], error)
}
// NewScrapeServiceClient constructs a client for the parca.scrape.v1alpha1.ScrapeService service.
// By default, it uses the Connect protocol with the binary Protobuf Codec, asks for gzipped
// responses, and sends uncompressed requests. To use the gRPC or gRPC-Web protocols, supply the
// connect.WithGRPC() or connect.WithGRPCWeb() options.
//
// The URL supplied here should be the base URL for the Connect or gRPC server (for example,
// http://api.acme.com or https://acme.com/grpc).
func NewScrapeServiceClient(httpClient connect_go.HTTPClient, baseURL string, opts ...connect_go.ClientOption) ScrapeServiceClient {
baseURL = strings.TrimRight(baseURL, "/")
return &scrapeServiceClient{
targets: connect_go.NewClient[v1alpha1.TargetsRequest, v1alpha1.TargetsResponse](
httpClient,
baseURL+"/parca.scrape.v1alpha1.ScrapeService/Targets",
opts...,
),
}
}
// scrapeServiceClient implements ScrapeServiceClient.
type scrapeServiceClient struct {
targets *connect_go.Client[v1alpha1.TargetsRequest, v1alpha1.TargetsResponse]
}
// Targets calls parca.scrape.v1alpha1.ScrapeService.Targets.
func (c *scrapeServiceClient) Targets(ctx context.Context, req *connect_go.Request[v1alpha1.TargetsRequest]) (*connect_go.Response[v1alpha1.TargetsResponse], error) {
return c.targets.CallUnary(ctx, req)
}
// ScrapeServiceHandler is an implementation of the parca.scrape.v1alpha1.ScrapeService service.
type ScrapeServiceHandler interface {
// Targets returns the set of scrape targets that are configured
Targets(context.Context, *connect_go.Request[v1alpha1.TargetsRequest]) (*connect_go.Response[v1alpha1.TargetsResponse], error)
}
// NewScrapeServiceHandler builds an HTTP handler from the service implementation. It returns the
// path on which to mount the handler and the handler itself.
//
// By default, handlers support the Connect, gRPC, and gRPC-Web protocols with the binary Protobuf
// and JSON codecs. They also support gzip compression.
func NewScrapeServiceHandler(svc ScrapeServiceHandler, opts ...connect_go.HandlerOption) (string, http.Handler) {
mux := http.NewServeMux()
mux.Handle("/parca.scrape.v1alpha1.ScrapeService/Targets", connect_go.NewUnaryHandler(
"/parca.scrape.v1alpha1.ScrapeService/Targets",
svc.Targets,
opts...,
))
return "/parca.scrape.v1alpha1.ScrapeService/", mux
}
// UnimplementedScrapeServiceHandler returns CodeUnimplemented from all methods.
type UnimplementedScrapeServiceHandler struct{}
func (UnimplementedScrapeServiceHandler) Targets(context.Context, *connect_go.Request[v1alpha1.TargetsRequest]) (*connect_go.Response[v1alpha1.TargetsResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("parca.scrape.v1alpha1.ScrapeService.Targets is not implemented"))
}

114
pkg/tsdb/parca/plugin.go Normal file
View File

@ -0,0 +1,114 @@
package parca
import (
"context"
"github.com/bufbuild/connect-go"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/tsdb/parca/gen/parca/parca/query/v1alpha1/queryv1alpha1connect"
v1alpha1 "github.com/parca-dev/parca/gen/proto/go/parca/query/v1alpha1"
)
// Make sure ParcaDatasource implements required interfaces. This is important to do
// since otherwise we will only get a not implemented error response from plugin in
// runtime. In this example datasource instance implements backend.QueryDataHandler,
// backend.CheckHealthHandler, backend.StreamHandler interfaces. Plugin should not
// implement all these interfaces - only those which are required for a particular task.
// For example if plugin does not need streaming functionality then you are free to remove
// methods that implement backend.StreamHandler. Implementing instancemgmt.InstanceDisposer
// is useful to clean up resources used by previous datasource instance when a new datasource
// instance created upon datasource settings changed.
var (
_ backend.QueryDataHandler = (*ParcaDatasource)(nil)
_ backend.CallResourceHandler = (*ParcaDatasource)(nil)
_ backend.CheckHealthHandler = (*ParcaDatasource)(nil)
)
// ParcaDatasource is a datasource for querying application performance profiles.
type ParcaDatasource struct {
client queryv1alpha1connect.QueryServiceClient
}
// NewParcaDatasource creates a new datasource instance.
func NewParcaDatasource(httpClientProvider httpclient.Provider, settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
opt, err := settings.HTTPClientOptions()
if err != nil {
return nil, err
}
httpClient, err := httpClientProvider.New(opt)
if err != nil {
return nil, err
}
return &ParcaDatasource{
client: queryv1alpha1connect.NewQueryServiceClient(httpClient, settings.URL, connect.WithGRPCWeb()),
}, nil
}
// Dispose here tells plugin SDK that plugin wants to clean up resources when a new instance
// created. As soon as datasource settings change detected by SDK old datasource instance will
// be disposed and a new one will be created using NewSampleDatasource factory function.
func (d *ParcaDatasource) Dispose() {
// Clean up datasource instance resources.
}
func (d *ParcaDatasource) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
logger.Debug("CallResource", "Path", req.Path, "Method", req.Method, "Body", req.Body)
if req.Path == "profileTypes" {
return d.callProfileTypes(ctx, req, sender)
}
if req.Path == "labelNames" {
return d.callLabelNames(ctx, req, sender)
}
if req.Path == "labelValues" {
return d.callLabelValues(ctx, req, sender)
}
return sender.Send(&backend.CallResourceResponse{
Status: 404,
})
}
// QueryData handles multiple queries and returns multiple responses.
// req contains the queries []DataQuery (where each query contains RefID as a unique identifier).
// The QueryDataResponse contains a map of RefID to the response for each query, and each response
// contains Frames ([]*Frame).
func (d *ParcaDatasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
logger.Debug("QueryData called", "queries", req.Queries)
// create response struct
response := backend.NewQueryDataResponse()
// loop over queries and execute them individually.
for _, q := range req.Queries {
res := d.query(ctx, req.PluginContext, q)
// save the response in a hashmap
// based on with RefID as identifier
response.Responses[q.RefID] = res
}
return response, nil
}
// CheckHealth handles health checks sent from Grafana to the plugin.
// The main use case for these health checks is the test button on the
// datasource configuration page which allows users to verify that
// a datasource is working as expected.
func (d *ParcaDatasource) CheckHealth(ctx context.Context, _ *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
logger.Debug("CheckHealth called")
status := backend.HealthStatusOk
message := "Data source is working"
if _, err := d.client.ProfileTypes(ctx, connect.NewRequest(&v1alpha1.ProfileTypesRequest{})); err != nil {
status = backend.HealthStatusError
message = err.Error()
}
return &backend.CheckHealthResult{
Status: status,
Message: message,
}, nil
}

View File

@ -0,0 +1,67 @@
package parca
import (
"context"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
)
// This is where the tests for the datasource backend live.
func Test_QueryData(t *testing.T) {
ds := ParcaDatasource{}
resp, err := ds.QueryData(
context.Background(),
&backend.QueryDataRequest{
Queries: []backend.DataQuery{
{
RefID: "A",
QueryType: "profile",
},
},
},
)
if err != nil {
t.Error(err)
}
if len(resp.Responses) != 1 {
t.Fatal("QueryData must return a response")
}
}
func Test_CallResource(t *testing.T) {
ds := &ParcaDatasource{
client: &FakeClient{},
}
t.Run("labels resource", func(t *testing.T) {
sender := &FakeSender{}
err := ds.CallResource(
context.Background(),
&backend.CallResourceRequest{
PluginContext: backend.PluginContext{},
Path: "labelNames",
Method: "GET",
URL: "labelNames",
Headers: nil,
Body: nil,
},
sender,
)
require.NoError(t, err)
require.Equal(t, 200, sender.Resp.Status)
require.Equal(t, `["instance","job"]`, string(sender.Resp.Body))
})
}
type FakeSender struct {
Resp *backend.CallResourceResponse
}
func (fs *FakeSender) Send(resp *backend.CallResourceResponse) error {
fs.Resp = resp
return nil
}

246
pkg/tsdb/parca/query.go Normal file
View File

@ -0,0 +1,246 @@
package parca
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
"github.com/bufbuild/connect-go"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
v1alpha1 "github.com/parca-dev/parca/gen/proto/go/parca/query/v1alpha1"
"google.golang.org/protobuf/types/known/timestamppb"
)
type queryModel struct {
ProfileTypeID string `json:"profileTypeId"`
LabelSelector string `json:"labelSelector"`
}
// These constants need to match the ones in the frontend.
const queryTypeProfile = "profile"
const queryTypeMetrics = "metrics"
const queryTypeBoth = "both"
// query processes single Fire query transforming the response to data.Frame packaged in DataResponse
func (d *ParcaDatasource) query(ctx context.Context, pCtx backend.PluginContext, query backend.DataQuery) backend.DataResponse {
var qm queryModel
response := backend.DataResponse{}
err := json.Unmarshal(query.JSON, &qm)
if err != nil {
response.Error = err
return response
}
if query.QueryType == queryTypeMetrics || query.QueryType == queryTypeBoth {
seriesResp, err := d.client.QueryRange(ctx, makeMetricRequest(qm, query))
if err != nil {
response.Error = err
return response
}
response.Frames = append(response.Frames, seriesToDataFrame(seriesResp, qm.ProfileTypeID)...)
}
if query.QueryType == queryTypeProfile || query.QueryType == queryTypeBoth {
logger.Debug("Querying SelectMergeStacktraces()", "queryModel", qm)
resp, err := d.client.Query(ctx, makeProfileRequest(qm, query))
if err != nil {
response.Error = err
return response
}
frame := responseToDataFrames(resp)
response.Frames = append(response.Frames, frame)
}
return response
}
func makeProfileRequest(qm queryModel, query backend.DataQuery) *connect.Request[v1alpha1.QueryRequest] {
return &connect.Request[v1alpha1.QueryRequest]{
Msg: &v1alpha1.QueryRequest{
Mode: v1alpha1.QueryRequest_MODE_MERGE,
Options: &v1alpha1.QueryRequest_Merge{
Merge: &v1alpha1.MergeProfile{
Query: fmt.Sprintf("%s%s", qm.ProfileTypeID, qm.LabelSelector),
Start: &timestamppb.Timestamp{
Seconds: query.TimeRange.From.Unix(),
},
End: &timestamppb.Timestamp{
Seconds: query.TimeRange.To.Unix(),
},
},
},
ReportType: v1alpha1.QueryRequest_REPORT_TYPE_FLAMEGRAPH_UNSPECIFIED,
},
}
}
func makeMetricRequest(qm queryModel, query backend.DataQuery) *connect.Request[v1alpha1.QueryRangeRequest] {
return &connect.Request[v1alpha1.QueryRangeRequest]{
Msg: &v1alpha1.QueryRangeRequest{
Query: fmt.Sprintf("%s%s", qm.ProfileTypeID, qm.LabelSelector),
Start: &timestamppb.Timestamp{
Seconds: query.TimeRange.From.Unix(),
},
End: &timestamppb.Timestamp{
Seconds: query.TimeRange.To.Unix(),
},
Limit: uint32(query.MaxDataPoints),
},
}
}
type CustomMeta struct {
ProfileTypeID string
}
// responseToDataFrames turns fire response to data.Frame. We encode the data into a nested set format where we have
// [level, value, label] columns and by ordering the items in a depth first traversal order we can recreate the whole
// tree back.
func responseToDataFrames(resp *connect.Response[v1alpha1.QueryResponse]) *data.Frame {
if flameResponse, ok := resp.Msg.Report.(*v1alpha1.QueryResponse_Flamegraph); ok {
frame := treeToNestedSetDataFrame(flameResponse.Flamegraph)
frame.Meta = &data.FrameMeta{PreferredVisualization: "flamegraph"}
return frame
} else {
panic("unknown report type returned from query")
}
}
// treeToNestedSetDataFrame walks the tree depth first and adds items into the dataframe. This is a nested set format
// where by ordering the items in depth first order and knowing the level/depth of each item we can recreate the
// parent - child relationship without explicitly needing parent/child column and we can later just iterate over the
// dataFrame to again basically walking depth first over the tree/profile.
func treeToNestedSetDataFrame(tree *v1alpha1.Flamegraph) *data.Frame {
frame := data.NewFrame("response")
levelField := data.NewField("level", nil, []int64{})
valueField := data.NewField("value", nil, []int64{})
valueField.Config = &data.FieldConfig{Unit: normalizeUnit(tree.Unit)}
selfField := data.NewField("self", nil, []int64{})
selfField.Config = &data.FieldConfig{Unit: normalizeUnit(tree.Unit)}
labelField := data.NewField("label", nil, []string{})
frame.Fields = data.Fields{levelField, valueField, selfField, labelField}
walkTree(tree.Root, func(level int64, value int64, name string, self int64) {
levelField.Append(level)
valueField.Append(value)
labelField.Append(name)
selfField.Append(self)
})
return frame
}
type Node struct {
Node *v1alpha1.FlamegraphNode
Level int64
}
func walkTree(tree *v1alpha1.FlamegraphRootNode, fn func(level int64, value int64, name string, self int64)) {
var stack []*Node
var childrenValue int64 = 0
for _, child := range tree.Children {
childrenValue += child.Cumulative
stack = append(stack, &Node{Node: child, Level: 1})
}
fn(0, tree.Cumulative, "total", tree.Cumulative-childrenValue)
for {
if len(stack) == 0 {
break
}
// shift stack
node := stack[0]
stack = stack[1:]
childrenValue = 0
if node.Node.Children != nil {
var children []*Node
for _, child := range node.Node.Children {
childrenValue += child.Cumulative
children = append(children, &Node{Node: child, Level: node.Level + 1})
}
// Put the children first so we do depth first traversal
stack = append(children, stack...)
}
fn(node.Level, node.Node.Cumulative, nodeName(node.Node), node.Node.Cumulative-childrenValue)
}
}
func nodeName(node *v1alpha1.FlamegraphNode) string {
if node.Meta == nil {
return "<unknown>"
}
mapping := ""
if node.Meta.Mapping != nil && node.Meta.Mapping.File != "" {
mapping = "[" + getLastItem(node.Meta.Mapping.File) + "] "
}
if node.Meta.Function != nil && node.Meta.Function.Name != "" {
return mapping + node.Meta.Function.Name
}
address := ""
if node.Meta.Location != nil {
address = fmt.Sprintf("0x%x", node.Meta.Location.Address)
}
if mapping == "" && address == "" {
return "<unknown>"
} else {
return mapping + address
}
}
func getLastItem(path string) string {
parts := strings.Split(path, "/")
return parts[len(parts)-1]
}
func normalizeUnit(unit string) string {
if unit == "nanoseconds" {
return "ns"
}
if unit == "count" {
return "short"
}
return unit
}
func seriesToDataFrame(seriesResp *connect.Response[v1alpha1.QueryRangeResponse], profileTypeID string) []*data.Frame {
var frames []*data.Frame
for _, series := range seriesResp.Msg.Series {
frame := data.NewFrame("series")
frame.Meta = &data.FrameMeta{PreferredVisualization: "graph"}
frames = append(frames, frame)
fields := data.Fields{}
timeField := data.NewField("time", nil, []time.Time{})
fields = append(fields, timeField)
labels := data.Labels{}
for _, label := range series.Labelset.Labels {
labels[label.Name] = label.Value
}
valueField := data.NewField(strings.Split(profileTypeID, ":")[1], labels, []int64{})
for _, sample := range series.Samples {
timeField.Append(sample.Timestamp.AsTime())
valueField.Append(sample.Value)
}
fields = append(fields, valueField)
frame.Fields = fields
}
return frames
}

View File

@ -0,0 +1,217 @@
package parca
import (
"context"
"testing"
"time"
"github.com/bufbuild/connect-go"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
v1alpha11 "github.com/parca-dev/parca/gen/proto/go/parca/metastore/v1alpha1"
profilestore "github.com/parca-dev/parca/gen/proto/go/parca/profilestore/v1alpha1"
v1alpha1 "github.com/parca-dev/parca/gen/proto/go/parca/query/v1alpha1"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/types/known/timestamppb"
)
// This is where the tests for the datasource backend live.
func Test_query(t *testing.T) {
ds := &ParcaDatasource{
client: &FakeClient{},
}
dataQuery := backend.DataQuery{
RefID: "A",
QueryType: queryTypeBoth,
MaxDataPoints: 0,
Interval: 0,
TimeRange: backend.TimeRange{
From: time.UnixMilli(10000),
To: time.UnixMilli(20000),
},
JSON: []byte(`{"profileTypeId":"foo:bar","labelSelector":"{app=\\\"baz\\\"}"}`),
}
t.Run("query both", func(t *testing.T) {
resp := ds.query(context.Background(), backend.PluginContext{}, dataQuery)
require.Nil(t, resp.Error)
require.Equal(t, 2, len(resp.Frames))
require.Equal(t, "time", resp.Frames[0].Fields[0].Name)
require.Equal(t, data.NewField("level", nil, []int64{0, 1, 2, 3}), resp.Frames[1].Fields[0])
})
t.Run("query profile", func(t *testing.T) {
dataQuery.QueryType = queryTypeProfile
resp := ds.query(context.Background(), backend.PluginContext{}, dataQuery)
require.Nil(t, resp.Error)
require.Equal(t, 1, len(resp.Frames))
require.Equal(t, data.NewField("level", nil, []int64{0, 1, 2, 3}), resp.Frames[0].Fields[0])
})
t.Run("query metrics", func(t *testing.T) {
dataQuery.QueryType = queryTypeMetrics
resp := ds.query(context.Background(), backend.PluginContext{}, dataQuery)
require.Nil(t, resp.Error)
require.Equal(t, 1, len(resp.Frames))
require.Equal(t, "time", resp.Frames[0].Fields[0].Name)
})
}
// This is where the tests for the datasource backend live.
func Test_profileToDataFrame(t *testing.T) {
frame := responseToDataFrames(flamegraphResponse)
require.Equal(t, 4, len(frame.Fields))
require.Equal(t, data.NewField("level", nil, []int64{0, 1, 2, 3}), frame.Fields[0])
values := data.NewField("value", nil, []int64{100, 10, 9, 8})
values.Config = &data.FieldConfig{
Unit: "samples",
}
require.Equal(t, values, frame.Fields[1])
self := data.NewField("self", nil, []int64{90, 1, 1, 8})
self.Config = &data.FieldConfig{
Unit: "samples",
}
require.Equal(t, self, frame.Fields[2])
require.Equal(t, data.NewField("label", nil, []string{"total", "foo", "bar", "baz"}), frame.Fields[3])
}
func Test_seriesToDataFrame(t *testing.T) {
frames := seriesToDataFrame(rangeResponse, "process_cpu:samples:count:cpu:nanoseconds")
require.Equal(t, 1, len(frames))
require.Equal(t, 2, len(frames[0].Fields))
require.Equal(t, data.NewField("time", nil, []time.Time{time.UnixMilli(1000 * 10).UTC(), time.UnixMilli(1000 * 20).UTC()}), frames[0].Fields[0])
require.Equal(t, data.NewField("samples", map[string]string{"foo": "bar"}, []int64{30, 10}), frames[0].Fields[1])
}
var rangeResponse = &connect.Response[v1alpha1.QueryRangeResponse]{
Msg: &v1alpha1.QueryRangeResponse{
Series: []*v1alpha1.MetricsSeries{
{
Labelset: &profilestore.LabelSet{
Labels: []*profilestore.Label{
{
Name: "foo",
Value: "bar",
},
},
},
Samples: []*v1alpha1.MetricsSample{
{
Timestamp: &timestamppb.Timestamp{
Seconds: 10,
Nanos: 0,
},
Value: 30,
},
{
Timestamp: &timestamppb.Timestamp{
Seconds: 20,
Nanos: 0,
},
Value: 10,
},
},
PeriodType: nil,
SampleType: nil,
},
},
},
}
var flamegraphResponse = &connect.Response[v1alpha1.QueryResponse]{
Msg: &v1alpha1.QueryResponse{
Report: &v1alpha1.QueryResponse_Flamegraph{
Flamegraph: &v1alpha1.Flamegraph{
Root: &v1alpha1.FlamegraphRootNode{
Cumulative: 100,
Diff: 0,
Children: []*v1alpha1.FlamegraphNode{
{
Meta: &v1alpha1.FlamegraphNodeMeta{
Function: &v1alpha11.Function{
Name: "foo",
},
},
Cumulative: 10,
Diff: 0,
Children: []*v1alpha1.FlamegraphNode{
{
Meta: &v1alpha1.FlamegraphNodeMeta{
Function: &v1alpha11.Function{
Name: "bar",
},
},
Cumulative: 9,
Diff: 0,
Children: []*v1alpha1.FlamegraphNode{
{
Meta: &v1alpha1.FlamegraphNodeMeta{
Function: &v1alpha11.Function{
Name: "baz",
},
},
Cumulative: 8,
Diff: 0,
},
},
},
},
},
},
},
Total: 100,
Unit: "samples",
Height: 3,
},
},
},
}
type FakeClient struct {
Req *connect.Request[v1alpha1.QueryRequest]
}
func (f *FakeClient) QueryRange(ctx context.Context, c *connect.Request[v1alpha1.QueryRangeRequest]) (*connect.Response[v1alpha1.QueryRangeResponse], error) {
return rangeResponse, nil
}
func (f *FakeClient) Query(ctx context.Context, c *connect.Request[v1alpha1.QueryRequest]) (*connect.Response[v1alpha1.QueryResponse], error) {
f.Req = c
return flamegraphResponse, nil
}
func (f *FakeClient) Series(ctx context.Context, c *connect.Request[v1alpha1.SeriesRequest]) (*connect.Response[v1alpha1.SeriesResponse], error) {
//TODO implement me
panic("implement me")
}
func (f *FakeClient) ProfileTypes(ctx context.Context, c *connect.Request[v1alpha1.ProfileTypesRequest]) (*connect.Response[v1alpha1.ProfileTypesResponse], error) {
//TODO implement me
panic("implement me")
}
func (f *FakeClient) Labels(ctx context.Context, c *connect.Request[v1alpha1.LabelsRequest]) (*connect.Response[v1alpha1.LabelsResponse], error) {
return &connect.Response[v1alpha1.LabelsResponse]{
Msg: &v1alpha1.LabelsResponse{
LabelNames: []string{"instance", "job"},
Warnings: nil,
},
}, nil
}
func (f *FakeClient) Values(ctx context.Context, c *connect.Request[v1alpha1.ValuesRequest]) (*connect.Response[v1alpha1.ValuesResponse], error) {
return &connect.Response[v1alpha1.ValuesResponse]{
Msg: &v1alpha1.ValuesResponse{
LabelValues: []string{"foo", "bar"},
Warnings: nil,
},
}, nil
}
func (f *FakeClient) ShareProfile(ctx context.Context, c *connect.Request[v1alpha1.ShareProfileRequest]) (*connect.Response[v1alpha1.ShareProfileResponse], error) {
//TODO implement me
panic("implement me")
}

101
pkg/tsdb/parca/resources.go Normal file
View File

@ -0,0 +1,101 @@
package parca
import (
"context"
"encoding/json"
"fmt"
"net/url"
"github.com/bufbuild/connect-go"
"github.com/grafana/grafana-plugin-sdk-go/backend"
v1alpha1 "github.com/parca-dev/parca/gen/proto/go/parca/query/v1alpha1"
)
type ProfileType struct {
// Same as *v1alpha1.ProfileType just added the ID
Name string `json:"name,omitempty"`
SampleType string `json:"sample_type,omitempty"`
SampleUnit string `json:"sample_unit,omitempty"`
PeriodType string `json:"period_type,omitempty"`
PeriodUnit string `json:"period_unit,omitempty"`
Delta bool `json:"delta,omitempty"`
ID string `json:"ID,omitempty"`
}
func (d *ParcaDatasource) callProfileTypes(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
res, err := d.client.ProfileTypes(ctx, connect.NewRequest(&v1alpha1.ProfileTypesRequest{}))
if err != nil {
return err
}
var types []*ProfileType
for _, t := range res.Msg.Types {
var id string
if t.Delta {
id = fmt.Sprintf("%s:%s:%s:%s:%s:delta", t.Name, t.SampleType, t.SampleUnit, t.PeriodType, t.PeriodUnit)
} else {
id = fmt.Sprintf("%s:%s:%s:%s:%s", t.Name, t.SampleType, t.SampleUnit, t.PeriodType, t.PeriodUnit)
}
types = append(types, &ProfileType{
Name: t.Name,
SampleType: t.SampleType,
SampleUnit: t.SampleUnit,
PeriodType: t.PeriodType,
PeriodUnit: t.PeriodUnit,
Delta: t.Delta,
ID: id,
})
}
data, err := json.Marshal(types)
if err != nil {
return err
}
err = sender.Send(&backend.CallResourceResponse{Body: data, Headers: req.Headers, Status: 200})
if err != nil {
return err
}
return nil
}
func (d *ParcaDatasource) callLabelNames(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
res, err := d.client.Labels(ctx, connect.NewRequest(&v1alpha1.LabelsRequest{}))
if err != nil {
return err
}
data, err := json.Marshal(res.Msg.LabelNames)
if err != nil {
return err
}
err = sender.Send(&backend.CallResourceResponse{Body: data, Headers: req.Headers, Status: 200})
if err != nil {
return err
}
return nil
}
func (d *ParcaDatasource) callLabelValues(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
parsedUrl, err := url.Parse(req.URL)
if err != nil {
return err
}
label, ok := parsedUrl.Query()["label"]
if !ok {
label = []string{""}
}
res, err := d.client.Values(ctx, connect.NewRequest(&v1alpha1.ValuesRequest{LabelName: label[0]}))
if err != nil {
return err
}
data, err := json.Marshal(res.Msg.LabelValues)
if err != nil {
return err
}
err = sender.Send(&backend.CallResourceResponse{Body: data, Headers: req.Headers, Status: 200})
if err != nil {
return err
}
return nil
}

77
pkg/tsdb/parca/service.go Normal file
View File

@ -0,0 +1,77 @@
package parca
import (
"context"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log"
)
// Make sure FireDatasource implements required interfaces. This is important to do
// since otherwise we will only get a not implemented error response from plugin in
// runtime. In this example datasource instance implements backend.QueryDataHandler,
// backend.CheckHealthHandler, backend.StreamHandler interfaces. Plugin should not
// implement all these interfaces - only those which are required for a particular task.
// For example if plugin does not need streaming functionality then you are free to remove
// methods that implement backend.StreamHandler. Implementing instancemgmt.InstanceDisposer
// is useful to clean up resources used by previous datasource instance when a new datasource
// instance created upon datasource settings changed.
var (
_ backend.QueryDataHandler = (*Service)(nil)
_ backend.CallResourceHandler = (*Service)(nil)
_ backend.CheckHealthHandler = (*Service)(nil)
)
var logger = log.New("tsdb.parca")
type Service struct {
im instancemgmt.InstanceManager
}
func (s *Service) getInstance(pluginCtx backend.PluginContext) (*ParcaDatasource, error) {
i, err := s.im.Get(pluginCtx)
if err != nil {
return nil, err
}
in := i.(*ParcaDatasource)
return in, nil
}
func ProvideService(httpClientProvider httpclient.Provider) *Service {
return &Service{
im: datasource.NewInstanceManager(newInstanceSettings(httpClientProvider)),
}
}
func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.InstanceFactoryFunc {
return func(settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
return NewParcaDatasource(httpClientProvider, settings)
}
}
func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
i, err := s.getInstance(req.PluginContext)
if err != nil {
return nil, err
}
return i.QueryData(ctx, req)
}
func (s *Service) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
i, err := s.getInstance(req.PluginContext)
if err != nil {
return err
}
return i.CallResource(ctx, req, sender)
}
func (s *Service) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
i, err := s.getInstance(req.PluginContext)
if err != nil {
return nil, err
}
return i.CheckHealth(ctx, req)
}

View File

@ -0,0 +1,486 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.0
// protoc (unknown)
// source: common/v1/common.proto
package commonv1
import (
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
type LabelPair struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
}
func (x *LabelPair) Reset() {
*x = LabelPair{}
if protoimpl.UnsafeEnabled {
mi := &file_common_v1_common_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *LabelPair) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*LabelPair) ProtoMessage() {}
func (x *LabelPair) ProtoReflect() protoreflect.Message {
mi := &file_common_v1_common_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use LabelPair.ProtoReflect.Descriptor instead.
func (*LabelPair) Descriptor() ([]byte, []int) {
return file_common_v1_common_proto_rawDescGZIP(), []int{0}
}
func (x *LabelPair) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *LabelPair) GetValue() string {
if x != nil {
return x.Value
}
return ""
}
type ProfileType struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"`
SampleType string `protobuf:"bytes,4,opt,name=sample_type,json=sampleType,proto3" json:"sample_type,omitempty"`
SampleUnit string `protobuf:"bytes,5,opt,name=sample_unit,json=sampleUnit,proto3" json:"sample_unit,omitempty"`
PeriodType string `protobuf:"bytes,6,opt,name=period_type,json=periodType,proto3" json:"period_type,omitempty"`
PeriodUnit string `protobuf:"bytes,7,opt,name=period_unit,json=periodUnit,proto3" json:"period_unit,omitempty"`
}
func (x *ProfileType) Reset() {
*x = ProfileType{}
if protoimpl.UnsafeEnabled {
mi := &file_common_v1_common_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *ProfileType) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ProfileType) ProtoMessage() {}
func (x *ProfileType) ProtoReflect() protoreflect.Message {
mi := &file_common_v1_common_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use ProfileType.ProtoReflect.Descriptor instead.
func (*ProfileType) Descriptor() ([]byte, []int) {
return file_common_v1_common_proto_rawDescGZIP(), []int{1}
}
func (x *ProfileType) GetID() string {
if x != nil {
return x.ID
}
return ""
}
func (x *ProfileType) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *ProfileType) GetSampleType() string {
if x != nil {
return x.SampleType
}
return ""
}
func (x *ProfileType) GetSampleUnit() string {
if x != nil {
return x.SampleUnit
}
return ""
}
func (x *ProfileType) GetPeriodType() string {
if x != nil {
return x.PeriodType
}
return ""
}
func (x *ProfileType) GetPeriodUnit() string {
if x != nil {
return x.PeriodUnit
}
return ""
}
type Labels struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// LabelPair is the key value pairs to identify the corresponding profile
Labels []*LabelPair `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty"`
}
func (x *Labels) Reset() {
*x = Labels{}
if protoimpl.UnsafeEnabled {
mi := &file_common_v1_common_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Labels) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Labels) ProtoMessage() {}
func (x *Labels) ProtoReflect() protoreflect.Message {
mi := &file_common_v1_common_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Labels.ProtoReflect.Descriptor instead.
func (*Labels) Descriptor() ([]byte, []int) {
return file_common_v1_common_proto_rawDescGZIP(), []int{2}
}
func (x *Labels) GetLabels() []*LabelPair {
if x != nil {
return x.Labels
}
return nil
}
type Series struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Labels []*LabelPair `protobuf:"bytes,1,rep,name=labels,proto3" json:"labels,omitempty"`
Points []*Point `protobuf:"bytes,2,rep,name=points,proto3" json:"points,omitempty"`
}
func (x *Series) Reset() {
*x = Series{}
if protoimpl.UnsafeEnabled {
mi := &file_common_v1_common_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Series) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Series) ProtoMessage() {}
func (x *Series) ProtoReflect() protoreflect.Message {
mi := &file_common_v1_common_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Series.ProtoReflect.Descriptor instead.
func (*Series) Descriptor() ([]byte, []int) {
return file_common_v1_common_proto_rawDescGZIP(), []int{3}
}
func (x *Series) GetLabels() []*LabelPair {
if x != nil {
return x.Labels
}
return nil
}
func (x *Series) GetPoints() []*Point {
if x != nil {
return x.Points
}
return nil
}
type Point struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Value float64 `protobuf:"fixed64,1,opt,name=value,proto3" json:"value,omitempty"`
// Milliseconds unix timestamp
Timestamp int64 `protobuf:"varint,2,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
}
func (x *Point) Reset() {
*x = Point{}
if protoimpl.UnsafeEnabled {
mi := &file_common_v1_common_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Point) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Point) ProtoMessage() {}
func (x *Point) ProtoReflect() protoreflect.Message {
mi := &file_common_v1_common_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Point.ProtoReflect.Descriptor instead.
func (*Point) Descriptor() ([]byte, []int) {
return file_common_v1_common_proto_rawDescGZIP(), []int{4}
}
func (x *Point) GetValue() float64 {
if x != nil {
return x.Value
}
return 0
}
func (x *Point) GetTimestamp() int64 {
if x != nil {
return x.Timestamp
}
return 0
}
var File_common_v1_common_proto protoreflect.FileDescriptor
var file_common_v1_common_proto_rawDesc = []byte{
0x0a, 0x16, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x76, 0x31, 0x2f, 0x63, 0x6f, 0x6d, 0x6d,
0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e,
0x2e, 0x76, 0x31, 0x22, 0x35, 0x0a, 0x09, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72,
0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20,
0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0xb5, 0x01, 0x0a, 0x0b, 0x50,
0x72, 0x6f, 0x66, 0x69, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x49, 0x44,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x49, 0x44, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61,
0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1f,
0x0a, 0x0b, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20,
0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12,
0x1f, 0x0a, 0x0b, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x5f, 0x75, 0x6e, 0x69, 0x74, 0x18, 0x05,
0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x61, 0x6d, 0x70, 0x6c, 0x65, 0x55, 0x6e, 0x69, 0x74,
0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18,
0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x54, 0x79, 0x70,
0x65, 0x12, 0x1f, 0x0a, 0x0b, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x5f, 0x75, 0x6e, 0x69, 0x74,
0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x70, 0x65, 0x72, 0x69, 0x6f, 0x64, 0x55, 0x6e,
0x69, 0x74, 0x22, 0x36, 0x0a, 0x06, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x12, 0x2c, 0x0a, 0x06,
0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x63,
0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61,
0x69, 0x72, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x22, 0x60, 0x0a, 0x06, 0x53, 0x65,
0x72, 0x69, 0x65, 0x73, 0x12, 0x2c, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01,
0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31,
0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x50, 0x61, 0x69, 0x72, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65,
0x6c, 0x73, 0x12, 0x28, 0x0a, 0x06, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x03,
0x28, 0x0b, 0x32, 0x10, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x50,
0x6f, 0x69, 0x6e, 0x74, 0x52, 0x06, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x73, 0x22, 0x3b, 0x0a, 0x05,
0x50, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01,
0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1c, 0x0a, 0x09, 0x74,
0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09,
0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x42, 0x95, 0x01, 0x0a, 0x0d, 0x63, 0x6f,
0x6d, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x42, 0x0b, 0x43, 0x6f, 0x6d,
0x6d, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x32, 0x67, 0x69, 0x74, 0x68,
0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x67, 0x72, 0x61, 0x66, 0x61, 0x6e, 0x61, 0x2f, 0x66,
0x69, 0x72, 0x65, 0x2f, 0x70, 0x6b, 0x67, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x63, 0x6f, 0x6d, 0x6d,
0x6f, 0x6e, 0x2f, 0x76, 0x31, 0x3b, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x76, 0x31, 0xa2, 0x02,
0x03, 0x43, 0x58, 0x58, 0xaa, 0x02, 0x09, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x56, 0x31,
0xca, 0x02, 0x09, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5c, 0x56, 0x31, 0xe2, 0x02, 0x15, 0x43,
0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5c, 0x56, 0x31, 0x5c, 0x47, 0x50, 0x42, 0x4d, 0x65, 0x74, 0x61,
0x64, 0x61, 0x74, 0x61, 0xea, 0x02, 0x0a, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x3a, 0x3a, 0x56,
0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_common_v1_common_proto_rawDescOnce sync.Once
file_common_v1_common_proto_rawDescData = file_common_v1_common_proto_rawDesc
)
func file_common_v1_common_proto_rawDescGZIP() []byte {
file_common_v1_common_proto_rawDescOnce.Do(func() {
file_common_v1_common_proto_rawDescData = protoimpl.X.CompressGZIP(file_common_v1_common_proto_rawDescData)
})
return file_common_v1_common_proto_rawDescData
}
var file_common_v1_common_proto_msgTypes = make([]protoimpl.MessageInfo, 5)
var file_common_v1_common_proto_goTypes = []interface{}{
(*LabelPair)(nil), // 0: common.v1.LabelPair
(*ProfileType)(nil), // 1: common.v1.ProfileType
(*Labels)(nil), // 2: common.v1.Labels
(*Series)(nil), // 3: common.v1.Series
(*Point)(nil), // 4: common.v1.Point
}
var file_common_v1_common_proto_depIdxs = []int32{
0, // 0: common.v1.Labels.labels:type_name -> common.v1.LabelPair
0, // 1: common.v1.Series.labels:type_name -> common.v1.LabelPair
4, // 2: common.v1.Series.points:type_name -> common.v1.Point
3, // [3:3] is the sub-list for method output_type
3, // [3:3] is the sub-list for method input_type
3, // [3:3] is the sub-list for extension type_name
3, // [3:3] is the sub-list for extension extendee
0, // [0:3] is the sub-list for field type_name
}
func init() { file_common_v1_common_proto_init() }
func file_common_v1_common_proto_init() {
if File_common_v1_common_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_common_v1_common_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*LabelPair); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_common_v1_common_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ProfileType); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_common_v1_common_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Labels); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_common_v1_common_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Series); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_common_v1_common_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Point); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_common_v1_common_proto_rawDesc,
NumEnums: 0,
NumMessages: 5,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_common_v1_common_proto_goTypes,
DependencyIndexes: file_common_v1_common_proto_depIdxs,
MessageInfos: file_common_v1_common_proto_msgTypes,
}.Build()
File_common_v1_common_proto = out.File
file_common_v1_common_proto_rawDesc = nil
file_common_v1_common_proto_goTypes = nil
file_common_v1_common_proto_depIdxs = nil
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,196 @@
// Code generated by protoc-gen-connect-go. DO NOT EDIT.
//
// Source: querier/v1/querier.proto
package querierv1connect
import (
context "context"
errors "errors"
connect_go "github.com/bufbuild/connect-go"
v1 "github.com/grafana/grafana/pkg/tsdb/phlare/gen/querier/v1"
http "net/http"
strings "strings"
)
// This is a compile-time assertion to ensure that this generated file and the connect package are
// compatible. If you get a compiler error that this constant is not defined, this code was
// generated with a version of connect newer than the one compiled into your binary. You can fix the
// problem by either regenerating this code with an older version of connect or updating the connect
// version compiled into your binary.
const _ = connect_go.IsAtLeastVersion0_1_0
const (
// QuerierServiceName is the fully-qualified name of the QuerierService service.
QuerierServiceName = "querier.v1.QuerierService"
)
// QuerierServiceClient is a client for the querier.v1.QuerierService service.
type QuerierServiceClient interface {
ProfileTypes(context.Context, *connect_go.Request[v1.ProfileTypesRequest]) (*connect_go.Response[v1.ProfileTypesResponse], error)
LabelValues(context.Context, *connect_go.Request[v1.LabelValuesRequest]) (*connect_go.Response[v1.LabelValuesResponse], error)
LabelNames(context.Context, *connect_go.Request[v1.LabelNamesRequest]) (*connect_go.Response[v1.LabelNamesResponse], error)
Series(context.Context, *connect_go.Request[v1.SeriesRequest]) (*connect_go.Response[v1.SeriesResponse], error)
SelectMergeStacktraces(context.Context, *connect_go.Request[v1.SelectMergeStacktracesRequest]) (*connect_go.Response[v1.SelectMergeStacktracesResponse], error)
SelectSeries(context.Context, *connect_go.Request[v1.SelectSeriesRequest]) (*connect_go.Response[v1.SelectSeriesResponse], error)
}
// NewQuerierServiceClient constructs a client for the querier.v1.QuerierService service. By
// default, it uses the Connect protocol with the binary Protobuf Codec, asks for gzipped responses,
// and sends uncompressed requests. To use the gRPC or gRPC-Web protocols, supply the
// connect.WithGRPC() or connect.WithGRPCWeb() options.
//
// The URL supplied here should be the base URL for the Connect or gRPC server (for example,
// http://api.acme.com or https://acme.com/grpc).
func NewQuerierServiceClient(httpClient connect_go.HTTPClient, baseURL string, opts ...connect_go.ClientOption) QuerierServiceClient {
baseURL = strings.TrimRight(baseURL, "/")
return &querierServiceClient{
profileTypes: connect_go.NewClient[v1.ProfileTypesRequest, v1.ProfileTypesResponse](
httpClient,
baseURL+"/querier.v1.QuerierService/ProfileTypes",
opts...,
),
labelValues: connect_go.NewClient[v1.LabelValuesRequest, v1.LabelValuesResponse](
httpClient,
baseURL+"/querier.v1.QuerierService/LabelValues",
opts...,
),
labelNames: connect_go.NewClient[v1.LabelNamesRequest, v1.LabelNamesResponse](
httpClient,
baseURL+"/querier.v1.QuerierService/LabelNames",
opts...,
),
series: connect_go.NewClient[v1.SeriesRequest, v1.SeriesResponse](
httpClient,
baseURL+"/querier.v1.QuerierService/Series",
opts...,
),
selectMergeStacktraces: connect_go.NewClient[v1.SelectMergeStacktracesRequest, v1.SelectMergeStacktracesResponse](
httpClient,
baseURL+"/querier.v1.QuerierService/SelectMergeStacktraces",
opts...,
),
selectSeries: connect_go.NewClient[v1.SelectSeriesRequest, v1.SelectSeriesResponse](
httpClient,
baseURL+"/querier.v1.QuerierService/SelectSeries",
opts...,
),
}
}
// querierServiceClient implements QuerierServiceClient.
type querierServiceClient struct {
profileTypes *connect_go.Client[v1.ProfileTypesRequest, v1.ProfileTypesResponse]
labelValues *connect_go.Client[v1.LabelValuesRequest, v1.LabelValuesResponse]
labelNames *connect_go.Client[v1.LabelNamesRequest, v1.LabelNamesResponse]
series *connect_go.Client[v1.SeriesRequest, v1.SeriesResponse]
selectMergeStacktraces *connect_go.Client[v1.SelectMergeStacktracesRequest, v1.SelectMergeStacktracesResponse]
selectSeries *connect_go.Client[v1.SelectSeriesRequest, v1.SelectSeriesResponse]
}
// ProfileTypes calls querier.v1.QuerierService.ProfileTypes.
func (c *querierServiceClient) ProfileTypes(ctx context.Context, req *connect_go.Request[v1.ProfileTypesRequest]) (*connect_go.Response[v1.ProfileTypesResponse], error) {
return c.profileTypes.CallUnary(ctx, req)
}
// LabelValues calls querier.v1.QuerierService.LabelValues.
func (c *querierServiceClient) LabelValues(ctx context.Context, req *connect_go.Request[v1.LabelValuesRequest]) (*connect_go.Response[v1.LabelValuesResponse], error) {
return c.labelValues.CallUnary(ctx, req)
}
// LabelNames calls querier.v1.QuerierService.LabelNames.
func (c *querierServiceClient) LabelNames(ctx context.Context, req *connect_go.Request[v1.LabelNamesRequest]) (*connect_go.Response[v1.LabelNamesResponse], error) {
return c.labelNames.CallUnary(ctx, req)
}
// Series calls querier.v1.QuerierService.Series.
func (c *querierServiceClient) Series(ctx context.Context, req *connect_go.Request[v1.SeriesRequest]) (*connect_go.Response[v1.SeriesResponse], error) {
return c.series.CallUnary(ctx, req)
}
// SelectMergeStacktraces calls querier.v1.QuerierService.SelectMergeStacktraces.
func (c *querierServiceClient) SelectMergeStacktraces(ctx context.Context, req *connect_go.Request[v1.SelectMergeStacktracesRequest]) (*connect_go.Response[v1.SelectMergeStacktracesResponse], error) {
return c.selectMergeStacktraces.CallUnary(ctx, req)
}
// SelectSeries calls querier.v1.QuerierService.SelectSeries.
func (c *querierServiceClient) SelectSeries(ctx context.Context, req *connect_go.Request[v1.SelectSeriesRequest]) (*connect_go.Response[v1.SelectSeriesResponse], error) {
return c.selectSeries.CallUnary(ctx, req)
}
// QuerierServiceHandler is an implementation of the querier.v1.QuerierService service.
type QuerierServiceHandler interface {
ProfileTypes(context.Context, *connect_go.Request[v1.ProfileTypesRequest]) (*connect_go.Response[v1.ProfileTypesResponse], error)
LabelValues(context.Context, *connect_go.Request[v1.LabelValuesRequest]) (*connect_go.Response[v1.LabelValuesResponse], error)
LabelNames(context.Context, *connect_go.Request[v1.LabelNamesRequest]) (*connect_go.Response[v1.LabelNamesResponse], error)
Series(context.Context, *connect_go.Request[v1.SeriesRequest]) (*connect_go.Response[v1.SeriesResponse], error)
SelectMergeStacktraces(context.Context, *connect_go.Request[v1.SelectMergeStacktracesRequest]) (*connect_go.Response[v1.SelectMergeStacktracesResponse], error)
SelectSeries(context.Context, *connect_go.Request[v1.SelectSeriesRequest]) (*connect_go.Response[v1.SelectSeriesResponse], error)
}
// NewQuerierServiceHandler builds an HTTP handler from the service implementation. It returns the
// path on which to mount the handler and the handler itself.
//
// By default, handlers support the Connect, gRPC, and gRPC-Web protocols with the binary Protobuf
// and JSON codecs. They also support gzip compression.
func NewQuerierServiceHandler(svc QuerierServiceHandler, opts ...connect_go.HandlerOption) (string, http.Handler) {
mux := http.NewServeMux()
mux.Handle("/querier.v1.QuerierService/ProfileTypes", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/ProfileTypes",
svc.ProfileTypes,
opts...,
))
mux.Handle("/querier.v1.QuerierService/LabelValues", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/LabelValues",
svc.LabelValues,
opts...,
))
mux.Handle("/querier.v1.QuerierService/LabelNames", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/LabelNames",
svc.LabelNames,
opts...,
))
mux.Handle("/querier.v1.QuerierService/Series", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/Series",
svc.Series,
opts...,
))
mux.Handle("/querier.v1.QuerierService/SelectMergeStacktraces", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/SelectMergeStacktraces",
svc.SelectMergeStacktraces,
opts...,
))
mux.Handle("/querier.v1.QuerierService/SelectSeries", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/SelectSeries",
svc.SelectSeries,
opts...,
))
return "/querier.v1.QuerierService/", mux
}
// UnimplementedQuerierServiceHandler returns CodeUnimplemented from all methods.
type UnimplementedQuerierServiceHandler struct{}
func (UnimplementedQuerierServiceHandler) ProfileTypes(context.Context, *connect_go.Request[v1.ProfileTypesRequest]) (*connect_go.Response[v1.ProfileTypesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("querier.v1.QuerierService.ProfileTypes is not implemented"))
}
func (UnimplementedQuerierServiceHandler) LabelValues(context.Context, *connect_go.Request[v1.LabelValuesRequest]) (*connect_go.Response[v1.LabelValuesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("querier.v1.QuerierService.LabelValues is not implemented"))
}
func (UnimplementedQuerierServiceHandler) LabelNames(context.Context, *connect_go.Request[v1.LabelNamesRequest]) (*connect_go.Response[v1.LabelNamesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("querier.v1.QuerierService.LabelNames is not implemented"))
}
func (UnimplementedQuerierServiceHandler) Series(context.Context, *connect_go.Request[v1.SeriesRequest]) (*connect_go.Response[v1.SeriesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("querier.v1.QuerierService.Series is not implemented"))
}
func (UnimplementedQuerierServiceHandler) SelectMergeStacktraces(context.Context, *connect_go.Request[v1.SelectMergeStacktracesRequest]) (*connect_go.Response[v1.SelectMergeStacktracesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("querier.v1.QuerierService.SelectMergeStacktraces is not implemented"))
}
func (UnimplementedQuerierServiceHandler) SelectSeries(context.Context, *connect_go.Request[v1.SelectSeriesRequest]) (*connect_go.Response[v1.SelectSeriesResponse], error) {
return nil, connect_go.NewError(connect_go.CodeUnimplemented, errors.New("querier.v1.QuerierService.SelectSeries is not implemented"))
}

View File

@ -0,0 +1,52 @@
// Code generated by protoc-gen-connect-go-mux. DO NOT EDIT.
//
// Source: querier/v1/querier.proto
package querierv1connect
import (
connect_go "github.com/bufbuild/connect-go"
mux "github.com/gorilla/mux"
)
// This is a compile-time assertion to ensure that this generated file and the connect package are
// compatible. If you get a compiler error that this constant is not defined, this code was
// generated with a version of connect newer than the one compiled into your binary. You can fix the
// problem by either regenerating this code with an older version of connect or updating the connect
// version compiled into your binary.
const _ = connect_go.IsAtLeastVersion0_1_0
// RegisterQuerierServiceHandler register an HTTP handler to a mux.Router from the service
// implementation.
func RegisterQuerierServiceHandler(mux *mux.Router, svc QuerierServiceHandler, opts ...connect_go.HandlerOption) {
mux.Handle("/querier.v1.QuerierService/ProfileTypes", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/ProfileTypes",
svc.ProfileTypes,
opts...,
))
mux.Handle("/querier.v1.QuerierService/LabelValues", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/LabelValues",
svc.LabelValues,
opts...,
))
mux.Handle("/querier.v1.QuerierService/LabelNames", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/LabelNames",
svc.LabelNames,
opts...,
))
mux.Handle("/querier.v1.QuerierService/Series", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/Series",
svc.Series,
opts...,
))
mux.Handle("/querier.v1.QuerierService/SelectMergeStacktraces", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/SelectMergeStacktraces",
svc.SelectMergeStacktraces,
opts...,
))
mux.Handle("/querier.v1.QuerierService/SelectSeries", connect_go.NewUnaryHandler(
"/querier.v1.QuerierService/SelectSeries",
svc.SelectSeries,
opts...,
))
}

245
pkg/tsdb/phlare/instance.go Normal file
View File

@ -0,0 +1,245 @@
package phlare
import (
"context"
"encoding/json"
"net/url"
"strings"
"time"
"github.com/bufbuild/connect-go"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/httpclient"
commonv1 "github.com/grafana/grafana/pkg/tsdb/phlare/gen/common/v1"
querierv1 "github.com/grafana/grafana/pkg/tsdb/phlare/gen/querier/v1"
"github.com/grafana/grafana/pkg/tsdb/phlare/gen/querier/v1/querierv1connect"
)
var (
_ backend.QueryDataHandler = (*PhlareDatasource)(nil)
_ backend.CallResourceHandler = (*PhlareDatasource)(nil)
_ backend.CheckHealthHandler = (*PhlareDatasource)(nil)
_ backend.StreamHandler = (*PhlareDatasource)(nil)
)
// PhlareDatasource is a datasource for querying application performance profiles.
type PhlareDatasource struct {
client querierv1connect.QuerierServiceClient
}
// NewPhlareDatasource creates a new datasource instance.
func NewPhlareDatasource(httpClientProvider httpclient.Provider, settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
opt, err := settings.HTTPClientOptions()
if err != nil {
return nil, err
}
httpClient, err := httpClientProvider.New(opt)
if err != nil {
return nil, err
}
return &PhlareDatasource{
client: querierv1connect.NewQuerierServiceClient(httpClient, settings.URL),
}, nil
}
func (d *PhlareDatasource) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
logger.Debug("CallResource", "Path", req.Path, "Method", req.Method, "Body", req.Body)
if req.Path == "profileTypes" {
return d.callProfileTypes(ctx, req, sender)
}
if req.Path == "labelNames" {
return d.callLabelNames(ctx, req, sender)
}
if req.Path == "series" {
return d.callSeries(ctx, req, sender)
}
return sender.Send(&backend.CallResourceResponse{
Status: 404,
})
}
func (d *PhlareDatasource) callProfileTypes(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
res, err := d.client.ProfileTypes(ctx, connect.NewRequest(&querierv1.ProfileTypesRequest{}))
if err != nil {
return err
}
data, err := json.Marshal(res.Msg.ProfileTypes)
if err != nil {
return err
}
err = sender.Send(&backend.CallResourceResponse{Body: data, Headers: req.Headers, Status: 200})
if err != nil {
return err
}
return nil
}
type SeriesRequestJson struct {
Matchers []string `json:"matchers"`
}
func (d *PhlareDatasource) callSeries(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
parsedUrl, err := url.Parse(req.URL)
if err != nil {
return err
}
matchers, ok := parsedUrl.Query()["matchers"]
if !ok {
matchers = []string{"{}"}
}
res, err := d.client.Series(ctx, connect.NewRequest(&querierv1.SeriesRequest{Matchers: matchers}))
if err != nil {
return err
}
for _, val := range res.Msg.LabelsSet {
withoutPrivate := withoutPrivateLabels(val.Labels)
val.Labels = withoutPrivate
}
data, err := json.Marshal(res.Msg.LabelsSet)
if err != nil {
return err
}
err = sender.Send(&backend.CallResourceResponse{Body: data, Headers: req.Headers, Status: 200})
if err != nil {
return err
}
return nil
}
func (d *PhlareDatasource) callLabelNames(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
res, err := d.client.LabelNames(ctx, connect.NewRequest(&querierv1.LabelNamesRequest{}))
if err != nil {
return err
}
data, err := json.Marshal(res.Msg.Names)
if err != nil {
return err
}
err = sender.Send(&backend.CallResourceResponse{Body: data, Headers: req.Headers, Status: 200})
if err != nil {
return err
}
return nil
}
// QueryData handles multiple queries and returns multiple responses.
// req contains the queries []DataQuery (where each query contains RefID as a unique identifier).
// The QueryDataResponse contains a map of RefID to the response for each query, and each response
// contains Frames ([]*Frame).
func (d *PhlareDatasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
logger.Debug("QueryData called", "Queries", req.Queries)
// create response struct
response := backend.NewQueryDataResponse()
// loop over queries and execute them individually.
for _, q := range req.Queries {
res := d.query(ctx, req.PluginContext, q)
// save the response in a hashmap
// based on with RefID as identifier
response.Responses[q.RefID] = res
}
return response, nil
}
// CheckHealth handles health checks sent from Grafana to the plugin.
// The main use case for these health checks is the test button on the
// datasource configuration page which allows users to verify that
// a datasource is working as expected.
func (d *PhlareDatasource) CheckHealth(ctx context.Context, _ *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
logger.Debug("CheckHealth called")
status := backend.HealthStatusOk
message := "Data source is working"
if _, err := d.client.ProfileTypes(ctx, connect.NewRequest(&querierv1.ProfileTypesRequest{})); err != nil {
status = backend.HealthStatusError
message = err.Error()
}
return &backend.CheckHealthResult{
Status: status,
Message: message,
}, nil
}
// SubscribeStream is called when a client wants to connect to a stream. This callback
// allows sending the first message.
func (d *PhlareDatasource) SubscribeStream(_ context.Context, req *backend.SubscribeStreamRequest) (*backend.SubscribeStreamResponse, error) {
logger.Debug("SubscribeStream called")
status := backend.SubscribeStreamStatusPermissionDenied
if req.Path == "stream" {
// Allow subscribing only on expected path.
status = backend.SubscribeStreamStatusOK
}
return &backend.SubscribeStreamResponse{
Status: status,
}, nil
}
// RunStream is called once for any open channel. Results are shared with everyone
// subscribed to the same channel.
func (d *PhlareDatasource) RunStream(ctx context.Context, req *backend.RunStreamRequest, sender *backend.StreamSender) error {
logger.Debug("RunStream called")
// Create the same data frame as for query data.
frame := data.NewFrame("response")
// Add fields (matching the same schema used in QueryData).
frame.Fields = append(frame.Fields,
data.NewField("time", nil, make([]time.Time, 1)),
data.NewField("values", nil, make([]int64, 1)),
)
counter := 0
// Stream data frames periodically till stream closed by Grafana.
for {
select {
case <-ctx.Done():
logger.Info("Context done, finish streaming", "path", req.Path)
return nil
case <-time.After(time.Second):
// Send new data periodically.
frame.Fields[0].Set(0, time.Now())
frame.Fields[1].Set(0, int64(10*(counter%2+1)))
counter++
err := sender.SendFrame(frame, data.IncludeAll)
if err != nil {
logger.Error("Error sending frame", "error", err)
continue
}
}
}
}
// PublishStream is called when a client sends a message to the stream.
func (d *PhlareDatasource) PublishStream(_ context.Context, _ *backend.PublishStreamRequest) (*backend.PublishStreamResponse, error) {
logger.Debug("PublishStream called")
// Do not allow publishing at all.
return &backend.PublishStreamResponse{
Status: backend.PublishStreamStatusPermissionDenied,
}, nil
}
func withoutPrivateLabels(labels []*commonv1.LabelPair) []*commonv1.LabelPair {
res := make([]*commonv1.LabelPair, 0, len(labels))
for _, l := range labels {
if !strings.HasPrefix(l.Name, "__") {
res = append(res, l)
}
}
return res
}

View File

@ -0,0 +1,64 @@
package phlare
import (
"context"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/require"
)
// This is where the tests for the datasource backend live.
func Test_QueryData(t *testing.T) {
ds := PhlareDatasource{}
resp, err := ds.QueryData(
context.Background(),
&backend.QueryDataRequest{
Queries: []backend.DataQuery{
{RefID: "A"},
},
},
)
if err != nil {
t.Error(err)
}
if len(resp.Responses) != 1 {
t.Fatal("QueryData must return a response")
}
}
func Test_CallResource(t *testing.T) {
ds := &PhlareDatasource{
client: &FakeClient{},
}
t.Run("series resource", func(t *testing.T) {
sender := &FakeSender{}
err := ds.CallResource(
context.Background(),
&backend.CallResourceRequest{
PluginContext: backend.PluginContext{},
Path: "series",
Method: "GET",
URL: "series?matchers=%7B%7D",
Headers: nil,
Body: nil,
},
sender,
)
require.NoError(t, err)
require.Equal(t, 200, sender.Resp.Status)
require.Equal(t, `[{"labels":[{"name":"instance","value":"127.0.0.1"},{"name":"job","value":"default"}]}]`, string(sender.Resp.Body))
})
}
type FakeSender struct {
Resp *backend.CallResourceResponse
}
func (fs *FakeSender) Send(resp *backend.CallResourceResponse) error {
fs.Resp = resp
return nil
}

335
pkg/tsdb/phlare/query.go Normal file
View File

@ -0,0 +1,335 @@
package phlare
import (
"context"
"encoding/json"
"fmt"
"math"
"strings"
"time"
"github.com/bufbuild/connect-go"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/live"
querierv1 "github.com/grafana/grafana/pkg/tsdb/phlare/gen/querier/v1"
)
type queryModel struct {
WithStreaming bool
ProfileTypeID string `json:"profileTypeId"`
LabelSelector string `json:"labelSelector"`
GroupBy []string `json:"groupBy"`
}
type dsJsonModel struct {
MinStep string `json:"minStep"`
}
// These constants need to match the ones in the frontend.
const queryTypeProfile = "profile"
const (
queryTypeMetrics = "metrics"
queryTypeBoth = "both"
)
// query processes single Fire query transforming the response to data.Frame packaged in DataResponse
func (d *PhlareDatasource) query(ctx context.Context, pCtx backend.PluginContext, query backend.DataQuery) backend.DataResponse {
var qm queryModel
response := backend.DataResponse{}
err := json.Unmarshal(query.JSON, &qm)
if err != nil {
response.Error = fmt.Errorf("error unmarshaling query model: %v", err)
return response
}
if query.QueryType == queryTypeMetrics || query.QueryType == queryTypeBoth {
var dsJson dsJsonModel
err = json.Unmarshal(pCtx.DataSourceInstanceSettings.JSONData, &dsJson)
if err != nil {
response.Error = fmt.Errorf("error unmarshaling datasource json model: %v", err)
return response
}
parsedInterval := time.Second * 15
if dsJson.MinStep != "" {
parsedInterval, err = gtime.ParseDuration(dsJson.MinStep)
if err != nil {
parsedInterval = time.Second * 15
logger.Debug("Failed to parse the MinStep using default", "MinStep", dsJson.MinStep)
}
}
req := connect.NewRequest(&querierv1.SelectSeriesRequest{
ProfileTypeID: qm.ProfileTypeID,
LabelSelector: qm.LabelSelector,
Start: query.TimeRange.From.UnixMilli(),
End: query.TimeRange.To.UnixMilli(),
Step: math.Max(query.Interval.Seconds(), parsedInterval.Seconds()),
GroupBy: qm.GroupBy,
})
logger.Debug("Sending SelectSeriesRequest", "request", req, "queryModel", qm)
seriesResp, err := d.client.SelectSeries(ctx, req)
if err != nil {
logger.Error("Querying SelectSeries()", "err", err)
response.Error = err
return response
}
// add the frames to the response.
response.Frames = append(response.Frames, seriesToDataFrames(seriesResp, qm.ProfileTypeID)...)
}
if query.QueryType == queryTypeProfile || query.QueryType == queryTypeBoth {
req := makeRequest(qm, query)
logger.Debug("Sending SelectMergeStacktracesRequest", "request", req, "queryModel", qm)
resp, err := d.client.SelectMergeStacktraces(ctx, makeRequest(qm, query))
if err != nil {
logger.Error("Querying SelectMergeStacktraces()", "err", err)
response.Error = err
return response
}
frame := responseToDataFrames(resp, qm.ProfileTypeID)
response.Frames = append(response.Frames, frame)
// If query called with streaming on then return a channel
// to subscribe on a client-side and consume updates from a plugin.
// Feel free to remove this if you don't need streaming for your datasource.
if qm.WithStreaming {
channel := live.Channel{
Scope: live.ScopeDatasource,
Namespace: pCtx.DataSourceInstanceSettings.UID,
Path: "stream",
}
frame.SetMeta(&data.FrameMeta{Channel: channel.String()})
}
}
return response
}
func makeRequest(qm queryModel, query backend.DataQuery) *connect.Request[querierv1.SelectMergeStacktracesRequest] {
return &connect.Request[querierv1.SelectMergeStacktracesRequest]{
Msg: &querierv1.SelectMergeStacktracesRequest{
ProfileTypeID: qm.ProfileTypeID,
LabelSelector: qm.LabelSelector,
Start: query.TimeRange.From.UnixMilli(),
End: query.TimeRange.To.UnixMilli(),
},
}
}
// responseToDataFrames turns fire response to data.Frame. We encode the data into a nested set format where we have
// [level, value, label] columns and by ordering the items in a depth first traversal order we can recreate the whole
// tree back.
func responseToDataFrames(resp *connect.Response[querierv1.SelectMergeStacktracesResponse], profileTypeID string) *data.Frame {
tree := levelsToTree(resp.Msg.Flamegraph.Levels, resp.Msg.Flamegraph.Names)
return treeToNestedSetDataFrame(tree, profileTypeID)
}
// START_OFFSET is offset of the bar relative to previous sibling
const START_OFFSET = 0
// VALUE_OFFSET is value or width of the bar
const VALUE_OFFSET = 1
// SELF_OFFSET is self value of the bar
const SELF_OFFSET = 2
// NAME_OFFSET is index into the names array
const NAME_OFFSET = 3
// ITEM_OFFSET Next bar. Each bar of the profile is represented by 4 number in a flat array.
const ITEM_OFFSET = 4
type ProfileTree struct {
Start int64
Value int64
Self int64
Level int
Name string
Nodes []*ProfileTree
}
// levelsToTree converts flamebearer format into a tree. This is needed to then convert it into nested set format
// dataframe. This should be temporary, and ideally we should get some sort of tree struct directly from Fire API.
func levelsToTree(levels []*querierv1.Level, names []string) *ProfileTree {
tree := &ProfileTree{
Start: 0,
Value: levels[0].Values[VALUE_OFFSET],
Self: levels[0].Values[SELF_OFFSET],
Level: 0,
Name: names[levels[0].Values[0]],
}
parentsStack := []*ProfileTree{tree}
currentLevel := 1
// Cycle through each level
for {
if currentLevel >= len(levels) {
break
}
// If we still have levels to go, this should not happen. Something is probably wrong with the flamebearer data.
if len(parentsStack) == 0 {
logger.Error("parentsStack is empty but we are not at the the last level", "currentLevel", currentLevel)
break
}
var nextParentsStack []*ProfileTree
currentParent := parentsStack[:1][0]
parentsStack = parentsStack[1:]
itemIndex := 0
// cumulative offset as items in flamebearer format have just relative to prev item
offset := int64(0)
// Cycle through bar in a level
for {
if itemIndex >= len(levels[currentLevel].Values) {
break
}
itemStart := levels[currentLevel].Values[itemIndex+START_OFFSET] + offset
itemValue := levels[currentLevel].Values[itemIndex+VALUE_OFFSET]
selfValue := levels[currentLevel].Values[itemIndex+SELF_OFFSET]
itemEnd := itemStart + itemValue
parentEnd := currentParent.Start + currentParent.Value
if itemStart >= currentParent.Start && itemEnd <= parentEnd {
// We have an item that is in the bounds of current parent item, so it should be its child
treeItem := &ProfileTree{
Start: itemStart,
Value: itemValue,
Self: selfValue,
Level: currentLevel,
Name: names[levels[currentLevel].Values[itemIndex+NAME_OFFSET]],
}
// Add to parent
currentParent.Nodes = append(currentParent.Nodes, treeItem)
// Add this item as parent for the next level
nextParentsStack = append(nextParentsStack, treeItem)
itemIndex += ITEM_OFFSET
// Update offset for next item. This is changing relative offset to absolute one.
offset = itemEnd
} else {
// We went out of parents bounds so lets move to next parent. We will evaluate the same item again, but
// we will check if it is a child of the next parent item in line.
if len(parentsStack) == 0 {
logger.Error("parentsStack is empty but there are still items in current level", "currentLevel", currentLevel, "itemIndex", itemIndex)
break
}
currentParent = parentsStack[:1][0]
parentsStack = parentsStack[1:]
continue
}
}
parentsStack = nextParentsStack
currentLevel++
}
return tree
}
type CustomMeta struct {
ProfileTypeID string
}
// treeToNestedSetDataFrame walks the tree depth first and adds items into the dataframe. This is a nested set format
// where by ordering the items in depth first order and knowing the level/depth of each item we can recreate the
// parent - child relationship without explicitly needing parent/child column and we can later just iterate over the
// dataFrame to again basically walking depth first over the tree/profile.
func treeToNestedSetDataFrame(tree *ProfileTree, profileTypeID string) *data.Frame {
frame := data.NewFrame("response")
frame.Meta = &data.FrameMeta{PreferredVisualization: "flamegraph"}
levelField := data.NewField("level", nil, []int64{})
valueField := data.NewField("value", nil, []int64{})
selfField := data.NewField("self", nil, []int64{})
// profileTypeID should encode the type of the profile with unit being the 3rd part
parts := strings.Split(profileTypeID, ":")
valueField.Config = &data.FieldConfig{Unit: normalizeUnit(parts[2])}
selfField.Config = &data.FieldConfig{Unit: normalizeUnit(parts[2])}
labelField := data.NewField("label", nil, []string{})
frame.Fields = data.Fields{levelField, valueField, selfField, labelField}
walkTree(tree, func(tree *ProfileTree) {
levelField.Append(int64(tree.Level))
valueField.Append(tree.Value)
selfField.Append(tree.Self)
labelField.Append(tree.Name)
})
return frame
}
func walkTree(tree *ProfileTree, fn func(tree *ProfileTree)) {
fn(tree)
stack := tree.Nodes
for {
if len(stack) == 0 {
break
}
fn(stack[0])
if stack[0].Nodes != nil {
stack = append(stack[0].Nodes, stack[1:]...)
} else {
stack = stack[1:]
}
}
}
func seriesToDataFrames(seriesResp *connect.Response[querierv1.SelectSeriesResponse], profileTypeID string) []*data.Frame {
var frames []*data.Frame
for _, series := range seriesResp.Msg.Series {
// We create separate data frames as the series may not have the same length
frame := data.NewFrame("series")
frame.Meta = &data.FrameMeta{PreferredVisualization: "graph"}
fields := data.Fields{}
timeField := data.NewField("time", nil, []time.Time{})
fields = append(fields, timeField)
label := ""
unit := ""
parts := strings.Split(profileTypeID, ":")
if len(parts) == 5 {
label = parts[1] // sample type e.g. cpu, goroutine, alloc_objects
unit = normalizeUnit(parts[2])
}
labels := make(map[string]string)
for _, label := range series.Labels {
labels[label.Name] = label.Value
}
valueField := data.NewField(label, labels, []float64{})
valueField.Config = &data.FieldConfig{Unit: unit}
for _, point := range series.Points {
timeField.Append(time.UnixMilli(point.Timestamp))
valueField.Append(point.Value)
}
fields = append(fields, valueField)
frame.Fields = fields
frames = append(frames, frame)
}
return frames
}
func normalizeUnit(unit string) string {
if unit == "nanoseconds" {
return "ns"
}
if unit == "count" {
return "short"
}
return unit
}

View File

@ -0,0 +1,314 @@
package phlare
import (
"context"
"testing"
"time"
"github.com/bufbuild/connect-go"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/require"
commonv1 "github.com/grafana/grafana/pkg/tsdb/phlare/gen/common/v1"
querierv1 "github.com/grafana/grafana/pkg/tsdb/phlare/gen/querier/v1"
)
// This is where the tests for the datasource backend live.
func Test_query(t *testing.T) {
client := &FakeClient{}
ds := &PhlareDatasource{
client: client,
}
pCtx := backend.PluginContext{
DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{
JSONData: []byte(`{"minStep":"30s"}`),
},
}
t.Run("query both", func(t *testing.T) {
dataQuery := makeDataQuery()
resp := ds.query(context.Background(), pCtx, *dataQuery)
require.Nil(t, resp.Error)
require.Equal(t, 2, len(resp.Frames))
require.Equal(t, "time", resp.Frames[0].Fields[0].Name)
require.Equal(t, data.NewField("level", nil, []int64{0, 1, 2}), resp.Frames[1].Fields[0])
})
t.Run("query profile", func(t *testing.T) {
dataQuery := makeDataQuery()
dataQuery.QueryType = queryTypeProfile
resp := ds.query(context.Background(), pCtx, *dataQuery)
require.Nil(t, resp.Error)
require.Equal(t, 1, len(resp.Frames))
require.Equal(t, data.NewField("level", nil, []int64{0, 1, 2}), resp.Frames[0].Fields[0])
})
t.Run("query metrics", func(t *testing.T) {
dataQuery := makeDataQuery()
dataQuery.QueryType = queryTypeMetrics
resp := ds.query(context.Background(), pCtx, *dataQuery)
require.Nil(t, resp.Error)
require.Equal(t, 1, len(resp.Frames))
require.Equal(t, "time", resp.Frames[0].Fields[0].Name)
})
t.Run("query metrics uses min step", func(t *testing.T) {
dataQuery := makeDataQuery()
dataQuery.QueryType = queryTypeMetrics
resp := ds.query(context.Background(), pCtx, *dataQuery)
require.Nil(t, resp.Error)
r, ok := client.Req.(*connect.Request[querierv1.SelectSeriesRequest])
require.True(t, ok)
require.Equal(t, float64(30), r.Msg.Step)
})
t.Run("query metrics uses default min step", func(t *testing.T) {
dataQuery := makeDataQuery()
dataQuery.QueryType = queryTypeMetrics
pCtxNoMinStep := backend.PluginContext{
DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{
JSONData: []byte(`{}`),
},
}
resp := ds.query(context.Background(), pCtxNoMinStep, *dataQuery)
require.Nil(t, resp.Error)
r, ok := client.Req.(*connect.Request[querierv1.SelectSeriesRequest])
require.True(t, ok)
require.Equal(t, float64(15), r.Msg.Step)
})
t.Run("query metrics uses group by", func(t *testing.T) {
dataQuery := makeDataQuery()
dataQuery.QueryType = queryTypeMetrics
dataQuery.JSON = []byte(`{"profileTypeId":"memory:alloc_objects:count:space:bytes","labelSelector":"{app=\\\"baz\\\"}","groupBy":["app","instance"]}`)
resp := ds.query(context.Background(), pCtx, *dataQuery)
require.Nil(t, resp.Error)
r, ok := client.Req.(*connect.Request[querierv1.SelectSeriesRequest])
require.True(t, ok)
require.Equal(t, []string{"app", "instance"}, r.Msg.GroupBy)
})
}
func makeDataQuery() *backend.DataQuery {
return &backend.DataQuery{
RefID: "A",
QueryType: queryTypeBoth,
MaxDataPoints: 0,
Interval: 0,
TimeRange: backend.TimeRange{
From: time.UnixMilli(10000),
To: time.UnixMilli(20000),
},
JSON: []byte(`{"profileTypeId":"memory:alloc_objects:count:space:bytes","labelSelector":"{app=\\\"baz\\\"}"}`),
}
}
// This is where the tests for the datasource backend live.
func Test_profileToDataFrame(t *testing.T) {
resp := &connect.Response[querierv1.SelectMergeStacktracesResponse]{
Msg: &querierv1.SelectMergeStacktracesResponse{
Flamegraph: &querierv1.FlameGraph{
Names: []string{"func1", "func2", "func3"},
Levels: []*querierv1.Level{
{Values: []int64{0, 20, 1, 2}},
{Values: []int64{0, 10, 3, 1, 4, 5, 5, 2}},
},
Total: 987,
MaxSelf: 123,
},
},
}
frame := responseToDataFrames(resp, "memory:alloc_objects:count:space:bytes")
require.Equal(t, 4, len(frame.Fields))
require.Equal(t, data.NewField("level", nil, []int64{0, 1, 1}), frame.Fields[0])
require.Equal(t, data.NewField("value", nil, []int64{20, 10, 5}).SetConfig(&data.FieldConfig{Unit: "short"}), frame.Fields[1])
require.Equal(t, data.NewField("self", nil, []int64{1, 3, 5}).SetConfig(&data.FieldConfig{Unit: "short"}), frame.Fields[2])
require.Equal(t, data.NewField("label", nil, []string{"func1", "func2", "func3"}), frame.Fields[3])
}
// This is where the tests for the datasource backend live.
func Test_levelsToTree(t *testing.T) {
t.Run("simple", func(t *testing.T) {
levels := []*querierv1.Level{
{Values: []int64{0, 100, 0, 0}},
{Values: []int64{0, 40, 0, 1, 0, 30, 0, 2}},
{Values: []int64{0, 15, 0, 3}},
}
tree := levelsToTree(levels, []string{"root", "func1", "func2", "func1:func3"})
require.Equal(t, &ProfileTree{
Start: 0, Value: 100, Level: 0, Name: "root", Nodes: []*ProfileTree{
{
Start: 0, Value: 40, Level: 1, Name: "func1", Nodes: []*ProfileTree{
{Start: 0, Value: 15, Level: 2, Name: "func1:func3"},
},
},
{Start: 40, Value: 30, Level: 1, Name: "func2"},
},
}, tree)
})
t.Run("medium", func(t *testing.T) {
levels := []*querierv1.Level{
{Values: []int64{0, 100, 0, 0}},
{Values: []int64{0, 40, 0, 1, 0, 30, 0, 2, 0, 30, 0, 3}},
{Values: []int64{0, 20, 0, 4, 50, 10, 0, 5}},
}
tree := levelsToTree(levels, []string{"root", "func1", "func2", "func3", "func1:func4", "func3:func5"})
require.Equal(t, &ProfileTree{
Start: 0, Value: 100, Level: 0, Name: "root", Nodes: []*ProfileTree{
{
Start: 0, Value: 40, Level: 1, Name: "func1", Nodes: []*ProfileTree{
{Start: 0, Value: 20, Level: 2, Name: "func1:func4"},
},
},
{Start: 40, Value: 30, Level: 1, Name: "func2"},
{
Start: 70, Value: 30, Level: 1, Name: "func3", Nodes: []*ProfileTree{
{Start: 70, Value: 10, Level: 2, Name: "func3:func5"},
},
},
},
}, tree)
})
}
func Test_treeToNestedDataFrame(t *testing.T) {
tree := &ProfileTree{
Start: 0, Value: 100, Level: 0, Self: 1, Name: "root", Nodes: []*ProfileTree{
{
Start: 10, Value: 40, Level: 1, Self: 2, Name: "func1",
},
{Start: 60, Value: 30, Level: 1, Self: 3, Name: "func2", Nodes: []*ProfileTree{
{Start: 61, Value: 15, Level: 2, Self: 4, Name: "func1:func3"},
}},
},
}
frame := treeToNestedSetDataFrame(tree, "memory:alloc_objects:count:space:bytes")
require.Equal(t,
[]*data.Field{
data.NewField("level", nil, []int64{0, 1, 1, 2}),
data.NewField("value", nil, []int64{100, 40, 30, 15}).SetConfig(&data.FieldConfig{Unit: "short"}),
data.NewField("self", nil, []int64{1, 2, 3, 4}).SetConfig(&data.FieldConfig{Unit: "short"}),
data.NewField("label", nil, []string{"root", "func1", "func2", "func1:func3"}),
}, frame.Fields)
}
func Test_seriesToDataFrame(t *testing.T) {
t.Run("single series", func(t *testing.T) {
resp := &connect.Response[querierv1.SelectSeriesResponse]{
Msg: &querierv1.SelectSeriesResponse{
Series: []*commonv1.Series{
{Labels: []*commonv1.LabelPair{}, Points: []*commonv1.Point{{Timestamp: int64(1000), Value: 30}, {Timestamp: int64(2000), Value: 10}}},
},
},
}
frames := seriesToDataFrames(resp, "process_cpu:samples:count:cpu:nanoseconds")
require.Equal(t, 2, len(frames[0].Fields))
require.Equal(t, data.NewField("time", nil, []time.Time{time.UnixMilli(1000), time.UnixMilli(2000)}), frames[0].Fields[0])
require.Equal(t, data.NewField("samples", map[string]string{}, []float64{30, 10}).SetConfig(&data.FieldConfig{Unit: "short"}), frames[0].Fields[1])
// with a label pair, the value field should name itself with a label pair name and not the profile type
resp = &connect.Response[querierv1.SelectSeriesResponse]{
Msg: &querierv1.SelectSeriesResponse{
Series: []*commonv1.Series{
{Labels: []*commonv1.LabelPair{{Name: "app", Value: "bar"}}, Points: []*commonv1.Point{{Timestamp: int64(1000), Value: 30}, {Timestamp: int64(2000), Value: 10}}},
},
},
}
frames = seriesToDataFrames(resp, "process_cpu:samples:count:cpu:nanoseconds")
require.Equal(t, data.NewField("samples", map[string]string{"app": "bar"}, []float64{30, 10}).SetConfig(&data.FieldConfig{Unit: "short"}), frames[0].Fields[1])
})
t.Run("single series", func(t *testing.T) {
resp := &connect.Response[querierv1.SelectSeriesResponse]{
Msg: &querierv1.SelectSeriesResponse{
Series: []*commonv1.Series{
{Labels: []*commonv1.LabelPair{{Name: "foo", Value: "bar"}}, Points: []*commonv1.Point{{Timestamp: int64(1000), Value: 30}, {Timestamp: int64(2000), Value: 10}}},
{Labels: []*commonv1.LabelPair{{Name: "foo", Value: "baz"}}, Points: []*commonv1.Point{{Timestamp: int64(1000), Value: 30}, {Timestamp: int64(2000), Value: 10}}},
},
},
}
frames := seriesToDataFrames(resp, "process_cpu:samples:count:cpu:nanoseconds")
require.Equal(t, 2, len(frames))
require.Equal(t, 2, len(frames[0].Fields))
require.Equal(t, 2, len(frames[1].Fields))
require.Equal(t, data.NewField("samples", map[string]string{"foo": "bar"}, []float64{30, 10}).SetConfig(&data.FieldConfig{Unit: "short"}), frames[0].Fields[1])
require.Equal(t, data.NewField("samples", map[string]string{"foo": "baz"}, []float64{30, 10}).SetConfig(&data.FieldConfig{Unit: "short"}), frames[1].Fields[1])
})
}
type FakeClient struct {
Req interface{}
}
func (f *FakeClient) ProfileTypes(ctx context.Context, c *connect.Request[querierv1.ProfileTypesRequest]) (*connect.Response[querierv1.ProfileTypesResponse], error) {
panic("implement me")
}
func (f *FakeClient) LabelValues(ctx context.Context, c *connect.Request[querierv1.LabelValuesRequest]) (*connect.Response[querierv1.LabelValuesResponse], error) {
panic("implement me")
}
func (f *FakeClient) LabelNames(context.Context, *connect.Request[querierv1.LabelNamesRequest]) (*connect.Response[querierv1.LabelNamesResponse], error) {
panic("implement me")
}
func (f *FakeClient) Series(ctx context.Context, c *connect.Request[querierv1.SeriesRequest]) (*connect.Response[querierv1.SeriesResponse], error) {
return &connect.Response[querierv1.SeriesResponse]{
Msg: &querierv1.SeriesResponse{
LabelsSet: []*commonv1.Labels{{
Labels: []*commonv1.LabelPair{
{
Name: "__unit__",
Value: "cpu",
},
{
Name: "instance",
Value: "127.0.0.1",
},
{
Name: "job",
Value: "default",
},
},
}},
},
}, nil
}
func (f *FakeClient) SelectMergeStacktraces(ctx context.Context, c *connect.Request[querierv1.SelectMergeStacktracesRequest]) (*connect.Response[querierv1.SelectMergeStacktracesResponse], error) {
f.Req = c
return &connect.Response[querierv1.SelectMergeStacktracesResponse]{
Msg: &querierv1.SelectMergeStacktracesResponse{
Flamegraph: &querierv1.FlameGraph{
Names: []string{"foo", "bar", "baz"},
Levels: []*querierv1.Level{
{Values: []int64{0, 10, 0, 0}},
{Values: []int64{0, 9, 0, 1}},
{Values: []int64{0, 8, 8, 2}},
},
Total: 100,
MaxSelf: 56,
},
},
}, nil
}
func (f *FakeClient) SelectSeries(ctx context.Context, req *connect.Request[querierv1.SelectSeriesRequest]) (*connect.Response[querierv1.SelectSeriesResponse], error) {
f.Req = req
return &connect.Response[querierv1.SelectSeriesResponse]{
Msg: &querierv1.SelectSeriesResponse{
Series: []*commonv1.Series{
{
Labels: []*commonv1.LabelPair{{Name: "foo", Value: "bar"}},
Points: []*commonv1.Point{{Timestamp: int64(1000), Value: 30}, {Timestamp: int64(2000), Value: 10}},
},
},
},
}, nil
}

103
pkg/tsdb/phlare/service.go Normal file
View File

@ -0,0 +1,103 @@
package phlare
import (
"context"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
"github.com/grafana/grafana/pkg/infra/httpclient"
"github.com/grafana/grafana/pkg/infra/log"
)
// Make sure FireDatasource implements required interfaces. This is important to do
// since otherwise we will only get a not implemented error response from plugin in
// runtime. In this example datasource instance implements backend.QueryDataHandler,
// backend.CheckHealthHandler, backend.StreamHandler interfaces. Plugin should not
// implement all these interfaces - only those which are required for a particular task.
// For example if plugin does not need streaming functionality then you are free to remove
// methods that implement backend.StreamHandler. Implementing instancemgmt.InstanceDisposer
// is useful to clean up resources used by previous datasource instance when a new datasource
// instance created upon datasource settings changed.
var (
_ backend.QueryDataHandler = (*Service)(nil)
_ backend.CallResourceHandler = (*Service)(nil)
_ backend.CheckHealthHandler = (*Service)(nil)
_ backend.StreamHandler = (*Service)(nil)
)
var logger = log.New("tsdb.phlare")
type Service struct {
im instancemgmt.InstanceManager
}
func (s *Service) getInstance(pluginCtx backend.PluginContext) (*PhlareDatasource, error) {
i, err := s.im.Get(pluginCtx)
if err != nil {
return nil, err
}
in := i.(*PhlareDatasource)
return in, nil
}
func ProvideService(httpClientProvider httpclient.Provider) *Service {
return &Service{
im: datasource.NewInstanceManager(newInstanceSettings(httpClientProvider)),
}
}
func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.InstanceFactoryFunc {
return func(settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
return NewPhlareDatasource(httpClientProvider, settings)
}
}
func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
i, err := s.getInstance(req.PluginContext)
if err != nil {
return nil, err
}
return i.QueryData(ctx, req)
}
func (s *Service) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
i, err := s.getInstance(req.PluginContext)
if err != nil {
return err
}
return i.CallResource(ctx, req, sender)
}
func (s *Service) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
i, err := s.getInstance(req.PluginContext)
if err != nil {
return nil, err
}
return i.CheckHealth(ctx, req)
}
func (s *Service) SubscribeStream(ctx context.Context, req *backend.SubscribeStreamRequest) (*backend.SubscribeStreamResponse, error) {
i, err := s.getInstance(req.PluginContext)
if err != nil {
return nil, err
}
return i.SubscribeStream(ctx, req)
}
func (s *Service) RunStream(ctx context.Context, req *backend.RunStreamRequest, sender *backend.StreamSender) error {
i, err := s.getInstance(req.PluginContext)
if err != nil {
return err
}
return i.RunStream(ctx, req, sender)
}
// PublishStream is called when a client sends a message to the stream.
func (s *Service) PublishStream(ctx context.Context, req *backend.PublishStreamRequest) (*backend.PublishStreamResponse, error) {
i, err := s.getInstance(req.PluginContext)
if err != nil {
return nil, err
}
return i.PublishStream(ctx, req)
}

View File

@ -7,6 +7,7 @@ export function buildCategories(plugins: DataSourcePluginMeta[]): DataSourcePlug
{ id: 'tsdb', title: 'Time series databases', plugins: [] },
{ id: 'logging', title: 'Logging & document databases', plugins: [] },
{ id: 'tracing', title: 'Distributed tracing', plugins: [] },
{ id: 'profiling', title: 'Profiling', plugins: [] },
{ id: 'sql', title: 'SQL', plugins: [] },
{ id: 'cloud', title: 'Cloud', plugins: [] },
{ id: 'enterprise', title: 'Enterprise plugins', plugins: [] },

View File

@ -39,6 +39,10 @@ const tempoPlugin = async () =>
await import(/* webpackChunkName: "tempoPlugin" */ 'app/plugins/datasource/tempo/module');
const alertmanagerPlugin = async () =>
await import(/* webpackChunkName: "alertmanagerPlugin" */ 'app/plugins/datasource/alertmanager/module');
const phlarePlugin = async () =>
await import(/* webpackChunkName: "phlarePlugin" */ 'app/plugins/datasource/phlare/module');
const parcaPlugin = async () =>
await import(/* webpackChunkName: "parcaPlugin" */ 'app/plugins/datasource/parca/module');
import { config } from '@grafana/runtime';
import * as alertGroupsPanel from 'app/plugins/panel/alertGroups/module';
@ -113,6 +117,8 @@ const builtInPlugins: any = {
'app/plugins/datasource/grafana-azure-monitor-datasource/module': azureMonitorPlugin,
'app/plugins/datasource/tempo/module': tempoPlugin,
'app/plugins/datasource/alertmanager/module': alertmanagerPlugin,
'app/plugins/datasource/phlare/module': phlarePlugin,
'app/plugins/datasource/parca/module': parcaPlugin,
'app/plugins/panel/text/module': textPanel,
'app/plugins/panel/timeseries/module': timeseriesPanel,

View File

@ -0,0 +1,23 @@
import React from 'react';
import { DataSourcePluginOptionsEditorProps } from '@grafana/data';
import { DataSourceHttpSettings } from '@grafana/ui';
import { ParcaDataSourceOptions } from './types';
interface Props extends DataSourcePluginOptionsEditorProps<ParcaDataSourceOptions> {}
export const ConfigEditor = (props: Props) => {
const { options, onOptionsChange } = props;
return (
<>
<DataSourceHttpSettings
defaultUrl={'http://localhost:7070'}
dataSourceConfig={options}
showAccessOptions={false}
onChange={onOptionsChange}
/>
</>
);
};

View File

@ -0,0 +1,34 @@
import { css } from '@emotion/css';
import React from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
import { Stack } from './Stack';
interface EditorRowProps {
children: React.ReactNode;
stackProps?: Partial<React.ComponentProps<typeof Stack>>;
}
export const EditorRow: React.FC<EditorRowProps> = ({ children, stackProps }) => {
const styles = useStyles2(getStyles);
return (
<div className={styles.root}>
<Stack gap={2} {...stackProps}>
{children}
</Stack>
</div>
);
};
const getStyles = (theme: GrafanaTheme2) => {
return {
root: css({
padding: theme.spacing(1),
backgroundColor: theme.colors.background.secondary,
borderRadius: theme.shape.borderRadius(1),
}),
};
};

View File

@ -0,0 +1,15 @@
import React from 'react';
import { Stack } from './Stack';
interface EditorRowsProps {
children: React.ReactNode;
}
export const EditorRows: React.FC<EditorRowsProps> = ({ children }) => {
return (
<Stack gap={0.5} direction="column">
{children}
</Stack>
);
};

View File

@ -0,0 +1,144 @@
import { css } from '@emotion/css';
import React, { useEffect, useRef } from 'react';
import { useLatest } from 'react-use';
import { CodeEditor, Monaco, useStyles2, monacoTypes } from '@grafana/ui';
import { ParcaDataSource } from '../datasource';
import { languageDefinition } from '../lang';
import { CompletionProvider } from './autocomplete';
interface Props {
value: string;
datasource: ParcaDataSource;
onChange: (val: string) => void;
onRunQuery: (value: string) => void;
}
export function LabelsEditor(props: Props) {
const setupAutocompleteFn = useAutocomplete(props.datasource);
const styles = useStyles2(getStyles);
const onRunQueryRef = useLatest(props.onRunQuery);
const containerRef = useRef<HTMLDivElement>(null);
return (
<div
className={styles.wrapper}
// NOTE: we will be setting inline-style-width/height on this element
ref={containerRef}
>
<CodeEditor
value={props.value}
language={langId}
onBlur={props.onChange}
containerStyles={styles.queryField}
monacoOptions={{
folding: false,
fontSize: 14,
lineNumbers: 'off',
overviewRulerLanes: 0,
renderLineHighlight: 'none',
scrollbar: {
vertical: 'hidden',
verticalScrollbarSize: 8, // used as "padding-right"
horizontal: 'hidden',
horizontalScrollbarSize: 0,
},
scrollBeyondLastLine: false,
wordWrap: 'on',
padding: {
top: 5,
bottom: 6,
},
}}
onBeforeEditorMount={ensureParcaQL}
onEditorDidMount={(editor, monaco) => {
setupAutocompleteFn(editor, monaco);
const updateElementHeight = () => {
const containerDiv = containerRef.current;
if (containerDiv !== null) {
const pixelHeight = editor.getContentHeight();
containerDiv.style.height = `${pixelHeight + EDITOR_HEIGHT_OFFSET}px`;
containerDiv.style.width = '100%';
const pixelWidth = containerDiv.clientWidth;
editor.layout({ width: pixelWidth, height: pixelHeight });
}
};
editor.onDidContentSizeChange(updateElementHeight);
updateElementHeight();
editor.addCommand(monaco.KeyMod.Shift | monaco.KeyCode.Enter, () => {
onRunQueryRef.current(editor.getValue());
});
}}
/>
</div>
);
}
// this number was chosen by testing various values. it might be necessary
// because of the width of the border, not sure.
//it needs to do 2 things:
// 1. when the editor is single-line, it should make the editor height be visually correct
// 2. when the editor is multi-line, the editor should not be "scrollable" (meaning,
// you do a scroll-movement in the editor, and it will scroll the content by a couple pixels
// up & down. this we want to avoid)
const EDITOR_HEIGHT_OFFSET = 2;
/**
* Hook that returns function that will set up monaco autocomplete for the label selector
* @param datasource
*/
function useAutocomplete(datasource: ParcaDataSource) {
const autocompleteDisposeFun = useRef<(() => void) | null>(null);
useEffect(() => {
// when we unmount, we unregister the autocomplete-function, if it was registered
return () => {
autocompleteDisposeFun.current?.();
};
}, []);
// This should be run in monaco onEditorDidMount
return async (editor: monacoTypes.editor.IStandaloneCodeEditor, monaco: Monaco) => {
const provider = new CompletionProvider(datasource, monaco, editor);
await provider.init();
const { dispose } = monaco.languages.registerCompletionItemProvider(langId, provider);
autocompleteDisposeFun.current = dispose;
};
}
// we must only run the setup code once
let parcaqlSetupDone = false;
const langId = 'parca';
function ensureParcaQL(monaco: Monaco) {
if (parcaqlSetupDone === false) {
parcaqlSetupDone = true;
const { aliases, extensions, mimetypes, def } = languageDefinition;
monaco.languages.register({ id: langId, aliases, extensions, mimetypes });
monaco.languages.setMonarchTokensProvider(langId, def.language);
monaco.languages.setLanguageConfiguration(langId, def.languageConfiguration);
}
}
const getStyles = () => {
return {
queryField: css`
flex: 1;
// Not exactly sure but without this the editor doe not shrink after resizing (so you can make it bigger but not
// smaller). At the same time this does not actually make the editor 100px because it has flex 1 so I assume
// this should sort of act as a flex-basis (but flex-basis does not work for this). So yeah CSS magic.
width: 100px;
`,
wrapper: css`
display: flex;
flex: 1;
border: 1px solid rgba(36, 41, 46, 0.3);
border-radius: 2px;
`,
};
};

View File

@ -0,0 +1,91 @@
import { screen } from '@testing-library/dom';
import { render } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import React from 'react';
import { CoreApp, DataSourcePluginMeta, PluginType } from '@grafana/data';
import { ParcaDataSource } from '../datasource';
import { ProfileTypeMessage } from '../types';
import { Props, QueryEditor } from './QueryEditor';
describe('QueryEditor', () => {
it('should render without error', async () => {
setup();
expect(screen.findByText('process_cpu:cpu')).toBeDefined();
});
it('should render options', async () => {
setup();
await openOptions();
expect(screen.getByText(/Metric/)).toBeDefined();
expect(screen.getByText(/Profile/)).toBeDefined();
expect(screen.getByText(/Both/)).toBeDefined();
});
it('should render correct options outside of explore', async () => {
setup({ props: { app: CoreApp.Dashboard } });
await openOptions();
expect(screen.getByText(/Metric/)).toBeDefined();
expect(screen.getByText(/Profile/)).toBeDefined();
expect(screen.queryAllByText(/Both/).length).toBe(0);
});
});
async function openOptions() {
const options = screen.getByText(/Options/);
expect(options).toBeDefined();
await userEvent.click(options);
}
function setup(options: { props: Partial<Props> } = { props: {} }) {
const onChange = jest.fn();
const ds = new ParcaDataSource({
name: 'test',
uid: 'test',
type: PluginType.datasource,
access: 'proxy',
id: 1,
jsonData: {},
meta: {} as unknown as DataSourcePluginMeta,
readOnly: false,
});
ds.getProfileTypes = jest.fn().mockResolvedValue([
{
name: 'process_cpu',
ID: 'process_cpu:cpu',
period_type: 'day',
period_unit: 's',
sample_unit: 'ms',
sample_type: 'cpu',
},
{
name: 'memory',
ID: 'memory:memory',
period_type: 'day',
period_unit: 's',
sample_unit: 'ms',
sample_type: 'memory',
},
] as ProfileTypeMessage[]);
const utils = render(
<QueryEditor
query={{
queryType: 'both',
labelSelector: '',
profileTypeId: 'process_cpu:cpu',
refId: 'A',
}}
datasource={ds}
onChange={onChange}
onRunQuery={() => {}}
app={CoreApp.Explore}
{...options.props}
/>
);
return { ...utils, onChange };
}

View File

@ -0,0 +1,114 @@
import { defaults } from 'lodash';
import React, { useMemo, useState } from 'react';
import { useMount } from 'react-use';
import { CoreApp, QueryEditorProps } from '@grafana/data';
import { ButtonCascader, CascaderOption } from '@grafana/ui';
import { ParcaDataSource } from '../datasource';
import { defaultQuery, ParcaDataSourceOptions, ProfileTypeMessage, Query } from '../types';
import { EditorRow } from './EditorRow';
import { EditorRows } from './EditorRows';
import { LabelsEditor } from './LabelsEditor';
import { QueryOptions } from './QueryOptions';
export type Props = QueryEditorProps<ParcaDataSource, Query, ParcaDataSourceOptions>;
export function QueryEditor(props: Props) {
const [profileTypes, setProfileTypes] = useState<ProfileTypeMessage[]>([]);
function onProfileTypeChange(value: string[], selectedOptions: CascaderOption[]) {
if (selectedOptions.length === 0) {
return;
}
const id = selectedOptions[selectedOptions.length - 1].value;
if (typeof id !== 'string') {
throw new Error('id is not string');
}
props.onChange({ ...props.query, profileTypeId: id });
}
function onLabelSelectorChange(value: string) {
props.onChange({ ...props.query, labelSelector: value });
}
function handleRunQuery(value: string) {
props.onChange({ ...props.query, labelSelector: value });
props.onRunQuery();
}
useMount(async () => {
const profileTypes = await props.datasource.getProfileTypes();
setProfileTypes(profileTypes);
});
// Turn profileTypes into cascader options
const cascaderOptions = useMemo(() => {
let mainTypes = new Map<string, CascaderOption>();
// Classify profile types by name then sample type.
for (let profileType of profileTypes) {
if (!mainTypes.has(profileType.name)) {
mainTypes.set(profileType.name, {
label: profileType.name,
value: profileType.ID,
children: [],
});
}
mainTypes.get(profileType.name)?.children?.push({
label: profileType.sample_type,
value: profileType.ID,
});
}
return Array.from(mainTypes.values());
}, [profileTypes]);
const selectedProfileName = useMemo(() => {
if (!profileTypes) {
return 'Loading';
}
const profile = profileTypes.find((type) => type.ID === props.query.profileTypeId);
if (!profile) {
return 'Select a profile type';
}
return profile.name + ' - ' + profile.sample_type;
}, [props.query.profileTypeId, profileTypes]);
let query = normalizeQuery(props.query, props.app);
return (
<EditorRows>
<EditorRow stackProps={{ wrap: false, gap: 1 }}>
<ButtonCascader onChange={onProfileTypeChange} options={cascaderOptions} buttonProps={{ variant: 'secondary' }}>
{selectedProfileName}
</ButtonCascader>
<LabelsEditor
value={query.labelSelector}
onChange={onLabelSelectorChange}
datasource={props.datasource}
onRunQuery={handleRunQuery}
/>
</EditorRow>
<EditorRow>
<QueryOptions
query={query}
onQueryTypeChange={(val) => {
props.onChange({ ...query, queryType: val });
}}
app={props.app}
/>
</EditorRow>
</EditorRows>
);
}
function normalizeQuery(query: Query, app?: CoreApp) {
let normalized = defaults(query, defaultQuery);
if (app !== CoreApp.Explore && normalized.queryType === 'both') {
// In dashboards and other places, we can't show both types of graphs at the same time.
// This will also be a default when having 'both' query and adding it from explore to dashboard
normalized.queryType = 'profile';
}
return normalized;
}

View File

@ -0,0 +1,107 @@
import { css } from '@emotion/css';
import React from 'react';
import { useToggle } from 'react-use';
import { CoreApp, GrafanaTheme2 } from '@grafana/data';
import { Icon, useStyles2, RadioButtonGroup, Field } from '@grafana/ui';
import { Query } from '../types';
import { Stack } from './Stack';
export interface Props {
query: Query;
onQueryTypeChange: (val: Query['queryType']) => void;
app?: CoreApp;
}
const rangeOptions: Array<{ value: Query['queryType']; label: string; description: string }> = [
{ value: 'metrics', label: 'Metric', description: 'Return aggregated metrics' },
{ value: 'profile', label: 'Profile', description: 'Return profile' },
{ value: 'both', label: 'Both', description: 'Return both metric and profile data' },
];
function getOptions(app?: CoreApp) {
if (app === CoreApp.Explore) {
return rangeOptions;
}
return rangeOptions.filter((option) => option.value !== 'both');
}
/**
* Base on QueryOptionGroup component from grafana/ui but that is not available yet.
*/
export function QueryOptions({ query, onQueryTypeChange, app }: Props) {
const [isOpen, toggleOpen] = useToggle(false);
const styles = useStyles2(getStyles);
const options = getOptions(app);
return (
<Stack gap={0} direction="column">
<div className={styles.header} onClick={toggleOpen} title="Click to edit options">
<div className={styles.toggle}>
<Icon name={isOpen ? 'angle-down' : 'angle-right'} />
</div>
<h6 className={styles.title}>Options</h6>
{!isOpen && (
<div className={styles.description}>
<span>Type: {query.queryType}</span>
</div>
)}
</div>
{isOpen && (
<div className={styles.body}>
<Field label={'Query Type'}>
<RadioButtonGroup options={options} value={query.queryType} onChange={onQueryTypeChange} />
</Field>
</div>
)}
</Stack>
);
}
const getStyles = (theme: GrafanaTheme2) => {
return {
switchLabel: css({
color: theme.colors.text.secondary,
cursor: 'pointer',
fontSize: theme.typography.bodySmall.fontSize,
'&:hover': {
color: theme.colors.text.primary,
},
}),
header: css({
display: 'flex',
cursor: 'pointer',
alignItems: 'baseline',
color: theme.colors.text.primary,
'&:hover': {
background: theme.colors.emphasize(theme.colors.background.primary, 0.03),
},
}),
title: css({
flexGrow: 1,
overflow: 'hidden',
fontSize: theme.typography.bodySmall.fontSize,
fontWeight: theme.typography.fontWeightMedium,
margin: 0,
}),
description: css({
color: theme.colors.text.secondary,
fontSize: theme.typography.bodySmall.fontSize,
paddingLeft: theme.spacing(2),
gap: theme.spacing(2),
display: 'flex',
}),
body: css({
display: 'flex',
paddingTop: theme.spacing(2),
gap: theme.spacing(2),
flexWrap: 'wrap',
}),
toggle: css({
color: theme.colors.text.secondary,
marginRight: `${theme.spacing(1)}`,
}),
};
};

View File

@ -0,0 +1,30 @@
import { css } from '@emotion/css';
import React, { CSSProperties, useCallback } from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
interface StackProps {
direction?: CSSProperties['flexDirection'];
alignItems?: CSSProperties['alignItems'];
wrap?: boolean;
gap?: number;
flexGrow?: CSSProperties['flexGrow'];
children: React.ReactNode;
}
export function Stack(props: StackProps) {
const styles = useStyles2(useCallback((theme) => getStyles(theme, props), [props]));
return <div className={styles.root}>{props.children}</div>;
}
const getStyles = (theme: GrafanaTheme2, props: StackProps) => ({
root: css({
display: 'flex',
flexDirection: props.direction ?? 'row',
flexWrap: props.wrap ?? true ? 'wrap' : undefined,
alignItems: props.alignItems,
gap: theme.spacing(props.gap ?? 2),
flexGrow: props.flexGrow,
}),
});

View File

@ -0,0 +1,107 @@
import { editor } from 'monaco-editor';
import { Monaco, monacoTypes } from '@grafana/ui';
import { CompletionProvider } from './autocomplete';
import IEditorModel = editor.IEditorModel;
describe('CompletionProvider', () => {
it('suggests labels', async () => {
const { provider, model } = await setup('{}', 1, defaultLabels);
const result = await provider.provideCompletionItems(model, {} as monacoTypes.Position);
expect((result! as monacoTypes.languages.CompletionList).suggestions).toEqual([
expect.objectContaining({ label: 'foo', insertText: 'foo' }),
]);
});
it('suggests label names with quotes', async () => {
const { provider, model } = await setup('{foo=}', 6, defaultLabels);
const result = await provider.provideCompletionItems(model, {} as monacoTypes.Position);
expect((result! as monacoTypes.languages.CompletionList).suggestions).toEqual([
expect.objectContaining({ label: 'bar', insertText: '"bar"' }),
]);
});
it('suggests label names without quotes', async () => {
const { provider, model } = await setup('{foo="}', 7, defaultLabels);
const result = await provider.provideCompletionItems(model, {} as monacoTypes.Position);
expect((result! as monacoTypes.languages.CompletionList).suggestions).toEqual([
expect.objectContaining({ label: 'bar', insertText: 'bar' }),
]);
});
it('suggests nothing without labels', async () => {
const { provider, model } = await setup('{foo="}', 7, {});
const result = await provider.provideCompletionItems(model, {} as monacoTypes.Position);
expect((result! as monacoTypes.languages.CompletionList).suggestions).toEqual([]);
});
it('suggests labels on empty input', async () => {
const { provider, model } = await setup('', 0, defaultLabels);
const result = await provider.provideCompletionItems(model, {} as monacoTypes.Position);
expect((result! as monacoTypes.languages.CompletionList).suggestions).toEqual([
expect.objectContaining({ label: 'foo', insertText: '{foo="' }),
]);
});
});
const defaultLabels = { foo: ['bar'] };
const fakeMonaco = {
Range: {
fromPositions() {
return null;
},
},
languages: {
CompletionItemKind: {
Enum: 1,
EnumMember: 2,
},
},
} as unknown as Monaco;
function makeFakeEditor(model: IEditorModel) {
return {
getModel(): IEditorModel | null {
return model;
},
} as unknown as monacoTypes.editor.IStandaloneCodeEditor;
}
async function setup(value: string, offset: number, labels: { [label: string]: string[] }) {
const model = makeModel(value, offset);
const editor = makeFakeEditor(model);
const provider = new CompletionProvider(
{
getLabelNames() {
return Promise.resolve(Object.keys(labels));
},
getLabelValues(label: string) {
return Promise.resolve(labels[label]);
},
},
fakeMonaco,
editor
);
await provider.init();
return { provider, model };
}
function makeModel(value: string, offset: number): monacoTypes.editor.ITextModel {
return {
id: 'test_monaco',
getWordAtPosition() {
return null;
},
getOffsetAt() {
return offset;
},
getValue() {
return value;
},
} as unknown as monacoTypes.editor.ITextModel;
}

View File

@ -0,0 +1,234 @@
import { monacoTypes, Monaco } from '@grafana/ui';
/**
* Class that implements CompletionItemProvider interface and allows us to provide suggestion for the Monaco
* autocomplete system.
*
* At this moment we just pass it all the labels/values we get from Fire backend later on we may do something a bit
* smarter if there will be lots of labels.
*/
export class CompletionProvider implements monacoTypes.languages.CompletionItemProvider {
triggerCharacters = ['{', ',', '[', '(', '=', '~', ' ', '"'];
private labels: { [label: string]: string[] } = {};
constructor(
private datasource: {
getLabelNames: () => Promise<string[]>;
getLabelValues: (label: string) => Promise<string[]>;
},
private monaco: Monaco,
private editor: monacoTypes.editor.IStandaloneCodeEditor
) {}
async init() {
const names = await this.datasource.getLabelNames();
this.labels = names.reduce<{ [label: string]: string[] }>((acc, name) => {
acc[name] = [];
return acc;
}, {});
}
provideCompletionItems(
model: monacoTypes.editor.ITextModel,
position: monacoTypes.Position
): monacoTypes.languages.ProviderResult<monacoTypes.languages.CompletionList> {
// if the model-id does not match, then this call is from a different editor-instance,
// not "our instance", so return nothing
if (this.editor.getModel()?.id !== model.id) {
return { suggestions: [] };
}
const { range, offset } = getRangeAndOffset(this.monaco, model, position);
const situation = getSituation(model.getValue(), offset);
// Cannot be async/await cause of the ProviderResult return type
return this.getCompletions(situation).then((completionItems) => {
// monaco by-default alphabetically orders the items.
// to stop it, we use a number-as-string sortkey,
// so that monaco keeps the order we use
const maxIndexDigits = completionItems.length.toString().length;
const suggestions: monacoTypes.languages.CompletionItem[] = completionItems.map((item, index) => ({
kind: getMonacoCompletionItemKind(item.type, this.monaco!),
label: item.label,
insertText: item.insertText,
sortText: index.toString().padStart(maxIndexDigits, '0'), // to force the order we have
range,
}));
return { suggestions };
});
}
/**
* Get suggestion based on the situation we are in like whether we should suggest label names or values.
* @param situation
* @private
*/
private async getCompletions(situation: Situation): Promise<Completion[]> {
if (!Object.keys(this.labels).length) {
return [];
}
switch (situation.type) {
// Not really sure what would make sense to suggest in this case so just leave it
case 'UNKNOWN': {
return [];
}
case 'EMPTY': {
return Object.keys(this.labels).map((key) => {
return {
label: key,
insertText: `{${key}="`,
type: 'LABEL_NAME',
};
});
}
case 'IN_LABEL_NAME':
return Object.keys(this.labels).map((key) => {
return {
label: key,
insertText: key,
type: 'LABEL_NAME',
};
});
case 'IN_LABEL_VALUE':
let values = [];
if (this.labels[situation.labelName].length) {
values = this.labels[situation.labelName];
} else {
values = await this.datasource.getLabelValues(situation.labelName);
this.labels[situation.labelName] = values;
}
return values.map((val) => {
return {
label: val,
insertText: situation.betweenQuotes ? val : `"${val}"`,
type: 'LABEL_VALUE',
};
});
default:
throw new Error(`Unexpected situation ${situation}`);
}
}
}
/**
* Get item kind which is used for icon next to the suggestion.
* @param type
* @param monaco
*/
function getMonacoCompletionItemKind(type: CompletionType, monaco: Monaco): monacoTypes.languages.CompletionItemKind {
switch (type) {
case 'LABEL_NAME':
return monaco.languages.CompletionItemKind.Enum;
case 'LABEL_VALUE':
return monaco.languages.CompletionItemKind.EnumMember;
default:
throw new Error(`Unexpected CompletionType: ${type}`);
}
}
export type CompletionType = 'LABEL_NAME' | 'LABEL_VALUE';
type Completion = {
type: CompletionType;
label: string;
insertText: string;
};
export type Label = {
name: string;
value: string;
};
export type Situation =
| {
type: 'UNKNOWN';
}
| {
type: 'EMPTY';
}
| {
type: 'IN_LABEL_NAME';
otherLabels: Label[];
}
| {
type: 'IN_LABEL_VALUE';
labelName: string;
betweenQuotes: boolean;
otherLabels: Label[];
};
const labelNameRegex = /[a-zA-Z_][a-zA-Z0-9_]*/;
const labelValueRegex = /[^"]*/; // anything except a double quote
const labelPairsRegex = new RegExp(`(${labelNameRegex.source})="(${labelValueRegex.source})"`, 'g');
const inLabelValueRegex = new RegExp(`(${labelNameRegex.source})=("?)${labelValueRegex.source}$`);
const inLabelNameRegex = new RegExp(/[{,]\s*[a-zA-Z0-9_]*$/);
/**
* Figure out where is the cursor and what kind of suggestions are appropriate.
* As currently Fire handles just a simple {foo="bar", baz="zyx"} kind of values we can do with simple regex to figure
* out where we are with the cursor.
* @param text
* @param offset
*/
function getSituation(text: string, offset: number): Situation {
if (text === '') {
return {
type: 'EMPTY',
};
}
// Get all the labels so far in the query, so we can do some more filtering.
const matches = text.matchAll(labelPairsRegex);
const existingLabels = Array.from(matches).reduce<Label[]>((acc, match) => {
const [_, name, value] = match[1];
acc.push({ name, value });
return acc;
}, []);
// Check if we are editing a label value right now. If so also get name of the label
const matchLabelValue = text.substring(0, offset).match(inLabelValueRegex);
if (matchLabelValue) {
return {
type: 'IN_LABEL_VALUE',
labelName: matchLabelValue[1],
betweenQuotes: !!matchLabelValue[2],
otherLabels: existingLabels,
};
}
// Check if we are editing a label name
const matchLabelName = text.substring(0, offset).match(inLabelNameRegex);
if (matchLabelName) {
return {
type: 'IN_LABEL_NAME',
otherLabels: existingLabels,
};
}
// Will happen only if user writes something that isn't really a label selector
return {
type: 'UNKNOWN',
};
}
function getRangeAndOffset(monaco: Monaco, model: monacoTypes.editor.ITextModel, position: monacoTypes.Position) {
const word = model.getWordAtPosition(position);
const range =
word != null
? monaco.Range.lift({
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: word.startColumn,
endColumn: word.endColumn,
})
: monaco.Range.fromPositions(position);
// documentation says `position` will be "adjusted" in `getOffsetAt` so we clone it here just for sure.
const positionClone = {
column: position.column,
lineNumber: position.lineNumber,
};
const offset = model.getOffsetAt(positionClone);
return { offset, range };
}

View File

@ -0,0 +1,33 @@
import { Observable, of } from 'rxjs';
import { DataQueryRequest, DataQueryResponse, DataSourceInstanceSettings } from '@grafana/data';
import { DataSourceWithBackend } from '@grafana/runtime';
import { ParcaDataSourceOptions, Query, ProfileTypeMessage } from './types';
export class ParcaDataSource extends DataSourceWithBackend<Query, ParcaDataSourceOptions> {
constructor(instanceSettings: DataSourceInstanceSettings<ParcaDataSourceOptions>) {
super(instanceSettings);
}
query(request: DataQueryRequest<Query>): Observable<DataQueryResponse> {
if (!request.targets.every((q) => q.profileTypeId)) {
// When changing data source in explore, firs query can be sent without filled in profileTypeId
return of({ data: [] });
}
return super.query(request);
}
async getProfileTypes(): Promise<ProfileTypeMessage[]> {
return await super.getResource('profileTypes');
}
async getLabelNames(): Promise<string[]> {
return await super.getResource('labelNames');
}
async getLabelValues(labelName: string): Promise<string[]> {
return await super.getResource('labelValues', { label: labelName });
}
}

View File

@ -0,0 +1 @@
<svg id="Layer_1" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" viewBox="70 95 102.46 138.6" fill="currentColor"><path d="M70.27,146.13c0-30.4,21.23-50.29,50.49-50.29,30,0,51.05,21,51.05,50.1s-21.22,49.34-48.95,49.34c-18,0-32.7-8.42-39.78-22.76v60.62H70.27Zm88.54-.57c0-21.8-15.3-37.86-37.86-37.86-22.76,0-37.87,16.06-37.87,37.86S98.19,183.42,121,183.42C143.51,183.42,158.81,167.36,158.81,145.56Z"/></svg>

After

Width:  |  Height:  |  Size: 415 B

View File

@ -0,0 +1,12 @@
import { language, languageConfiguration } from './lang';
export const languageDefinition = {
id: 'parca',
extensions: ['.parca'],
aliases: ['parca'],
mimetypes: [],
def: {
language,
languageConfiguration,
},
};

View File

@ -0,0 +1,97 @@
import type { languages } from 'monaco-editor';
export const languageConfiguration: languages.LanguageConfiguration = {
// the default separators except `@$`
wordPattern: /(-?\d*\.\d\w*)|([^`~!#%^&*()\-=+\[{\]}\\|;:'",.<>\/?\s]+)/g,
brackets: [['{', '}']],
autoClosingPairs: [
{ open: '{', close: '}' },
{ open: '"', close: '"' },
{ open: "'", close: "'" },
],
surroundingPairs: [
{ open: '{', close: '}' },
{ open: '"', close: '"' },
{ open: "'", close: "'" },
],
folding: {},
};
export const language: languages.IMonarchLanguage = {
ignoreCase: false,
defaultToken: '',
tokenPostfix: '.fireql',
keywords: [],
operators: [],
// we include these common regular expressions
symbols: /[=><!~?:&|+\-*\/^%]+/,
escapes: /\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,
digits: /\d+(_+\d+)*/,
octaldigits: /[0-7]+(_+[0-7]+)*/,
binarydigits: /[0-1]+(_+[0-1]+)*/,
hexdigits: /[[0-9a-fA-F]+(_+[0-9a-fA-F]+)*/,
integersuffix: /(ll|LL|u|U|l|L)?(ll|LL|u|U|l|L)?/,
floatsuffix: /[fFlL]?/,
tokenizer: {
root: [
// labels
[/[a-z_]\w*(?=\s*(=|!=|=~|!~))/, 'tag'],
// strings
[/"([^"\\]|\\.)*$/, 'string.invalid'], // non-teminated string
[/'([^'\\]|\\.)*$/, 'string.invalid'], // non-teminated string
[/"/, 'string', '@string_double'],
[/'/, 'string', '@string_single'],
// whitespace
{ include: '@whitespace' },
// delimiters and operators
[/[{}()\[\]]/, '@brackets'],
[/[<>](?!@symbols)/, '@brackets'],
[
/@symbols/,
{
cases: {
'@operators': 'delimiter',
'@default': '',
},
},
],
// numbers
[/\d+/, 'number'],
[/\d*\d+[eE]([\-+]?\d+)?(@floatsuffix)/, 'number.float'],
[/\d*\.\d+([eE][\-+]?\d+)?(@floatsuffix)/, 'number.float'],
[/0[xX][0-9a-fA-F']*[0-9a-fA-F](@integersuffix)/, 'number.hex'],
[/0[0-7']*[0-7](@integersuffix)/, 'number.octal'],
[/0[bB][0-1']*[0-1](@integersuffix)/, 'number.binary'],
[/\d[\d']*\d(@integersuffix)/, 'number'],
[/\d(@integersuffix)/, 'number'],
],
string_double: [
[/[^\\"]+/, 'string'],
[/@escapes/, 'string.escape'],
[/\\./, 'string.escape.invalid'],
[/"/, 'string', '@pop'],
],
string_single: [
[/[^\\']+/, 'string'],
[/@escapes/, 'string.escape'],
[/\\./, 'string.escape.invalid'],
[/'/, 'string', '@pop'],
],
clauses: [
[/[^(,)]/, 'tag'],
[/\)/, 'identifier', '@pop'],
],
whitespace: [[/[ \t\r\n]+/, 'white']],
},
};

View File

@ -0,0 +1,10 @@
import { DataSourcePlugin } from '@grafana/data';
import { ConfigEditor } from './ConfigEditor';
import { QueryEditor } from './QueryEditor/QueryEditor';
import { ParcaDataSource } from './datasource';
import { Query, ParcaDataSourceOptions } from './types';
export const plugin = new DataSourcePlugin<ParcaDataSource, Query, ParcaDataSourceOptions>(ParcaDataSource)
.setConfigEditor(ConfigEditor)
.setQueryEditor(QueryEditor);

View File

@ -0,0 +1,26 @@
{
"type": "datasource",
"name": "Parca",
"id": "parca",
"category": "profiling",
"metrics": true,
"backend": true,
"info": {
"description": "Continuous profiling for analysis of CPU and memory usage, down to the line number and throughout time. Saving infrastructure cost, improving performance, and increasing reliability.",
"author": {
"name": "Grafana Labs",
"url": "https://www.grafana.com"
},
"keywords": ["grafana", "datasource", "parca", "profiling"],
"logos": {
"small": "img/logo-small.svg",
"large": "img/logo-small.svg"
},
"links": [
{
"name": "GitHub Project",
"url": "https://github.com/parca-dev/parca"
}
]
}
}

View File

@ -0,0 +1,26 @@
import { DataQuery, DataSourceJsonData } from '@grafana/data';
export interface Query extends DataQuery {
labelSelector: string;
profileTypeId: string;
queryType: 'metrics' | 'profile' | 'both';
}
export interface ProfileTypeMessage {
ID: string;
name: string;
period_type: string;
period_unit: string;
sample_type: string;
sample_unit: string;
}
export const defaultQuery: Partial<Query> = {
labelSelector: '{}',
queryType: 'both',
};
/**
* These are options configured for each DataSource instance.
*/
export interface ParcaDataSourceOptions extends DataSourceJsonData {}

View File

@ -0,0 +1,61 @@
import React from 'react';
import { DataSourcePluginOptionsEditorProps } from '@grafana/data';
import { DataSourceHttpSettings, EventsWithValidation, LegacyForms, regexValidation } from '@grafana/ui';
import { FireDataSourceOptions } from './types';
interface Props extends DataSourcePluginOptionsEditorProps<FireDataSourceOptions> {}
export const ConfigEditor = (props: Props) => {
const { options, onOptionsChange } = props;
return (
<>
<DataSourceHttpSettings
defaultUrl={'http://localhost:4100'}
dataSourceConfig={options}
showAccessOptions={false}
onChange={onOptionsChange}
/>
<h3 className="page-heading">Querying</h3>
<div className="gf-form-group">
<div className="gf-form-inline">
<div className="gf-form">
<LegacyForms.FormField
label="Minimal step"
labelWidth={13}
inputEl={
<LegacyForms.Input
className="width-6"
value={options.jsonData.minStep}
spellCheck={false}
placeholder="15s"
onChange={(event) => {
onOptionsChange({
...options,
jsonData: {
...options.jsonData,
minStep: event.currentTarget.value,
},
});
}}
validationEvents={{
[EventsWithValidation.onBlur]: [
regexValidation(
/^$|^\d+(ms|[Mwdhmsy])$/,
'Value is not valid, you can use number with time unit specifier: y, M, w, d, h, m, s'
),
],
}}
/>
}
tooltip="Minimal step used for metric query. Should be the same or higher as the scrape interval setting in the Fire database."
/>
</div>
</div>
</div>
</>
);
};

View File

@ -0,0 +1,76 @@
import { css } from '@emotion/css';
import React, { ComponentProps } from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { Field, Icon, PopoverContent, ReactUtils, stylesFactory, Tooltip, useTheme2 } from '@grafana/ui';
interface EditorFieldProps extends ComponentProps<typeof Field> {
label: string;
children: React.ReactElement;
width?: number | string;
optional?: boolean;
tooltip?: PopoverContent;
}
export const EditorField: React.FC<EditorFieldProps> = (props) => {
const { label, optional, tooltip, children, width, ...fieldProps } = props;
const theme = useTheme2();
const styles = getStyles(theme, width);
// Null check for backward compatibility
const childInputId = fieldProps?.htmlFor || ReactUtils?.getChildId(children);
const labelEl = (
<>
<label className={styles.label} htmlFor={childInputId}>
{label}
{optional && <span className={styles.optional}> - optional</span>}
{tooltip && (
<Tooltip placement="top" content={tooltip} theme="info">
<Icon name="info-circle" size="sm" className={styles.icon} />
</Tooltip>
)}
</label>
<span className={styles.space} />
</>
);
return (
<div className={styles.root}>
<Field className={styles.field} label={labelEl} {...fieldProps}>
{children}
</Field>
</div>
);
};
const getStyles = stylesFactory((theme: GrafanaTheme2, width?: number | string) => {
return {
space: css({
paddingRight: theme.spacing(0),
paddingBottom: theme.spacing(0.5),
}),
root: css({
minWidth: theme.spacing(width ?? 0),
}),
label: css({
fontSize: 12,
fontWeight: theme.typography.fontWeightMedium,
}),
optional: css({
fontStyle: 'italic',
color: theme.colors.text.secondary,
}),
field: css({
marginBottom: 0, // GrafanaUI/Field has a bottom margin which we must remove
}),
icon: css({
color: theme.colors.text.secondary,
marginLeft: theme.spacing(1),
':hover': {
color: theme.colors.text.primary,
},
}),
};
});

View File

@ -0,0 +1,34 @@
import { css } from '@emotion/css';
import React from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
import { Stack } from './Stack';
interface EditorRowProps {
children: React.ReactNode;
stackProps?: Partial<React.ComponentProps<typeof Stack>>;
}
export const EditorRow: React.FC<EditorRowProps> = ({ children, stackProps }) => {
const styles = useStyles2(getStyles);
return (
<div className={styles.root}>
<Stack gap={2} {...stackProps}>
{children}
</Stack>
</div>
);
};
const getStyles = (theme: GrafanaTheme2) => {
return {
root: css({
padding: theme.spacing(1),
backgroundColor: theme.colors.background.secondary,
borderRadius: theme.shape.borderRadius(1),
}),
};
};

View File

@ -0,0 +1,15 @@
import React from 'react';
import { Stack } from './Stack';
interface EditorRowsProps {
children: React.ReactNode;
}
export const EditorRows: React.FC<EditorRowsProps> = ({ children }) => {
return (
<Stack gap={0.5} direction="column">
{children}
</Stack>
);
};

View File

@ -0,0 +1,154 @@
import { css } from '@emotion/css';
import React, { useEffect, useRef } from 'react';
import { useLatest } from 'react-use';
import { GrafanaTheme2 } from '@grafana/data';
import { CodeEditor, Monaco, useStyles2, monacoTypes } from '@grafana/ui';
import { languageDefinition } from '../fireql';
import { SeriesMessage } from '../types';
import { CompletionProvider } from './autocomplete';
interface Props {
value: string;
onChange: (val: string) => void;
onRunQuery: (value: string) => void;
series?: SeriesMessage;
}
export function LabelsEditor(props: Props) {
const setupAutocompleteFn = useAutocomplete(props.series);
const styles = useStyles2(getStyles);
const onRunQueryRef = useLatest(props.onRunQuery);
const containerRef = useRef<HTMLDivElement>(null);
return (
<div
className={styles.wrapper}
// NOTE: we will be setting inline-style-width/height on this element
ref={containerRef}
>
<CodeEditor
value={props.value}
language={langId}
onBlur={props.onChange}
containerStyles={styles.queryField}
monacoOptions={{
folding: false,
fontSize: 14,
lineNumbers: 'off',
overviewRulerLanes: 0,
renderLineHighlight: 'none',
scrollbar: {
vertical: 'hidden',
verticalScrollbarSize: 8, // used as "padding-right"
horizontal: 'hidden',
horizontalScrollbarSize: 0,
},
scrollBeyondLastLine: false,
wordWrap: 'on',
padding: {
top: 5,
bottom: 6,
},
}}
onBeforeEditorMount={ensureFireQL}
onEditorDidMount={(editor, monaco) => {
setupAutocompleteFn(editor, monaco);
const updateElementHeight = () => {
const containerDiv = containerRef.current;
if (containerDiv !== null) {
const pixelHeight = editor.getContentHeight();
containerDiv.style.height = `${pixelHeight + EDITOR_HEIGHT_OFFSET}px`;
containerDiv.style.width = '100%';
const pixelWidth = containerDiv.clientWidth;
editor.layout({ width: pixelWidth, height: pixelHeight });
}
};
editor.onDidContentSizeChange(updateElementHeight);
updateElementHeight();
editor.addCommand(monaco.KeyMod.Shift | monaco.KeyCode.Enter, () => {
onRunQueryRef.current(editor.getValue());
});
}}
/>
</div>
);
}
// this number was chosen by testing various values. it might be necessary
// because of the width of the border, not sure.
//it needs to do 2 things:
// 1. when the editor is single-line, it should make the editor height be visually correct
// 2. when the editor is multi-line, the editor should not be "scrollable" (meaning,
// you do a scroll-movement in the editor, and it will scroll the content by a couple pixels
// up & down. this we want to avoid)
const EDITOR_HEIGHT_OFFSET = 2;
/**
* Hook that returns function that will set up monaco autocomplete for the label selector
*/
function useAutocomplete(series?: SeriesMessage) {
const providerRef = useRef<CompletionProvider>(new CompletionProvider());
useEffect(() => {
if (series) {
// When we have the value we will pass it to the CompletionProvider
providerRef.current.setSeries(series);
}
}, [series]);
const autocompleteDisposeFun = useRef<(() => void) | null>(null);
useEffect(() => {
// when we unmount, we unregister the autocomplete-function, if it was registered
return () => {
autocompleteDisposeFun.current?.();
};
}, []);
// This should be run in monaco onEditorDidMount
return (editor: monacoTypes.editor.IStandaloneCodeEditor, monaco: Monaco) => {
providerRef.current.editor = editor;
providerRef.current.monaco = monaco;
const { dispose } = monaco.languages.registerCompletionItemProvider(langId, providerRef.current);
autocompleteDisposeFun.current = dispose;
};
}
// we must only run the setup code once
let fireqlSetupDone = false;
const langId = 'fireql';
function ensureFireQL(monaco: Monaco) {
if (fireqlSetupDone === false) {
fireqlSetupDone = true;
const { aliases, extensions, mimetypes, def } = languageDefinition;
monaco.languages.register({ id: langId, aliases, extensions, mimetypes });
monaco.languages.setMonarchTokensProvider(langId, def.language);
monaco.languages.setLanguageConfiguration(langId, def.languageConfiguration);
}
}
const getStyles = (theme: GrafanaTheme2) => {
return {
queryField: css`
flex: 1;
// Not exactly sure but without this the editor doe not shrink after resizing (so you can make it bigger but not
// smaller). At the same time this does not actually make the editor 100px because it has flex 1 so I assume
// this should sort of act as a flex-basis (but flex-basis does not work for this). So yeah CSS magic.
width: 100px;
`,
wrapper: css`
display: flex;
flex: 1;
border: 1px solid rgba(36, 41, 46, 0.3);
border-radius: 2px;
`,
};
};

View File

@ -0,0 +1,114 @@
import { screen } from '@testing-library/dom';
import { render } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import React from 'react';
import { CoreApp, PluginType } from '@grafana/data';
import { FireDataSource } from '../datasource';
import { ProfileTypeMessage } from '../types';
import { Props, QueryEditor } from './QueryEditor';
describe('QueryEditor', () => {
it('should render without error', async () => {
setup();
expect(await screen.findByText('process_cpu - cpu')).toBeDefined();
});
it('should render options', async () => {
setup();
await openOptions();
expect(screen.getByText(/Metric/)).toBeDefined();
expect(screen.getByText(/Profile/)).toBeDefined();
expect(screen.getByText(/Both/)).toBeDefined();
expect(screen.getByText(/Group by/)).toBeDefined();
});
it('should render correct options outside of explore', async () => {
setup({ props: { app: CoreApp.Dashboard } });
await openOptions();
expect(screen.getByText(/Metric/)).toBeDefined();
expect(screen.getByText(/Profile/)).toBeDefined();
expect(screen.queryAllByText(/Both/).length).toBe(0);
});
});
async function openOptions() {
const options = screen.getByText(/Options/);
expect(options).toBeDefined();
await userEvent.click(options);
}
function setup(options: { props: Partial<Props> } = { props: {} }) {
const onChange = jest.fn();
const ds = new FireDataSource({
name: 'test',
uid: 'test',
type: PluginType.datasource,
access: 'proxy',
id: 1,
jsonData: {},
meta: {
name: '',
id: '',
type: PluginType.datasource,
baseUrl: '',
info: {
author: {
name: '',
},
description: '',
links: [],
logos: {
large: '',
small: '',
},
screenshots: [],
updated: '',
version: '',
},
module: '',
},
readOnly: false,
});
ds.getProfileTypes = jest.fn().mockResolvedValue([
{
name: 'process_cpu',
ID: 'process_cpu:cpu',
period_type: 'day',
period_unit: 's',
sample_unit: 'ms',
sample_type: 'cpu',
},
{
name: 'memory',
ID: 'memory:memory',
period_type: 'day',
period_unit: 's',
sample_unit: 'ms',
sample_type: 'memory',
},
] as ProfileTypeMessage[]);
const utils = render(
<QueryEditor
query={{
queryType: 'both',
labelSelector: '',
profileTypeId: 'process_cpu:cpu',
refId: 'A',
groupBy: [],
}}
datasource={ds}
onChange={onChange}
onRunQuery={() => {}}
app={CoreApp.Explore}
{...options.props}
/>
);
return { ...utils, onChange };
}

View File

@ -0,0 +1,127 @@
import { defaults } from 'lodash';
import React, { useEffect, useMemo, useState } from 'react';
import { useAsync } from 'react-use';
import { CoreApp, QueryEditorProps } from '@grafana/data';
import { ButtonCascader, CascaderOption } from '@grafana/ui';
import { FireDataSource } from '../datasource';
import { defaultQuery, FireDataSourceOptions, ProfileTypeMessage, Query } from '../types';
import { EditorRow } from './EditorRow';
import { EditorRows } from './EditorRows';
import { LabelsEditor } from './LabelsEditor';
import { QueryOptions } from './QueryOptions';
export type Props = QueryEditorProps<FireDataSource, Query, FireDataSourceOptions>;
export function QueryEditor(props: Props) {
const profileTypes = useProfileTypes(props.datasource);
function onProfileTypeChange(value: string[], selectedOptions: CascaderOption[]) {
if (selectedOptions.length === 0) {
return;
}
const id = selectedOptions[selectedOptions.length - 1].value;
if (typeof id !== 'string') {
throw new Error('id is not string');
}
props.onChange({ ...props.query, profileTypeId: id });
}
function onLabelSelectorChange(value: string) {
props.onChange({ ...props.query, labelSelector: value });
}
function handleRunQuery(value: string) {
props.onChange({ ...props.query, labelSelector: value });
props.onRunQuery();
}
const seriesResult = useAsync(() => {
return props.datasource.getSeries();
}, [props.datasource]);
const cascaderOptions = useCascaderOptions(profileTypes);
const selectedProfileName = useProfileName(profileTypes, props.query.profileTypeId);
let query = normalizeQuery(props.query, props.app);
return (
<EditorRows>
<EditorRow stackProps={{ wrap: false, gap: 1 }}>
<ButtonCascader onChange={onProfileTypeChange} options={cascaderOptions} buttonProps={{ variant: 'secondary' }}>
{selectedProfileName}
</ButtonCascader>
<LabelsEditor
value={query.labelSelector}
onChange={onLabelSelectorChange}
onRunQuery={handleRunQuery}
series={seriesResult.value}
/>
</EditorRow>
<EditorRow>
<QueryOptions query={query} onQueryChange={props.onChange} app={props.app} series={seriesResult.value} />
</EditorRow>
</EditorRows>
);
}
// Turn profileTypes into cascader options
function useCascaderOptions(profileTypes: ProfileTypeMessage[]) {
return useMemo(() => {
let mainTypes = new Map<string, CascaderOption>();
// Classify profile types by name then sample type.
for (let profileType of profileTypes) {
if (!mainTypes.has(profileType.name)) {
mainTypes.set(profileType.name, {
label: profileType.name,
value: profileType.ID,
children: [],
});
}
mainTypes.get(profileType.name)?.children?.push({
label: profileType.sample_type,
value: profileType.ID,
});
}
return Array.from(mainTypes.values());
}, [profileTypes]);
}
function useProfileTypes(datasource: FireDataSource) {
const [profileTypes, setProfileTypes] = useState<ProfileTypeMessage[]>([]);
useEffect(() => {
(async () => {
const profileTypes = await datasource.getProfileTypes();
setProfileTypes(profileTypes);
})();
}, [datasource]);
return profileTypes;
}
function useProfileName(profileTypes: ProfileTypeMessage[], profileTypeId: string) {
return useMemo(() => {
if (!profileTypes) {
return 'Loading';
}
const profile = profileTypes.find((type) => type.ID === profileTypeId);
if (!profile) {
return 'Select a profile type';
}
return profile.name + ' - ' + profile.sample_type;
}, [profileTypeId, profileTypes]);
}
export function normalizeQuery(query: Query, app?: CoreApp | string) {
let normalized = defaults(query, defaultQuery);
if (app !== CoreApp.Explore && normalized.queryType === 'both') {
// In dashboards and other places, we can't show both types of graphs at the same time.
// This will also be a default when having 'both' query and adding it from explore to dashboard
normalized.queryType = 'profile';
}
return normalized;
}

View File

@ -0,0 +1,153 @@
import { css } from '@emotion/css';
import React from 'react';
import { useToggle } from 'react-use';
import { CoreApp, GrafanaTheme2, SelectableValue } from '@grafana/data';
import { Icon, useStyles2, RadioButtonGroup, MultiSelect } from '@grafana/ui';
import { Query, SeriesMessage } from '../types';
import { EditorField } from './EditorField';
import { Stack } from './Stack';
export interface Props {
query: Query;
onQueryChange: (query: Query) => void;
app?: CoreApp;
series?: SeriesMessage;
}
const typeOptions: Array<{ value: Query['queryType']; label: string; description: string }> = [
{ value: 'metrics', label: 'Metric', description: 'Return aggregated metrics' },
{ value: 'profile', label: 'Profile', description: 'Return profile' },
{ value: 'both', label: 'Both', description: 'Return both metric and profile data' },
];
function getTypeOptions(app?: CoreApp) {
if (app === CoreApp.Explore) {
return typeOptions;
}
return typeOptions.filter((option) => option.value !== 'both');
}
function getGroupByOptions(series?: SeriesMessage) {
let options: SelectableValue[] = [];
if (series) {
const labels = series.flatMap((val) => {
return val.labels.map((l) => l.name);
});
options = Array.from(new Set(labels)).map((l) => ({
label: l,
value: l,
}));
}
return options;
}
/**
* Base on QueryOptionGroup component from grafana/ui but that is not available yet.
*/
export function QueryOptions({ query, onQueryChange, app, series }: Props) {
const [isOpen, toggleOpen] = useToggle(false);
const styles = useStyles2(getStyles);
const typeOptions = getTypeOptions(app);
const groupByOptions = getGroupByOptions(series);
return (
<Stack gap={0} direction="column">
<div className={styles.header} onClick={toggleOpen} title="Click to edit options">
<div className={styles.toggle}>
<Icon name={isOpen ? 'angle-down' : 'angle-right'} />
</div>
<h6 className={styles.title}>Options</h6>
{!isOpen && (
<div className={styles.description}>
{[`Type: ${query.queryType}`, query.groupBy?.length ? `Group by: ${query.groupBy.join(', ')}` : undefined]
.filter((v) => v)
.map((v, i) => (
<span key={i}>{v}</span>
))}
</div>
)}
</div>
{isOpen && (
<div className={styles.body}>
<EditorField label={'Query Type'}>
<RadioButtonGroup
options={typeOptions}
value={query.queryType}
onChange={(value) => onQueryChange({ ...query, queryType: value })}
/>
</EditorField>
<EditorField
label={'Group by'}
tooltip={
<>
Used to group the metric result by a specific label or set of labels. Does not apply to profile query.
</>
}
>
<MultiSelect
placeholder="Label"
value={query.groupBy}
allowCustomValue
options={groupByOptions}
onChange={(change) => {
const changes = change.map((c: SelectableValue<string>) => {
return c.value!;
});
onQueryChange({ ...query, groupBy: changes });
}}
/>
</EditorField>
</div>
)}
</Stack>
);
}
const getStyles = (theme: GrafanaTheme2) => {
return {
switchLabel: css({
color: theme.colors.text.secondary,
cursor: 'pointer',
fontSize: theme.typography.bodySmall.fontSize,
'&:hover': {
color: theme.colors.text.primary,
},
}),
header: css({
display: 'flex',
cursor: 'pointer',
alignItems: 'baseline',
color: theme.colors.text.primary,
'&:hover': {
background: theme.colors.emphasize(theme.colors.background.primary, 0.03),
},
}),
title: css({
flexGrow: 1,
overflow: 'hidden',
fontSize: theme.typography.bodySmall.fontSize,
fontWeight: theme.typography.fontWeightMedium,
margin: 0,
}),
description: css({
color: theme.colors.text.secondary,
fontSize: theme.typography.bodySmall.fontSize,
paddingLeft: theme.spacing(2),
gap: theme.spacing(2),
display: 'flex',
}),
body: css({
display: 'flex',
paddingTop: theme.spacing(2),
gap: theme.spacing(2),
flexWrap: 'wrap',
}),
toggle: css({
color: theme.colors.text.secondary,
marginRight: `${theme.spacing(1)}`,
}),
};
};

View File

@ -0,0 +1,30 @@
import { css } from '@emotion/css';
import React, { CSSProperties, useCallback } from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
interface StackProps {
direction?: CSSProperties['flexDirection'];
alignItems?: CSSProperties['alignItems'];
wrap?: boolean;
gap?: number;
flexGrow?: CSSProperties['flexGrow'];
children: React.ReactNode;
}
export function Stack(props: StackProps) {
const styles = useStyles2(useCallback((theme) => getStyles(theme, props), [props]));
return <div className={styles.root}>{props.children}</div>;
}
const getStyles = (theme: GrafanaTheme2, props: StackProps) => ({
root: css({
display: 'flex',
flexDirection: props.direction ?? 'row',
flexWrap: props.wrap ?? true ? 'wrap' : undefined,
alignItems: props.alignItems,
gap: theme.spacing(props.gap ?? 2),
flexGrow: props.flexGrow,
}),
});

View File

@ -0,0 +1,90 @@
import { monacoTypes, Monaco } from '@grafana/ui';
import { SeriesMessage } from '../types';
import { CompletionProvider } from './autocomplete';
describe('CompletionProvider', () => {
it('suggests labels', () => {
const { provider, model } = setup('{}', 1, defaultLabels);
const result = provider.provideCompletionItems(model, {} as monacoTypes.Position);
expect((result! as monacoTypes.languages.CompletionList).suggestions).toEqual([
expect.objectContaining({ label: 'foo', insertText: 'foo' }),
]);
});
it('suggests label names with quotes', () => {
const { provider, model } = setup('{foo=}', 6, defaultLabels);
const result = provider.provideCompletionItems(model, {} as monacoTypes.Position);
expect((result! as monacoTypes.languages.CompletionList).suggestions).toEqual([
expect.objectContaining({ label: 'bar', insertText: '"bar"' }),
]);
});
it('suggests label names without quotes', () => {
const { provider, model } = setup('{foo="}', 7, defaultLabels);
const result = provider.provideCompletionItems(model, {} as monacoTypes.Position);
expect((result! as monacoTypes.languages.CompletionList).suggestions).toEqual([
expect.objectContaining({ label: 'bar', insertText: 'bar' }),
]);
});
it('suggests nothing without labels', () => {
const { provider, model } = setup('{foo="}', 7, []);
const result = provider.provideCompletionItems(model, {} as monacoTypes.Position);
expect((result! as monacoTypes.languages.CompletionList).suggestions).toEqual([]);
});
it('suggests labels on empty input', () => {
const { provider, model } = setup('', 0, defaultLabels);
const result = provider.provideCompletionItems(model, {} as monacoTypes.Position);
expect((result! as monacoTypes.languages.CompletionList).suggestions).toEqual([
expect.objectContaining({ label: 'foo', insertText: '{foo="' }),
]);
});
});
const defaultLabels = [{ labels: [{ name: 'foo', value: 'bar' }] }];
function setup(value: string, offset: number, series?: SeriesMessage) {
const provider = new CompletionProvider();
if (series) {
provider.setSeries(series);
}
const model = makeModel(value, offset);
provider.monaco = {
Range: {
fromPositions() {
return null;
},
},
languages: {
CompletionItemKind: {
Enum: 1,
EnumMember: 2,
},
},
} as unknown as Monaco;
provider.editor = {
getModel() {
return model;
},
} as monacoTypes.editor.IStandaloneCodeEditor;
return { provider, model };
}
function makeModel(value: string, offset: number) {
return {
id: 'test_monaco',
getWordAtPosition() {
return null;
},
getOffsetAt() {
return offset;
},
getValue() {
return value;
},
} as unknown as monacoTypes.editor.ITextModel;
}

View File

@ -0,0 +1,238 @@
import { monacoTypes, Monaco } from '@grafana/ui';
import { SeriesMessage } from '../types';
/**
* Class that implements CompletionItemProvider interface and allows us to provide suggestion for the Monaco
* autocomplete system.
*
* At this moment we just pass it all the labels/values we get from Fire backend later on we may do something a bit
* smarter if there will be lots of labels.
*/
export class CompletionProvider implements monacoTypes.languages.CompletionItemProvider {
triggerCharacters = ['{', ',', '[', '(', '=', '~', ' ', '"'];
// We set these directly and ae required for the provider to function.
monaco: Monaco | undefined;
editor: monacoTypes.editor.IStandaloneCodeEditor | undefined;
private labels: { [label: string]: Set<string> } = {};
provideCompletionItems(
model: monacoTypes.editor.ITextModel,
position: monacoTypes.Position
): monacoTypes.languages.ProviderResult<monacoTypes.languages.CompletionList> {
// Should not happen, this should not be called before it is initialized
if (!(this.monaco && this.editor)) {
throw new Error('provideCompletionItems called before CompletionProvider was initialized');
}
// if the model-id does not match, then this call is from a different editor-instance,
// not "our instance", so return nothing
if (this.editor.getModel()?.id !== model.id) {
return { suggestions: [] };
}
const { range, offset } = getRangeAndOffset(this.monaco, model, position);
const situation = getSituation(model.getValue(), offset);
const completionItems = this.getCompletions(situation);
// monaco by-default alphabetically orders the items.
// to stop it, we use a number-as-string sortkey,
// so that monaco keeps the order we use
const maxIndexDigits = completionItems.length.toString().length;
const suggestions: monacoTypes.languages.CompletionItem[] = completionItems.map((item, index) => ({
kind: getMonacoCompletionItemKind(item.type, this.monaco!),
label: item.label,
insertText: item.insertText,
sortText: index.toString().padStart(maxIndexDigits, '0'), // to force the order we have
range,
}));
return { suggestions };
}
/**
* We expect the data directly from the request and transform it here. We do some deduplication and turn them into
* object for quicker search as we usually need either a list of label names or values or particular label.
*/
setSeries(series: SeriesMessage) {
this.labels = series.reduce<{ [label: string]: Set<string> }>((acc, serie) => {
const seriesLabels = serie.labels.reduce<{ [label: string]: Set<string> }>((acc, labelValue) => {
acc[labelValue.name] = acc[labelValue.name] || new Set();
acc[labelValue.name].add(labelValue.value);
return acc;
}, {});
for (const label of Object.keys(seriesLabels)) {
acc[label] = new Set([...(acc[label] || []), ...seriesLabels[label]]);
}
return acc;
}, {});
}
/**
* Get suggestion based on the situation we are in like whether we should suggest label names or values.
* @param situation
* @private
*/
private getCompletions(situation: Situation): Completion[] {
if (!Object.keys(this.labels).length) {
return [];
}
switch (situation.type) {
// Not really sure what would make sense to suggest in this case so just leave it
case 'UNKNOWN': {
return [];
}
case 'EMPTY': {
return Object.keys(this.labels).map((key) => {
return {
label: key,
insertText: `{${key}="`,
type: 'LABEL_NAME',
};
});
}
case 'IN_LABEL_NAME':
return Object.keys(this.labels).map((key) => {
return {
label: key,
insertText: key,
type: 'LABEL_NAME',
};
});
case 'IN_LABEL_VALUE':
return Array.from(this.labels[situation.labelName].values()).map((key) => {
return {
label: key,
insertText: situation.betweenQuotes ? key : `"${key}"`,
type: 'LABEL_VALUE',
};
});
default:
throw new Error(`Unexpected situation ${situation}`);
}
}
}
/**
* Get item kind which is used for icon next to the suggestion.
* @param type
* @param monaco
*/
function getMonacoCompletionItemKind(type: CompletionType, monaco: Monaco): monacoTypes.languages.CompletionItemKind {
switch (type) {
case 'LABEL_NAME':
return monaco.languages.CompletionItemKind.Enum;
case 'LABEL_VALUE':
return monaco.languages.CompletionItemKind.EnumMember;
default:
throw new Error(`Unexpected CompletionType: ${type}`);
}
}
export type CompletionType = 'LABEL_NAME' | 'LABEL_VALUE';
type Completion = {
type: CompletionType;
label: string;
insertText: string;
};
export type Label = {
name: string;
value: string;
};
export type Situation =
| {
type: 'UNKNOWN';
}
| {
type: 'EMPTY';
}
| {
type: 'IN_LABEL_NAME';
otherLabels: Label[];
}
| {
type: 'IN_LABEL_VALUE';
labelName: string;
betweenQuotes: boolean;
otherLabels: Label[];
};
const labelNameRegex = /[a-zA-Z_][a-zA-Z0-9_]*/;
const labelValueRegex = /[^"]*/; // anything except a double quote
const labelPairsRegex = new RegExp(`(${labelNameRegex.source})="(${labelValueRegex.source})"`, 'g');
const inLabelValueRegex = new RegExp(`(${labelNameRegex.source})=("?)${labelValueRegex.source}$`);
const inLabelNameRegex = new RegExp(/[{,]\s*[a-zA-Z0-9_]*$/);
/**
* Figure out where is the cursor and what kind of suggestions are appropriate.
* As currently Fire handles just a simple {foo="bar", baz="zyx"} kind of values we can do with simple regex to figure
* out where we are with the cursor.
* @param text
* @param offset
*/
function getSituation(text: string, offset: number): Situation {
if (text === '') {
return {
type: 'EMPTY',
};
}
// Get all the labels so far in the query, so we can do some more filtering.
const matches = text.matchAll(labelPairsRegex);
const existingLabels = Array.from(matches).reduce<Label[]>((acc, match) => {
const [_, name, value] = match[1];
acc.push({ name, value });
return acc;
}, []);
// Check if we are editing a label value right now. If so also get name of the label
const matchLabelValue = text.substring(0, offset).match(inLabelValueRegex);
if (matchLabelValue) {
return {
type: 'IN_LABEL_VALUE',
labelName: matchLabelValue[1],
betweenQuotes: !!matchLabelValue[2],
otherLabels: existingLabels,
};
}
// Check if we are editing a label name
const matchLabelName = text.substring(0, offset).match(inLabelNameRegex);
if (matchLabelName) {
return {
type: 'IN_LABEL_NAME',
otherLabels: existingLabels,
};
}
// Will happen only if user writes something that isn't really a label selector
return {
type: 'UNKNOWN',
};
}
function getRangeAndOffset(monaco: Monaco, model: monacoTypes.editor.ITextModel, position: monacoTypes.Position) {
const word = model.getWordAtPosition(position);
const range =
word != null
? monaco.Range.lift({
startLineNumber: position.lineNumber,
endLineNumber: position.lineNumber,
startColumn: word.startColumn,
endColumn: word.endColumn,
})
: monaco.Range.fromPositions(position);
// documentation says `position` will be "adjusted" in `getOffsetAt` so we clone it here just for sure.
const positionClone = {
column: position.column,
lineNumber: position.lineNumber,
};
const offset = model.getOffsetAt(positionClone);
return { offset, range };
}

View File

@ -0,0 +1,48 @@
import { Observable, of } from 'rxjs';
import { DataQueryRequest, DataQueryResponse, DataSourceInstanceSettings } from '@grafana/data';
import { DataSourceWithBackend } from '@grafana/runtime';
import { normalizeQuery } from './QueryEditor/QueryEditor';
import { FireDataSourceOptions, Query, ProfileTypeMessage, SeriesMessage } from './types';
export class FireDataSource extends DataSourceWithBackend<Query, FireDataSourceOptions> {
constructor(instanceSettings: DataSourceInstanceSettings<FireDataSourceOptions>) {
super(instanceSettings);
}
query(request: DataQueryRequest<Query>): Observable<DataQueryResponse> {
const validTargets = request.targets
.filter((t) => t.profileTypeId)
.map((t) => {
// Empty string errors out but honestly seems like we can just normalize it this way
if (t.labelSelector === '') {
return {
...t,
labelSelector: '{}',
};
}
return normalizeQuery(t, request.app);
});
if (!validTargets.length) {
return of({ data: [] });
}
return super.query({
...request,
targets: validTargets,
});
}
async getProfileTypes(): Promise<ProfileTypeMessage[]> {
return await super.getResource('profileTypes');
}
async getSeries(): Promise<SeriesMessage> {
// For now, we send empty matcher to get all the series
return await super.getResource('series', { matchers: ['{}'] });
}
async getLabelNames(): Promise<string[]> {
return await super.getResource('labelNames');
}
}

View File

@ -0,0 +1,97 @@
import type { languages } from 'monaco-editor';
export const languageConfiguration: languages.LanguageConfiguration = {
// the default separators except `@$`
wordPattern: /(-?\d*\.\d\w*)|([^`~!#%^&*()\-=+\[{\]}\\|;:'",.<>\/?\s]+)/g,
brackets: [['{', '}']],
autoClosingPairs: [
{ open: '{', close: '}' },
{ open: '"', close: '"' },
{ open: "'", close: "'" },
],
surroundingPairs: [
{ open: '{', close: '}' },
{ open: '"', close: '"' },
{ open: "'", close: "'" },
],
folding: {},
};
export const language: languages.IMonarchLanguage = {
ignoreCase: false,
defaultToken: '',
tokenPostfix: '.fireql',
keywords: [],
operators: [],
// we include these common regular expressions
symbols: /[=><!~?:&|+\-*\/^%]+/,
escapes: /\\(?:[abfnrtv\\"']|x[0-9A-Fa-f]{1,4}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})/,
digits: /\d+(_+\d+)*/,
octaldigits: /[0-7]+(_+[0-7]+)*/,
binarydigits: /[0-1]+(_+[0-1]+)*/,
hexdigits: /[[0-9a-fA-F]+(_+[0-9a-fA-F]+)*/,
integersuffix: /(ll|LL|u|U|l|L)?(ll|LL|u|U|l|L)?/,
floatsuffix: /[fFlL]?/,
tokenizer: {
root: [
// labels
[/[a-z_]\w*(?=\s*(=|!=|=~|!~))/, 'tag'],
// strings
[/"([^"\\]|\\.)*$/, 'string.invalid'], // non-teminated string
[/'([^'\\]|\\.)*$/, 'string.invalid'], // non-teminated string
[/"/, 'string', '@string_double'],
[/'/, 'string', '@string_single'],
// whitespace
{ include: '@whitespace' },
// delimiters and operators
[/[{}()\[\]]/, '@brackets'],
[/[<>](?!@symbols)/, '@brackets'],
[
/@symbols/,
{
cases: {
'@operators': 'delimiter',
'@default': '',
},
},
],
// numbers
[/\d+/, 'number'],
[/\d*\d+[eE]([\-+]?\d+)?(@floatsuffix)/, 'number.float'],
[/\d*\.\d+([eE][\-+]?\d+)?(@floatsuffix)/, 'number.float'],
[/0[xX][0-9a-fA-F']*[0-9a-fA-F](@integersuffix)/, 'number.hex'],
[/0[0-7']*[0-7](@integersuffix)/, 'number.octal'],
[/0[bB][0-1']*[0-1](@integersuffix)/, 'number.binary'],
[/\d[\d']*\d(@integersuffix)/, 'number'],
[/\d(@integersuffix)/, 'number'],
],
string_double: [
[/[^\\"]+/, 'string'],
[/@escapes/, 'string.escape'],
[/\\./, 'string.escape.invalid'],
[/"/, 'string', '@pop'],
],
string_single: [
[/[^\\']+/, 'string'],
[/@escapes/, 'string.escape'],
[/\\./, 'string.escape.invalid'],
[/'/, 'string', '@pop'],
],
clauses: [
[/[^(,)]/, 'tag'],
[/\)/, 'identifier', '@pop'],
],
whitespace: [[/[ \t\r\n]+/, 'white']],
},
};

View File

@ -0,0 +1,12 @@
import { language, languageConfiguration } from './fireql';
export const languageDefinition = {
id: 'fireql',
extensions: ['.fireql'],
aliases: ['fire', 'fireql'],
mimetypes: [],
def: {
language,
languageConfiguration,
},
};

View File

@ -0,0 +1 @@
<?xml version="1.0" encoding="UTF-8"?><svg id="Layer_2" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="0 0 140.07 151.15"><defs><style>.cls-1{fill:url(#linear-gradient);}.cls-2{fill:url(#linear-gradient-8);}.cls-3{fill:url(#linear-gradient-3);}.cls-4{fill:url(#linear-gradient-4);}.cls-5{fill:url(#linear-gradient-2);}.cls-6{fill:url(#linear-gradient-6);}.cls-7{fill:url(#linear-gradient-7);}.cls-8{fill:url(#linear-gradient-5);}</style><linearGradient id="linear-gradient" x1="556.29" y1="168.71" x2="674.41" y2="28.91" gradientTransform="translate(-556.16) skewX(-8)" gradientUnits="userSpaceOnUse"><stop offset="0" stop-color="#ffef00"/><stop offset="1" stop-color="#ed5a27"/></linearGradient><linearGradient id="linear-gradient-2" x1="524.45" y1="141.81" x2="642.57" y2="2.01" xlink:href="#linear-gradient"/><linearGradient id="linear-gradient-3" x1="546.04" y1="160.05" x2="664.16" y2="20.25" xlink:href="#linear-gradient"/><linearGradient id="linear-gradient-4" x1="561.98" y1="173.52" x2="680.1" y2="33.71" xlink:href="#linear-gradient"/><linearGradient id="linear-gradient-5" x1="535.6" y1="121.34" x2="655.87" y2="118.41" xlink:href="#linear-gradient"/><linearGradient id="linear-gradient-6" x1="536.16" y1="144.33" x2="656.43" y2="141.41" xlink:href="#linear-gradient"/><linearGradient id="linear-gradient-7" x1="613.28" y1="33.92" x2="517.45" y2="-5.31" xlink:href="#linear-gradient"/><linearGradient id="linear-gradient-8" x1="613.28" y1="33.92" x2="517.45" y2="-5.31" xlink:href="#linear-gradient"/></defs><g id="Layer_1-2"><g><g><path class="cls-1" d="M126.18,82.3c6.34-7.66,10.91-16.78,12.86-26.62h-39.74c-2.18,5.73-7.94,10.01-14.09,10.01H13.33l-2.34,16.61H126.18Z"/><path class="cls-5" d="M42.55,13.99l6.66,2.7h81.13C122.18,6.5,109.49,.12,94.43,.12H29.32c2.23,6.34,6.94,11.32,13.23,13.87Z"/><path class="cls-3" d="M48.24,22.87l-7.32,2.66c-7.06,2.57-13.22,7.62-17.23,14.04H88.89c6.12,0,10.65,4.23,11.26,9.92h39.75c.8-9.84-1.22-18.96-5.43-26.62H48.24Z"/><path class="cls-4" d="M10.13,88.48l-2.11,15.01-.23,1.65H79.67c15.1,0,29.63-6.41,40.66-16.66H10.13Z"/><polygon class="cls-8" points="44.06 128.3 46.45 111.32 6.92 111.32 4.53 128.3 44.06 128.3"/><polygon class="cls-6" points="3.66 134.48 1.32 151.15 40.85 151.15 43.19 134.48 3.66 134.48"/></g><g><path class="cls-7" d="M39.52,19.76C30.96,16.29,24.74,9.05,22.54,0,17.8,9.05,9.53,16.29,0,19.76c8.56,3.47,14.78,10.72,16.98,19.76,4.74-9.05,13-16.29,22.54-19.76Z"/><path class="cls-2" d="M22.54,0c2.2,9.05,8.43,16.29,16.98,19.76-9.53,3.47-17.8,10.72-22.54,19.76C14.78,30.48,8.56,23.23,0,19.76,9.53,16.29,17.8,9.05,22.54,0"/></g></g></g></svg>

After

Width:  |  Height:  |  Size: 2.6 KiB

View File

@ -0,0 +1,10 @@
import { DataSourcePlugin } from '@grafana/data';
import { ConfigEditor } from './ConfigEditor';
import { QueryEditor } from './QueryEditor/QueryEditor';
import { FireDataSource } from './datasource';
import { Query, FireDataSourceOptions } from './types';
export const plugin = new DataSourcePlugin<FireDataSource, Query, FireDataSourceOptions>(FireDataSource)
.setConfigEditor(ConfigEditor)
.setQueryEditor(QueryEditor);

View File

@ -0,0 +1,33 @@
{
"type": "datasource",
"name": "Phlare",
"id": "phlare",
"category": "profiling",
"metrics": true,
"alerting": false,
"annotations": false,
"logs": false,
"streaming": false,
"tracing": false,
"backend": true,
"info": {
"description": "Horizontally-scalable, highly-available, multi-tenant continuous profiling aggregation system. OSS profiling solution from Grafana Labs.",
"author": {
"name": "Grafana Labs",
"url": "https://www.grafana.com"
},
"keywords": ["grafana", "datasource", "phlare", "flamegraph"],
"logos": {
"small": "img/phlare_icon_color.svg",
"large": "img/phlare_icon_color.svg"
},
"links": [
{
"name": "GitHub Project",
"url": "https://github.com/grafana/phlare"
}
]
}
}

View File

@ -0,0 +1,32 @@
import { DataQuery, DataSourceJsonData } from '@grafana/data';
export interface Query extends DataQuery {
labelSelector: string;
profileTypeId: string;
queryType: 'metrics' | 'profile' | 'both';
groupBy: string[];
}
export interface ProfileTypeMessage {
ID: string;
name: string;
period_type: string;
period_unit: string;
sample_type: string;
sample_unit: string;
}
export type SeriesMessage = Array<{ labels: Array<{ name: string; value: string }> }>;
export const defaultQuery: Partial<Query> = {
labelSelector: '{}',
queryType: 'both',
groupBy: [],
};
/**
* These are options configured for each DataSource instance.
*/
export interface FireDataSourceOptions extends DataSourceJsonData {
minStep?: string;
}