mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Tempo: TraceQL query response streaming (#69212)
* Refactor Tempo datasource backend to support multiple queryData types. Added traceId query type that is set when performing the request but doesn't map to a tab. * WIP data is reaching the frontend * WIP * Use channels and goroutines * Some fixes * Simplify backend code. Return traces, metrics, state and error in a dataframe. Shared state type between FE and BE. Use getStream() instead of getQueryData() * Handle errors in frontend * Update Tempo and use same URL for RPC and HTTP * Cleanup backend code * Merge main * Create grpc client only with host and authenticate * Create grpc client only with host and authenticate * Cleanup * Add streaming to TraceQL Search tab * Fix merge conflicts * Added tests for processStream * make gen-cue * make gen-cue * goimports * lint * Cleanup go.mod * Comments * Addressing PR comments * Fix streaming for tracel search tab * Added streaming kill switch as the disableTraceQLStreaming feature toggle * Small comment * Fix conflicts * Correctly capture and send all errors as a DF to client * Fix infinite error loop * Fix merge conflicts * Fix test * Update deprecated import * Fix feature toggles gen * Fix merge conflicts
This commit is contained in:
parent
fb2a57d3a3
commit
c1709c9301
@ -127,6 +127,7 @@ Experimental features might be changed or removed without prior notice.
|
||||
| `awsDatasourcesTempCredentials` | Support temporary security credentials in AWS plugins for Grafana Cloud customers |
|
||||
| `transformationsRedesign` | Enables the transformations redesign |
|
||||
| `mlExpressions` | Enable support for Machine Learning in server-side expressions |
|
||||
| `disableTraceQLStreaming` | Disables the option to stream the response of TraceQL queries of the Tempo data source |
|
||||
|
||||
## Development feature toggles
|
||||
|
||||
|
50
go.mod
50
go.mod
@ -27,7 +27,7 @@ require k8s.io/apimachinery v0.26.2 // @grafana/backend-platform
|
||||
replace github.com/prometheus/prometheus => github.com/prometheus/prometheus v0.43.0
|
||||
|
||||
require (
|
||||
cloud.google.com/go/storage v1.28.1 // @grafana/backend-platform
|
||||
cloud.google.com/go/storage v1.30.1 // @grafana/backend-platform
|
||||
cuelang.org/go v0.6.0-0.dev // @grafana/grafana-as-code
|
||||
github.com/Azure/azure-sdk-for-go v65.0.0+incompatible // @grafana/backend-platform
|
||||
github.com/Azure/go-autorest/autorest v0.11.28 // @grafana/backend-platform
|
||||
@ -66,7 +66,7 @@ require (
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // @grafana/backend-platform
|
||||
github.com/hashicorp/go-hclog v1.5.0 // @grafana/plugins-platform-backend
|
||||
github.com/hashicorp/go-plugin v1.4.9 // @grafana/plugins-platform-backend
|
||||
github.com/hashicorp/go-version v1.3.0 // @grafana/backend-platform
|
||||
github.com/hashicorp/go-version v1.6.0 // @grafana/backend-platform
|
||||
github.com/influxdata/influxdb-client-go/v2 v2.6.0 // @grafana/observability-metrics
|
||||
github.com/influxdata/line-protocol v0.0.0-20210311194329-9aa0e372d097 // @grafana/grafana-app-platform-squad
|
||||
github.com/jmespath/go-jmespath v0.4.0 // @grafana/backend-platform
|
||||
@ -88,22 +88,21 @@ require (
|
||||
github.com/prometheus/client_golang v1.15.1 // @grafana/alerting-squad-backend
|
||||
github.com/prometheus/client_model v0.4.0 // @grafana/backend-platform
|
||||
github.com/prometheus/common v0.43.0 // @grafana/alerting-squad-backend
|
||||
github.com/prometheus/prometheus v1.8.2-0.20210621150501-ff58416a0b02 // @grafana/alerting-squad-backend
|
||||
github.com/prometheus/prometheus v1.8.2-0.20221021121301-51a44e6657c3 // @grafana/alerting-squad-backend
|
||||
github.com/robfig/cron/v3 v3.0.1 // @grafana/backend-platform
|
||||
github.com/russellhaering/goxmldsig v1.4.0 // @grafana/backend-platform
|
||||
github.com/stretchr/testify v1.8.4 // @grafana/backend-platform
|
||||
github.com/teris-io/shortid v0.0.0-20171029131806-771a37caa5cf // @grafana/backend-platform
|
||||
github.com/ua-parser/uap-go v0.0.0-20211112212520-00c877edfe0f // @grafana/backend-platform
|
||||
github.com/uber/jaeger-client-go v2.29.1+incompatible // indirect
|
||||
github.com/uber/jaeger-client-go v2.30.0+incompatible // indirect
|
||||
github.com/urfave/cli/v2 v2.25.0 // @grafana/backend-platform
|
||||
github.com/vectordotdev/go-datemath v0.1.1-0.20220323213446-f3954d0b18ae // @grafana/backend-platform
|
||||
github.com/yalue/merged_fs v1.2.2 // @grafana/grafana-as-code
|
||||
github.com/yudai/gojsondiff v1.0.0 // @grafana/backend-platform
|
||||
go.opentelemetry.io/collector v0.31.0 // @grafana/backend-platform
|
||||
go.opentelemetry.io/collector/model v0.31.0 // @grafana/backend-platform
|
||||
go.opentelemetry.io/collector/model v0.46.0 // @grafana/backend-platform
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.37.0 // @grafana/grafana-operator-experience-squad
|
||||
go.opentelemetry.io/otel v1.14.0 // @grafana/alerting-squad-backend
|
||||
go.opentelemetry.io/otel/exporters/jaeger v1.0.0 // @grafana/backend-platform
|
||||
go.opentelemetry.io/otel/exporters/jaeger v1.10.0 // @grafana/backend-platform
|
||||
go.opentelemetry.io/otel/sdk v1.14.0 // @grafana/backend-platform
|
||||
go.opentelemetry.io/otel/trace v1.14.0 // @grafana/backend-platform
|
||||
golang.org/x/crypto v0.11.0 // @grafana/backend-platform
|
||||
@ -113,8 +112,8 @@ require (
|
||||
golang.org/x/sync v0.3.0 // @grafana/alerting-squad-backend
|
||||
golang.org/x/time v0.3.0 // @grafana/backend-platform
|
||||
golang.org/x/tools v0.7.0 // @grafana/grafana-as-code
|
||||
gonum.org/v1/gonum v0.11.0 // @grafana/observability-metrics
|
||||
google.golang.org/api v0.111.0 // @grafana/backend-platform
|
||||
gonum.org/v1/gonum v0.12.0 // @grafana/observability-metrics
|
||||
google.golang.org/api v0.114.0 // @grafana/backend-platform
|
||||
google.golang.org/grpc v1.55.0 // @grafana/plugins-platform-backend
|
||||
google.golang.org/protobuf v1.30.0 // @grafana/plugins-platform-backend
|
||||
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
|
||||
@ -141,7 +140,7 @@ require (
|
||||
github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 // indirect
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.2.0 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.2.1 // indirect
|
||||
github.com/centrifugal/protocol v0.10.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||
github.com/cheekybits/genny v1.0.0 // indirect
|
||||
@ -170,7 +169,7 @@ require (
|
||||
github.com/golang/protobuf v1.5.3 // @grafana/backend-platform
|
||||
github.com/google/btree v1.1.2 // indirect
|
||||
github.com/google/flatbuffers v2.0.8+incompatible // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.7.0 // @grafana/backend-platform
|
||||
github.com/googleapis/gax-go/v2 v2.7.1 // @grafana/backend-platform
|
||||
github.com/gorilla/mux v1.8.0 // indirect
|
||||
github.com/grafana/grafana-google-sdk-go v0.1.0 // @grafana/partner-datasources
|
||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.1-0.20191002090509-6af20e3a5340 // indirect
|
||||
@ -221,11 +220,11 @@ require (
|
||||
golang.org/x/text v0.11.0 // @grafana/backend-platform
|
||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // @grafana/backend-platform
|
||||
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1 // indirect; @grafana/backend-platform
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go/kms v1.4.0 // @grafana/backend-platform
|
||||
cloud.google.com/go/kms v1.10.1 // @grafana/backend-platform
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.2.0 // @grafana/backend-platform
|
||||
github.com/Azure/azure-sdk-for-go/sdk/keyvault/azkeys v0.9.0 // @grafana/backend-platform
|
||||
github.com/Azure/azure-storage-blob-go v0.15.0 // @grafana/backend-platform
|
||||
@ -264,6 +263,7 @@ require (
|
||||
github.com/grafana/dataplane/sdata v0.0.6 // @grafana/observability-metrics
|
||||
github.com/grafana/go-mssqldb v0.9.1 // @grafana/grafana-bi-squad
|
||||
github.com/grafana/kindsys v0.0.0-20230508162304-452481b63482 // @grafana/grafana-as-code
|
||||
github.com/grafana/tempo v1.5.1-0.20230524121406-1dc1bfe7085b
|
||||
github.com/grafana/thema v0.0.0-20230712153715-375c1b45f3ed // @grafana/grafana-as-code
|
||||
github.com/ory/fosite v0.44.1-0.20230317114349-45a6785cc54f // @grafana/grafana-authnz-team
|
||||
github.com/redis/go-redis/v9 v9.0.2 // @grafana/alerting-squad-backend
|
||||
@ -276,7 +276,7 @@ require (
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go v0.107.0 // indirect
|
||||
cloud.google.com/go v0.110.0 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.2.3 // indirect
|
||||
github.com/Azure/azure-pipeline-go v0.2.3 // indirect
|
||||
github.com/Azure/go-ntlmssp v0.0.0-20220621081337-cb9428e4ac1e // indirect
|
||||
@ -302,7 +302,7 @@ require (
|
||||
github.com/drone/drone-go v1.7.1 // indirect
|
||||
github.com/drone/envsubst v1.0.3 // indirect
|
||||
github.com/drone/runner-go v1.12.0 // indirect
|
||||
github.com/dustin/go-humanize v1.0.0 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/ecordell/optgen v0.0.6 // indirect
|
||||
github.com/fsnotify/fsnotify v1.6.0 // indirect
|
||||
github.com/getsentry/sentry-go v0.12.0 // indirect
|
||||
@ -310,7 +310,7 @@ require (
|
||||
github.com/google/go-querystring v1.1.0 // indirect
|
||||
github.com/google/gofuzz v1.2.0 // indirect
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.2.3 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd // indirect
|
||||
github.com/grafana/regexp v0.0.0-20221123153739-15dc172cd2db // indirect
|
||||
github.com/grafana/sqlds/v2 v2.3.10 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
||||
@ -318,7 +318,7 @@ require (
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.2 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/hashicorp/memberlist v0.5.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/invopop/yaml v0.1.0 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
@ -331,7 +331,7 @@ require (
|
||||
github.com/mitchellh/reflectwalk v1.0.2 // indirect
|
||||
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 // indirect
|
||||
github.com/opencontainers/image-spec v1.0.3-0.20220512140940-7b36cea86235 // indirect
|
||||
github.com/ory/go-acc v0.2.6 // indirect
|
||||
github.com/ory/go-convenience v0.1.0 // indirect
|
||||
github.com/ory/viper v1.7.5 // indirect
|
||||
@ -347,7 +347,7 @@ require (
|
||||
github.com/shopspring/decimal v1.2.0 // indirect
|
||||
github.com/spf13/afero v1.9.2 // indirect
|
||||
github.com/spf13/cast v1.5.0 // indirect
|
||||
github.com/spf13/cobra v1.4.0 // indirect
|
||||
github.com/spf13/cobra v1.6.1 // indirect
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/subosito/gotenv v1.4.1 // indirect
|
||||
@ -367,8 +367,8 @@ require (
|
||||
)
|
||||
|
||||
require (
|
||||
cloud.google.com/go/compute v1.18.0 // indirect
|
||||
cloud.google.com/go/iam v0.8.0 // indirect
|
||||
cloud.google.com/go/compute v1.19.0 // indirect
|
||||
cloud.google.com/go/iam v0.13.0 // indirect
|
||||
filippo.io/age v1.1.1 // @grafana/grafana-authnz-team
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.2.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/keyvault/internal v0.7.0 // indirect
|
||||
@ -395,7 +395,7 @@ require (
|
||||
github.com/ghodss/yaml v1.0.1-0.20190212211648-25d852aebe32 // indirect
|
||||
github.com/go-git/gcfg v1.5.0 // indirect
|
||||
github.com/go-git/go-billy/v5 v5.3.1 // indirect
|
||||
github.com/go-logr/logr v1.2.3 // indirect
|
||||
github.com/go-logr/logr v1.2.4 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/google/go-github v17.0.0+incompatible // @grafana/grafana-app-platform-squad
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.15.2 // indirect
|
||||
@ -403,14 +403,14 @@ require (
|
||||
github.com/imdario/mergo v0.3.13 // indirect
|
||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
|
||||
github.com/kevinburke/ssh_config v0.0.0-20201106050909-4977a11b4351 // indirect
|
||||
github.com/klauspost/compress v1.15.13 // indirect
|
||||
github.com/klauspost/compress v1.16.5 // indirect
|
||||
github.com/kylelemons/godebug v1.1.0 // indirect
|
||||
github.com/labstack/echo/v4 v4.10.2 // indirect
|
||||
github.com/labstack/gommon v0.4.0 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/mitchellh/go-wordwrap v1.0.1 // indirect
|
||||
github.com/mschoch/smat v0.2.0 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.15 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.17 // indirect
|
||||
github.com/valyala/fasttemplate v1.2.2 // indirect
|
||||
github.com/wk8/go-ordered-map v1.0.0 // @grafana/backend-platform
|
||||
github.com/xanzy/ssh-agent v0.3.0 // indirect
|
||||
@ -442,8 +442,6 @@ replace github.com/prometheus/alertmanager => github.com/grafana/prometheus-aler
|
||||
// grpc v1.46.0 removed "WithBalancerName()" API, still in use by weaveworks/commons.
|
||||
replace google.golang.org/grpc => google.golang.org/grpc v1.45.0
|
||||
|
||||
replace google.golang.org/genproto => google.golang.org/genproto v0.0.0-20220421151946-72621c1f0bd3
|
||||
|
||||
// Use 1.10.6 of pq to avoid a change in 1.10.7 that has certificate validation issues. https://github.com/grafana/grafana/issues/65816
|
||||
replace github.com/lib/pq => github.com/lib/pq v1.10.6
|
||||
|
||||
|
@ -114,4 +114,5 @@ export interface FeatureToggles {
|
||||
awsDatasourcesTempCredentials?: boolean;
|
||||
transformationsRedesign?: boolean;
|
||||
mlExpressions?: boolean;
|
||||
disableTraceQLStreaming?: boolean;
|
||||
}
|
||||
|
@ -47,6 +47,10 @@ export interface TempoQuery extends common.DataQuery {
|
||||
* Query traces by span name
|
||||
*/
|
||||
spanName?: string;
|
||||
/**
|
||||
* Use the streaming API to get partial results as they are available
|
||||
*/
|
||||
streaming?: boolean;
|
||||
}
|
||||
|
||||
export const defaultTempoQuery: Partial<TempoQuery> = {
|
||||
@ -56,7 +60,17 @@ export const defaultTempoQuery: Partial<TempoQuery> = {
|
||||
/**
|
||||
* search = Loki search, nativeSearch = Tempo search for backwards compatibility
|
||||
*/
|
||||
export type TempoQueryType = ('traceql' | 'traceqlSearch' | 'search' | 'serviceMap' | 'upload' | 'nativeSearch' | 'clear');
|
||||
export type TempoQueryType = ('traceql' | 'traceqlSearch' | 'search' | 'serviceMap' | 'upload' | 'nativeSearch' | 'traceId' | 'clear');
|
||||
|
||||
/**
|
||||
* The state of the TraceQL streaming search query
|
||||
*/
|
||||
export enum SearchStreamingState {
|
||||
Done = 'done',
|
||||
Error = 'error',
|
||||
Pending = 'pending',
|
||||
Streaming = 'streaming',
|
||||
}
|
||||
|
||||
/**
|
||||
* static fields are pre-set in the UI, dynamic fields are added by the user
|
||||
|
@ -2,6 +2,7 @@ package extensions
|
||||
|
||||
import (
|
||||
_ "cloud.google.com/go/kms/apiv1"
|
||||
_ "cloud.google.com/go/kms/apiv1/kmspb"
|
||||
_ "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
|
||||
_ "github.com/Azure/azure-sdk-for-go/sdk/keyvault/azkeys"
|
||||
_ "github.com/Azure/azure-sdk-for-go/services/keyvault/v7.1/keyvault"
|
||||
@ -25,7 +26,6 @@ import (
|
||||
_ "github.com/stretchr/testify/require"
|
||||
_ "github.com/vectordotdev/go-datemath"
|
||||
_ "golang.org/x/time/rate"
|
||||
_ "google.golang.org/genproto/googleapis/cloud/kms/v1"
|
||||
)
|
||||
|
||||
var IsEnterprise bool = false
|
||||
|
@ -654,5 +654,12 @@ var (
|
||||
FrontendOnly: false,
|
||||
Owner: grafanaAlertingSquad,
|
||||
},
|
||||
{
|
||||
Name: "disableTraceQLStreaming",
|
||||
Description: "Disables the option to stream the response of TraceQL queries of the Tempo data source",
|
||||
Stage: FeatureStageExperimental,
|
||||
FrontendOnly: true,
|
||||
Owner: grafanaObservabilityTracesAndProfilingSquad,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
@ -95,3 +95,4 @@ logsExploreTableVisualisation,experimental,@grafana/observability-logs,false,fal
|
||||
awsDatasourcesTempCredentials,experimental,@grafana/aws-datasources,false,false,false,false
|
||||
transformationsRedesign,experimental,@grafana/observability-metrics,false,false,false,true
|
||||
mlExpressions,experimental,@grafana/alerting-squad,false,false,false,false
|
||||
disableTraceQLStreaming,experimental,@grafana/observability-traces-and-profiling,false,false,false,true
|
||||
|
|
@ -390,4 +390,8 @@ const (
|
||||
// FlagMlExpressions
|
||||
// Enable support for Machine Learning in server-side expressions
|
||||
FlagMlExpressions = "mlExpressions"
|
||||
|
||||
// FlagDisableTraceQLStreaming
|
||||
// Disables the option to stream the response of TraceQL queries of the Tempo data source
|
||||
FlagDisableTraceQLStreaming = "disableTraceQLStreaming"
|
||||
)
|
||||
|
72
pkg/tsdb/tempo/grpc.go
Normal file
72
pkg/tsdb/tempo/grpc.go
Normal file
@ -0,0 +1,72 @@
|
||||
package tempo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
|
||||
"github.com/grafana/tempo/pkg/tempopb"
|
||||
"google.golang.org/grpc"
|
||||
"google.golang.org/grpc/credentials"
|
||||
"google.golang.org/grpc/credentials/insecure"
|
||||
)
|
||||
|
||||
// This function creates a new gRPC client to connect to a streaming query service.
|
||||
// It starts by parsing the URL from the data source settings and extracting the host, since that's what the gRPC connection expects.
|
||||
// If the URL does not contain a port number, it adds a default port based on the scheme (80 for HTTP and 443 for HTTPS).
|
||||
// If basic authentication is enabled, it uses TLS transport credentials and sets the basic authentication header for each RPC call.
|
||||
// Otherwise, it uses insecure credentials.
|
||||
func newGrpcClient(settings backend.DataSourceInstanceSettings, opts httpclient.Options) (tempopb.StreamingQuerierClient, error) {
|
||||
parsedUrl, err := url.Parse(settings.URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
onlyHost := parsedUrl.Host
|
||||
if !strings.Contains(onlyHost, ":") {
|
||||
if parsedUrl.Scheme == "http" {
|
||||
onlyHost += ":80"
|
||||
} else {
|
||||
onlyHost += ":443"
|
||||
}
|
||||
}
|
||||
|
||||
var dialOps []grpc.DialOption
|
||||
if settings.BasicAuthEnabled {
|
||||
dialOps = append(dialOps, grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{})))
|
||||
dialOps = append(dialOps, grpc.WithPerRPCCredentials(&basicAuth{
|
||||
Header: basicHeaderForAuth(opts.BasicAuth.User, opts.BasicAuth.Password),
|
||||
}))
|
||||
} else {
|
||||
dialOps = append(dialOps, grpc.WithTransportCredentials(insecure.NewCredentials()))
|
||||
}
|
||||
|
||||
clientConn, err := grpc.Dial(onlyHost, dialOps...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return tempopb.NewStreamingQuerierClient(clientConn), nil
|
||||
}
|
||||
|
||||
type basicAuth struct {
|
||||
Header string
|
||||
}
|
||||
|
||||
func (c *basicAuth) GetRequestMetadata(context.Context, ...string) (map[string]string, error) {
|
||||
return map[string]string{
|
||||
"Authorization": c.Header,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c *basicAuth) RequireTransportSecurity() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func basicHeaderForAuth(username, password string) string {
|
||||
return fmt.Sprintf("Basic %s", base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("%s:%s", username, password))))
|
||||
}
|
@ -9,12 +9,21 @@
|
||||
|
||||
package dataquery
|
||||
|
||||
// Defines values for SearchStreamingState.
|
||||
const (
|
||||
SearchStreamingStateDone SearchStreamingState = "done"
|
||||
SearchStreamingStateError SearchStreamingState = "error"
|
||||
SearchStreamingStatePending SearchStreamingState = "pending"
|
||||
SearchStreamingStateStreaming SearchStreamingState = "streaming"
|
||||
)
|
||||
|
||||
// Defines values for TempoQueryType.
|
||||
const (
|
||||
TempoQueryTypeClear TempoQueryType = "clear"
|
||||
TempoQueryTypeNativeSearch TempoQueryType = "nativeSearch"
|
||||
TempoQueryTypeSearch TempoQueryType = "search"
|
||||
TempoQueryTypeServiceMap TempoQueryType = "serviceMap"
|
||||
TempoQueryTypeTraceId TempoQueryType = "traceId"
|
||||
TempoQueryTypeTraceql TempoQueryType = "traceql"
|
||||
TempoQueryTypeTraceqlSearch TempoQueryType = "traceqlSearch"
|
||||
TempoQueryTypeUpload TempoQueryType = "upload"
|
||||
@ -52,6 +61,9 @@ type DataQuery struct {
|
||||
RefId string `json:"refId"`
|
||||
}
|
||||
|
||||
// The state of the TraceQL streaming search query
|
||||
type SearchStreamingState string
|
||||
|
||||
// TempoDataQuery defines model for TempoDataQuery.
|
||||
type TempoDataQuery = map[string]any
|
||||
|
||||
@ -106,6 +118,9 @@ type TempoQuery struct {
|
||||
|
||||
// Query traces by span name
|
||||
SpanName *string `json:"spanName,omitempty"`
|
||||
|
||||
// Use the streaming API to get partial results as they are available
|
||||
Streaming *bool `json:"streaming,omitempty"`
|
||||
}
|
||||
|
||||
// TempoQueryType search = Loki search, nativeSearch = Tempo search for backwards compatibility
|
||||
|
63
pkg/tsdb/tempo/protospan_translation.go
Normal file
63
pkg/tsdb/tempo/protospan_translation.go
Normal file
@ -0,0 +1,63 @@
|
||||
// Copyright The OpenTelemetry Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package tempo
|
||||
|
||||
import (
|
||||
"go.opentelemetry.io/collector/model/pdata"
|
||||
)
|
||||
|
||||
// Some of the keys used to represent OTLP constructs as tags or annotations in other formats.
|
||||
const (
|
||||
TagMessage = "message"
|
||||
|
||||
TagSpanKind = "span.kind"
|
||||
|
||||
TagStatusCode = "status.code"
|
||||
TagStatusMsg = "status.message"
|
||||
TagError = "error"
|
||||
TagHTTPStatusMsg = "http.status_message"
|
||||
|
||||
TagW3CTraceState = "w3c.tracestate"
|
||||
)
|
||||
|
||||
// Constants used for signifying batch-level attribute values where not supplied by OTLP data but required
|
||||
// by other protocols.
|
||||
const (
|
||||
ResourceNoServiceName = "OTLPResourceNoServiceName"
|
||||
)
|
||||
|
||||
// OpenTracingSpanKind are possible values for TagSpanKind and match the OpenTracing
|
||||
// conventions: https://github.com/opentracing/specification/blob/main/semantic_conventions.md
|
||||
// These values are used for representing span kinds that have no
|
||||
// equivalents in OpenCensus format. They are stored as values of TagSpanKind
|
||||
type OpenTracingSpanKind string
|
||||
|
||||
const (
|
||||
OpenTracingSpanKindUnspecified OpenTracingSpanKind = ""
|
||||
OpenTracingSpanKindClient OpenTracingSpanKind = "client"
|
||||
OpenTracingSpanKindServer OpenTracingSpanKind = "server"
|
||||
OpenTracingSpanKindConsumer OpenTracingSpanKind = "consumer"
|
||||
OpenTracingSpanKindProducer OpenTracingSpanKind = "producer"
|
||||
OpenTracingSpanKindInternal OpenTracingSpanKind = "internal"
|
||||
)
|
||||
|
||||
// StatusCodeFromHTTP takes an HTTP status code and return the appropriate OpenTelemetry status code
|
||||
// See: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#status
|
||||
func StatusCodeFromHTTP(httpStatusCode int) pdata.StatusCode {
|
||||
if httpStatusCode >= 100 && httpStatusCode < 399 {
|
||||
return pdata.StatusCodeUnset
|
||||
}
|
||||
return pdata.StatusCodeError
|
||||
}
|
160
pkg/tsdb/tempo/search_stream.go
Normal file
160
pkg/tsdb/tempo/search_stream.go
Normal file
@ -0,0 +1,160 @@
|
||||
package tempo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana/pkg/tsdb/tempo/kinds/dataquery"
|
||||
"github.com/grafana/tempo/pkg/tempopb"
|
||||
)
|
||||
|
||||
const SearchPathPrefix = "search/"
|
||||
|
||||
type ExtendedResponse struct {
|
||||
*tempopb.SearchResponse
|
||||
State dataquery.SearchStreamingState
|
||||
}
|
||||
|
||||
type StreamSender interface {
|
||||
SendFrame(frame *data.Frame, include data.FrameInclude) error
|
||||
SendJSON(data []byte) error
|
||||
SendBytes(data []byte) error
|
||||
}
|
||||
|
||||
func (s *Service) runSearchStream(ctx context.Context, req *backend.RunStreamRequest, sender *backend.StreamSender, datasource *Datasource) error {
|
||||
response := &backend.DataResponse{}
|
||||
|
||||
var backendQuery *backend.DataQuery
|
||||
err := json.Unmarshal(req.Data, &backendQuery)
|
||||
if err != nil {
|
||||
response.Error = fmt.Errorf("error unmarshaling backend query model: %v", err)
|
||||
return err
|
||||
}
|
||||
|
||||
var sr *tempopb.SearchRequest
|
||||
err = json.Unmarshal(req.Data, &sr)
|
||||
if err != nil {
|
||||
response.Error = fmt.Errorf("error unmarshaling Tempo query model: %v", err)
|
||||
return err
|
||||
}
|
||||
|
||||
if sr.GetQuery() == "" {
|
||||
return fmt.Errorf("query is empty")
|
||||
}
|
||||
|
||||
sr.Start = uint32(backendQuery.TimeRange.From.Unix())
|
||||
sr.End = uint32(backendQuery.TimeRange.To.Unix())
|
||||
|
||||
stream, err := datasource.StreamingClient.Search(ctx, sr)
|
||||
if err != nil {
|
||||
s.logger.Error("Error Search()", "err", err)
|
||||
return err
|
||||
}
|
||||
|
||||
return s.processStream(stream, sender)
|
||||
}
|
||||
|
||||
func (s *Service) processStream(stream tempopb.StreamingQuerier_SearchClient, sender StreamSender) error {
|
||||
var traceList []*tempopb.TraceSearchMetadata
|
||||
var metrics *tempopb.SearchMetrics
|
||||
for {
|
||||
msg, err := stream.Recv()
|
||||
if errors.Is(err, io.EOF) {
|
||||
if err := sendResponse(&ExtendedResponse{
|
||||
State: dataquery.SearchStreamingStateDone,
|
||||
SearchResponse: &tempopb.SearchResponse{
|
||||
Metrics: metrics,
|
||||
Traces: traceList,
|
||||
},
|
||||
}, sender); err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
s.logger.Error("Error receiving message", "err", err)
|
||||
return err
|
||||
}
|
||||
|
||||
metrics = msg.Metrics
|
||||
traceList = append(traceList, msg.Traces...)
|
||||
traceList = removeDuplicates(traceList)
|
||||
|
||||
if err := sendResponse(&ExtendedResponse{
|
||||
State: dataquery.SearchStreamingStateStreaming,
|
||||
SearchResponse: &tempopb.SearchResponse{
|
||||
Metrics: metrics,
|
||||
Traces: traceList,
|
||||
},
|
||||
}, sender); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func sendResponse(response *ExtendedResponse, sender StreamSender) error {
|
||||
frame := createResponseDataFrame()
|
||||
|
||||
if response != nil {
|
||||
tracesAsJson, err := json.Marshal(response.Traces)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tracesRawMessage := json.RawMessage(tracesAsJson)
|
||||
frame.Fields[0].Append(tracesRawMessage)
|
||||
|
||||
metricsAsJson, err := json.Marshal(response.Metrics)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
metricsRawMessage := json.RawMessage(metricsAsJson)
|
||||
frame.Fields[1].Append(metricsRawMessage)
|
||||
frame.Fields[2].Append(string(response.State))
|
||||
frame.Fields[3].Append("")
|
||||
}
|
||||
|
||||
return sender.SendFrame(frame, data.IncludeAll)
|
||||
}
|
||||
|
||||
func sendError(searchErr error, sender StreamSender) error {
|
||||
frame := createResponseDataFrame()
|
||||
|
||||
if searchErr != nil {
|
||||
frame.Fields[0].Append(json.RawMessage{})
|
||||
frame.Fields[1].Append(json.RawMessage{})
|
||||
frame.Fields[2].Append(string(dataquery.SearchStreamingStateError))
|
||||
frame.Fields[3].Append(searchErr.Error())
|
||||
}
|
||||
|
||||
return sender.SendFrame(frame, data.IncludeAll)
|
||||
}
|
||||
|
||||
func createResponseDataFrame() *data.Frame {
|
||||
frame := data.NewFrame("response")
|
||||
frame.Fields = append(frame.Fields, data.NewField("traces", nil, []json.RawMessage{}))
|
||||
frame.Fields = append(frame.Fields, data.NewField("metrics", nil, []json.RawMessage{}))
|
||||
frame.Fields = append(frame.Fields, data.NewField("state", nil, []string{}))
|
||||
frame.Fields = append(frame.Fields, data.NewField("error", nil, []string{}))
|
||||
|
||||
return frame
|
||||
}
|
||||
|
||||
func removeDuplicates(traceList []*tempopb.TraceSearchMetadata) []*tempopb.TraceSearchMetadata {
|
||||
keys := make(map[string]bool)
|
||||
var list []*tempopb.TraceSearchMetadata
|
||||
|
||||
for _, entry := range traceList {
|
||||
if _, value := keys[entry.TraceID]; !value {
|
||||
keys[entry.TraceID] = true
|
||||
list = append(list, entry)
|
||||
}
|
||||
}
|
||||
return list
|
||||
}
|
229
pkg/tsdb/tempo/search_stream_test.go
Normal file
229
pkg/tsdb/tempo/search_stream_test.go
Normal file
@ -0,0 +1,229 @@
|
||||
package tempo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/tsdb/tempo/kinds/dataquery"
|
||||
"github.com/grafana/tempo/pkg/tempopb"
|
||||
"google.golang.org/grpc/metadata"
|
||||
)
|
||||
|
||||
func TestProcessStream_ValidInput_ReturnsNoError(t *testing.T) {
|
||||
service := &Service{}
|
||||
searchClient := &mockStreamer{}
|
||||
streamSender := &mockSender{}
|
||||
err := service.processStream(searchClient, streamSender)
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error, but got %s", err)
|
||||
}
|
||||
}
|
||||
func TestProcessStream_InvalidInput_ReturnsError(t *testing.T) {
|
||||
logger := log.New("tsdb.tempo.test")
|
||||
service := &Service{
|
||||
logger: logger,
|
||||
}
|
||||
searchClient := &mockStreamer{err: errors.New("invalid input")}
|
||||
streamSender := &mockSender{}
|
||||
err := service.processStream(searchClient, streamSender)
|
||||
if err != nil {
|
||||
if !strings.Contains(err.Error(), "invalid input") {
|
||||
t.Errorf("Expected error message to contain 'invalid input', but got %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
func TestProcessStream_ValidInput_ReturnsExpectedOutput(t *testing.T) {
|
||||
logger := log.New("tsdb.tempo.test")
|
||||
service := &Service{
|
||||
logger: logger,
|
||||
}
|
||||
searchClient := &mockStreamer{
|
||||
tracingMetadata: []*tempopb.TraceSearchMetadata{
|
||||
{TraceID: "abcdefg", StartTimeUnixNano: 1234},
|
||||
{TraceID: "hijklmn", StartTimeUnixNano: 5678},
|
||||
},
|
||||
metrics: &tempopb.SearchMetrics{
|
||||
CompletedJobs: 2,
|
||||
TotalJobs: 5,
|
||||
InspectedBytes: 123456789,
|
||||
TotalBlockBytes: 987654321,
|
||||
InspectedTraces: 123,
|
||||
},
|
||||
expectedResponses: []ExtendedResponse{
|
||||
{
|
||||
SearchResponse: &tempopb.SearchResponse{
|
||||
Traces: []*tempopb.TraceSearchMetadata{
|
||||
{TraceID: "abcdefg", StartTimeUnixNano: 1234},
|
||||
},
|
||||
Metrics: &tempopb.SearchMetrics{
|
||||
CompletedJobs: 2,
|
||||
TotalJobs: 5,
|
||||
InspectedBytes: 123456789,
|
||||
TotalBlockBytes: 987654321,
|
||||
InspectedTraces: 123,
|
||||
},
|
||||
},
|
||||
State: dataquery.SearchStreamingStateStreaming,
|
||||
},
|
||||
{
|
||||
SearchResponse: &tempopb.SearchResponse{
|
||||
Traces: []*tempopb.TraceSearchMetadata{
|
||||
{TraceID: "abcdefg", StartTimeUnixNano: 1234},
|
||||
{TraceID: "hijklmn", StartTimeUnixNano: 5678},
|
||||
},
|
||||
Metrics: &tempopb.SearchMetrics{
|
||||
CompletedJobs: 2,
|
||||
TotalJobs: 5,
|
||||
InspectedBytes: 123456789,
|
||||
TotalBlockBytes: 987654321,
|
||||
InspectedTraces: 123,
|
||||
},
|
||||
},
|
||||
State: dataquery.SearchStreamingStateStreaming,
|
||||
},
|
||||
|
||||
{
|
||||
SearchResponse: &tempopb.SearchResponse{
|
||||
Traces: []*tempopb.TraceSearchMetadata{
|
||||
{TraceID: "abcdefg", StartTimeUnixNano: 1234},
|
||||
{TraceID: "hijklmn", StartTimeUnixNano: 5678},
|
||||
},
|
||||
Metrics: &tempopb.SearchMetrics{
|
||||
CompletedJobs: 2,
|
||||
TotalJobs: 5,
|
||||
InspectedBytes: 123456789,
|
||||
TotalBlockBytes: 987654321,
|
||||
InspectedTraces: 123,
|
||||
},
|
||||
},
|
||||
State: dataquery.SearchStreamingStateDone,
|
||||
},
|
||||
},
|
||||
}
|
||||
streamSender := &mockSender{}
|
||||
err := service.processStream(searchClient, streamSender)
|
||||
if err != nil {
|
||||
t.Errorf("Expected no error, but got %s", err)
|
||||
return
|
||||
}
|
||||
if len(streamSender.responses) != 3 {
|
||||
t.Errorf("Expected 3 responses, but got %d", len(streamSender.responses))
|
||||
return
|
||||
}
|
||||
|
||||
for i, frame := range streamSender.responses {
|
||||
expectedMetrics := searchClient.expectedResponses[i].Metrics
|
||||
expectedTraces := searchClient.expectedResponses[i].Traces
|
||||
expectedState := string(searchClient.expectedResponses[i].State)
|
||||
|
||||
if len(frame.Fields) != 4 {
|
||||
t.Errorf("Expected 4 fields in data frame, but was '%d'", len(frame.Fields))
|
||||
return
|
||||
}
|
||||
var traceList []*tempopb.TraceSearchMetadata
|
||||
if err := json.Unmarshal(frame.Fields[0].At(0).(json.RawMessage), &traceList); err != nil {
|
||||
t.Errorf("Error unmarshaling trace list: %s", err)
|
||||
} else {
|
||||
if !reflect.DeepEqual(traceList, expectedTraces) {
|
||||
t.Errorf("Expected response traces to be '%+v', but was '%+v'",
|
||||
expectedTraces, traceList)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
var metrics *tempopb.SearchMetrics
|
||||
if err := json.Unmarshal(frame.Fields[1].At(0).(json.RawMessage), &metrics); err != nil {
|
||||
t.Errorf("Error unmarshaling metrics: %s", err)
|
||||
} else {
|
||||
if !reflect.DeepEqual(metrics, expectedMetrics) {
|
||||
t.Errorf("Expected response metrics to be '%+v', but was '%+v'",
|
||||
expectedMetrics, metrics)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
state := frame.Fields[2].At(0).(string)
|
||||
if state != expectedState {
|
||||
t.Errorf("Expected response state to be '%+v', but was '%+v'", expectedState,
|
||||
state)
|
||||
return
|
||||
}
|
||||
frameErr := frame.Fields[3].At(0).(string)
|
||||
if frameErr != "" {
|
||||
t.Errorf("Didn't expect error but got '%+v'", frameErr)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type mockSender struct {
|
||||
backend.StreamSender
|
||||
responses []*data.Frame
|
||||
}
|
||||
|
||||
func (s *mockSender) SendFrame(frame *data.Frame, include data.FrameInclude) error {
|
||||
s.responses = append(s.responses, frame)
|
||||
return nil
|
||||
}
|
||||
|
||||
type mockStreamer struct {
|
||||
tracingMetadata []*tempopb.TraceSearchMetadata
|
||||
copyOfTracingMetadata []*tempopb.TraceSearchMetadata
|
||||
metrics *tempopb.SearchMetrics
|
||||
expectedResponses []ExtendedResponse
|
||||
err error
|
||||
}
|
||||
|
||||
func (m *mockStreamer) Recv() (*tempopb.SearchResponse, error) {
|
||||
if m.err != nil {
|
||||
return nil, m.err
|
||||
}
|
||||
if m.copyOfTracingMetadata == nil {
|
||||
m.copyOfTracingMetadata = make([]*tempopb.TraceSearchMetadata, len(m.tracingMetadata))
|
||||
copy(m.copyOfTracingMetadata, m.tracingMetadata)
|
||||
}
|
||||
if len(m.copyOfTracingMetadata) == 0 {
|
||||
return &tempopb.SearchResponse{
|
||||
Metrics: m.metrics,
|
||||
Traces: m.tracingMetadata,
|
||||
}, io.EOF
|
||||
}
|
||||
traceMetadata := m.copyOfTracingMetadata[0]
|
||||
m.copyOfTracingMetadata = m.copyOfTracingMetadata[1:]
|
||||
return &tempopb.SearchResponse{
|
||||
Metrics: m.metrics,
|
||||
Traces: []*tempopb.TraceSearchMetadata{traceMetadata},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (m *mockStreamer) Header() (metadata.MD, error) {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (m *mockStreamer) Trailer() metadata.MD {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (m *mockStreamer) CloseSend() error {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (m *mockStreamer) Context() context.Context {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (m *mockStreamer) SendMsg(a interface{}) error {
|
||||
panic("implement me")
|
||||
}
|
||||
|
||||
func (m *mockStreamer) RecvMsg(a interface{}) error {
|
||||
panic("implement me")
|
||||
}
|
48
pkg/tsdb/tempo/stream_handler.go
Normal file
48
pkg/tsdb/tempo/stream_handler.go
Normal file
@ -0,0 +1,48 @@
|
||||
package tempo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
)
|
||||
|
||||
func (s *Service) SubscribeStream(ctx context.Context, req *backend.SubscribeStreamRequest) (*backend.SubscribeStreamResponse, error) {
|
||||
s.logger.Debug("Allowing access to stream", "path", req.Path, "user", req.PluginContext.User)
|
||||
status := backend.SubscribeStreamStatusPermissionDenied
|
||||
if strings.HasPrefix(req.Path, SearchPathPrefix) {
|
||||
status = backend.SubscribeStreamStatusOK
|
||||
}
|
||||
|
||||
return &backend.SubscribeStreamResponse{
|
||||
Status: status,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *Service) PublishStream(ctx context.Context, req *backend.PublishStreamRequest) (*backend.PublishStreamResponse, error) {
|
||||
s.logger.Debug("PublishStream called")
|
||||
|
||||
// Do not allow publishing at all.
|
||||
return &backend.PublishStreamResponse{
|
||||
Status: backend.PublishStreamStatusPermissionDenied,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s *Service) RunStream(ctx context.Context, request *backend.RunStreamRequest, sender *backend.StreamSender) error {
|
||||
s.logger.Debug("New stream call", "path", request.Path)
|
||||
|
||||
if strings.HasPrefix(request.Path, SearchPathPrefix) {
|
||||
tempoDatasource, err := s.getDSInfo(ctx, request.PluginContext)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err = s.runSearchStream(ctx, request, sender, tempoDatasource); err != nil {
|
||||
return sendError(err, sender)
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("unknown path %s", request.Path)
|
||||
}
|
@ -2,38 +2,34 @@ package tempo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb/tempo/kinds/dataquery"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"go.opentelemetry.io/collector/model/otlp"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/httpclient"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/tsdb/tempo/kinds/dataquery"
|
||||
"github.com/grafana/tempo/pkg/tempopb"
|
||||
)
|
||||
|
||||
type Service struct {
|
||||
im instancemgmt.InstanceManager
|
||||
tlog log.Logger
|
||||
im instancemgmt.InstanceManager
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
func ProvideService(httpClientProvider httpclient.Provider) *Service {
|
||||
return &Service{
|
||||
tlog: log.New("tsdb.tempo"),
|
||||
im: datasource.NewInstanceManager(newInstanceSettings(httpClientProvider)),
|
||||
logger: log.New("tsdb.tempo"),
|
||||
im: datasource.NewInstanceManager(newInstanceSettings(httpClientProvider)),
|
||||
}
|
||||
}
|
||||
|
||||
type datasourceInfo struct {
|
||||
HTTPClient *http.Client
|
||||
URL string
|
||||
type Datasource struct {
|
||||
HTTPClient *http.Client
|
||||
StreamingClient tempopb.StreamingQuerierClient
|
||||
URL string
|
||||
}
|
||||
|
||||
func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.InstanceFactoryFunc {
|
||||
@ -48,100 +44,55 @@ func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.Inst
|
||||
return nil, err
|
||||
}
|
||||
|
||||
model := &datasourceInfo{
|
||||
HTTPClient: client,
|
||||
URL: settings.URL,
|
||||
streamingClient, err := newGrpcClient(settings, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
model := &Datasource{
|
||||
HTTPClient: client,
|
||||
StreamingClient: streamingClient,
|
||||
URL: settings.URL,
|
||||
}
|
||||
return model, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
||||
result := backend.NewQueryDataResponse()
|
||||
queryRes := backend.DataResponse{}
|
||||
refID := req.Queries[0].RefID
|
||||
s.logger.Info("QueryData called ", "Queries ", req.Queries)
|
||||
|
||||
model := &dataquery.TempoQuery{}
|
||||
err := json.Unmarshal(req.Queries[0].JSON, model)
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
// create response struct
|
||||
response := backend.NewQueryDataResponse()
|
||||
|
||||
dsInfo, err := s.getDSInfo(ctx, req.PluginContext)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
request, err := s.createRequest(ctx, dsInfo, model.Query, req.Queries[0].TimeRange.From.Unix(), req.Queries[0].TimeRange.To.Unix())
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
resp, err := dsInfo.HTTPClient.Do(request)
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("failed get to tempo: %w", err)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := resp.Body.Close(); err != nil {
|
||||
s.tlog.FromContext(ctx).Warn("failed to close response body", "err", err)
|
||||
// loop over queries and execute them individually.
|
||||
for _, q := range req.Queries {
|
||||
if res, err := s.query(ctx, req.PluginContext, q); err != nil {
|
||||
return response, err
|
||||
} else {
|
||||
if res != nil {
|
||||
response.Responses[q.RefID] = *res
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return &backend.QueryDataResponse{}, err
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
queryRes.Error = fmt.Errorf("failed to get trace with id: %s Status: %s Body: %s", model.Query, resp.Status, string(body))
|
||||
result.Responses[refID] = queryRes
|
||||
return result, nil
|
||||
}
|
||||
|
||||
otTrace, err := otlp.NewProtobufTracesUnmarshaler().UnmarshalTraces(body)
|
||||
|
||||
if err != nil {
|
||||
return &backend.QueryDataResponse{}, fmt.Errorf("failed to convert tempo response to Otlp: %w", err)
|
||||
}
|
||||
|
||||
frame, err := TraceToFrame(otTrace)
|
||||
if err != nil {
|
||||
return &backend.QueryDataResponse{}, fmt.Errorf("failed to transform trace %v to data frame: %w", model.Query, err)
|
||||
}
|
||||
frame.RefID = refID
|
||||
frames := []*data.Frame{frame}
|
||||
queryRes.Frames = frames
|
||||
result.Responses[refID] = queryRes
|
||||
return result, nil
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (s *Service) createRequest(ctx context.Context, dsInfo *datasourceInfo, traceID string, start int64, end int64) (*http.Request, error) {
|
||||
var tempoQuery string
|
||||
if start == 0 || end == 0 {
|
||||
tempoQuery = fmt.Sprintf("%s/api/traces/%s", dsInfo.URL, traceID)
|
||||
} else {
|
||||
tempoQuery = fmt.Sprintf("%s/api/traces/%s?start=%d&end=%d", dsInfo.URL, traceID, start, end)
|
||||
func (s *Service) query(ctx context.Context, pCtx backend.PluginContext, query backend.DataQuery) (*backend.DataResponse, error) {
|
||||
if query.QueryType == string(dataquery.TempoQueryTypeTraceId) {
|
||||
return s.getTrace(ctx, pCtx, query)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", tempoQuery, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req.Header.Set("Accept", "application/protobuf")
|
||||
|
||||
s.tlog.FromContext(ctx).Debug("Tempo request", "url", req.URL.String(), "headers", req.Header)
|
||||
return req, nil
|
||||
return nil, fmt.Errorf("unsupported query type: '%s' for query with refID '%s'", query.QueryType, query.RefID)
|
||||
}
|
||||
|
||||
func (s *Service) getDSInfo(ctx context.Context, pluginCtx backend.PluginContext) (*datasourceInfo, error) {
|
||||
func (s *Service) getDSInfo(ctx context.Context, pluginCtx backend.PluginContext) (*Datasource, error) {
|
||||
i, err := s.im.Get(ctx, pluginCtx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
instance, ok := i.(*datasourceInfo)
|
||||
instance, ok := i.(*Datasource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("failed to cast datsource info")
|
||||
}
|
||||
|
90
pkg/tsdb/tempo/trace.go
Normal file
90
pkg/tsdb/tempo/trace.go
Normal file
@ -0,0 +1,90 @@
|
||||
package tempo
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana/pkg/tsdb/tempo/kinds/dataquery"
|
||||
"go.opentelemetry.io/collector/model/otlp"
|
||||
)
|
||||
|
||||
func (s *Service) getTrace(ctx context.Context, pCtx backend.PluginContext, query backend.DataQuery) (*backend.DataResponse, error) {
|
||||
result := &backend.DataResponse{}
|
||||
refID := query.RefID
|
||||
|
||||
model := &dataquery.TempoQuery{}
|
||||
err := json.Unmarshal(query.JSON, model)
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
dsInfo, err := s.getDSInfo(ctx, pCtx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
request, err := s.createRequest(ctx, dsInfo, model.Query, query.TimeRange.From.Unix(), query.TimeRange.To.Unix())
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
resp, err := dsInfo.HTTPClient.Do(request)
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("failed get to tempo: %w", err)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if err := resp.Body.Close(); err != nil {
|
||||
s.logger.FromContext(ctx).Warn("failed to close response body", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return &backend.DataResponse{}, err
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
result.Error = fmt.Errorf("failed to get trace with id: %s Status: %s Body: %s", model.Query, resp.Status, string(body))
|
||||
return result, nil
|
||||
}
|
||||
|
||||
otTrace, err := otlp.NewProtobufTracesUnmarshaler().UnmarshalTraces(body)
|
||||
|
||||
if err != nil {
|
||||
return &backend.DataResponse{}, fmt.Errorf("failed to convert tempo response to Otlp: %w", err)
|
||||
}
|
||||
|
||||
frame, err := TraceToFrame(otTrace)
|
||||
if err != nil {
|
||||
return &backend.DataResponse{}, fmt.Errorf("failed to transform trace %v to data frame: %w", model.Query, err)
|
||||
}
|
||||
frame.RefID = refID
|
||||
frames := []*data.Frame{frame}
|
||||
result.Frames = frames
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (s *Service) createRequest(ctx context.Context, dsInfo *Datasource, traceID string, start int64, end int64) (*http.Request, error) {
|
||||
var tempoQuery string
|
||||
if start == 0 || end == 0 {
|
||||
tempoQuery = fmt.Sprintf("%s/api/traces/%s", dsInfo.URL, traceID)
|
||||
} else {
|
||||
tempoQuery = fmt.Sprintf("%s/api/traces/%s?start=%d&end=%d", dsInfo.URL, traceID, start, end)
|
||||
}
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, "GET", tempoQuery, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req.Header.Set("Accept", "application/protobuf")
|
||||
|
||||
s.logger.FromContext(ctx).Debug("Tempo request", "url", req.URL.String(), "headers", req.Header)
|
||||
return req, nil
|
||||
}
|
@ -11,15 +11,15 @@ import (
|
||||
|
||||
func TestTempo(t *testing.T) {
|
||||
t.Run("createRequest without time range - success", func(t *testing.T) {
|
||||
service := &Service{tlog: log.New("tempo-test")}
|
||||
req, err := service.createRequest(context.Background(), &datasourceInfo{}, "traceID", 0, 0)
|
||||
service := &Service{logger: log.New("tempo-test")}
|
||||
req, err := service.createRequest(context.Background(), &Datasource{}, "traceID", 0, 0)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(req.Header))
|
||||
})
|
||||
|
||||
t.Run("createRequest with time range - success", func(t *testing.T) {
|
||||
service := &Service{tlog: log.New("tempo-test")}
|
||||
req, err := service.createRequest(context.Background(), &datasourceInfo{}, "traceID", 1, 2)
|
||||
service := &Service{logger: log.New("tempo-test")}
|
||||
req, err := service.createRequest(context.Background(), &Datasource{}, "traceID", 1, 2)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, 1, len(req.Header))
|
||||
assert.Equal(t, "/api/traces/traceID?start=1&end=2", req.URL.String())
|
@ -7,8 +7,7 @@ import (
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"go.opentelemetry.io/collector/model/pdata"
|
||||
"go.opentelemetry.io/collector/translator/conventions"
|
||||
tracetranslator "go.opentelemetry.io/collector/translator/trace"
|
||||
semconv "go.opentelemetry.io/collector/model/semconv/v1.8.0"
|
||||
)
|
||||
|
||||
type KeyValue struct {
|
||||
@ -178,14 +177,14 @@ func spanToSpanRow(span pdata.Span, libraryTags pdata.InstrumentationLibrary, re
|
||||
|
||||
func resourceToProcess(resource pdata.Resource) (string, []*KeyValue) {
|
||||
attrs := resource.Attributes()
|
||||
serviceName := tracetranslator.ResourceNoServiceName
|
||||
serviceName := ResourceNoServiceName
|
||||
if attrs.Len() == 0 {
|
||||
return serviceName, nil
|
||||
}
|
||||
|
||||
tags := make([]*KeyValue, 0, attrs.Len()-1)
|
||||
attrs.Range(func(key string, attr pdata.AttributeValue) bool {
|
||||
if key == conventions.AttributeServiceName {
|
||||
if key == semconv.AttributeServiceName {
|
||||
serviceName = attr.StringVal()
|
||||
}
|
||||
tags = append(tags, &KeyValue{Key: key, Value: getAttributeVal(attr)})
|
||||
@ -206,7 +205,7 @@ func getAttributeVal(attr pdata.AttributeValue) interface{} {
|
||||
case pdata.AttributeValueTypeDouble:
|
||||
return attr.DoubleVal()
|
||||
case pdata.AttributeValueTypeMap, pdata.AttributeValueTypeArray:
|
||||
return tracetranslator.AttributeValueToString(attr)
|
||||
return attr.AsString()
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
@ -225,15 +224,15 @@ func getSpanKind(spanKind pdata.SpanKind) string {
|
||||
var tagStr string
|
||||
switch spanKind {
|
||||
case pdata.SpanKindClient:
|
||||
tagStr = string(tracetranslator.OpenTracingSpanKindClient)
|
||||
tagStr = string(OpenTracingSpanKindClient)
|
||||
case pdata.SpanKindServer:
|
||||
tagStr = string(tracetranslator.OpenTracingSpanKindServer)
|
||||
tagStr = string(OpenTracingSpanKindServer)
|
||||
case pdata.SpanKindProducer:
|
||||
tagStr = string(tracetranslator.OpenTracingSpanKindProducer)
|
||||
tagStr = string(OpenTracingSpanKindProducer)
|
||||
case pdata.SpanKindConsumer:
|
||||
tagStr = string(tracetranslator.OpenTracingSpanKindConsumer)
|
||||
tagStr = string(OpenTracingSpanKindConsumer)
|
||||
case pdata.SpanKindInternal:
|
||||
tagStr = string(tracetranslator.OpenTracingSpanKindInternal)
|
||||
tagStr = string(OpenTracingSpanKindInternal)
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
@ -259,7 +258,7 @@ func spanEventsToLogs(events pdata.SpanEventSlice) []*TraceLog {
|
||||
fields := make([]*KeyValue, 0, event.Attributes().Len()+1)
|
||||
if event.Name() != "" {
|
||||
fields = append(fields, &KeyValue{
|
||||
Key: tracetranslator.TagMessage,
|
||||
Key: TagMessage,
|
||||
Value: event.Name(),
|
||||
})
|
||||
}
|
||||
|
@ -38,13 +38,13 @@ export class TableContainer extends PureComponent<Props> {
|
||||
return frames?.filter((df) => df.meta?.custom?.parentRowIndex === undefined) || [frames?.[0]];
|
||||
}
|
||||
|
||||
getTableHeight(rowCount: number, isSingleTable = true) {
|
||||
getTableHeight(rowCount: number, hasSubFrames = true) {
|
||||
if (rowCount === 0) {
|
||||
return 200;
|
||||
}
|
||||
// tries to estimate table height, with a min of 300 and a max of 600
|
||||
// if there are multiple tables, there is no min
|
||||
return Math.min(600, Math.max(rowCount * 36, isSingleTable ? 300 : 0) + 40 + 46);
|
||||
return Math.min(600, Math.max(rowCount * 36, hasSubFrames ? 300 : 0) + 40 + 46);
|
||||
}
|
||||
|
||||
render() {
|
||||
@ -107,7 +107,7 @@ export class TableContainer extends PureComponent<Props> {
|
||||
key={data.main.refId || `table-${i}`}
|
||||
title={tableData.length > 1 ? `Table - ${data.main.name || data.main.refId || i}` : 'Table'}
|
||||
width={width}
|
||||
height={this.getTableHeight(data.main.length, tableData.length === 1)}
|
||||
height={this.getTableHeight(data.main.length, (data.sub?.length || 0) > 0)}
|
||||
loadingState={loading ? LoadingState.Loading : undefined}
|
||||
>
|
||||
{(innerWidth, innerHeight) => (
|
||||
|
@ -46,11 +46,16 @@ composableKinds: DataQuery: {
|
||||
serviceMapQuery?: string
|
||||
// Defines the maximum number of traces that are returned from Tempo
|
||||
limit?: int64
|
||||
// Use the streaming API to get partial results as they are available
|
||||
streaming?: bool
|
||||
filters: [...#TraceqlFilter]
|
||||
} @cuetsy(kind="interface") @grafana(TSVeneer="type")
|
||||
|
||||
// search = Loki search, nativeSearch = Tempo search for backwards compatibility
|
||||
#TempoQueryType: "traceql" | "traceqlSearch" | "search" | "serviceMap" | "upload" | "nativeSearch" | "clear" @cuetsy(kind="type")
|
||||
#TempoQueryType: "traceql" | "traceqlSearch" | "search" | "serviceMap" | "upload" | "nativeSearch" | "traceId" | "clear" @cuetsy(kind="type")
|
||||
|
||||
// The state of the TraceQL streaming search query
|
||||
#SearchStreamingState: "pending" | "streaming" | "done" | "error" @cuetsy(kind="enum")
|
||||
|
||||
// static fields are pre-set in the UI, dynamic fields are added by the user
|
||||
#TraceqlSearchScope: "unscoped" | "resource" | "span" @cuetsy(kind="enum")
|
||||
|
@ -44,6 +44,10 @@ export interface TempoQuery extends common.DataQuery {
|
||||
* Query traces by span name
|
||||
*/
|
||||
spanName?: string;
|
||||
/**
|
||||
* Use the streaming API to get partial results as they are available
|
||||
*/
|
||||
streaming?: boolean;
|
||||
}
|
||||
|
||||
export const defaultTempoQuery: Partial<TempoQuery> = {
|
||||
@ -53,7 +57,17 @@ export const defaultTempoQuery: Partial<TempoQuery> = {
|
||||
/**
|
||||
* search = Loki search, nativeSearch = Tempo search for backwards compatibility
|
||||
*/
|
||||
export type TempoQueryType = ('traceql' | 'traceqlSearch' | 'search' | 'serviceMap' | 'upload' | 'nativeSearch' | 'clear');
|
||||
export type TempoQueryType = ('traceql' | 'traceqlSearch' | 'search' | 'serviceMap' | 'upload' | 'nativeSearch' | 'traceId' | 'clear');
|
||||
|
||||
/**
|
||||
* The state of the TraceQL streaming search query
|
||||
*/
|
||||
export enum SearchStreamingState {
|
||||
Done = 'done',
|
||||
Error = 'error',
|
||||
Pending = 'pending',
|
||||
Streaming = 'streaming',
|
||||
}
|
||||
|
||||
/**
|
||||
* static fields are pre-set in the UI, dynamic fields are added by the user
|
||||
|
@ -58,6 +58,7 @@ import {
|
||||
createTableFrameFromSearch,
|
||||
createTableFrameFromTraceQlQuery,
|
||||
} from './resultTransformer';
|
||||
import { doTempoChannelStream } from './streaming';
|
||||
import { SearchQueryParams, TempoQuery, TempoJsonData } from './types';
|
||||
import { getErrorMessage } from './utils';
|
||||
|
||||
@ -97,6 +98,7 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
|
||||
this.lokiSearch = instanceSettings.jsonData.lokiSearch;
|
||||
this.traceQuery = instanceSettings.jsonData.traceQuery;
|
||||
this.languageProvider = new TempoLanguageProvider(this);
|
||||
|
||||
if (!this.search?.filters) {
|
||||
this.search = {
|
||||
...this.search,
|
||||
@ -221,7 +223,49 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
|
||||
app: options.app ?? '',
|
||||
grafana_version: config.buildInfo.version,
|
||||
query: queryValue ?? '',
|
||||
streaming: appliedQuery.streaming,
|
||||
});
|
||||
|
||||
if (appliedQuery.streaming) {
|
||||
subQueries.push(this.handleStreamingSearch(options, targets.traceql));
|
||||
} else {
|
||||
subQueries.push(
|
||||
this._request('/api/search', {
|
||||
q: queryValue,
|
||||
limit: options.targets[0].limit ?? DEFAULT_LIMIT,
|
||||
start: options.range.from.unix(),
|
||||
end: options.range.to.unix(),
|
||||
}).pipe(
|
||||
map((response) => {
|
||||
return {
|
||||
data: createTableFrameFromTraceQlQuery(response.data.traces, this.instanceSettings),
|
||||
};
|
||||
}),
|
||||
catchError((err) => {
|
||||
return of({ error: { message: getErrorMessage(err.data.message) }, data: [] });
|
||||
})
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
return of({ error: { message: error instanceof Error ? error.message : 'Unknown error occurred' }, data: [] });
|
||||
}
|
||||
}
|
||||
if (targets.traceqlSearch?.length) {
|
||||
try {
|
||||
const queryValue = generateQueryFromFilters(targets.traceqlSearch[0].filters);
|
||||
reportInteraction('grafana_traces_traceql_search_queried', {
|
||||
datasourceType: 'tempo',
|
||||
app: options.app ?? '',
|
||||
grafana_version: config.buildInfo.version,
|
||||
query: queryValue ?? '',
|
||||
streaming: targets.traceqlSearch[0].streaming,
|
||||
});
|
||||
|
||||
if (targets.traceqlSearch[0].streaming) {
|
||||
subQueries.push(this.handleStreamingSearch(options, targets.traceqlSearch, queryValue));
|
||||
} else {
|
||||
subQueries.push(
|
||||
this._request('/api/search', {
|
||||
q: queryValue,
|
||||
@ -244,36 +288,6 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
|
||||
return of({ error: { message: error instanceof Error ? error.message : 'Unknown error occurred' }, data: [] });
|
||||
}
|
||||
}
|
||||
if (targets.traceqlSearch?.length) {
|
||||
try {
|
||||
const queryValue = generateQueryFromFilters(targets.traceqlSearch[0].filters);
|
||||
reportInteraction('grafana_traces_traceql_search_queried', {
|
||||
datasourceType: 'tempo',
|
||||
app: options.app ?? '',
|
||||
grafana_version: config.buildInfo.version,
|
||||
query: queryValue ?? '',
|
||||
});
|
||||
subQueries.push(
|
||||
this._request('/api/search', {
|
||||
q: queryValue,
|
||||
limit: options.targets[0].limit ?? DEFAULT_LIMIT,
|
||||
start: options.range.from.unix(),
|
||||
end: options.range.to.unix(),
|
||||
}).pipe(
|
||||
map((response) => {
|
||||
return {
|
||||
data: createTableFrameFromTraceQlQuery(response.data.traces, this.instanceSettings),
|
||||
};
|
||||
}),
|
||||
catchError((err) => {
|
||||
return of({ error: { message: getErrorMessage(err.data.message) }, data: [] });
|
||||
})
|
||||
)
|
||||
);
|
||||
} catch (error) {
|
||||
return of({ error: { message: error instanceof Error ? error.message : 'Unknown error occurred' }, data: [] });
|
||||
}
|
||||
}
|
||||
|
||||
if (targets.upload?.length) {
|
||||
if (this.uploadedJson) {
|
||||
@ -370,7 +384,9 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
|
||||
* @private
|
||||
*/
|
||||
handleTraceIdQuery(options: DataQueryRequest<TempoQuery>, targets: TempoQuery[]): Observable<DataQueryResponse> {
|
||||
const validTargets = targets.filter((t) => t.query).map((t) => ({ ...t, query: t.query.trim() }));
|
||||
const validTargets = targets
|
||||
.filter((t) => t.query)
|
||||
.map((t): TempoQuery => ({ ...t, query: t.query.trim(), queryType: 'traceId' }));
|
||||
if (!validTargets.length) {
|
||||
return EMPTY;
|
||||
}
|
||||
@ -409,6 +425,30 @@ export class TempoDatasource extends DataSourceWithBackend<TempoQuery, TempoJson
|
||||
return request;
|
||||
}
|
||||
|
||||
handleStreamingSearch(
|
||||
options: DataQueryRequest<TempoQuery>,
|
||||
targets: TempoQuery[],
|
||||
query?: string
|
||||
): Observable<DataQueryResponse> {
|
||||
const validTargets = targets
|
||||
.filter((t) => t.query || query)
|
||||
.map((t): TempoQuery => ({ ...t, query: query || t.query.trim() }));
|
||||
if (!validTargets.length) {
|
||||
return EMPTY;
|
||||
}
|
||||
|
||||
return merge(
|
||||
...validTargets.map((q) =>
|
||||
doTempoChannelStream(
|
||||
q,
|
||||
this, // the datasource
|
||||
options,
|
||||
this.instanceSettings
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
async metadataRequest(url: string, params = {}) {
|
||||
return await lastValueFrom(this._request(url, params, { method: 'GET', hideFromInspector: true }));
|
||||
}
|
||||
|
@ -575,6 +575,7 @@ export function createTableFrameFromTraceQlQuery(
|
||||
instanceSettings: DataSourceInstanceSettings
|
||||
): DataFrame[] {
|
||||
const frame = new MutableDataFrame({
|
||||
name: 'Traces',
|
||||
fields: [
|
||||
{
|
||||
name: 'traceID',
|
||||
@ -773,17 +774,17 @@ function transformSpanToTraceData(span: Span, traceID: string): TraceTableData {
|
||||
};
|
||||
|
||||
span.attributes?.forEach((attr) => {
|
||||
if (attr.value.boolValue) {
|
||||
data[attr.key] = attr.value.boolValue;
|
||||
if (attr.value.boolValue || attr.value.Value?.bool_value) {
|
||||
data[attr.key] = attr.value.boolValue || attr.value.Value?.bool_value;
|
||||
}
|
||||
if (attr.value.doubleValue) {
|
||||
data[attr.key] = attr.value.doubleValue;
|
||||
if (attr.value.doubleValue || attr.value.Value?.double_value) {
|
||||
data[attr.key] = attr.value.doubleValue || attr.value.Value?.double_value;
|
||||
}
|
||||
if (attr.value.intValue) {
|
||||
data[attr.key] = attr.value.intValue;
|
||||
if (attr.value.intValue || attr.value.Value?.int_value) {
|
||||
data[attr.key] = attr.value.intValue || attr.value.Value?.int_value;
|
||||
}
|
||||
if (attr.value.stringValue) {
|
||||
data[attr.key] = attr.value.stringValue;
|
||||
if (attr.value.stringValue || attr.value.Value?.string_value) {
|
||||
data[attr.key] = attr.value.stringValue || attr.value.Value?.string_value;
|
||||
}
|
||||
});
|
||||
|
||||
|
166
public/app/plugins/datasource/tempo/streaming.ts
Normal file
166
public/app/plugins/datasource/tempo/streaming.ts
Normal file
@ -0,0 +1,166 @@
|
||||
import { capitalize } from 'lodash';
|
||||
import { map, Observable, defer, mergeMap } from 'rxjs';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import {
|
||||
DataFrame,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
DataSourceInstanceSettings,
|
||||
FieldType,
|
||||
LiveChannelScope,
|
||||
LoadingState,
|
||||
MutableDataFrame,
|
||||
ThresholdsConfig,
|
||||
ThresholdsMode,
|
||||
} from '@grafana/data';
|
||||
import { getGrafanaLiveSrv } from '@grafana/runtime';
|
||||
|
||||
import { SearchStreamingState } from './dataquery.gen';
|
||||
import { TempoDatasource } from './datasource';
|
||||
import { createTableFrameFromTraceQlQuery } from './resultTransformer';
|
||||
import { SearchMetrics, TempoJsonData, TempoQuery } from './types';
|
||||
export async function getLiveStreamKey(): Promise<string> {
|
||||
return uuidv4();
|
||||
}
|
||||
|
||||
export function doTempoChannelStream(
|
||||
query: TempoQuery,
|
||||
ds: TempoDatasource,
|
||||
options: DataQueryRequest<TempoQuery>,
|
||||
instanceSettings: DataSourceInstanceSettings<TempoJsonData>
|
||||
): Observable<DataQueryResponse> {
|
||||
const range = options.range;
|
||||
|
||||
let frames: DataFrame[] | undefined = undefined;
|
||||
let state: LoadingState = LoadingState.NotStarted;
|
||||
|
||||
return defer(() => getLiveStreamKey()).pipe(
|
||||
mergeMap((key) => {
|
||||
return getGrafanaLiveSrv()
|
||||
.getStream<MutableDataFrame>({
|
||||
scope: LiveChannelScope.DataSource,
|
||||
namespace: ds.uid,
|
||||
path: `search/${key}`,
|
||||
data: {
|
||||
...query,
|
||||
timeRange: {
|
||||
from: range.from.toISOString(),
|
||||
to: range.to.toISOString(),
|
||||
},
|
||||
},
|
||||
})
|
||||
.pipe(
|
||||
map((evt) => {
|
||||
if ('message' in evt && evt?.message) {
|
||||
// Schema should be [traces, metrics, state, error]
|
||||
const traces = evt.message.data.values[0][0];
|
||||
const metrics = evt.message.data.values[1][0];
|
||||
const frameState: SearchStreamingState = evt.message.data.values[2][0];
|
||||
const error = evt.message.data.values[3][0];
|
||||
|
||||
switch (frameState) {
|
||||
case SearchStreamingState.Done:
|
||||
state = LoadingState.Done;
|
||||
break;
|
||||
case SearchStreamingState.Streaming:
|
||||
state = LoadingState.Streaming;
|
||||
break;
|
||||
case SearchStreamingState.Error:
|
||||
throw new Error(error);
|
||||
}
|
||||
|
||||
frames = [
|
||||
metricsDataFrame(metrics, frameState),
|
||||
...createTableFrameFromTraceQlQuery(traces, instanceSettings),
|
||||
];
|
||||
}
|
||||
return {
|
||||
data: frames || [],
|
||||
state,
|
||||
};
|
||||
})
|
||||
);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
function metricsDataFrame(metrics: SearchMetrics, state: SearchStreamingState) {
|
||||
const progressThresholds: ThresholdsConfig = {
|
||||
steps: [
|
||||
{
|
||||
color: 'blue',
|
||||
value: -Infinity,
|
||||
},
|
||||
{
|
||||
color: 'green',
|
||||
value: 75,
|
||||
},
|
||||
],
|
||||
mode: ThresholdsMode.Absolute,
|
||||
};
|
||||
|
||||
const frame: DataFrame = {
|
||||
refId: 'streaming-progress',
|
||||
name: 'Streaming Progress',
|
||||
length: 1,
|
||||
fields: [
|
||||
{
|
||||
name: 'state',
|
||||
type: FieldType.string,
|
||||
values: [capitalize(state.toString())],
|
||||
config: {
|
||||
displayNameFromDS: 'State',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'totalBlocks',
|
||||
type: FieldType.number,
|
||||
values: [metrics.totalBlocks],
|
||||
config: {
|
||||
displayNameFromDS: 'Total Blocks',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'completedJobs',
|
||||
type: FieldType.number,
|
||||
values: [metrics.completedJobs],
|
||||
config: {
|
||||
displayNameFromDS: 'Completed Jobs',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'totalJobs',
|
||||
type: FieldType.number,
|
||||
values: [metrics.totalJobs],
|
||||
config: {
|
||||
displayNameFromDS: 'Total Jobs',
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'progress',
|
||||
type: FieldType.number,
|
||||
values: [
|
||||
state === SearchStreamingState.Done ? 100 : ((metrics.completedJobs || 0) / (metrics.totalJobs || 1)) * 100,
|
||||
],
|
||||
config: {
|
||||
displayNameFromDS: 'Total Jobs',
|
||||
unit: 'percent',
|
||||
min: 0,
|
||||
max: 100,
|
||||
custom: {
|
||||
cellOptions: {
|
||||
type: 'gauge',
|
||||
mode: 'gradient',
|
||||
},
|
||||
},
|
||||
thresholds: progressThresholds,
|
||||
},
|
||||
},
|
||||
],
|
||||
meta: {
|
||||
preferredVisualisationType: 'table',
|
||||
},
|
||||
};
|
||||
return frame;
|
||||
}
|
@ -1,7 +1,8 @@
|
||||
import React from 'react';
|
||||
|
||||
import { EditorField, EditorRow } from '@grafana/experimental';
|
||||
import { AutoSizeInput } from '@grafana/ui';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { AutoSizeInput, Switch } from '@grafana/ui';
|
||||
import { QueryOptionGroup } from 'app/plugins/datasource/prometheus/querybuilder/shared/QueryOptionGroup';
|
||||
|
||||
import { DEFAULT_LIMIT } from '../datasource';
|
||||
@ -17,14 +18,27 @@ export const TempoQueryBuilderOptions = React.memo<Props>(({ onChange, query })
|
||||
query.limit = DEFAULT_LIMIT;
|
||||
}
|
||||
|
||||
if (!query.hasOwnProperty('streaming')) {
|
||||
query.streaming = true;
|
||||
}
|
||||
|
||||
const onLimitChange = (e: React.FormEvent<HTMLInputElement>) => {
|
||||
onChange({ ...query, limit: parseInt(e.currentTarget.value, 10) });
|
||||
};
|
||||
|
||||
const onStreamingChange = (e: React.FormEvent<HTMLInputElement>) => {
|
||||
onChange({ ...query, streaming: e.currentTarget.checked });
|
||||
};
|
||||
|
||||
const collapsedInfoList = [`Limit: ${query.limit || DEFAULT_LIMIT}`];
|
||||
if (!config.featureToggles.disableTraceQLStreaming) {
|
||||
collapsedInfoList.push(`Streaming: ${query.streaming ? 'Yes' : 'No'}`);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<EditorRow>
|
||||
<QueryOptionGroup title="Options" collapsedInfo={[`Limit: ${query.limit || DEFAULT_LIMIT}`]}>
|
||||
<QueryOptionGroup title="Options" collapsedInfo={collapsedInfoList}>
|
||||
<EditorField label="Limit" tooltip="Maximum number of traces to return.">
|
||||
<AutoSizeInput
|
||||
className="width-4"
|
||||
@ -36,6 +50,11 @@ export const TempoQueryBuilderOptions = React.memo<Props>(({ onChange, query })
|
||||
value={query.limit}
|
||||
/>
|
||||
</EditorField>
|
||||
{!config.featureToggles.disableTraceQLStreaming && (
|
||||
<EditorField label="Stream response" tooltip="Stream the query response to receive partial results sooner">
|
||||
<Switch value={query.streaming || false} onChange={onStreamingChange} />
|
||||
</EditorField>
|
||||
)}
|
||||
</QueryOptionGroup>
|
||||
</EditorRow>
|
||||
</>
|
||||
|
@ -61,11 +61,10 @@ export type TraceSearchMetadata = {
|
||||
export type SearchMetrics = {
|
||||
inspectedTraces?: number;
|
||||
inspectedBytes?: number;
|
||||
inspectedBlocks?: number;
|
||||
skippedBlocks?: number;
|
||||
skippedTraces?: number;
|
||||
totalBlocks?: number;
|
||||
completedJobs?: number;
|
||||
totalJobs?: number;
|
||||
totalBlockBytes?: number;
|
||||
spanSets?: Spanset[];
|
||||
};
|
||||
|
||||
export enum SpanKind {
|
||||
@ -89,7 +88,18 @@ export type Span = {
|
||||
endTimeUnixNano?: string;
|
||||
attributes?: Array<{
|
||||
key: string;
|
||||
value: { stringValue?: string; intValue?: string; boolValue?: boolean; doubleValue?: string };
|
||||
value: {
|
||||
stringValue?: string;
|
||||
intValue?: string;
|
||||
boolValue?: boolean;
|
||||
doubleValue?: string;
|
||||
Value?: {
|
||||
string_value?: string;
|
||||
int_value?: string;
|
||||
bool_value?: boolean;
|
||||
double_value?: string;
|
||||
};
|
||||
};
|
||||
}>;
|
||||
dropped_attributes_count?: number;
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user