From 43d0664340f3e3af219d7b5c747f486a073f5ce3 Mon Sep 17 00:00:00 2001 From: Kyle Brandt Date: Mon, 29 Jan 2024 15:22:17 -0500 Subject: [PATCH] Prometheus: (Experimental) Inject label matchers into queries (also change drone to fix ARM rpm build and Update Swagger) (#81396) - Feature Toggle is `promQLScope`. - Query property is: "scope": { "matchers": "{job=~\".*\"}" } Misc: - Also updates drone GO version to address ARM bug https://github.com/golang/go/issues/58425 - Also updates Swagger defs that were causing builds to fail --------- Co-authored-by: Kevin Minehart --- .drone.yml | 14 ++--- .../prometheus/dataquery/schema-reference.md | 35 ++++++----- .../feature-toggles/index.md | 1 + .../src/types/featureToggles.gen.ts | 1 + .../x/PrometheusDataQuery_types.gen.ts | 3 + pkg/services/featuremgmt/registry.go | 7 +++ pkg/services/featuremgmt/toggles_gen.csv | 1 + pkg/services/featuremgmt/toggles_gen.go | 4 ++ .../kinds/dataquery/types_dataquery_gen.go | 3 + pkg/tsdb/prometheus/models/query.go | 21 ++++++- pkg/tsdb/prometheus/models/query_test.go | 58 +++++++++---------- pkg/tsdb/prometheus/models/scope.go | 52 +++++++++++++++++ pkg/tsdb/prometheus/querydata/request.go | 4 +- public/api-enterprise-spec.json | 9 +++ public/api-merged.json | 9 +++ .../datasource/prometheus/dataquery.cue | 4 ++ .../datasource/prometheus/dataquery.gen.ts | 3 + public/openapi3.json | 9 +++ scripts/drone/steps/rgm.star | 1 + 19 files changed, 187 insertions(+), 52 deletions(-) create mode 100644 pkg/tsdb/prometheus/models/scope.go diff --git a/.drone.yml b/.drone.yml index c10598c368a..86a0013f759 100644 --- a/.drone.yml +++ b/.drone.yml @@ -710,9 +710,9 @@ steps: - /src/grafana-build artifacts -a docker:grafana:linux/amd64 -a docker:grafana:linux/amd64:ubuntu -a docker:grafana:linux/arm64 -a docker:grafana:linux/arm64:ubuntu -a docker:grafana:linux/arm/v7 -a docker:grafana:linux/arm/v7:ubuntu --yarn-cache=$$YARN_CACHE_FOLDER --build-id=$$DRONE_BUILD_NUMBER - --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.18.5 --tag-format='{{ .version_base - }}-{{ .buildID }}-{{ .arch }}' --grafana-dir=$$PWD --ubuntu-tag-format='{{ .version_base - }}-{{ .buildID }}-ubuntu-{{ .arch }}' > docker.txt + --go-version=1.21.6 --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.18.5 --tag-format='{{ + .version_base }}-{{ .buildID }}-{{ .arch }}' --grafana-dir=$$PWD --ubuntu-tag-format='{{ + .version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}' > docker.txt - find ./dist -name '*docker*.tar.gz' -type f | xargs -n1 docker load -i depends_on: - yarn-install @@ -2009,9 +2009,9 @@ steps: - /src/grafana-build artifacts -a docker:grafana:linux/amd64 -a docker:grafana:linux/amd64:ubuntu -a docker:grafana:linux/arm64 -a docker:grafana:linux/arm64:ubuntu -a docker:grafana:linux/arm/v7 -a docker:grafana:linux/arm/v7:ubuntu --yarn-cache=$$YARN_CACHE_FOLDER --build-id=$$DRONE_BUILD_NUMBER - --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.18.5 --tag-format='{{ .version_base - }}-{{ .buildID }}-{{ .arch }}' --grafana-dir=$$PWD --ubuntu-tag-format='{{ .version_base - }}-{{ .buildID }}-ubuntu-{{ .arch }}' > docker.txt + --go-version=1.21.6 --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.18.5 --tag-format='{{ + .version_base }}-{{ .buildID }}-{{ .arch }}' --grafana-dir=$$PWD --ubuntu-tag-format='{{ + .version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}' > docker.txt - find ./dist -name '*docker*.tar.gz' -type f | xargs -n1 docker load -i depends_on: - update-package-json-version @@ -4777,6 +4777,6 @@ kind: secret name: gcr_credentials --- kind: signature -hmac: 42c4eb79bab004d2916c7ab27b58e654300d2683345ea959bc052d1b3f107cd7 +hmac: 0e34c95370617ee9f721421913cbe1fe103c117e2912ac589953298246fd2012 ... diff --git a/docs/sources/developers/kinds/composable/prometheus/dataquery/schema-reference.md b/docs/sources/developers/kinds/composable/prometheus/dataquery/schema-reference.md index 07bc7b865de..028dea0ce18 100644 --- a/docs/sources/developers/kinds/composable/prometheus/dataquery/schema-reference.md +++ b/docs/sources/developers/kinds/composable/prometheus/dataquery/schema-reference.md @@ -18,19 +18,26 @@ title: PrometheusDataQuery kind -| Property | Type | Required | Default | Description | -|------------------|---------|----------|---------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `expr` | string | **Yes** | | The actual expression/query that will be evaluated by Prometheus | -| `refId` | string | **Yes** | | A unique identifier for the query within the list of targets.
In server side expressions, the refId is used as a variable name to identify results.
By default, the UI will assign A->Z; however setting meaningful names may be useful. | -| `datasource` | | No | | For mixed data sources the selected datasource is on the query level.
For non mixed scenarios this is undefined.
TODO find a better way to do this ^ that's friendly to schema
TODO this shouldn't be unknown but DataSourceRef | null | -| `editorMode` | string | No | | Possible values are: `code`, `builder`. | -| `exemplar` | boolean | No | | Execute an additional query to identify interesting raw samples relevant for the given expr | -| `format` | string | No | | Possible values are: `time_series`, `table`, `heatmap`. | -| `hide` | boolean | No | | true if query is disabled (ie should not be returned to the dashboard)
Note this does not always imply that the query should not be executed since
the results from a hidden query may be used as the input to other queries (SSE etc) | -| `instant` | boolean | No | | Returns only the latest value that Prometheus has scraped for the requested time series | -| `intervalFactor` | number | No | | @deprecated Used to specify how many times to divide max data points by. We use max data points under query options
See https://github.com/grafana/grafana/issues/48081 | -| `legendFormat` | string | No | | Series name override or template. Ex. {{hostname}} will be replaced with label value for hostname | -| `queryType` | string | No | | Specify the query flavor
TODO make this required and give it a default | -| `range` | boolean | No | | Returns a Range vector, comprised of a set of time series containing a range of data points over time for each time series | +| Property | Type | Required | Default | Description | +|------------------|------------------|----------|---------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `expr` | string | **Yes** | | The actual expression/query that will be evaluated by Prometheus | +| `refId` | string | **Yes** | | A unique identifier for the query within the list of targets.
In server side expressions, the refId is used as a variable name to identify results.
By default, the UI will assign A->Z; however setting meaningful names may be useful. | +| `datasource` | | No | | For mixed data sources the selected datasource is on the query level.
For non mixed scenarios this is undefined.
TODO find a better way to do this ^ that's friendly to schema
TODO this shouldn't be unknown but DataSourceRef | null | +| `editorMode` | string | No | | Possible values are: `code`, `builder`. | +| `exemplar` | boolean | No | | Execute an additional query to identify interesting raw samples relevant for the given expr | +| `format` | string | No | | Possible values are: `time_series`, `table`, `heatmap`. | +| `hide` | boolean | No | | true if query is disabled (ie should not be returned to the dashboard)
Note this does not always imply that the query should not be executed since
the results from a hidden query may be used as the input to other queries (SSE etc) | +| `instant` | boolean | No | | Returns only the latest value that Prometheus has scraped for the requested time series | +| `intervalFactor` | number | No | | @deprecated Used to specify how many times to divide max data points by. We use max data points under query options
See https://github.com/grafana/grafana/issues/48081 | +| `legendFormat` | string | No | | Series name override or template. Ex. {{hostname}} will be replaced with label value for hostname | +| `queryType` | string | No | | Specify the query flavor
TODO make this required and give it a default | +| `range` | boolean | No | | Returns a Range vector, comprised of a set of time series containing a range of data points over time for each time series | +| `scope` | [object](#scope) | No | | | + +### Scope + +| Property | Type | Required | Default | Description | +|------------|--------|----------|---------|-------------| +| `matchers` | string | **Yes** | | | diff --git a/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md b/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md index 1b2afe2355c..76c1cf01db0 100644 --- a/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md +++ b/docs/sources/setup-grafana/configure-grafana/feature-toggles/index.md @@ -174,6 +174,7 @@ Experimental features might be changed or removed without prior notice. | `enablePluginsTracingByDefault` | Enable plugin tracing for all external plugins | | `newFolderPicker` | Enables the nested folder picker without having nested folders enabled | | `onPremToCloudMigrations` | In-development feature that will allow users to easily migrate their on-prem Grafana instances to Grafana Cloud. | +| `promQLScope` | In-development feature that will allow injection of labels into prometheus queries. | ## Development feature toggles diff --git a/packages/grafana-data/src/types/featureToggles.gen.ts b/packages/grafana-data/src/types/featureToggles.gen.ts index 621225de4e3..336f7682319 100644 --- a/packages/grafana-data/src/types/featureToggles.gen.ts +++ b/packages/grafana-data/src/types/featureToggles.gen.ts @@ -175,4 +175,5 @@ export interface FeatureToggles { jitterAlertRulesWithinGroups?: boolean; onPremToCloudMigrations?: boolean; alertingSaveStatePeriodic?: boolean; + promQLScope?: boolean; } diff --git a/packages/grafana-schema/src/raw/composable/prometheus/dataquery/x/PrometheusDataQuery_types.gen.ts b/packages/grafana-schema/src/raw/composable/prometheus/dataquery/x/PrometheusDataQuery_types.gen.ts index d74af4a10aa..3a31b3270ea 100644 --- a/packages/grafana-schema/src/raw/composable/prometheus/dataquery/x/PrometheusDataQuery_types.gen.ts +++ b/packages/grafana-schema/src/raw/composable/prometheus/dataquery/x/PrometheusDataQuery_types.gen.ts @@ -54,4 +54,7 @@ export interface PrometheusDataQuery extends common.DataQuery { * Returns a Range vector, comprised of a set of time series containing a range of data points over time for each time series */ range?: boolean; + scope?: { + matchers: string; + }; } diff --git a/pkg/services/featuremgmt/registry.go b/pkg/services/featuremgmt/registry.go index 1dd8c7f2a5a..42fe4d64edc 100644 --- a/pkg/services/featuremgmt/registry.go +++ b/pkg/services/featuremgmt/registry.go @@ -1330,5 +1330,12 @@ var ( Owner: grafanaAlertingSquad, Created: time.Date(2024, time.January, 22, 12, 0, 0, 0, time.UTC), }, + { + Name: "promQLScope", + Description: "In-development feature that will allow injection of labels into prometheus queries.", + Stage: FeatureStageExperimental, + Owner: grafanaObservabilityMetricsSquad, + Created: time.Date(2024, time.January, 29, 0, 0, 0, 0, time.UTC), + }, } ) diff --git a/pkg/services/featuremgmt/toggles_gen.csv b/pkg/services/featuremgmt/toggles_gen.csv index 6da65ba3643..e6666814aa7 100644 --- a/pkg/services/featuremgmt/toggles_gen.csv +++ b/pkg/services/featuremgmt/toggles_gen.csv @@ -156,3 +156,4 @@ jitterAlertRules,GA,@grafana/alerting-squad,2024-01-17,false,true,false jitterAlertRulesWithinGroups,preview,@grafana/alerting-squad,2024-01-17,false,true,false onPremToCloudMigrations,experimental,@grafana/grafana-operator-experience-squad,2024-01-22,false,false,false alertingSaveStatePeriodic,privatePreview,@grafana/alerting-squad,2024-01-22,false,false,false +promQLScope,experimental,@grafana/observability-metrics,2024-01-29,false,false,false diff --git a/pkg/services/featuremgmt/toggles_gen.go b/pkg/services/featuremgmt/toggles_gen.go index ee666c677b2..fda85b72a03 100644 --- a/pkg/services/featuremgmt/toggles_gen.go +++ b/pkg/services/featuremgmt/toggles_gen.go @@ -634,4 +634,8 @@ const ( // FlagAlertingSaveStatePeriodic // Writes the state periodically to the database, asynchronous to rule evaluation FlagAlertingSaveStatePeriodic = "alertingSaveStatePeriodic" + + // FlagPromQLScope + // In-development feature that will allow injection of labels into prometheus queries. + FlagPromQLScope = "promQLScope" ) diff --git a/pkg/tsdb/prometheus/kinds/dataquery/types_dataquery_gen.go b/pkg/tsdb/prometheus/kinds/dataquery/types_dataquery_gen.go index 2112a08c93c..b31c9eee626 100644 --- a/pkg/tsdb/prometheus/kinds/dataquery/types_dataquery_gen.go +++ b/pkg/tsdb/prometheus/kinds/dataquery/types_dataquery_gen.go @@ -97,6 +97,9 @@ type PrometheusDataQuery struct { // In server side expressions, the refId is used as a variable name to identify results. // By default, the UI will assign A->Z; however setting meaningful names may be useful. RefId string `json:"refId"` + Scope *struct { + Matchers string `json:"matchers"` + } `json:"scope,omitempty"` } // QueryEditorMode defines model for QueryEditorMode. diff --git a/pkg/tsdb/prometheus/models/query.go b/pkg/tsdb/prometheus/models/query.go index 120f563cf01..da5ef0f9b1a 100644 --- a/pkg/tsdb/prometheus/models/query.go +++ b/pkg/tsdb/prometheus/models/query.go @@ -2,12 +2,15 @@ package models import ( "encoding/json" + "fmt" "math" "strconv" "strings" "time" "github.com/grafana/grafana-plugin-sdk-go/backend" + "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/promql/parser" "github.com/grafana/grafana/pkg/tsdb/intervalv2" "github.com/grafana/grafana/pkg/tsdb/prometheus/kinds/dataquery" @@ -75,9 +78,14 @@ type Query struct { RangeQuery bool ExemplarQuery bool UtcOffsetSec int64 + Scope Scope } -func Parse(query backend.DataQuery, dsScrapeInterval string, intervalCalculator intervalv2.Calculator, fromAlert bool) (*Query, error) { +type Scope struct { + Matchers []*labels.Matcher +} + +func Parse(query backend.DataQuery, dsScrapeInterval string, intervalCalculator intervalv2.Calculator, fromAlert bool, enableScope bool) (*Query, error) { model := &QueryModel{} if err := json.Unmarshal(query.JSON, model); err != nil { return nil, err @@ -99,6 +107,17 @@ func Parse(query backend.DataQuery, dsScrapeInterval string, intervalCalculator dsScrapeInterval, timeRange, ) + var matchers []*labels.Matcher + if enableScope && model.Scope != nil && model.Scope.Matchers != "" { + matchers, err = parser.ParseMetricSelector(model.Scope.Matchers) + if err != nil { + return nil, fmt.Errorf("failed to parse metric selector %v in scope", model.Scope.Matchers) + } + expr, err = ApplyQueryScope(expr, matchers) + if err != nil { + return nil, err + } + } var rangeQuery, instantQuery bool if model.Instant == nil { instantQuery = false diff --git a/pkg/tsdb/prometheus/models/query_test.go b/pkg/tsdb/prometheus/models/query_test.go index dc82481048b..7f806ee9a9d 100644 --- a/pkg/tsdb/prometheus/models/query_test.go +++ b/pkg/tsdb/prometheus/models/query_test.go @@ -37,7 +37,7 @@ func TestParse(t *testing.T) { RefID: "A", } - res, err := models.Parse(q, "15s", intervalCalculator, true) + res, err := models.Parse(q, "15s", intervalCalculator, true, false) require.NoError(t, err) require.Equal(t, false, res.ExemplarQuery) }) @@ -54,7 +54,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, time.Second*30, res.Step) }) @@ -72,7 +72,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, time.Second*15, res.Step) }) @@ -90,7 +90,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, time.Minute*20, res.Step) }) @@ -108,7 +108,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, time.Minute*2, res.Step) }) @@ -126,7 +126,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "240s", intervalCalculator, false) + res, err := models.Parse(q, "240s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, time.Minute*4, res.Step) }) @@ -145,7 +145,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [2m]})", res.Expr) require.Equal(t, 120*time.Second, res.Step) @@ -166,7 +166,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [2m]})", res.Expr) }) @@ -185,7 +185,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [120000]})", res.Expr) }) @@ -204,7 +204,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [120000]}) + rate(ALERTS{job=\"test\" [2m]})", res.Expr) }) @@ -223,7 +223,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [120000]}) + rate(ALERTS{job=\"test\" [2m]})", res.Expr) }) @@ -241,7 +241,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [172800s]})", res.Expr) }) @@ -259,7 +259,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [172800]})", res.Expr) }) @@ -277,7 +277,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [172800s]})", res.Expr) }) @@ -295,7 +295,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [0]})", res.Expr) }) @@ -313,7 +313,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [1]})", res.Expr) }) @@ -331,7 +331,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [172800000]})", res.Expr) }) @@ -349,7 +349,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [20]})", res.Expr) }) @@ -368,7 +368,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [20m0s]})", res.Expr) }) @@ -387,7 +387,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, 1*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [1m0s]})", res.Expr) require.Equal(t, 1*time.Minute, res.Step) @@ -406,7 +406,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, 2*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [135000]})", res.Expr) }) @@ -424,7 +424,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, 2*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [135000]}) + rate(ALERTS{job=\"test\" [2m15s]})", res.Expr) }) @@ -442,7 +442,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, 2*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "rate(ALERTS{job=\"test\" [135000]}) + rate(ALERTS{job=\"test\" [2m15s]})", res.Expr) }) @@ -461,7 +461,7 @@ func TestParse(t *testing.T) { "range": true }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, true, res.RangeQuery) }) @@ -481,7 +481,7 @@ func TestParse(t *testing.T) { "instant": true }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, true, res.RangeQuery) require.Equal(t, true, res.InstantQuery) @@ -500,7 +500,7 @@ func TestParse(t *testing.T) { "refId": "A" }`, timeRange, time.Duration(1)*time.Minute) - res, err := models.Parse(q, "15s", intervalCalculator, false) + res, err := models.Parse(q, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, true, res.RangeQuery) }) @@ -631,7 +631,7 @@ func TestRateInterval(t *testing.T) { t.Run(tt.name, func(t *testing.T) { q := mockQuery(tt.args.expr, tt.args.interval, tt.args.intervalMs, tt.args.timeRange) q.MaxDataPoints = 12384 - res, err := models.Parse(q, tt.args.dsScrapeInterval, intervalCalculator, false) + res, err := models.Parse(q, tt.args.dsScrapeInterval, intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, tt.want.Expr, res.Expr) require.Equal(t, tt.want.Step, res.Step) @@ -666,7 +666,7 @@ func TestRateInterval(t *testing.T) { "utcOffsetSec":3600 }`), } - res, err := models.Parse(query, "30s", intervalCalculator, false) + res, err := models.Parse(query, "30s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "sum(rate(process_cpu_seconds_total[2m0s]))", res.Expr) require.Equal(t, 30*time.Second, res.Step) @@ -701,7 +701,7 @@ func TestRateInterval(t *testing.T) { "maxDataPoints": 1055 }`), } - res, err := models.Parse(query, "15s", intervalCalculator, false) + res, err := models.Parse(query, "15s", intervalCalculator, false, false) require.NoError(t, err) require.Equal(t, "sum(rate(cache_requests_total[1m0s]))", res.Expr) require.Equal(t, 15*time.Second, res.Step) diff --git a/pkg/tsdb/prometheus/models/scope.go b/pkg/tsdb/prometheus/models/scope.go new file mode 100644 index 00000000000..c14dfdc550b --- /dev/null +++ b/pkg/tsdb/prometheus/models/scope.go @@ -0,0 +1,52 @@ +package models + +import ( + "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/promql/parser" +) + +func ApplyQueryScope(rawExpr string, matchers []*labels.Matcher) (string, error) { + expr, err := parser.ParseExpr(rawExpr) + if err != nil { + return "", err + } + + matcherNamesToIdx := make(map[string]int, len(matchers)) + for i, matcher := range matchers { + if matcher == nil { + continue + } + matcherNamesToIdx[matcher.Name] = i + } + + parser.Inspect(expr, func(node parser.Node, nodes []parser.Node) error { + switch v := node.(type) { + case *parser.VectorSelector: + found := make([]bool, len(matchers)) + for _, matcher := range v.LabelMatchers { + if matcher == nil || matcher.Name == "__name__" { // const prob + continue + } + if _, ok := matcherNamesToIdx[matcher.Name]; ok { + found[matcherNamesToIdx[matcher.Name]] = true + newM := matchers[matcherNamesToIdx[matcher.Name]] + matcher.Name = newM.Name + matcher.Type = newM.Type + matcher.Value = newM.Value + } + } + for i, f := range found { + if f { + continue + } + v.LabelMatchers = append(v.LabelMatchers, matchers[i]) + } + + return nil + + default: + return nil + } + }) + return expr.String(), nil +} diff --git a/pkg/tsdb/prometheus/querydata/request.go b/pkg/tsdb/prometheus/querydata/request.go index 526065c1c77..455fdbb6bed 100644 --- a/pkg/tsdb/prometheus/querydata/request.go +++ b/pkg/tsdb/prometheus/querydata/request.go @@ -45,6 +45,7 @@ type QueryData struct { URL string TimeInterval string enableDataplane bool + enableScope bool exemplarSampler func() exemplar.Sampler } @@ -88,6 +89,7 @@ func New( URL: settings.URL, enableDataplane: features.IsEnabledGlobally(featuremgmt.FlagPrometheusDataplane), exemplarSampler: exemplarSampler, + enableScope: features.IsEnabledGlobally(featuremgmt.FlagPromQLScope), }, nil } @@ -98,7 +100,7 @@ func (s *QueryData) Execute(ctx context.Context, req *backend.QueryDataRequest) } for _, q := range req.Queries { - query, err := models.Parse(q, s.TimeInterval, s.intervalCalculator, fromAlert) + query, err := models.Parse(q, s.TimeInterval, s.intervalCalculator, fromAlert, s.enableScope) if err != nil { return &result, err } diff --git a/public/api-enterprise-spec.json b/public/api-enterprise-spec.json index 9f2f0d7e37c..3a27c7aa42e 100644 --- a/public/api-enterprise-spec.json +++ b/public/api-enterprise-spec.json @@ -4489,6 +4489,15 @@ }, "typeVersion": { "$ref": "#/definitions/FrameTypeVersion" + }, + "uniqueRowIdFields": { + "description": "Array of field indices which values create a unique id for each row. Ideally this should be globally unique ID\nbut that isn't guarantied. Should help with keeping track and deduplicating rows in visualizations, especially\nwith streaming data with frequent updates.", + "type": "array", + "items": { + "type": "integer", + "format": "int64" + }, + "example": "TraceID in Tempo, table name + primary key in SQL" } } }, diff --git a/public/api-merged.json b/public/api-merged.json index 5501bb68eee..e30d02d361b 100644 --- a/public/api-merged.json +++ b/public/api-merged.json @@ -15070,6 +15070,15 @@ }, "typeVersion": { "$ref": "#/definitions/FrameTypeVersion" + }, + "uniqueRowIdFields": { + "description": "Array of field indices which values create a unique id for each row. Ideally this should be globally unique ID\nbut that isn't guarantied. Should help with keeping track and deduplicating rows in visualizations, especially\nwith streaming data with frequent updates.", + "type": "array", + "items": { + "type": "integer", + "format": "int64" + }, + "example": "TraceID in Tempo, table name + primary key in SQL" } } }, diff --git a/public/app/plugins/datasource/prometheus/dataquery.cue b/public/app/plugins/datasource/prometheus/dataquery.cue index 934c72263f2..177af1929c6 100644 --- a/public/app/plugins/datasource/prometheus/dataquery.cue +++ b/public/app/plugins/datasource/prometheus/dataquery.cue @@ -45,6 +45,10 @@ composableKinds: DataQuery: { // See https://github.com/grafana/grafana/issues/48081 intervalFactor?: number + scope?: { + matchers: string + } + #QueryEditorMode: "code" | "builder" @cuetsy(kind="enum") #PromQueryFormat: "time_series" | "table" | "heatmap" @cuetsy(kind="type") } diff --git a/public/app/plugins/datasource/prometheus/dataquery.gen.ts b/public/app/plugins/datasource/prometheus/dataquery.gen.ts index 65e4714dfbd..3ba622cecff 100644 --- a/public/app/plugins/datasource/prometheus/dataquery.gen.ts +++ b/public/app/plugins/datasource/prometheus/dataquery.gen.ts @@ -51,4 +51,7 @@ export interface Prometheus extends common.DataQuery { * Returns a Range vector, comprised of a set of time series containing a range of data points over time for each time series */ range?: boolean; + scope?: { + matchers: string; + }; } diff --git a/public/openapi3.json b/public/openapi3.json index a57e9fe0dac..2d6c2d08411 100644 --- a/public/openapi3.json +++ b/public/openapi3.json @@ -5566,6 +5566,15 @@ }, "typeVersion": { "$ref": "#/components/schemas/FrameTypeVersion" + }, + "uniqueRowIdFields": { + "description": "Array of field indices which values create a unique id for each row. Ideally this should be globally unique ID\nbut that isn't guarantied. Should help with keeping track and deduplicating rows in visualizations, especially\nwith streaming data with frequent updates.", + "example": "TraceID in Tempo, table name + primary key in SQL", + "items": { + "format": "int64", + "type": "integer" + }, + "type": "array" } }, "title": "FrameMeta matches:", diff --git a/scripts/drone/steps/rgm.star b/scripts/drone/steps/rgm.star index 393f62f49ab..a58610269ca 100644 --- a/scripts/drone/steps/rgm.star +++ b/scripts/drone/steps/rgm.star @@ -57,6 +57,7 @@ def rgm_build_docker_step(ubuntu, alpine, depends_on = ["yarn-install"], file = "-a docker:grafana:linux/arm/v7:ubuntu " + "--yarn-cache=$$YARN_CACHE_FOLDER " + "--build-id=$$DRONE_BUILD_NUMBER " + + "--go-version={} ".format(golang_version) + "--ubuntu-base={} ".format(ubuntu) + "--alpine-base={} ".format(alpine) + "--tag-format='{}' ".format(tag_format) +