Prometheus: (Experimental) Inject label matchers into queries (also change drone to fix ARM rpm build and Update Swagger) (#81396)

- Feature Toggle is `promQLScope`.
 - Query property is:

"scope": {
  "matchers": "{job=~\".*\"}"
 }

Misc:
 - Also updates drone GO version to address ARM bug https://github.com/golang/go/issues/58425
 - Also updates Swagger defs that were causing builds to fail

---------

Co-authored-by: Kevin Minehart <kmineh0151@gmail.com>
This commit is contained in:
Kyle Brandt 2024-01-29 15:22:17 -05:00 committed by GitHub
parent c2b64c6739
commit 43d0664340
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 187 additions and 52 deletions

View File

@ -710,9 +710,9 @@ steps:
- /src/grafana-build artifacts -a docker:grafana:linux/amd64 -a docker:grafana:linux/amd64:ubuntu
-a docker:grafana:linux/arm64 -a docker:grafana:linux/arm64:ubuntu -a docker:grafana:linux/arm/v7
-a docker:grafana:linux/arm/v7:ubuntu --yarn-cache=$$YARN_CACHE_FOLDER --build-id=$$DRONE_BUILD_NUMBER
--ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.18.5 --tag-format='{{ .version_base
}}-{{ .buildID }}-{{ .arch }}' --grafana-dir=$$PWD --ubuntu-tag-format='{{ .version_base
}}-{{ .buildID }}-ubuntu-{{ .arch }}' > docker.txt
--go-version=1.21.6 --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.18.5 --tag-format='{{
.version_base }}-{{ .buildID }}-{{ .arch }}' --grafana-dir=$$PWD --ubuntu-tag-format='{{
.version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}' > docker.txt
- find ./dist -name '*docker*.tar.gz' -type f | xargs -n1 docker load -i
depends_on:
- yarn-install
@ -2009,9 +2009,9 @@ steps:
- /src/grafana-build artifacts -a docker:grafana:linux/amd64 -a docker:grafana:linux/amd64:ubuntu
-a docker:grafana:linux/arm64 -a docker:grafana:linux/arm64:ubuntu -a docker:grafana:linux/arm/v7
-a docker:grafana:linux/arm/v7:ubuntu --yarn-cache=$$YARN_CACHE_FOLDER --build-id=$$DRONE_BUILD_NUMBER
--ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.18.5 --tag-format='{{ .version_base
}}-{{ .buildID }}-{{ .arch }}' --grafana-dir=$$PWD --ubuntu-tag-format='{{ .version_base
}}-{{ .buildID }}-ubuntu-{{ .arch }}' > docker.txt
--go-version=1.21.6 --ubuntu-base=ubuntu:22.04 --alpine-base=alpine:3.18.5 --tag-format='{{
.version_base }}-{{ .buildID }}-{{ .arch }}' --grafana-dir=$$PWD --ubuntu-tag-format='{{
.version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}' > docker.txt
- find ./dist -name '*docker*.tar.gz' -type f | xargs -n1 docker load -i
depends_on:
- update-package-json-version
@ -4777,6 +4777,6 @@ kind: secret
name: gcr_credentials
---
kind: signature
hmac: 42c4eb79bab004d2916c7ab27b58e654300d2683345ea959bc052d1b3f107cd7
hmac: 0e34c95370617ee9f721421913cbe1fe103c117e2912ac589953298246fd2012
...

View File

@ -19,7 +19,7 @@ title: PrometheusDataQuery kind
| Property | Type | Required | Default | Description |
|------------------|---------|----------|---------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|------------------|------------------|----------|---------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| `expr` | string | **Yes** | | The actual expression/query that will be evaluated by Prometheus |
| `refId` | string | **Yes** | | A unique identifier for the query within the list of targets.<br/>In server side expressions, the refId is used as a variable name to identify results.<br/>By default, the UI will assign A->Z; however setting meaningful names may be useful. |
| `datasource` | | No | | For mixed data sources the selected datasource is on the query level.<br/>For non mixed scenarios this is undefined.<br/>TODO find a better way to do this ^ that's friendly to schema<br/>TODO this shouldn't be unknown but DataSourceRef &#124; null |
@ -32,5 +32,12 @@ title: PrometheusDataQuery kind
| `legendFormat` | string | No | | Series name override or template. Ex. {{hostname}} will be replaced with label value for hostname |
| `queryType` | string | No | | Specify the query flavor<br/>TODO make this required and give it a default |
| `range` | boolean | No | | Returns a Range vector, comprised of a set of time series containing a range of data points over time for each time series |
| `scope` | [object](#scope) | No | | |
### Scope
| Property | Type | Required | Default | Description |
|------------|--------|----------|---------|-------------|
| `matchers` | string | **Yes** | | |

View File

@ -174,6 +174,7 @@ Experimental features might be changed or removed without prior notice.
| `enablePluginsTracingByDefault` | Enable plugin tracing for all external plugins |
| `newFolderPicker` | Enables the nested folder picker without having nested folders enabled |
| `onPremToCloudMigrations` | In-development feature that will allow users to easily migrate their on-prem Grafana instances to Grafana Cloud. |
| `promQLScope` | In-development feature that will allow injection of labels into prometheus queries. |
## Development feature toggles

View File

@ -175,4 +175,5 @@ export interface FeatureToggles {
jitterAlertRulesWithinGroups?: boolean;
onPremToCloudMigrations?: boolean;
alertingSaveStatePeriodic?: boolean;
promQLScope?: boolean;
}

View File

@ -54,4 +54,7 @@ export interface PrometheusDataQuery extends common.DataQuery {
* Returns a Range vector, comprised of a set of time series containing a range of data points over time for each time series
*/
range?: boolean;
scope?: {
matchers: string;
};
}

View File

@ -1330,5 +1330,12 @@ var (
Owner: grafanaAlertingSquad,
Created: time.Date(2024, time.January, 22, 12, 0, 0, 0, time.UTC),
},
{
Name: "promQLScope",
Description: "In-development feature that will allow injection of labels into prometheus queries.",
Stage: FeatureStageExperimental,
Owner: grafanaObservabilityMetricsSquad,
Created: time.Date(2024, time.January, 29, 0, 0, 0, 0, time.UTC),
},
}
)

View File

@ -156,3 +156,4 @@ jitterAlertRules,GA,@grafana/alerting-squad,2024-01-17,false,true,false
jitterAlertRulesWithinGroups,preview,@grafana/alerting-squad,2024-01-17,false,true,false
onPremToCloudMigrations,experimental,@grafana/grafana-operator-experience-squad,2024-01-22,false,false,false
alertingSaveStatePeriodic,privatePreview,@grafana/alerting-squad,2024-01-22,false,false,false
promQLScope,experimental,@grafana/observability-metrics,2024-01-29,false,false,false

1 Name Stage Owner Created requiresDevMode RequiresRestart FrontendOnly
156 jitterAlertRulesWithinGroups preview @grafana/alerting-squad 2024-01-17 false true false
157 onPremToCloudMigrations experimental @grafana/grafana-operator-experience-squad 2024-01-22 false false false
158 alertingSaveStatePeriodic privatePreview @grafana/alerting-squad 2024-01-22 false false false
159 promQLScope experimental @grafana/observability-metrics 2024-01-29 false false false

View File

@ -634,4 +634,8 @@ const (
// FlagAlertingSaveStatePeriodic
// Writes the state periodically to the database, asynchronous to rule evaluation
FlagAlertingSaveStatePeriodic = "alertingSaveStatePeriodic"
// FlagPromQLScope
// In-development feature that will allow injection of labels into prometheus queries.
FlagPromQLScope = "promQLScope"
)

View File

@ -97,6 +97,9 @@ type PrometheusDataQuery struct {
// In server side expressions, the refId is used as a variable name to identify results.
// By default, the UI will assign A->Z; however setting meaningful names may be useful.
RefId string `json:"refId"`
Scope *struct {
Matchers string `json:"matchers"`
} `json:"scope,omitempty"`
}
// QueryEditorMode defines model for QueryEditorMode.

View File

@ -2,12 +2,15 @@ package models
import (
"encoding/json"
"fmt"
"math"
"strconv"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/promql/parser"
"github.com/grafana/grafana/pkg/tsdb/intervalv2"
"github.com/grafana/grafana/pkg/tsdb/prometheus/kinds/dataquery"
@ -75,9 +78,14 @@ type Query struct {
RangeQuery bool
ExemplarQuery bool
UtcOffsetSec int64
Scope Scope
}
func Parse(query backend.DataQuery, dsScrapeInterval string, intervalCalculator intervalv2.Calculator, fromAlert bool) (*Query, error) {
type Scope struct {
Matchers []*labels.Matcher
}
func Parse(query backend.DataQuery, dsScrapeInterval string, intervalCalculator intervalv2.Calculator, fromAlert bool, enableScope bool) (*Query, error) {
model := &QueryModel{}
if err := json.Unmarshal(query.JSON, model); err != nil {
return nil, err
@ -99,6 +107,17 @@ func Parse(query backend.DataQuery, dsScrapeInterval string, intervalCalculator
dsScrapeInterval,
timeRange,
)
var matchers []*labels.Matcher
if enableScope && model.Scope != nil && model.Scope.Matchers != "" {
matchers, err = parser.ParseMetricSelector(model.Scope.Matchers)
if err != nil {
return nil, fmt.Errorf("failed to parse metric selector %v in scope", model.Scope.Matchers)
}
expr, err = ApplyQueryScope(expr, matchers)
if err != nil {
return nil, err
}
}
var rangeQuery, instantQuery bool
if model.Instant == nil {
instantQuery = false

View File

@ -37,7 +37,7 @@ func TestParse(t *testing.T) {
RefID: "A",
}
res, err := models.Parse(q, "15s", intervalCalculator, true)
res, err := models.Parse(q, "15s", intervalCalculator, true, false)
require.NoError(t, err)
require.Equal(t, false, res.ExemplarQuery)
})
@ -54,7 +54,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, time.Second*30, res.Step)
})
@ -72,7 +72,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, time.Second*15, res.Step)
})
@ -90,7 +90,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, time.Minute*20, res.Step)
})
@ -108,7 +108,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, time.Minute*2, res.Step)
})
@ -126,7 +126,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "240s", intervalCalculator, false)
res, err := models.Parse(q, "240s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, time.Minute*4, res.Step)
})
@ -145,7 +145,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [2m]})", res.Expr)
require.Equal(t, 120*time.Second, res.Step)
@ -166,7 +166,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [2m]})", res.Expr)
})
@ -185,7 +185,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [120000]})", res.Expr)
})
@ -204,7 +204,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [120000]}) + rate(ALERTS{job=\"test\" [2m]})", res.Expr)
})
@ -223,7 +223,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [120000]}) + rate(ALERTS{job=\"test\" [2m]})", res.Expr)
})
@ -241,7 +241,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [172800s]})", res.Expr)
})
@ -259,7 +259,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [172800]})", res.Expr)
})
@ -277,7 +277,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [172800s]})", res.Expr)
})
@ -295,7 +295,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [0]})", res.Expr)
})
@ -313,7 +313,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [1]})", res.Expr)
})
@ -331,7 +331,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [172800000]})", res.Expr)
})
@ -349,7 +349,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [20]})", res.Expr)
})
@ -368,7 +368,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [20m0s]})", res.Expr)
})
@ -387,7 +387,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, 1*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [1m0s]})", res.Expr)
require.Equal(t, 1*time.Minute, res.Step)
@ -406,7 +406,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, 2*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [135000]})", res.Expr)
})
@ -424,7 +424,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, 2*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [135000]}) + rate(ALERTS{job=\"test\" [2m15s]})", res.Expr)
})
@ -442,7 +442,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, 2*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "rate(ALERTS{job=\"test\" [135000]}) + rate(ALERTS{job=\"test\" [2m15s]})", res.Expr)
})
@ -461,7 +461,7 @@ func TestParse(t *testing.T) {
"range": true
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, true, res.RangeQuery)
})
@ -481,7 +481,7 @@ func TestParse(t *testing.T) {
"instant": true
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, true, res.RangeQuery)
require.Equal(t, true, res.InstantQuery)
@ -500,7 +500,7 @@ func TestParse(t *testing.T) {
"refId": "A"
}`, timeRange, time.Duration(1)*time.Minute)
res, err := models.Parse(q, "15s", intervalCalculator, false)
res, err := models.Parse(q, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, true, res.RangeQuery)
})
@ -631,7 +631,7 @@ func TestRateInterval(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
q := mockQuery(tt.args.expr, tt.args.interval, tt.args.intervalMs, tt.args.timeRange)
q.MaxDataPoints = 12384
res, err := models.Parse(q, tt.args.dsScrapeInterval, intervalCalculator, false)
res, err := models.Parse(q, tt.args.dsScrapeInterval, intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, tt.want.Expr, res.Expr)
require.Equal(t, tt.want.Step, res.Step)
@ -666,7 +666,7 @@ func TestRateInterval(t *testing.T) {
"utcOffsetSec":3600
}`),
}
res, err := models.Parse(query, "30s", intervalCalculator, false)
res, err := models.Parse(query, "30s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "sum(rate(process_cpu_seconds_total[2m0s]))", res.Expr)
require.Equal(t, 30*time.Second, res.Step)
@ -701,7 +701,7 @@ func TestRateInterval(t *testing.T) {
"maxDataPoints": 1055
}`),
}
res, err := models.Parse(query, "15s", intervalCalculator, false)
res, err := models.Parse(query, "15s", intervalCalculator, false, false)
require.NoError(t, err)
require.Equal(t, "sum(rate(cache_requests_total[1m0s]))", res.Expr)
require.Equal(t, 15*time.Second, res.Step)

View File

@ -0,0 +1,52 @@
package models
import (
"github.com/prometheus/prometheus/model/labels"
"github.com/prometheus/prometheus/promql/parser"
)
func ApplyQueryScope(rawExpr string, matchers []*labels.Matcher) (string, error) {
expr, err := parser.ParseExpr(rawExpr)
if err != nil {
return "", err
}
matcherNamesToIdx := make(map[string]int, len(matchers))
for i, matcher := range matchers {
if matcher == nil {
continue
}
matcherNamesToIdx[matcher.Name] = i
}
parser.Inspect(expr, func(node parser.Node, nodes []parser.Node) error {
switch v := node.(type) {
case *parser.VectorSelector:
found := make([]bool, len(matchers))
for _, matcher := range v.LabelMatchers {
if matcher == nil || matcher.Name == "__name__" { // const prob
continue
}
if _, ok := matcherNamesToIdx[matcher.Name]; ok {
found[matcherNamesToIdx[matcher.Name]] = true
newM := matchers[matcherNamesToIdx[matcher.Name]]
matcher.Name = newM.Name
matcher.Type = newM.Type
matcher.Value = newM.Value
}
}
for i, f := range found {
if f {
continue
}
v.LabelMatchers = append(v.LabelMatchers, matchers[i])
}
return nil
default:
return nil
}
})
return expr.String(), nil
}

View File

@ -45,6 +45,7 @@ type QueryData struct {
URL string
TimeInterval string
enableDataplane bool
enableScope bool
exemplarSampler func() exemplar.Sampler
}
@ -88,6 +89,7 @@ func New(
URL: settings.URL,
enableDataplane: features.IsEnabledGlobally(featuremgmt.FlagPrometheusDataplane),
exemplarSampler: exemplarSampler,
enableScope: features.IsEnabledGlobally(featuremgmt.FlagPromQLScope),
}, nil
}
@ -98,7 +100,7 @@ func (s *QueryData) Execute(ctx context.Context, req *backend.QueryDataRequest)
}
for _, q := range req.Queries {
query, err := models.Parse(q, s.TimeInterval, s.intervalCalculator, fromAlert)
query, err := models.Parse(q, s.TimeInterval, s.intervalCalculator, fromAlert, s.enableScope)
if err != nil {
return &result, err
}

View File

@ -4489,6 +4489,15 @@
},
"typeVersion": {
"$ref": "#/definitions/FrameTypeVersion"
},
"uniqueRowIdFields": {
"description": "Array of field indices which values create a unique id for each row. Ideally this should be globally unique ID\nbut that isn't guarantied. Should help with keeping track and deduplicating rows in visualizations, especially\nwith streaming data with frequent updates.",
"type": "array",
"items": {
"type": "integer",
"format": "int64"
},
"example": "TraceID in Tempo, table name + primary key in SQL"
}
}
},

View File

@ -15070,6 +15070,15 @@
},
"typeVersion": {
"$ref": "#/definitions/FrameTypeVersion"
},
"uniqueRowIdFields": {
"description": "Array of field indices which values create a unique id for each row. Ideally this should be globally unique ID\nbut that isn't guarantied. Should help with keeping track and deduplicating rows in visualizations, especially\nwith streaming data with frequent updates.",
"type": "array",
"items": {
"type": "integer",
"format": "int64"
},
"example": "TraceID in Tempo, table name + primary key in SQL"
}
}
},

View File

@ -45,6 +45,10 @@ composableKinds: DataQuery: {
// See https://github.com/grafana/grafana/issues/48081
intervalFactor?: number
scope?: {
matchers: string
}
#QueryEditorMode: "code" | "builder" @cuetsy(kind="enum")
#PromQueryFormat: "time_series" | "table" | "heatmap" @cuetsy(kind="type")
}

View File

@ -51,4 +51,7 @@ export interface Prometheus extends common.DataQuery {
* Returns a Range vector, comprised of a set of time series containing a range of data points over time for each time series
*/
range?: boolean;
scope?: {
matchers: string;
};
}

View File

@ -5566,6 +5566,15 @@
},
"typeVersion": {
"$ref": "#/components/schemas/FrameTypeVersion"
},
"uniqueRowIdFields": {
"description": "Array of field indices which values create a unique id for each row. Ideally this should be globally unique ID\nbut that isn't guarantied. Should help with keeping track and deduplicating rows in visualizations, especially\nwith streaming data with frequent updates.",
"example": "TraceID in Tempo, table name + primary key in SQL",
"items": {
"format": "int64",
"type": "integer"
},
"type": "array"
}
},
"title": "FrameMeta matches:",

View File

@ -57,6 +57,7 @@ def rgm_build_docker_step(ubuntu, alpine, depends_on = ["yarn-install"], file =
"-a docker:grafana:linux/arm/v7:ubuntu " +
"--yarn-cache=$$YARN_CACHE_FOLDER " +
"--build-id=$$DRONE_BUILD_NUMBER " +
"--go-version={} ".format(golang_version) +
"--ubuntu-base={} ".format(ubuntu) +
"--alpine-base={} ".format(alpine) +
"--tag-format='{}' ".format(tag_format) +