mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Merge branch 'main' into eledobleefe/unified-history-styles-99868
This commit is contained in:
commit
738746913a
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@ -515,7 +515,6 @@ playwright.config.ts @grafana/plugins-platform-frontend
|
||||
/public/app/features/playlist/ @grafana/dashboards-squad
|
||||
/public/app/features/plugins/ @grafana/plugins-platform-frontend
|
||||
/public/app/features/profile/ @grafana/grafana-frontend-platform
|
||||
/public/app/features/query-library/ @grafana/grafana-frontend-platform
|
||||
/public/app/features/runtime/ @ryantxu
|
||||
/public/app/features/query/ @grafana/dashboards-squad
|
||||
/public/app/features/sandbox/ @grafana/grafana-frontend-platform
|
||||
@ -593,6 +592,7 @@ playwright.config.ts @grafana/plugins-platform-frontend
|
||||
/public/app/features/explore/NodeGraph/ @grafana/observability-traces-and-profiling
|
||||
/public/app/features/explore/FlameGraph/ @grafana/observability-traces-and-profiling
|
||||
/public/app/features/explore/TraceView/ @grafana/observability-traces-and-profiling
|
||||
/public/app/features/explore/QueryLibrary/ @grafana/grafana-frontend-platform
|
||||
|
||||
/public/api-merged.json @grafana/grafana-backend-group
|
||||
/public/api-enterprise-spec.json @grafana/grafana-backend-group
|
||||
|
3
.github/pr-commands.json
vendored
3
.github/pr-commands.json
vendored
@ -247,7 +247,8 @@
|
||||
"/pkg/services/sqlstore/migrations/ualert/**/*",
|
||||
"/pkg/services/alerting/**/*",
|
||||
"/public/app/features/alerting/**/*",
|
||||
"/pkg/tests/api/alerting/**/*"
|
||||
"/pkg/tests/api/alerting/**/*",
|
||||
"/pkg/tests/alertmanager/**/*"
|
||||
],
|
||||
"action": "updateLabel",
|
||||
"addLabel": "area/alerting"
|
||||
|
@ -163,7 +163,8 @@ RUN if grep -i -q alpine /etc/issue && [ `arch` = "x86_64" ]; then \
|
||||
usr/glibc-compat/lib/libdl.so.2 \
|
||||
usr/glibc-compat/lib/libm.so.6 \
|
||||
usr/glibc-compat/lib/libpthread.so.0 \
|
||||
usr/glibc-compat/lib/librt.so.1 && \
|
||||
usr/glibc-compat/lib/librt.so.1 \
|
||||
usr/glibc-compat/lib/libresolv.so.2 && \
|
||||
mkdir /lib64 && \
|
||||
ln -s /usr/glibc-compat/lib/ld-linux-x86-64.so.2 /lib64; \
|
||||
fi
|
||||
|
@ -19,10 +19,11 @@ import (
|
||||
|
||||
func New(cfg app.Config) (app.App, error) {
|
||||
// Read config
|
||||
checkRegistry, ok := cfg.SpecificConfig.(checkregistry.CheckService)
|
||||
specificConfig, ok := cfg.SpecificConfig.(checkregistry.AdvisorAppConfig)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid config type")
|
||||
}
|
||||
checkRegistry := specificConfig.CheckRegistry
|
||||
|
||||
// Prepare storage client
|
||||
clientGenerator := k8s.NewClientRegistry(cfg.KubeConfig, k8s.ClientConfig{})
|
||||
|
@ -57,3 +57,9 @@ func (s *Service) Checks() []checks.Check {
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
// AdvisorAppConfig is the configuration received from Grafana to run the app
|
||||
type AdvisorAppConfig struct {
|
||||
CheckRegistry CheckService
|
||||
PluginConfig map[string]string
|
||||
}
|
||||
|
@ -4,11 +4,13 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-app-sdk/app"
|
||||
"github.com/grafana/grafana-app-sdk/k8s"
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/gtime"
|
||||
advisorv0alpha1 "github.com/grafana/grafana/apps/advisor/pkg/apis/advisor/v0alpha1"
|
||||
"github.com/grafana/grafana/apps/advisor/pkg/app/checkregistry"
|
||||
"github.com/grafana/grafana/apps/advisor/pkg/app/checks"
|
||||
@ -16,24 +18,35 @@ import (
|
||||
"k8s.io/klog/v2"
|
||||
)
|
||||
|
||||
const evaluateChecksInterval = 24 * time.Hour
|
||||
const maxChecks = 10
|
||||
const defaultEvaluationInterval = 24 * time.Hour
|
||||
const defaultMaxHistory = 10
|
||||
|
||||
// Runner is a "runnable" app used to be able to expose and API endpoint
|
||||
// with the existing checks types. This does not need to be a CRUD resource, but it is
|
||||
// the only way existing at the moment to expose the check types.
|
||||
type Runner struct {
|
||||
checkRegistry checkregistry.CheckService
|
||||
client resource.Client
|
||||
checkRegistry checkregistry.CheckService
|
||||
client resource.Client
|
||||
evaluationInterval time.Duration
|
||||
maxHistory int
|
||||
}
|
||||
|
||||
// NewRunner creates a new Runner.
|
||||
func New(cfg app.Config) (app.Runnable, error) {
|
||||
// Read config
|
||||
checkRegistry, ok := cfg.SpecificConfig.(checkregistry.CheckService)
|
||||
specificConfig, ok := cfg.SpecificConfig.(checkregistry.AdvisorAppConfig)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid config type")
|
||||
}
|
||||
checkRegistry := specificConfig.CheckRegistry
|
||||
evalInterval, err := getEvaluationInterval(specificConfig.PluginConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
maxHistory, err := getMaxHistory(specificConfig.PluginConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Prepare storage client
|
||||
clientGenerator := k8s.NewClientRegistry(cfg.KubeConfig, k8s.ClientConfig{})
|
||||
@ -43,8 +56,10 @@ func New(cfg app.Config) (app.Runnable, error) {
|
||||
}
|
||||
|
||||
return &Runner{
|
||||
checkRegistry: checkRegistry,
|
||||
client: client,
|
||||
checkRegistry: checkRegistry,
|
||||
client: client,
|
||||
evaluationInterval: evalInterval,
|
||||
maxHistory: maxHistory,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@ -64,7 +79,7 @@ func (r *Runner) Run(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
nextSendInterval := time.Until(lastCreated.Add(evaluateChecksInterval))
|
||||
nextSendInterval := time.Until(lastCreated.Add(r.evaluationInterval))
|
||||
if nextSendInterval < time.Minute {
|
||||
nextSendInterval = 1 * time.Minute
|
||||
}
|
||||
@ -85,8 +100,8 @@ func (r *Runner) Run(ctx context.Context) error {
|
||||
klog.Error("Error cleaning up old check reports", "error", err)
|
||||
}
|
||||
|
||||
if nextSendInterval != evaluateChecksInterval {
|
||||
nextSendInterval = evaluateChecksInterval
|
||||
if nextSendInterval != r.evaluationInterval {
|
||||
nextSendInterval = r.evaluationInterval
|
||||
}
|
||||
ticker.Reset(nextSendInterval)
|
||||
case <-ctx.Done():
|
||||
@ -155,7 +170,7 @@ func (r *Runner) cleanupChecks(ctx context.Context) error {
|
||||
}
|
||||
|
||||
for _, checks := range checksByType {
|
||||
if len(checks) > maxChecks {
|
||||
if len(checks) > r.maxHistory {
|
||||
// Sort checks by creation time
|
||||
sort.Slice(checks, func(i, j int) bool {
|
||||
ti := checks[i].GetCreationTimestamp().Time
|
||||
@ -163,7 +178,7 @@ func (r *Runner) cleanupChecks(ctx context.Context) error {
|
||||
return ti.Before(tj)
|
||||
})
|
||||
// Delete the oldest checks
|
||||
for i := 0; i < len(checks)-maxChecks; i++ {
|
||||
for i := 0; i < len(checks)-r.maxHistory; i++ {
|
||||
check := checks[i]
|
||||
id := check.GetStaticMetadata().Identifier()
|
||||
err := r.client.Delete(ctx, id, resource.DeleteOptions{})
|
||||
@ -176,3 +191,29 @@ func (r *Runner) cleanupChecks(ctx context.Context) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getEvaluationInterval(pluginConfig map[string]string) (time.Duration, error) {
|
||||
evaluationInterval := defaultEvaluationInterval
|
||||
configEvaluationInterval, ok := pluginConfig["evaluation_interval"]
|
||||
if ok {
|
||||
var err error
|
||||
evaluationInterval, err = gtime.ParseDuration(configEvaluationInterval)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("invalid evaluation interval: %w", err)
|
||||
}
|
||||
}
|
||||
return evaluationInterval, nil
|
||||
}
|
||||
|
||||
func getMaxHistory(pluginConfig map[string]string) (int, error) {
|
||||
maxHistory := defaultMaxHistory
|
||||
configMaxHistory, ok := pluginConfig["max_history"]
|
||||
if ok {
|
||||
var err error
|
||||
maxHistory, err = strconv.Atoi(configMaxHistory)
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("invalid max history: %w", err)
|
||||
}
|
||||
}
|
||||
return maxHistory, nil
|
||||
}
|
||||
|
@ -135,8 +135,8 @@ func TestRunner_cleanupChecks_WithinMax(t *testing.T) {
|
||||
func TestRunner_cleanupChecks_ErrorOnDelete(t *testing.T) {
|
||||
mockClient := &MockClient{
|
||||
listFunc: func(ctx context.Context, namespace string, options resource.ListOptions) (resource.ListObject, error) {
|
||||
items := make([]advisorv0alpha1.Check, 0, maxChecks+1)
|
||||
for i := 0; i < maxChecks+1; i++ {
|
||||
items := make([]advisorv0alpha1.Check, 0, defaultMaxHistory+1)
|
||||
for i := 0; i < defaultMaxHistory+1; i++ {
|
||||
item := advisorv0alpha1.Check{}
|
||||
item.ObjectMeta.SetLabels(map[string]string{
|
||||
checks.TypeLabel: "mock",
|
||||
@ -153,7 +153,8 @@ func TestRunner_cleanupChecks_ErrorOnDelete(t *testing.T) {
|
||||
}
|
||||
|
||||
runner := &Runner{
|
||||
client: mockClient,
|
||||
client: mockClient,
|
||||
maxHistory: defaultMaxHistory,
|
||||
}
|
||||
err := runner.cleanupChecks(context.Background())
|
||||
assert.ErrorContains(t, err, "delete error")
|
||||
@ -161,8 +162,8 @@ func TestRunner_cleanupChecks_ErrorOnDelete(t *testing.T) {
|
||||
|
||||
func TestRunner_cleanupChecks_Success(t *testing.T) {
|
||||
itemsDeleted := []string{}
|
||||
items := make([]advisorv0alpha1.Check, 0, maxChecks+1)
|
||||
for i := 0; i < maxChecks+1; i++ {
|
||||
items := make([]advisorv0alpha1.Check, 0, defaultMaxHistory+1)
|
||||
for i := 0; i < defaultMaxHistory+1; i++ {
|
||||
item := advisorv0alpha1.Check{}
|
||||
item.ObjectMeta.SetName(fmt.Sprintf("check-%d", i))
|
||||
item.ObjectMeta.SetLabels(map[string]string{
|
||||
@ -187,13 +188,54 @@ func TestRunner_cleanupChecks_Success(t *testing.T) {
|
||||
}
|
||||
|
||||
runner := &Runner{
|
||||
client: mockClient,
|
||||
client: mockClient,
|
||||
maxHistory: defaultMaxHistory,
|
||||
}
|
||||
err := runner.cleanupChecks(context.Background())
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, []string{"check-0"}, itemsDeleted)
|
||||
}
|
||||
|
||||
func Test_getEvaluationInterval(t *testing.T) {
|
||||
t.Run("default", func(t *testing.T) {
|
||||
interval, err := getEvaluationInterval(map[string]string{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 24*time.Hour, interval)
|
||||
})
|
||||
|
||||
t.Run("invalid", func(t *testing.T) {
|
||||
interval, err := getEvaluationInterval(map[string]string{"evaluation_interval": "invalid"})
|
||||
assert.Error(t, err)
|
||||
assert.Zero(t, interval)
|
||||
})
|
||||
|
||||
t.Run("custom", func(t *testing.T) {
|
||||
interval, err := getEvaluationInterval(map[string]string{"evaluation_interval": "1h"})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, time.Hour, interval)
|
||||
})
|
||||
}
|
||||
|
||||
func Test_getMaxHistory(t *testing.T) {
|
||||
t.Run("default", func(t *testing.T) {
|
||||
history, err := getMaxHistory(map[string]string{})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 10, history)
|
||||
})
|
||||
|
||||
t.Run("invalid", func(t *testing.T) {
|
||||
history, err := getMaxHistory(map[string]string{"max_history": "invalid"})
|
||||
assert.Error(t, err)
|
||||
assert.Zero(t, history)
|
||||
})
|
||||
|
||||
t.Run("custom", func(t *testing.T) {
|
||||
history, err := getMaxHistory(map[string]string{"max_history": "5"})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, 5, history)
|
||||
})
|
||||
}
|
||||
|
||||
type MockCheckService struct {
|
||||
checks []checks.Check
|
||||
}
|
||||
|
@ -24,10 +24,11 @@ type Runner struct {
|
||||
// NewRunner creates a new Runner.
|
||||
func New(cfg app.Config) (app.Runnable, error) {
|
||||
// Read config
|
||||
checkRegistry, ok := cfg.SpecificConfig.(checkregistry.CheckService)
|
||||
specificConfig, ok := cfg.SpecificConfig.(checkregistry.AdvisorAppConfig)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid config type")
|
||||
}
|
||||
checkRegistry := specificConfig.CheckRegistry
|
||||
|
||||
// Prepare storage client
|
||||
clientGenerator := k8s.NewClientRegistry(cfg.KubeConfig, k8s.ClientConfig{})
|
||||
|
@ -75,8 +75,8 @@ require (
|
||||
golang.org/x/crypto v0.32.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect
|
||||
golang.org/x/net v0.34.0 // indirect
|
||||
golang.org/x/oauth2 v0.25.0 // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/oauth2 v0.26.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/term v0.28.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
golang.org/x/time v0.9.0 // indirect
|
||||
|
@ -226,8 +226,8 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
||||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -237,8 +237,8 @@ golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5h
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
|
||||
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
@ -63,9 +63,9 @@ require (
|
||||
go.opentelemetry.io/otel/trace v1.34.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.5.0 // indirect
|
||||
golang.org/x/net v0.34.0 // indirect
|
||||
golang.org/x/oauth2 v0.25.0 // indirect
|
||||
golang.org/x/oauth2 v0.26.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/term v0.28.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
golang.org/x/time v0.9.0 // indirect
|
||||
|
@ -157,8 +157,8 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
||||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -167,8 +167,8 @@ golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
|
||||
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
@ -64,9 +64,9 @@ require (
|
||||
go.opentelemetry.io/otel/trace v1.34.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.5.0 // indirect
|
||||
golang.org/x/net v0.34.0 // indirect
|
||||
golang.org/x/oauth2 v0.25.0 // indirect
|
||||
golang.org/x/oauth2 v0.26.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/term v0.28.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
golang.org/x/time v0.9.0 // indirect
|
||||
|
@ -157,8 +157,8 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
||||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -167,8 +167,8 @@ golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
|
||||
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
@ -120,3 +120,10 @@ update: ## Fetch the latest version of this Makefile and the `make-docs` script
|
||||
curl -s -LO https://raw.githubusercontent.com/grafana/writers-toolkit/main/docs/docs.mk
|
||||
curl -s -LO https://raw.githubusercontent.com/grafana/writers-toolkit/main/docs/make-docs
|
||||
chmod +x make-docs
|
||||
|
||||
.PHONY: topic/%
|
||||
topic/%: ## Create a topic from the Writers' Toolkit template. Specify the topic type as the target, for example, `make topic/task TOPIC_PATH=sources/my-new-topic.md`.
|
||||
topic/%:
|
||||
$(if $(TOPIC_PATH),,$(error "You must set the TOPIC_PATH variable to the path where the $(@F) topic will be created. For example: make $(@) TOPIC_PATH=sources/my-new-topic.md"))
|
||||
mkdir -p $(dir $(TOPIC_PATH))
|
||||
curl -s -o $(TOPIC_PATH) https://raw.githubusercontent.com/grafana/writers-toolkit/refs/heads/main/docs/static/templates/$(@F)-template.md
|
||||
|
@ -8,6 +8,12 @@
|
||||
# [Semantic versioning](https://semver.org/) is used to help the reader identify the significance of changes.
|
||||
# Changes are relevant to this script and the support docs.mk GNU Make interface.
|
||||
#
|
||||
# ## 8.5.0 (2025-02-13)
|
||||
#
|
||||
# ### Added
|
||||
#
|
||||
# - make topic/<KIND> TOPIC_PATH=<PATH> target to create a new topic from the Writers' Toolkit templates.
|
||||
#
|
||||
# ## 8.4.0 (2025-01-27)
|
||||
#
|
||||
# ### Fixed
|
||||
|
@ -28,13 +28,83 @@ refs:
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/configure-notifications/template-notifications/
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/alerting/configure-notifications/template-notifications/
|
||||
configure-contact-points:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/configure-notifications/manage-contact-points/
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/alerting/configure-notifications/manage-contact-points/
|
||||
---
|
||||
|
||||
# Configure the webhook notifier for Alerting
|
||||
# Configure webhook notifications
|
||||
|
||||
The webhook notification is a simple way to send information about a state change over HTTP to a custom endpoint. Using this notification you could integrate Grafana into a system of your choosing.
|
||||
Use the webhook integration in contact points to send alert notifications to your webhook.
|
||||
|
||||
## Webhook JSON payload
|
||||
The webhook integration is a flexible way to integrate alerts into your system. When a notification is triggered, it sends a JSON request with alert details and additional data to the webhook endpoint.
|
||||
|
||||
## Configure webhook for a contact point
|
||||
|
||||
To create a contact point with webhook integration, complete the following steps.
|
||||
|
||||
1. Navigate to **Alerts & IRM** -> **Alerting** -> **Contact points**.
|
||||
1. Click **+ Add contact point**.
|
||||
1. Enter a name for the contact point.
|
||||
1. From the **Integration** list, select **Webhook**.
|
||||
1. In the **URL** field, copy in your Webhook URL.
|
||||
1. (Optional) Configure [additional settings](#settings).
|
||||
1. Click **Save contact point**.
|
||||
|
||||
For more details on contact points, including how to test them and enable notifications, refer to [Configure contact points](ref:configure-contact-points).
|
||||
|
||||
## Webhook settings
|
||||
|
||||
| Option | Description |
|
||||
| ------ | ---------------- |
|
||||
| URL | The Webhook URL. |
|
||||
|
||||
#### Optional settings
|
||||
|
||||
| Option | Description |
|
||||
| --------------------------------- | ----------------------------------------------------------------------------------------------------------------------- |
|
||||
| HTTP Method | Specifies the HTTP method to use: `POST` or `PUT`. |
|
||||
| Basic Authentication Username | Username for HTTP Basic Authentication. |
|
||||
| Basic Authentication Password | Password for HTTP Basic Authentication. |
|
||||
| Authentication Header Scheme | Scheme for the `Authorization` Request Header. Default is `Bearer`. |
|
||||
| Authentication Header Credentials | Credentials for the `Authorization` Request header. |
|
||||
| Max Alerts | Maximum number of alerts to include in a notification. Any alerts exceeding this limit are ignored. `0` means no limit. |
|
||||
| TLS | TLS configuration options, including CA certificate, client certificate, and client key. |
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
|
||||
You can configure either HTTP Basic Authentication or the Authorization request header, but not both.
|
||||
|
||||
{{< /admonition >}}
|
||||
|
||||
#### Optional settings using templates
|
||||
|
||||
Use the following settings to include custom data within the [JSON payload](#body). Both options support using [notification templates](ref:notification-templates).
|
||||
|
||||
| Option | Description |
|
||||
| ------- | ----------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Title | Sends the value as a string in the `title` field of the [JSON payload](#body). Supports [notification templates](ref:notification-templates). |
|
||||
| Message | Sends the value as a string in the `message` field of the [JSON payload](#body). Supports [notification templates](ref:notification-templates). |
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
You can customize the `title` and `message` options to include custom messages and notification data using notification templates. These fields are always sent as strings in the JSON payload.
|
||||
|
||||
However, you cannot customize the webhook data structure, such as adding or changing other JSON fields and HTTP headers, or sending data in a different format like XML.
|
||||
|
||||
If you need to format these fields as JSON or modify other webhook request options, consider sending webhook notifications to a proxy server that adjusts the webhook request before forwarding it to the final destination.
|
||||
{{< /admonition >}}
|
||||
|
||||
#### Optional notification settings
|
||||
|
||||
| Option | Description |
|
||||
| ------------------------ | ------------------------------------------------------------------- |
|
||||
| Disable resolved message | Enable this option to prevent notifications when an alert resolves. |
|
||||
|
||||
## JSON payload
|
||||
|
||||
The following example shows the payload of a webhook notification containing information about two firing alerts:
|
||||
|
||||
```json
|
||||
{
|
||||
@ -106,76 +176,47 @@ The webhook notification is a simple way to send information about a state chang
|
||||
}
|
||||
```
|
||||
|
||||
## Webhook fields
|
||||
|
||||
### Body
|
||||
|
||||
| Key | Type | Description |
|
||||
| ----------------- | ------------------------- | ------------------------------------------------------------------------------- |
|
||||
| receiver | string | Name of the webhook |
|
||||
| status | string | Current status of the alert, `firing` or `resolved` |
|
||||
| orgId | number | ID of the organization related to the payload |
|
||||
| alerts | array of [alerts](#alert) | Alerts that are triggering |
|
||||
| groupLabels | object | Labels that are used for grouping, map of string keys to string values |
|
||||
| commonLabels | object | Labels that all alarms have in common, map of string keys to string values |
|
||||
| commonAnnotations | object | Annotations that all alarms have in common, map of string keys to string values |
|
||||
| externalURL | string | External URL to the Grafana instance sending this webhook |
|
||||
| version | string | Version of the payload |
|
||||
| groupKey | string | Key that is used for grouping |
|
||||
| truncatedAlerts | number | Number of alerts that were truncated |
|
||||
| title | string | Custom title |
|
||||
| state | string | State of the alert group (either `alerting` or `ok`) |
|
||||
| message | string | Custom message |
|
||||
The JSON payload of webhook notifications includes the following key-value pairs:
|
||||
|
||||
| Key | Type | Description |
|
||||
| ------------------- | ------------------------- | -------------------------------------------------------------------------------- |
|
||||
| `receiver` | string | Name of the contact point. |
|
||||
| `status` | string | Current status of the alert, `firing` or `resolved`. |
|
||||
| `orgId` | number | ID of the organization related to the payload. |
|
||||
| `alerts` | array of [alerts](#alert) | Alerts that are triggering. |
|
||||
| `groupLabels` | object | Labels that are used for grouping, map of string keys to string values. |
|
||||
| `commonLabels` | object | Labels that all alarms have in common, map of string keys to string values. |
|
||||
| `commonAnnotations` | object | Annotations that all alarms have in common, map of string keys to string values. |
|
||||
| `externalURL` | string | External URL to the Grafana instance sending this webhook. |
|
||||
| `version` | string | Version of the payload structure. |
|
||||
| `groupKey` | string | Key that is used for grouping. |
|
||||
| `truncatedAlerts` | number | Number of alerts that were truncated. |
|
||||
| `state` | string | State of the alert group (either `alerting` or `ok`). |
|
||||
|
||||
The following key-value pairs are also included in the JSON payload and can be configured in the [webhook settings using notification templates](#optional-settings-using-templates).
|
||||
|
||||
| Key | Type | Description |
|
||||
| --------- | ------ | -------------------------------------------------------------------------------------------------------------------- |
|
||||
| `title` | string | Custom title. Configurable in [webhook settings using notification templates](#optional-settings-using-templates). |
|
||||
| `message` | string | Custom message. Configurable in [webhook settings using notification templates](#optional-settings-using-templates). |
|
||||
|
||||
### Alert
|
||||
|
||||
| Key | Type | Description |
|
||||
| ------------ | ------ | ---------------------------------------------------------------------------------- |
|
||||
| status | string | Current status of the alert, `firing` or `resolved` |
|
||||
| labels | object | Labels that are part of this alert, map of string keys to string values |
|
||||
| annotations | object | Annotations that are part of this alert, map of string keys to string values |
|
||||
| startsAt | string | Start time of the alert |
|
||||
| endsAt | string | End time of the alert, default value when not resolved is `0001-01-01T00:00:00Z` |
|
||||
| values | object | Values that triggered the current status |
|
||||
| generatorURL | string | URL of the alert rule in the Grafana UI |
|
||||
| fingerprint | string | The labels fingerprint, alarms with the same labels will have the same fingerprint |
|
||||
| silenceURL | string | URL to silence the alert rule in the Grafana UI |
|
||||
| dashboardURL | string | A link to the Grafana Dashboard if the alert has a Dashboard UID annotation |
|
||||
| panelURL | string | A link to the panel if the alert has a Panel ID annotation |
|
||||
| imageURL | string | URL of a screenshot of a panel assigned to the rule that created this notification |
|
||||
The Alert object represents an alert included in the notification group, as provided by the [`alerts` field](#body).
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
|
||||
You can customize the `title` and `message` fields using [notification templates](ref:notification-templates).
|
||||
|
||||
However, you cannot customize webhook data structure or format, including JSON fields or sending data in XML, nor can you change the webhook HTTP headers.
|
||||
|
||||
{{< /admonition >}}
|
||||
|
||||
## Procedure
|
||||
|
||||
To create your Webhook integration in Grafana Alerting, complete the following steps.
|
||||
|
||||
1. Navigate to **Alerts & IRM** -> **Alerting** -> **Contact points**.
|
||||
1. Click **+ Add contact point**.
|
||||
1. Enter a contact point name.
|
||||
1. From the Integration list, select **Webhook**.
|
||||
1. In the **URL** field, copy in your Webhook URL.
|
||||
1. Click **Test** to check that your integration works.
|
||||
|
||||
** For Grafana Alertmanager only.**
|
||||
|
||||
1. Click **Save contact point**.
|
||||
|
||||
## Next steps
|
||||
|
||||
The Webhook contact point is ready to receive alert notifications.
|
||||
|
||||
To add this contact point to your alert, complete the following steps.
|
||||
|
||||
1. In Grafana, navigate to **Alerting** > **Alert rules**.
|
||||
1. Edit or create a new alert rule.
|
||||
1. Scroll down to the **Configure labels and notifications** section.
|
||||
1. Under Notifications, click **Select contact point**.
|
||||
1. From the drop-down menu, select the previously created contact point.
|
||||
1. **Click Save rule and exit**.
|
||||
| Key | Type | Description |
|
||||
| -------------- | ------ | ----------------------------------------------------------------------------------- |
|
||||
| `status` | string | Current status of the alert, `firing` or `resolved`. |
|
||||
| `labels` | object | Labels that are part of this alert, map of string keys to string values. |
|
||||
| `annotations` | object | Annotations that are part of this alert, map of string keys to string values. |
|
||||
| `startsAt` | string | Start time of the alert. |
|
||||
| `endsAt` | string | End time of the alert, default value when not resolved is `0001-01-01T00:00:00Z`. |
|
||||
| `values` | object | Values that triggered the current status. |
|
||||
| `generatorURL` | string | URL of the alert rule in the Grafana UI. |
|
||||
| `fingerprint` | string | The labels fingerprint, alarms with the same labels will have the same fingerprint. |
|
||||
| `silenceURL` | string | URL to silence the alert rule in the Grafana UI. |
|
||||
| `dashboardURL` | string | A link to the Grafana Dashboard if the alert has a Dashboard UID annotation. |
|
||||
| `panelURL` | string | A link to the panel if the alert has a Panel ID annotation. |
|
||||
| `imageURL` | string | URL of a screenshot of a panel assigned to the rule that created this notification. |
|
||||
|
@ -71,7 +71,14 @@ refs:
|
||||
|
||||
# Tempo data source
|
||||
|
||||
Grafana ships with built-in support for [Tempo](https://grafana.com/docs/tempo/latest/), a high-volume, minimal-dependency trace storage, open source tracing solution from Grafana Labs. This topic explains configuration and queries specific to the Tempo data source.
|
||||
Grafana ships with built-in support for [Tempo](https://grafana.com/docs/tempo/<TEMPO_VERSION>/), a high-volume, minimal-dependency trace storage, open source tracing solution from Grafana Labs.
|
||||
|
||||
To learn more about traces, refer to [Introduction to tracing](https://grafana.com/docs/tempo/<TEMPO_VERSION>/introduction/).
|
||||
|
||||
To use traces, you need you have an application or service that is instrumented to emit traces.
|
||||
Refer to the [Instrument for tracing](https://grafana.com/docs/tempo/<TEMPO_VERSION>/getting-started/instrumentation/) for more information.
|
||||
|
||||
## Add a data source
|
||||
|
||||
For instructions on how to add a data source to Grafana, refer to the [administration documentation](ref:data-source-management).
|
||||
Only users with the organization administrator role can add data sources.
|
||||
@ -81,6 +88,8 @@ This video explains how to add data sources, including Loki, Tempo, and Mimir, t
|
||||
|
||||
{{< youtube id="cqHO0oYW6Ic" start="298" >}}
|
||||
|
||||
Once you've added the data source, you can [configure it]({{< relref "./configure-tempo-data-source/" >}}) so that your Grafana instance's users can create queries in its [query editor]({{< relref "./query-editor/" >}}) when they [build dashboards](ref:build-dashboards) and use [Explore](ref:explore).
|
||||
## Learn more
|
||||
|
||||
After you've added the data source, you can [configure it](./configure-tempo-data-source/) so that your Grafana instance's users can create queries in its [query editor](./query-editor/) when they [build dashboards](ref:build-dashboards) and use [Explore](ref:explore).
|
||||
|
||||
{{< section withDescriptions="true">}}
|
||||
|
@ -131,7 +131,7 @@ To use streaming, you need to:
|
||||
- Run Tempo version 2.2 or later, or Grafana Enterprise Traces (GET) version 2.2 or later, or use Grafana Cloud Traces.
|
||||
- Tempo must have `stream_over_http_enabled: true` for streaming to work.
|
||||
|
||||
For more information, refer to [Tempo GRPC API](https://grafana.com/docs/tempo/<TEMPO_VERSION>/api_docs/#tempo-grpc-api).
|
||||
For more information, refer to [Tempo gRPC API](https://grafana.com/docs/tempo/<TEMPO_VERSION>/api_docs/#tempo-grpc-api).
|
||||
|
||||
- For self-managed Tempo or GET instances: If your Tempo or GET instance is behind a load balancer or proxy that doesn't supporting gRPC or HTTP2, streaming may not work and should be deactivated.
|
||||
|
||||
|
@ -105,7 +105,7 @@ Refer to the [Search using the TraceQL query builder documentation]({{< relref "
|
||||
|
||||
The **TraceQL** query editor lets you search by trace ID and write TraceQL queries using autocomplete.
|
||||
|
||||
Refer to the [TraceQL query editor documentation]({{< relref "./traceql-editor" >}}) to learn more about constructing queries using a code-editor-like experience.
|
||||
Refer to the [TraceQL query editor documentation](./traceql-editor) to learn more about constructing queries using a code-editor-like experience.
|
||||
|
||||

|
||||
|
||||
@ -113,8 +113,8 @@ You can also search for a trace ID by entering it into the query field.
|
||||
|
||||
### Service graph view
|
||||
|
||||
Grafana’s **Service Graph** view uses metrics to display span request rates, error rates, and durations, as well as service graphs.
|
||||
Once the requirements are set up, this preconfigured view is immediately available.
|
||||
The **Service Graph** view uses metrics to display span request rates, error rates, and durations, as well as service graphs.
|
||||
After the requirements are set up, this preconfigured view is immediately available.
|
||||
|
||||
Using the service graph view, you can:
|
||||
|
||||
|
@ -12,7 +12,7 @@ labels:
|
||||
- oss
|
||||
menuTitle: Write TraceQL queries
|
||||
title: Write TraceQL queries with the editor
|
||||
weight: 300
|
||||
weight: 400
|
||||
refs:
|
||||
explore:
|
||||
- pattern: /docs/grafana/
|
||||
@ -41,4 +41,4 @@ refs:
|
||||
[//]: # 'Shared content for the TraceQL query editor'
|
||||
[//]: # 'This content is located in /docs/sources/shared/datasources/tempo-editor-traceql.md'
|
||||
|
||||
{{< docs/shared source="grafana" lookup="datasources/tempo-editor-traceql.md" version="<GRAFANA VERSION>" >}}
|
||||
{{< docs/shared source="grafana" lookup="datasources/tempo-editor-traceql.md" version="<GRAFANA_VERSION>" >}}
|
||||
|
@ -56,4 +56,4 @@ To enable the TraceQL query builder in self-hosted Grafana through version 10.1,
|
||||
|
||||
[//]: # 'Shared content for the Search - TraceQL query builder'
|
||||
|
||||
{{< docs/shared source="grafana" lookup="datasources/tempo-search-traceql.md" leveloffset="+1" version="<GRAFANA VERSION>" >}}
|
||||
{{< docs/shared source="grafana" lookup="datasources/tempo-search-traceql.md" leveloffset="+1" version="<GRAFANA_VERSION>" >}}
|
||||
|
@ -0,0 +1,21 @@
|
||||
---
|
||||
description: Learn how to create TraceQL queries are structured.
|
||||
keywords:
|
||||
- queries
|
||||
- TraceQL
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: TraceQL query structure
|
||||
title: TraceQL query structure
|
||||
weight: 200
|
||||
---
|
||||
|
||||
# TraceQL query structure
|
||||
|
||||
[//]: # 'Shared content for best practices for traces'
|
||||
[//]: # 'This content is located in /tempo/docs/sources/shared/trace-structure.md'
|
||||
|
||||
{{< docs/shared source="tempo" lookup="traceql-query-structure.md" version="<TEMPO_VERSION>" >}}
|
@ -72,8 +72,8 @@ You use the Service Graph to detect performance issues; track increases in error
|
||||
|
||||
## Display the Service Graph
|
||||
|
||||
1. [Configure Grafana Alloy](https://grafana.com/docs/tempo/latest/configuration/grafana-alloy/) or [Tempo or GET](https://grafana.com/docs/tempo/latest/metrics-generator/service_graphs/#tempo) to generate Service Graph data.
|
||||
1. Link a Prometheus data source in the Tempo data source's [Service Graph]({{< relref "./configure-tempo-data-source#configure-service-graph" >}}) settings.
|
||||
1. [Configure Grafana Alloy](https://grafana.com/docs/tempo/<TEMPO_VERSION>/configuration/grafana-alloy/) or [Tempo or GET](https://grafana.com/docs/tempo/<TEMPO_VERSION>/metrics-generator/service_graphs/#tempo) to generate Service Graph data.
|
||||
1. Link a Prometheus data source in the Tempo data source's [Service Graph](./configure-tempo-data-source#configure-service-graph) settings.
|
||||
1. Navigate to [Explore](ref:explore).
|
||||
1. Select the Tempo data source.
|
||||
1. Select the **Service Graph** query type.
|
||||
@ -113,10 +113,10 @@ To open the Service Graph view:
|
||||
1. Run the query.
|
||||
1. _(Optional)_ Filter your results.
|
||||
|
||||
{{% admonition type="note" %}}
|
||||
{{< admonition type="note" >}}
|
||||
Grafana uses the `traces_spanmetrics_calls_total` metric to display the name, rate, and error rate columns, and `traces_spanmetrics_latency_bucket` to display the duration column.
|
||||
These metrics must exist in your Prometheus data source.
|
||||
{{% /admonition %}}
|
||||
{{< /admonition >}}
|
||||
|
||||
To open a query in Prometheus with the span name of that row automatically set in the query, click a row in the **rate**, **error rate**, or **duration** columns.
|
||||
|
||||
|
23
docs/sources/datasources/tempo/traces-in-grafana/_index.md
Normal file
23
docs/sources/datasources/tempo/traces-in-grafana/_index.md
Normal file
@ -0,0 +1,23 @@
|
||||
---
|
||||
description: Learn about how you can use tracing data in Grafana Cloud to query data,
|
||||
generate metrics, and link your tracing data with logs, metrics, and profiles.
|
||||
keywords:
|
||||
- Grafana
|
||||
- traces
|
||||
- tracing
|
||||
title: Use traces in Grafana
|
||||
weight: 275
|
||||
---
|
||||
|
||||
# Use traces in Grafana
|
||||
|
||||
Using traces, you can search for traces, generate metrics from spans, and link your tracing data with logs, metrics, and profiles.
|
||||
|
||||
This page provides a summary of how you can use tracing data in Grafana.
|
||||
|
||||
For general documentation on querying data sources in Grafana, refer to [Query and transform data](https://grafana.com/docs/grafana-cloud/visualizations/panels-visualizations/query-transform-data/).
|
||||
|
||||
[//]: # 'Shared content for best practices for traces'
|
||||
[//]: # 'This content is located in /tempo/docs/sources/shared/tempo-in-grafana.md'
|
||||
|
||||
{{< docs/shared source="tempo" lookup="tempo-in-grafana.md" version="<TEMPO_VERSION>" >}}
|
@ -12,7 +12,9 @@ labels:
|
||||
- oss
|
||||
menuTitle: Upload JSON trace file
|
||||
title: Upload a JSON trace file
|
||||
weight: 400
|
||||
weight: 900
|
||||
aliases:
|
||||
- ../json-trace-file/ # /docs/grafana/latest/datasources/tempo/json-trace-file
|
||||
---
|
||||
|
||||
# Upload a JSON trace file
|
||||
@ -20,7 +22,15 @@ weight: 400
|
||||
You can upload a JSON file that contains a single trace and visualize it.
|
||||
If the file has multiple traces, Grafana visualizes the first trace.
|
||||
|
||||
**To download a trace or Service Graph through the inspector:**
|
||||
To upload a trace file:
|
||||
|
||||
1. Select **Explore** in Grafana.
|
||||
1. Select **Import trace** in the right corner.
|
||||
1. Upload your JSON trace file.
|
||||
|
||||
## Download a trace or service graph
|
||||
|
||||
To download a trace or Service Graph through the [Inspector panel](https://grafana.com/docs/grafana/<TEMPO_VERSION>/explore/explore-inspector/):
|
||||
|
||||
1. Open the inspector.
|
||||
1. Navigate to the **Data** tab.
|
@ -12,7 +12,10 @@ labels:
|
||||
- oss
|
||||
menuTitle: Link to a trace ID
|
||||
title: Link to a trace ID
|
||||
weight: 700
|
||||
weight: 800
|
||||
aliases:
|
||||
- ../link-trace-id/ # /docs/grafana/latest/datasources/tempo/link-trace-id/
|
||||
|
||||
refs:
|
||||
configure-grafana-feature-toggles:
|
||||
- pattern: /docs/grafana/
|
||||
@ -69,10 +72,10 @@ You can link to Tempo traces from logs or metrics.
|
||||
|
||||
You can link to Tempo traces from logs in Loki, Elasticsearch, Splunk, and other logs data sources by configuring an internal link.
|
||||
|
||||
To configure this feature, see the [Derived fields]({{< relref "../loki#configure-derived-fields" >}}) section of the Loki data source docs or the [Data links]({{< relref "../elasticsearch#data-links" >}}) section of the Elasticsearch or Splunk data source docs.
|
||||
To configure this feature, refer to the [Derived fields](../../loki#configure-derived-fields) section of the Loki data source docs or the [Data links](../../elasticsearch#data-links) section of the Elasticsearch or Splunk data source docs.
|
||||
|
||||
## Link to a trace ID from metrics
|
||||
|
||||
You can link to Tempo traces from metrics in Prometheus data sources by configuring an exemplar.
|
||||
|
||||
To configure this feature, see the [introduction to exemplars](ref:exemplars) documentation.
|
||||
To configure this feature, refer to the [Exemplars](ref:exemplars) documentation.
|
@ -157,7 +157,8 @@ The node graph requires data to be returned from the data source in a specific f
|
||||
|
||||
## Service graph
|
||||
|
||||
A service graph visualizes span metrics, including rates, error rates, and durations (RED), along with service relationships. Once the requirements are configured, this pre-configured view is immediately available.
|
||||
A service graph visualizes rates, error rates, and durations (RED), along with service relationships.
|
||||
After the requirements are configured, this pre-configured view is immediately available.
|
||||
|
||||
For additional information refer to the following documentation:
|
||||
|
||||
|
@ -228,7 +228,6 @@ Experimental features might be changed or removed without prior notice.
|
||||
| `k8SFolderMove` | Enable folder's api server move |
|
||||
| `teamHttpHeadersMimir` | Enables LBAC for datasources for Mimir to apply LBAC filtering of metrics to the client requests for users in teams |
|
||||
| `templateVariablesUsesCombobox` | Use new combobox component for template variables |
|
||||
| `queryLibraryDashboards` | Enables Query Library feature in Dashboards |
|
||||
| `grafanaAdvisor` | Enables Advisor app |
|
||||
| `elasticsearchImprovedParsing` | Enables less memory intensive Elasticsearch result parsing |
|
||||
| `datasourceConnectionsTab` | Shows defined connections for a data source in the plugins detail page |
|
||||
|
@ -113,8 +113,26 @@ Sign in to Grafana and navigate to **Administration > Authentication > Configure
|
||||
|
||||
1. If you wish to [map user information from SAML assertions]({{< relref "../saml#assertion-mapping" >}}), complete the **Assertion attributes mappings** section.
|
||||
|
||||
You also need to configure the **Groups attribute** field if you want to use group synchronization. Group sync allows you to automatically map users to Grafana teams or role-based access control roles based on their SAML group membership.
|
||||
To learn more about how to configure group synchronization, refer to [Configure team sync]({{< relref "../../configure-team-sync" >}}) and [Configure group attribute sync](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/setup-grafana/configure-security/configure-group-attribute-sync) documentation.
|
||||
If Azure is the Identity Provider over SAML there are caveats for the assertion attribute mappings. Due to how Azure interprets these attributes the full URL will need to be entered in the corresponding fields within the UI, which should match the URLs from the metadata XML. There are differences depending on whether it's a Role or Group claim vs other assertions which Microsoft has [documented](https://learn.microsoft.com/en-us/entra/identity-platform/reference-claims-customization#table-2-saml-restricted-claim-set).
|
||||
|
||||
Group and Role:
|
||||
|
||||
```
|
||||
http://schemas.microsoft.com/ws/2008/06/identity/claims/role
|
||||
http://schemas.microsoft.com/ws/2008/06/identity/claims/groups
|
||||
http://schemas.microsoft.com/identity/claims/displayname
|
||||
```
|
||||
|
||||
Other Assertions:
|
||||
|
||||
```
|
||||
http://schemas.xmlsoap.org/ws/2005/05/identity/claims/emailaddress
|
||||
```
|
||||
|
||||

|
||||
|
||||
You also need to configure the **Groups attribute** field if you want to use group synchronization. Group sync allows you to automatically map users to Grafana teams or role-based access control roles based on their SAML group membership.
|
||||
To learn more about how to configure group synchronization, refer to [Configure team sync]({{< relref "../../configure-team-sync" >}}) and [Configure group attribute sync](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/setup-grafana/configure-security/configure-group-attribute-sync) documentation.
|
||||
|
||||
1. If you want to automatically assign users' roles based on their SAML roles, complete the **Role mapping** section.
|
||||
|
||||
|
6
go.mod
6
go.mod
@ -87,7 +87,7 @@ require (
|
||||
github.com/grafana/grafana-cloud-migration-snapshot v1.6.0 // @grafana/grafana-operator-experience-squad
|
||||
github.com/grafana/grafana-google-sdk-go v0.2.1 // @grafana/partner-datasources
|
||||
github.com/grafana/grafana-openapi-client-go v0.0.0-20231213163343-bd475d63fb79 // @grafana/grafana-backend-group
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.265.0 // @grafana/plugins-platform-backend
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0 // @grafana/plugins-platform-backend
|
||||
github.com/grafana/loki/v3 v3.2.1 // @grafana/observability-logs
|
||||
github.com/grafana/otel-profiling-go v0.5.1 // @grafana/grafana-backend-group
|
||||
github.com/grafana/pyroscope-go/godeltaprof v0.1.8 // @grafana/observability-traces-and-profiling
|
||||
@ -172,7 +172,7 @@ require (
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // @grafana/alerting-backend
|
||||
golang.org/x/mod v0.22.0 // indirect; @grafana/grafana-backend-group
|
||||
golang.org/x/net v0.34.0 // @grafana/oss-big-tent @grafana/partner-datasources
|
||||
golang.org/x/oauth2 v0.25.0 // @grafana/identity-access-team
|
||||
golang.org/x/oauth2 v0.26.0 // @grafana/identity-access-team
|
||||
golang.org/x/sync v0.11.0 // @grafana/alerting-backend
|
||||
golang.org/x/text v0.21.0 // @grafana/grafana-backend-group
|
||||
golang.org/x/time v0.9.0 // @grafana/grafana-backend-group
|
||||
@ -518,7 +518,7 @@ require (
|
||||
go.uber.org/mock v0.5.0 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go4.org/netipx v0.0.0-20230125063823-8449b0a6169f // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/term v0.28.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||
gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect
|
||||
|
12
go.sum
12
go.sum
@ -1545,8 +1545,8 @@ github.com/grafana/grafana-google-sdk-go v0.2.1 h1:XeFdKnkXBjOJjXc1gf4iMx4h5aCHT
|
||||
github.com/grafana/grafana-google-sdk-go v0.2.1/go.mod h1:RiITSHwBhqVTTd3se3HQq5Ncs/wzzhTB9OK5N0J0PEU=
|
||||
github.com/grafana/grafana-openapi-client-go v0.0.0-20231213163343-bd475d63fb79 h1:r+mU5bGMzcXCRVAuOrTn54S80qbfVkvTdUJZfSfTNbs=
|
||||
github.com/grafana/grafana-openapi-client-go v0.0.0-20231213163343-bd475d63fb79/go.mod h1:wc6Hbh3K2TgCUSfBC/BOzabItujtHMESZeFk5ZhdxhQ=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.265.0 h1:XshoH8R23Jm9jRreW9R3aOrIVr9vxhCWFyrMe7BFSks=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.265.0/go.mod h1:nkN6xI08YcX6CGsgvRA2+19nhXA/ZPuneLMUUElOD80=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0 h1:YP+iEpXH3HRX9Xo4NHjsrJhN2W7uVTtkLNzMHYbmiLI=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0/go.mod h1:bxkXrBQ4QSmOncsWdIOcpgP+M6wajQNMAPXlbWrqAWY=
|
||||
github.com/grafana/grafana/apps/advisor v0.0.0-20250123151950-b066a6313173 h1:uOM89HiWVVOTls0LrD4coHTckb2lA4U0sIJwCYdbhbw=
|
||||
github.com/grafana/grafana/apps/advisor v0.0.0-20250123151950-b066a6313173/go.mod h1:goSDiy3jtC2cp8wjpPZdUHRENcoSUHae1/Px/MDfddA=
|
||||
github.com/grafana/grafana/apps/alerting/notifications v0.0.0-20250121113133-e747350fee2d h1:NRVOtiG1aUwOazBj9KM7X2o2shsM6TchqisezzoH1gw=
|
||||
@ -2738,8 +2738,8 @@ golang.org/x/oauth2 v0.7.0/go.mod h1:hPLQkd9LyjfXTiRohC/41GhcFqxisoUQ99sCUOHO9x4
|
||||
golang.org/x/oauth2 v0.8.0/go.mod h1:yr7u4HXZRm1R1kBWqr/xKNqewf0plRYoB7sla+BCIXE=
|
||||
golang.org/x/oauth2 v0.12.0/go.mod h1:A74bZ3aGXgCY0qaIC9Ahg6Lglin4AMAco8cIv9baba4=
|
||||
golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -2889,8 +2889,8 @@ golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
|
@ -1066,7 +1066,6 @@ github.com/IBM/ibm-cos-sdk-go v1.11.0/go.mod h1:FnWOym0CvrPM0nHoXvceClOEvGVXecPp
|
||||
github.com/IBM/sarama v1.43.1/go.mod h1:GG5q1RURtDNPz8xxJs3mgX6Ytak8Z9eLhAkJPObe2xE=
|
||||
github.com/IBM/sarama v1.43.2 h1:HABeEqRUh32z8yzY2hGB/j8mHSzC/HA9zlEjqFNCzSw=
|
||||
github.com/IBM/sarama v1.43.2/go.mod h1:Kyo4WkF24Z+1nz7xeVUFWIuKVV8RS3wM8mkvPKMdXFQ=
|
||||
github.com/JohnCGriffin/overflow v0.0.0-20211019200055-46fa312c352c h1:RGWPOewvKIROun94nF7v2cua9qP+thov/7M50KEoeSU=
|
||||
github.com/Joker/jade v1.1.3 h1:Qbeh12Vq6BxURXT1qZBRHsDxeURB8ztcL6f3EXSGeHk=
|
||||
github.com/Joker/jade v1.1.3/go.mod h1:T+2WLyt7VH6Lp0TRxQrUYEs64nRc83wkMQrfeIQKduM=
|
||||
github.com/KimMachineGun/automemlimit v0.6.0 h1:p/BXkH+K40Hax+PuWWPQ478hPjsp9h1CPDhLlA3Z37E=
|
||||
@ -1455,7 +1454,6 @@ github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1/go.mod h1:oJDH3BJKyqBA2TXFhDs
|
||||
github.com/go-zookeeper/zk v1.0.2/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw=
|
||||
github.com/go-zookeeper/zk v1.0.3 h1:7M2kwOsc//9VeeFiPtf+uSJlVpU66x9Ba5+8XK7/TDg=
|
||||
github.com/go-zookeeper/zk v1.0.3/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw=
|
||||
github.com/gobs/pretty v0.0.0-20180724170744-09732c25a95b/go.mod h1:Xo4aNUOrJnVruqWQJBtW6+bTBDTniY8yZum5rF3b5jw=
|
||||
github.com/gobwas/httphead v0.1.0 h1:exrUm0f4YX0L7EBwZHuCF4GDp8aJfVeBrlLQrs6NqWU=
|
||||
github.com/gobwas/httphead v0.1.0/go.mod h1:O/RXo79gxV8G+RqlR/otEwx4Q36zl9rqC5u12GKvMCM=
|
||||
github.com/gobwas/pool v0.2.1 h1:xfeeEhW7pwmX8nuLVlqbzVc7udMDrwetjEv+TZIz1og=
|
||||
@ -1557,6 +1555,8 @@ github.com/grafana/go-gelf/v2 v2.0.1/go.mod h1:lexHie0xzYGwCgiRGcvZ723bSNyNI8ZRD
|
||||
github.com/grafana/grafana-app-sdk v0.29.0/go.mod h1:XLt308EmK6kvqPlzjUyXxbwZKEk2vur/eiypUNDay5I=
|
||||
github.com/grafana/grafana-azure-sdk-go/v2 v2.1.5/go.mod h1:i0uiuu9/sMFBJnpFbjvviH0KOZzdWkti9Q9Ck1HkFWM=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.262.0/go.mod h1:U43Cnrj/9DNYyvFcNdeUWNjMXTKNB0jcTcQGpWKd2gw=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0 h1:YP+iEpXH3HRX9Xo4NHjsrJhN2W7uVTtkLNzMHYbmiLI=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0/go.mod h1:bxkXrBQ4QSmOncsWdIOcpgP+M6wajQNMAPXlbWrqAWY=
|
||||
github.com/grafana/grafana/apps/advisor v0.0.0-20250121115006-c1eac9f9973f/go.mod h1:goSDiy3jtC2cp8wjpPZdUHRENcoSUHae1/Px/MDfddA=
|
||||
github.com/grafana/grafana/pkg/promlib v0.0.7/go.mod h1:rnwJXCA2xRwb7F27NB35iO/JsLL/H/+eVXECk/hrEhQ=
|
||||
github.com/grafana/regexp v0.0.0-20221122212121-6b5c0a4cb7fd/go.mod h1:M5qHK+eWfAv8VR/265dIuEpL3fNfeC21tXXp9itM24A=
|
||||
@ -2430,6 +2430,8 @@ golang.org/x/oauth2 v0.19.0/go.mod h1:vYi7skDa1x015PmRRYZ7+s1cWyPgrPiSYRe4rnsexc
|
||||
golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
@ -2459,6 +2461,8 @@ golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/telemetry v0.0.0-20240208230135-b75ee8823808/go.mod h1:KG1lNk5ZFNssSZLrpVb4sMXKMpGwGXOxSG3rnu2gZQQ=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/telemetry v0.0.0-20240521205824-bda55230c457 h1:zf5N6UOrA487eEFacMePxjXAJctxKmyjKUsjA11Uzuk=
|
||||
|
@ -247,7 +247,6 @@ export interface FeatureToggles {
|
||||
ABTestFeatureToggleA?: boolean;
|
||||
templateVariablesUsesCombobox?: boolean;
|
||||
ABTestFeatureToggleB?: boolean;
|
||||
queryLibraryDashboards?: boolean;
|
||||
grafanaAdvisor?: boolean;
|
||||
elasticsearchImprovedParsing?: boolean;
|
||||
exploreMetricsUseExternalAppPlugin?: boolean;
|
||||
@ -259,4 +258,5 @@ export interface FeatureToggles {
|
||||
newLogsPanel?: boolean;
|
||||
grafanaconThemes?: boolean;
|
||||
pluginsCDNSyncLoader?: boolean;
|
||||
alertingJiraIntegration?: boolean;
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ export interface Options {
|
||||
dedupStrategy: common.LogsDedupStrategy;
|
||||
enableInfiniteScrolling?: boolean;
|
||||
enableLogDetails: boolean;
|
||||
onNewLogsReceived?: unknown;
|
||||
showTime: boolean;
|
||||
sortOrder: common.LogsSortOrder;
|
||||
wrapLogMessage: boolean;
|
||||
|
@ -0,0 +1,23 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
//
|
||||
// Generated by:
|
||||
// public/app/plugins/gen.go
|
||||
// Using jennies:
|
||||
// TSTypesJenny
|
||||
// PluginTsTypesJenny
|
||||
//
|
||||
// Run 'make gen-cue' from repository root to regenerate.
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "11.6.0-pre";
|
||||
|
||||
export interface Options {
|
||||
dedupStrategy: common.LogsDedupStrategy;
|
||||
enableInfiniteScrolling?: boolean;
|
||||
enableLogDetails: boolean;
|
||||
onNewLogsReceived?: unknown;
|
||||
showTime: boolean;
|
||||
sortOrder: common.LogsSortOrder;
|
||||
wrapLogMessage: boolean;
|
||||
}
|
@ -12,7 +12,7 @@ import { SkeletonComponent, attachSkeleton } from '../../utils/skeleton';
|
||||
import { Icon } from '../Icon/Icon';
|
||||
import { Tooltip } from '../Tooltip/Tooltip';
|
||||
|
||||
export type BadgeColor = 'blue' | 'red' | 'green' | 'orange' | 'purple';
|
||||
export type BadgeColor = 'blue' | 'red' | 'green' | 'orange' | 'purple' | 'darkgrey';
|
||||
|
||||
export interface BadgeProps extends HTMLAttributes<HTMLDivElement> {
|
||||
text: React.ReactNode;
|
||||
|
@ -4,53 +4,66 @@ import { Combobox } from './Combobox';
|
||||
|
||||
<Meta title="MDX|Combobox" component={Combobox} />
|
||||
|
||||
## Usage
|
||||
# Combobox
|
||||
|
||||
**Do**
|
||||
A performant and accessible combobox component that supports both synchronous and asynchronous options loading. It provides type-ahead filtering, keyboard navigation, and virtual scrolling for handling large datasets efficiently.
|
||||
|
||||
- Use in inline query editors
|
||||
- Use when you require async calls from a select input
|
||||
**Use Combobox when you need:**
|
||||
|
||||
**Don't**
|
||||
- A searchable dropdown with keyboard navigation
|
||||
- Asynchronous loading of options (e.g., API calls)
|
||||
- Support for large datasets (1000+ items)
|
||||
- Type-ahead filtering functionality
|
||||
- Custom value creation
|
||||
- Inline form usage (e.g., query editors)
|
||||
|
||||
- Use the async functionality, when all items are only loaded on the initial load
|
||||
- Use when fewer than 4 items are needed, as a `RadioButtonGroup` may be more suitable (not for inline use cases)
|
||||
- Use this component if you need custom option styling
|
||||
**Consider alternatives when:**
|
||||
|
||||
## ComboboxOption
|
||||
- You have fewer than 4 options (consider `RadioButtonGroup` instead)
|
||||
- You need complex custom option styling
|
||||
|
||||
The `ComboboxOption` currently supports 3 properties:
|
||||
## Usage & Guidelines
|
||||
|
||||
- `label` - The text that is visible in the menu.
|
||||
- `value` (required) - The value that is selected.
|
||||
- `description` - A longer description that describes the choice.
|
||||
### Options
|
||||
|
||||
If no `label` is given, `value` will be used as a display value.
|
||||
Options are supplied through the `options` prop as either:
|
||||
|
||||
## Sizing
|
||||
- An array of options for synchronous usage
|
||||
- An async function that returns a promise resolving to options for user input.
|
||||
|
||||
The recommended way to set the width is by sizing the container element. This is so it may reflect a similar size as other inputs in the context.
|
||||
Options can be an array of objects with seperate label and values, or an array of strings which will be used as both the label and value.
|
||||
|
||||
If that is not possible, the width can be set directly on the component, by setting a number, which is a multiple of `8px`.
|
||||
While Combobox can handle large sets of options, you should consider both the user experience of searching through many options, and the performance of loading many options from an API.
|
||||
|
||||
For inline usage, such as in query editors, it may be useful to size the input based on the content. Set `width="auto"` to achieve this. In this case, it is also recommended to set `maxWidth` and `minWidth`.
|
||||
### Async behaviour
|
||||
|
||||
## Async Usage
|
||||
When using Combobox with options from a remote source as the user types, you can supply the `options` prop as an function that is called on each keypress with the current input value and returns a promise resolving to an array of options matching the input.
|
||||
|
||||
The `options` prop can accept an async function:
|
||||
Consider the following when implementing async behaviour:
|
||||
|
||||
- When the menu opens, the `options` function is called with `''`, to load all options.
|
||||
- When the user types, the `options` function is called with the current input value.
|
||||
- Consumers should return filtered options matching the input. This is bested suited for APIs that support filtering/search.
|
||||
- When the menu is opened with blank input (e.g. initial click with no selected value) the function will be called with an empty string.
|
||||
- Consumers should only ever load top-n options from APIs using this async function. If your API does not support filtering, consider loading options yourself and just passing the sync options array in
|
||||
- Combobox does not cache calls to the async function. If you need this, implement your own caching.
|
||||
- Calls to the async function are debounced, so consumers should not need to implement this themselves.
|
||||
|
||||
Note: The calls are debounced. Old calls are invalidated when a new call is made.
|
||||
### Value
|
||||
|
||||
## Unit testing
|
||||
The `value` prop is used to set the selected value of the combobox. A scalar value (the value of options) is preferred over a full option object.
|
||||
|
||||
Writing unit tests with Combobox requires mocking the `getBoundingClientRect` method because of [the virtual list library](https://github.com/TanStack/virtual/issues/29#issuecomment-657519522)
|
||||
When using async options with seperate labels and values, the `value` prop can be a full option object to ensure the correct label is displayed.
|
||||
|
||||
This code sets up the mocking before all tests:
|
||||
### Sizing
|
||||
|
||||
```js
|
||||
Combobox defaults to filling the width of its container to match other inputs. If that's not desired, set the `width` prop to control the exact input width.
|
||||
|
||||
For inline usage, such as in query editors, it may be useful to size the input based on the text content. Set width="auto" to achieve this. In this case, it is also recommended to set maxWidth and minWidth.
|
||||
|
||||
### Unit tests
|
||||
|
||||
The component requires mocking `getBoundingClientRect` because of virtualisation:
|
||||
|
||||
```typescript
|
||||
beforeAll(() => {
|
||||
const mockGetBoundingClientRect = jest.fn(() => ({
|
||||
width: 120,
|
||||
@ -67,13 +80,9 @@ beforeAll(() => {
|
||||
});
|
||||
```
|
||||
|
||||
### Selecting an option
|
||||
#### Select an option by mouse
|
||||
|
||||
To select an option, you can use any `*ByRole` methods, as Combobox has proper roles for accessibility.
|
||||
|
||||
#### Selecting option by clicking
|
||||
|
||||
```js
|
||||
```jsx
|
||||
render(<Combobox options={options} onChange={onChangeHandler} value={null} />);
|
||||
|
||||
const input = screen.getByRole('combobox');
|
||||
@ -84,9 +93,9 @@ await userEvent.click(item);
|
||||
expect(screen.getByDisplayValue('Option 1')).toBeInTheDocument();
|
||||
```
|
||||
|
||||
#### Selecting option by typing
|
||||
#### Select an option by keyboard
|
||||
|
||||
```js
|
||||
```jsx
|
||||
render(<Combobox options={options} value={null} onChange={onChangeHandler} />);
|
||||
|
||||
const input = screen.getByRole('combobox');
|
||||
@ -96,6 +105,32 @@ await userEvent.keyboard('{ArrowDown}{Enter}');
|
||||
expect(screen.getByDisplayValue('Option 3')).toBeInTheDocument();
|
||||
```
|
||||
|
||||
## Migrating from Select
|
||||
|
||||
Combobox's API is similar to Select, but is greatly simplified. Any workarounds you may have implemented to workaround Select's slow performance are no longer necessary.
|
||||
|
||||
Some differences to note:
|
||||
|
||||
- Virtualisation is built in, so no separate `VirtualizedSelect` component is needed.
|
||||
- Async behaviour is built in so a seperate `AsyncSelect` component is not needed
|
||||
- `isLoading: boolean` has been renamed to `loading: boolean`
|
||||
- `allowCustomValue` has been renamed to `createCustomValue`.
|
||||
- When specifying `width="auto"`, `minWidth` is also required.
|
||||
- Groups are not supported at this time.
|
||||
- Many props used to control subtle behaviour have been removed to simplify the API and improve performance.
|
||||
- Custom render props, or label as ReactNode is not supported at this time. Reach out if you have a hard requirement for this and we can discuss.
|
||||
|
||||
For all async behaviour, pass in a function that returns `Promise<ComboboxOption[]>` that will be called when the menu is opened, and on keypress.
|
||||
|
||||
```tsx
|
||||
const loadOptions = useCallback(async (input: string) => {
|
||||
const response = await fetch(`/api/options?query=${input}`);
|
||||
return response.json();
|
||||
}, []);
|
||||
|
||||
<Combobox options={loadOptions} />;
|
||||
```
|
||||
|
||||
## Props
|
||||
|
||||
<ArgTypes of={Combobox} />
|
||||
|
@ -1,9 +1,8 @@
|
||||
import { action } from '@storybook/addon-actions';
|
||||
import { useArgs } from '@storybook/preview-api';
|
||||
import { Meta, StoryFn, StoryObj } from '@storybook/react';
|
||||
import React, { useEffect, useState } from 'react';
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
import { Alert } from '../Alert/Alert';
|
||||
import { Field } from '../Forms/Field';
|
||||
|
||||
import { Combobox, ComboboxProps } from './Combobox';
|
||||
@ -64,7 +63,6 @@ const meta: Meta<PropsAndCustomArgs> = {
|
||||
],
|
||||
value: 'banana',
|
||||
},
|
||||
decorators: [InDevDecorator],
|
||||
};
|
||||
export default meta;
|
||||
|
||||
@ -257,17 +255,3 @@ export const PositioningTest: Story = {
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
function InDevDecorator(Story: React.ElementType) {
|
||||
return (
|
||||
<div>
|
||||
<Alert title="This component is still in development!" severity="info">
|
||||
Combobox is still in development and not able to be used externally.
|
||||
<br />
|
||||
Within the Grafana repo, it can be used by importing it from{' '}
|
||||
<span style={{ fontFamily: 'monospace' }}>@grafana/ui/src/unstable</span>
|
||||
</Alert>
|
||||
<Story />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -35,17 +35,32 @@ export interface ComboboxBaseProps<T extends string | number>
|
||||
* Allows the user to set a value which is not in the list of options.
|
||||
*/
|
||||
createCustomValue?: boolean;
|
||||
options: Array<ComboboxOption<T>> | ((inputValue: string) => Promise<Array<ComboboxOption<T>>>);
|
||||
onChange: (option: ComboboxOption<T>) => void;
|
||||
|
||||
/**
|
||||
* Most consumers should pass value in as a scalar string | number. However, sometimes with Async because we don't
|
||||
* have the full options loaded to match the value to, consumers may also pass in an Option with a label to display.
|
||||
* An array of options, or a function that returns a promise resolving to an array of options.
|
||||
* If a function, it will be called when the menu is opened and on keypress with the current search query.
|
||||
*/
|
||||
options: Array<ComboboxOption<T>> | ((inputValue: string) => Promise<Array<ComboboxOption<T>>>);
|
||||
|
||||
/**
|
||||
* onChange handler is called with the newly selected option.
|
||||
*/
|
||||
onChange: (option: ComboboxOption<T>) => void;
|
||||
|
||||
/**
|
||||
* Current selected value. Most consumers should pass a scalar value (string | number). However, sometimes with Async
|
||||
* it may be better to pass in an Option with a label to display.
|
||||
*/
|
||||
value?: T | ComboboxOption<T> | null;
|
||||
|
||||
/**
|
||||
* Defaults to 100%. Number is a multiple of 8px. 'auto' will size the input to the content.
|
||||
* Defaults to full width of container. Number is a multiple of the spacing unit. 'auto' will size the input to the content.
|
||||
* */
|
||||
width?: number | 'auto';
|
||||
|
||||
/**
|
||||
* Called when the input loses focus.
|
||||
*/
|
||||
onBlur?: () => void;
|
||||
}
|
||||
|
||||
@ -53,6 +68,9 @@ const RECOMMENDED_ITEMS_AMOUNT = 100_000;
|
||||
|
||||
type ClearableConditionals<T extends number | string> =
|
||||
| {
|
||||
/**
|
||||
* Allow the user to clear the selected value. `null` is emitted from the onChange handler
|
||||
*/
|
||||
isClearable: true;
|
||||
/**
|
||||
* The onChange handler is called with `null` when clearing the Combobox.
|
||||
|
@ -7,6 +7,7 @@ import { useState } from 'react';
|
||||
import { SelectableValue, toIconName } from '@grafana/data';
|
||||
|
||||
import { getAvailableIcons } from '../../types';
|
||||
import { Alert } from '../Alert/Alert';
|
||||
import { Icon } from '../Icon/Icon';
|
||||
|
||||
import { AsyncMultiSelect, AsyncSelect, MultiSelect, Select } from './Select';
|
||||
@ -92,6 +93,7 @@ const meta: Meta = {
|
||||
},
|
||||
},
|
||||
},
|
||||
decorators: [DeprecatedDecorator],
|
||||
};
|
||||
|
||||
const loadAsyncOptions = () => {
|
||||
@ -383,7 +385,7 @@ export const AutoMenuPlacement: StoryFn = (args) => {
|
||||
|
||||
return (
|
||||
<>
|
||||
<div style={{ width: '100%', height: '95vh', display: 'flex', alignItems: 'flex-end' }}>
|
||||
<div style={{ width: '100%', height: 'calc(95vh - 118px)', display: 'flex', alignItems: 'flex-end' }}>
|
||||
<Select
|
||||
options={generateOptions()}
|
||||
value={value}
|
||||
@ -455,3 +457,18 @@ CustomValueCreation.args = {
|
||||
};
|
||||
|
||||
export default meta;
|
||||
|
||||
function DeprecatedDecorator(Story: React.ElementType) {
|
||||
return (
|
||||
<div>
|
||||
<Alert title="Deprecated!" severity="warning">
|
||||
The Select component is deprecated.
|
||||
<br />
|
||||
Use Combobox instead - it supports most use cases, is performant by default, and can handle hundreds of
|
||||
thousands of options, and has a simpler API.
|
||||
</Alert>
|
||||
|
||||
<Story />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ import {
|
||||
VirtualizedSelectAsyncProps,
|
||||
} from './types';
|
||||
|
||||
/** @deprecated Use Combobox component instead */
|
||||
export function Select<T, Rest = {}>(props: SelectCommonProps<T> & Rest) {
|
||||
return <SelectBase {...props} />;
|
||||
}
|
||||
@ -24,14 +25,17 @@ export interface AsyncSelectProps<T> extends Omit<SelectCommonProps<T>, 'options
|
||||
value?: T | SelectableValue<T> | null;
|
||||
}
|
||||
|
||||
/** @deprecated Use Combobox component instead */
|
||||
export function AsyncSelect<T, Rest = {}>(props: AsyncSelectProps<T> & Rest) {
|
||||
return <SelectBase {...props} />;
|
||||
}
|
||||
|
||||
/** @deprecated Use Combobox component instead - it's virtualised by default! */
|
||||
export function VirtualizedSelect<T, Rest = {}>(props: VirtualizedSelectProps<T> & Rest) {
|
||||
return <SelectBase virtualized {...props} />;
|
||||
}
|
||||
|
||||
/** @deprecated Use Combobox component instead - it's virtualised by default! */
|
||||
export function AsyncVirtualizedSelect<T, Rest = {}>(props: VirtualizedSelectAsyncProps<T> & Rest) {
|
||||
return <SelectBase virtualized {...props} />;
|
||||
}
|
||||
|
@ -275,6 +275,7 @@ export { ButtonSelect } from './Dropdown/ButtonSelect';
|
||||
export { Dropdown } from './Dropdown/Dropdown';
|
||||
export { PluginSignatureBadge, type PluginSignatureBadgeProps } from './PluginSignatureBadge/PluginSignatureBadge';
|
||||
export { UserIcon, type UserIconProps } from './UsersIndicator/UserIcon';
|
||||
export { UsersIndicator, type UsersIndicatorProps } from './UsersIndicator/UsersIndicator';
|
||||
export { type UserView } from './UsersIndicator/types';
|
||||
export { Avatar } from './UsersIndicator/Avatar';
|
||||
// Export this until we've figured out a good approach to inline form styles.
|
||||
|
@ -6,7 +6,7 @@ toolchain go1.23.6
|
||||
|
||||
require (
|
||||
github.com/emicklei/go-restful/v3 v3.11.0
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.265.0
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20240808213237-f4d2e064f435
|
||||
github.com/grafana/grafana/pkg/semconv v0.0.0-20240808213237-f4d2e064f435
|
||||
github.com/mattbaird/jsonpatch v0.0.0-20240118010651-0ba75a80ca38
|
||||
@ -138,9 +138,9 @@ require (
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect
|
||||
golang.org/x/mod v0.22.0 // indirect
|
||||
golang.org/x/net v0.34.0 // indirect
|
||||
golang.org/x/oauth2 v0.25.0 // indirect
|
||||
golang.org/x/oauth2 v0.26.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/term v0.28.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
golang.org/x/time v0.9.0 // indirect
|
||||
|
@ -134,8 +134,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.265.0 h1:XshoH8R23Jm9jRreW9R3aOrIVr9vxhCWFyrMe7BFSks=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.265.0/go.mod h1:nkN6xI08YcX6CGsgvRA2+19nhXA/ZPuneLMUUElOD80=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0 h1:YP+iEpXH3HRX9Xo4NHjsrJhN2W7uVTtkLNzMHYbmiLI=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0/go.mod h1:bxkXrBQ4QSmOncsWdIOcpgP+M6wajQNMAPXlbWrqAWY=
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20240808213237-f4d2e064f435 h1:lmw60EW7JWlAEvgggktOyVkH4hF1m/+LSF/Ap0NCyi8=
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20240808213237-f4d2e064f435/go.mod h1:ORVFiW/KNRY52lNjkGwnFWCxNVfE97bJG2jr2fetq0I=
|
||||
github.com/grafana/grafana/pkg/semconv v0.0.0-20240808213237-f4d2e064f435 h1:SNEeqY22DrGr5E9kGF1mKSqlOom14W9+b1u4XEGJowA=
|
||||
@ -421,8 +421,8 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
||||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -445,8 +445,8 @@ golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
|
||||
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
@ -832,11 +832,11 @@ func getDashboardShouldReturn200WithConfig(t *testing.T, sc *scenarioContext, pr
|
||||
quotaService := quotatest.New(false, nil)
|
||||
folderSvc := folderimpl.ProvideService(
|
||||
fStore, ac, bus.ProvideBus(tracing.InitializeTracerForTest()), dashboardStore, folderStore,
|
||||
nil, db, features, supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest())
|
||||
nil, db, features, supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest(), nil)
|
||||
if dashboardService == nil {
|
||||
dashboardService, err = service.ProvideDashboardServiceImpl(
|
||||
cfg, dashboardStore, folderStore, features, folderPermissions,
|
||||
ac, folderSvc, fStore, nil, client.MockTestRestConfig{}, nil, quotaService, nil, nil,
|
||||
ac, folderSvc, fStore, nil, client.MockTestRestConfig{}, nil, quotaService, nil, nil, nil,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
dashboardService.(dashboards.PermissionsRegistrationService).RegisterDashboardPermissions(dashboardPermissions)
|
||||
@ -844,7 +844,7 @@ func getDashboardShouldReturn200WithConfig(t *testing.T, sc *scenarioContext, pr
|
||||
|
||||
dashboardProvisioningService, err := service.ProvideDashboardServiceImpl(
|
||||
cfg, dashboardStore, folderStore, features, folderPermissions,
|
||||
ac, folderSvc, fStore, nil, client.MockTestRestConfig{}, nil, quotaService, nil, nil,
|
||||
ac, folderSvc, fStore, nil, client.MockTestRestConfig{}, nil, quotaService, nil, nil, nil,
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
|
@ -462,10 +462,10 @@ func setupServer(b testing.TB, sc benchScenario, features featuremgmt.FeatureTog
|
||||
fStore := folderimpl.ProvideStore(sc.db)
|
||||
folderServiceWithFlagOn := folderimpl.ProvideService(
|
||||
fStore, ac, bus.ProvideBus(tracing.InitializeTracerForTest()), dashStore, folderStore,
|
||||
nil, sc.db, features, supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest())
|
||||
nil, sc.db, features, supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest(), nil)
|
||||
acSvc := acimpl.ProvideOSSService(
|
||||
sc.cfg, acdb.ProvideService(sc.db), actionSets, localcache.ProvideService(),
|
||||
features, tracing.InitializeTracerForTest(), sc.db, permreg.ProvidePermissionRegistry(), nil, folderServiceWithFlagOn,
|
||||
features, tracing.InitializeTracerForTest(), sc.db, permreg.ProvidePermissionRegistry(), nil,
|
||||
)
|
||||
folderPermissions, err := ossaccesscontrol.ProvideFolderPermissions(
|
||||
cfg, features, routing.NewRouteRegister(), sc.db, ac, license, folderServiceWithFlagOn, acSvc, sc.teamSvc, sc.userSvc, actionSets)
|
||||
@ -473,7 +473,7 @@ func setupServer(b testing.TB, sc benchScenario, features featuremgmt.FeatureTog
|
||||
dashboardSvc, err := dashboardservice.ProvideDashboardServiceImpl(
|
||||
sc.cfg, dashStore, folderStore,
|
||||
features, folderPermissions, ac,
|
||||
folderServiceWithFlagOn, fStore, nil, client.MockTestRestConfig{}, nil, quotaSrv, nil, nil,
|
||||
folderServiceWithFlagOn, fStore, nil, client.MockTestRestConfig{}, nil, quotaSrv, nil, nil, nil,
|
||||
)
|
||||
require.NoError(b, err)
|
||||
|
||||
|
@ -39,7 +39,7 @@ require (
|
||||
golang.org/x/crypto v0.32.0 // indirect
|
||||
golang.org/x/net v0.34.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f // indirect
|
||||
google.golang.org/grpc v1.70.0 // indirect
|
||||
|
@ -122,8 +122,8 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
|
51
pkg/apis/dashboard/utils.go
Normal file
51
pkg/apis/dashboard/utils.go
Normal file
@ -0,0 +1,51 @@
|
||||
package dashboard
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
|
||||
)
|
||||
|
||||
var PluginIDRepoName = "plugin"
|
||||
var fileProvisionedRepoPrefix = "file:"
|
||||
|
||||
// ProvisionedFileNameWithPrefix adds the `file:` prefix to the
|
||||
// provisioner name, to be used as the annotation for dashboards
|
||||
// provisioned from files
|
||||
func ProvisionedFileNameWithPrefix(name string) string {
|
||||
if name == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
return fileProvisionedRepoPrefix + name
|
||||
}
|
||||
|
||||
// GetProvisionedFileNameFromMeta returns the provisioner name
|
||||
// from a given annotation string, which is in the form file:<name>
|
||||
func GetProvisionedFileNameFromMeta(annotation string) (string, bool) {
|
||||
return strings.CutPrefix(annotation, fileProvisionedRepoPrefix)
|
||||
}
|
||||
|
||||
// SetPluginIDMeta sets the repo name to "plugin" and the path to the plugin ID
|
||||
func SetPluginIDMeta(obj unstructured.Unstructured, pluginID string) {
|
||||
if pluginID == "" {
|
||||
return
|
||||
}
|
||||
|
||||
annotations := obj.GetAnnotations()
|
||||
if annotations == nil {
|
||||
annotations = map[string]string{}
|
||||
}
|
||||
annotations[utils.AnnoKeyRepoName] = PluginIDRepoName
|
||||
annotations[utils.AnnoKeyRepoPath] = pluginID
|
||||
obj.SetAnnotations(annotations)
|
||||
}
|
||||
|
||||
// GetPluginIDFromMeta returns the plugin ID from the meta if the repo name is "plugin"
|
||||
func GetPluginIDFromMeta(obj utils.GrafanaMetaAccessor) string {
|
||||
if obj.GetRepositoryName() == PluginIDRepoName {
|
||||
return obj.GetRepositoryPath()
|
||||
}
|
||||
return ""
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
// +k8s:deepcopy-gen=package
|
||||
// +k8s:openapi-gen=true
|
||||
// +k8s:defaulter-gen=TypeMeta
|
||||
// +groupName=peakq.grafana.app
|
||||
|
||||
package v0alpha1 // import "github.com/grafana/grafana/pkg/apis/peakq/v0alpha1"
|
@ -1,52 +0,0 @@
|
||||
package v0alpha1
|
||||
|
||||
import (
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
)
|
||||
|
||||
const (
|
||||
GROUP = "peakq.grafana.app"
|
||||
VERSION = "v0alpha1"
|
||||
APIVERSION = GROUP + "/" + VERSION
|
||||
)
|
||||
|
||||
var QueryTemplateResourceInfo = utils.NewResourceInfo(GROUP, VERSION,
|
||||
"querytemplates", "querytemplate", "QueryTemplate",
|
||||
func() runtime.Object { return &QueryTemplate{} },
|
||||
func() runtime.Object { return &QueryTemplateList{} },
|
||||
utils.TableColumns{}, // default table converter
|
||||
)
|
||||
|
||||
var (
|
||||
// SchemeGroupVersion is group version used to register these objects
|
||||
SchemeGroupVersion = schema.GroupVersion{Group: GROUP, Version: VERSION}
|
||||
|
||||
// SchemaBuilder is used by standard codegen
|
||||
SchemeBuilder runtime.SchemeBuilder
|
||||
localSchemeBuilder = &SchemeBuilder
|
||||
AddToScheme = localSchemeBuilder.AddToScheme
|
||||
)
|
||||
|
||||
func init() {
|
||||
localSchemeBuilder.Register(addKnownTypes)
|
||||
}
|
||||
|
||||
// Adds the list of known types to the given scheme.
|
||||
func addKnownTypes(scheme *runtime.Scheme) error {
|
||||
scheme.AddKnownTypes(SchemeGroupVersion,
|
||||
&QueryTemplate{},
|
||||
&QueryTemplateList{},
|
||||
&RenderedQuery{},
|
||||
)
|
||||
metav1.AddToGroupVersion(scheme, SchemeGroupVersion)
|
||||
return nil
|
||||
}
|
||||
|
||||
// Resource takes an unqualified resource and returns a Group qualified GroupResource
|
||||
func Resource(resource string) schema.GroupResource {
|
||||
return SchemeGroupVersion.WithResource(resource).GroupResource()
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
package v0alpha1
|
||||
|
||||
import (
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apis/query/v0alpha1/template"
|
||||
)
|
||||
|
||||
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
|
||||
type QueryTemplate struct {
|
||||
metav1.TypeMeta `json:",inline"`
|
||||
metav1.ObjectMeta `json:"metadata,omitempty"`
|
||||
|
||||
Spec template.QueryTemplate `json:"spec,omitempty"`
|
||||
}
|
||||
|
||||
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
|
||||
type QueryTemplateList struct {
|
||||
metav1.TypeMeta `json:",inline"`
|
||||
metav1.ListMeta `json:"metadata,omitempty"`
|
||||
|
||||
Items []QueryTemplate `json:"items,omitempty"`
|
||||
}
|
||||
|
||||
// Dummy object that represents a real query object
|
||||
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
|
||||
type RenderedQuery struct {
|
||||
metav1.TypeMeta `json:",inline"`
|
||||
|
||||
// +listType=atomic
|
||||
Targets []template.Target `json:"targets,omitempty"`
|
||||
}
|
@ -1,105 +0,0 @@
|
||||
//go:build !ignore_autogenerated
|
||||
// +build !ignore_autogenerated
|
||||
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
// Code generated by deepcopy-gen. DO NOT EDIT.
|
||||
|
||||
package v0alpha1
|
||||
|
||||
import (
|
||||
template "github.com/grafana/grafana/pkg/apis/query/v0alpha1/template"
|
||||
runtime "k8s.io/apimachinery/pkg/runtime"
|
||||
)
|
||||
|
||||
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
|
||||
func (in *QueryTemplate) DeepCopyInto(out *QueryTemplate) {
|
||||
*out = *in
|
||||
out.TypeMeta = in.TypeMeta
|
||||
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
|
||||
in.Spec.DeepCopyInto(&out.Spec)
|
||||
return
|
||||
}
|
||||
|
||||
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryTemplate.
|
||||
func (in *QueryTemplate) DeepCopy() *QueryTemplate {
|
||||
if in == nil {
|
||||
return nil
|
||||
}
|
||||
out := new(QueryTemplate)
|
||||
in.DeepCopyInto(out)
|
||||
return out
|
||||
}
|
||||
|
||||
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
|
||||
func (in *QueryTemplate) DeepCopyObject() runtime.Object {
|
||||
if c := in.DeepCopy(); c != nil {
|
||||
return c
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
|
||||
func (in *QueryTemplateList) DeepCopyInto(out *QueryTemplateList) {
|
||||
*out = *in
|
||||
out.TypeMeta = in.TypeMeta
|
||||
in.ListMeta.DeepCopyInto(&out.ListMeta)
|
||||
if in.Items != nil {
|
||||
in, out := &in.Items, &out.Items
|
||||
*out = make([]QueryTemplate, len(*in))
|
||||
for i := range *in {
|
||||
(*in)[i].DeepCopyInto(&(*out)[i])
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new QueryTemplateList.
|
||||
func (in *QueryTemplateList) DeepCopy() *QueryTemplateList {
|
||||
if in == nil {
|
||||
return nil
|
||||
}
|
||||
out := new(QueryTemplateList)
|
||||
in.DeepCopyInto(out)
|
||||
return out
|
||||
}
|
||||
|
||||
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
|
||||
func (in *QueryTemplateList) DeepCopyObject() runtime.Object {
|
||||
if c := in.DeepCopy(); c != nil {
|
||||
return c
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
|
||||
func (in *RenderedQuery) DeepCopyInto(out *RenderedQuery) {
|
||||
*out = *in
|
||||
out.TypeMeta = in.TypeMeta
|
||||
if in.Targets != nil {
|
||||
in, out := &in.Targets, &out.Targets
|
||||
*out = make([]template.Target, len(*in))
|
||||
for i := range *in {
|
||||
(*in)[i].DeepCopyInto(&(*out)[i])
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new RenderedQuery.
|
||||
func (in *RenderedQuery) DeepCopy() *RenderedQuery {
|
||||
if in == nil {
|
||||
return nil
|
||||
}
|
||||
out := new(RenderedQuery)
|
||||
in.DeepCopyInto(out)
|
||||
return out
|
||||
}
|
||||
|
||||
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
|
||||
func (in *RenderedQuery) DeepCopyObject() runtime.Object {
|
||||
if c := in.DeepCopy(); c != nil {
|
||||
return c
|
||||
}
|
||||
return nil
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
//go:build !ignore_autogenerated
|
||||
// +build !ignore_autogenerated
|
||||
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
// Code generated by defaulter-gen. DO NOT EDIT.
|
||||
|
||||
package v0alpha1
|
||||
|
||||
import (
|
||||
runtime "k8s.io/apimachinery/pkg/runtime"
|
||||
)
|
||||
|
||||
// RegisterDefaults adds defaulters functions to the given scheme.
|
||||
// Public to allow building arbitrary schemes.
|
||||
// All generated defaulters are covering - they call all nested defaulters.
|
||||
func RegisterDefaults(scheme *runtime.Scheme) error {
|
||||
return nil
|
||||
}
|
@ -1,155 +0,0 @@
|
||||
//go:build !ignore_autogenerated
|
||||
// +build !ignore_autogenerated
|
||||
|
||||
// SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
// Code generated by openapi-gen. DO NOT EDIT.
|
||||
|
||||
package v0alpha1
|
||||
|
||||
import (
|
||||
common "k8s.io/kube-openapi/pkg/common"
|
||||
spec "k8s.io/kube-openapi/pkg/validation/spec"
|
||||
)
|
||||
|
||||
func GetOpenAPIDefinitions(ref common.ReferenceCallback) map[string]common.OpenAPIDefinition {
|
||||
return map[string]common.OpenAPIDefinition{
|
||||
"github.com/grafana/grafana/pkg/apis/peakq/v0alpha1.QueryTemplate": schema_pkg_apis_peakq_v0alpha1_QueryTemplate(ref),
|
||||
"github.com/grafana/grafana/pkg/apis/peakq/v0alpha1.QueryTemplateList": schema_pkg_apis_peakq_v0alpha1_QueryTemplateList(ref),
|
||||
"github.com/grafana/grafana/pkg/apis/peakq/v0alpha1.RenderedQuery": schema_pkg_apis_peakq_v0alpha1_RenderedQuery(ref),
|
||||
}
|
||||
}
|
||||
|
||||
func schema_pkg_apis_peakq_v0alpha1_QueryTemplate(ref common.ReferenceCallback) common.OpenAPIDefinition {
|
||||
return common.OpenAPIDefinition{
|
||||
Schema: spec.Schema{
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Type: []string{"object"},
|
||||
Properties: map[string]spec.Schema{
|
||||
"kind": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds",
|
||||
Type: []string{"string"},
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
"apiVersion": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources",
|
||||
Type: []string{"string"},
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
"metadata": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Default: map[string]interface{}{},
|
||||
Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta"),
|
||||
},
|
||||
},
|
||||
"spec": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Default: map[string]interface{}{},
|
||||
Ref: ref("github.com/grafana/grafana/pkg/apis/query/v0alpha1/template.QueryTemplate"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Dependencies: []string{
|
||||
"github.com/grafana/grafana/pkg/apis/query/v0alpha1/template.QueryTemplate", "k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta"},
|
||||
}
|
||||
}
|
||||
|
||||
func schema_pkg_apis_peakq_v0alpha1_QueryTemplateList(ref common.ReferenceCallback) common.OpenAPIDefinition {
|
||||
return common.OpenAPIDefinition{
|
||||
Schema: spec.Schema{
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Type: []string{"object"},
|
||||
Properties: map[string]spec.Schema{
|
||||
"kind": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds",
|
||||
Type: []string{"string"},
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
"apiVersion": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources",
|
||||
Type: []string{"string"},
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
"metadata": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Default: map[string]interface{}{},
|
||||
Ref: ref("k8s.io/apimachinery/pkg/apis/meta/v1.ListMeta"),
|
||||
},
|
||||
},
|
||||
"items": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Type: []string{"array"},
|
||||
Items: &spec.SchemaOrArray{
|
||||
Schema: &spec.Schema{
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Default: map[string]interface{}{},
|
||||
Ref: ref("github.com/grafana/grafana/pkg/apis/peakq/v0alpha1.QueryTemplate"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Dependencies: []string{
|
||||
"github.com/grafana/grafana/pkg/apis/peakq/v0alpha1.QueryTemplate", "k8s.io/apimachinery/pkg/apis/meta/v1.ListMeta"},
|
||||
}
|
||||
}
|
||||
|
||||
func schema_pkg_apis_peakq_v0alpha1_RenderedQuery(ref common.ReferenceCallback) common.OpenAPIDefinition {
|
||||
return common.OpenAPIDefinition{
|
||||
Schema: spec.Schema{
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "Dummy object that represents a real query object",
|
||||
Type: []string{"object"},
|
||||
Properties: map[string]spec.Schema{
|
||||
"kind": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds",
|
||||
Type: []string{"string"},
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
"apiVersion": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources",
|
||||
Type: []string{"string"},
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
"targets": {
|
||||
VendorExtensible: spec.VendorExtensible{
|
||||
Extensions: spec.Extensions{
|
||||
"x-kubernetes-list-type": "atomic",
|
||||
},
|
||||
},
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Type: []string{"array"},
|
||||
Items: &spec.SchemaOrArray{
|
||||
Schema: &spec.Schema{
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Default: map[string]interface{}{},
|
||||
Ref: ref("github.com/grafana/grafana/pkg/apis/query/v0alpha1/template.Target"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Dependencies: []string{
|
||||
"github.com/grafana/grafana/pkg/apis/query/v0alpha1/template.Target"},
|
||||
}
|
||||
}
|
@ -82,8 +82,8 @@ require (
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go.uber.org/zap v1.27.0 // indirect
|
||||
golang.org/x/net v0.34.0 // indirect
|
||||
golang.org/x/oauth2 v0.25.0 // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/oauth2 v0.26.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/term v0.28.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
golang.org/x/time v0.9.0 // indirect
|
||||
|
@ -254,8 +254,8 @@ golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
||||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -267,8 +267,8 @@ golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
|
||||
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
@ -30,7 +30,7 @@ require (
|
||||
golang.org/x/crypto v0.32.0 // indirect; @grafana/grafana-backend-group
|
||||
golang.org/x/mod v0.22.0 // @grafana/grafana-backend-group
|
||||
golang.org/x/net v0.34.0 // indirect; @grafana/oss-big-tent @grafana/partner-datasources
|
||||
golang.org/x/oauth2 v0.25.0 // @grafana/identity-access-team
|
||||
golang.org/x/oauth2 v0.26.0 // @grafana/identity-access-team
|
||||
golang.org/x/sync v0.11.0 // indirect; @grafana/alerting-backend
|
||||
golang.org/x/text v0.21.0 // indirect; @grafana/grafana-backend-group
|
||||
golang.org/x/time v0.9.0 // indirect; @grafana/grafana-backend-group
|
||||
@ -75,7 +75,7 @@ require (
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.34.0 // indirect
|
||||
go.starlark.net v0.0.0-20230525235612-a134d8f9ddca // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
google.golang.org/genproto v0.0.0-20241021214115-324edc3d5d38 // indirect; @grafana/grafana-backend-group
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250115164207-1a7da9e5054f // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f // indirect
|
||||
|
@ -293,8 +293,8 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY
|
||||
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
||||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -312,8 +312,8 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20220526004731-065cf7ba2467/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
|
@ -187,14 +187,15 @@ func promptYesNo(prompt string) (bool, error) {
|
||||
}
|
||||
|
||||
func newUnifiedClient(cfg *setting.Cfg, sqlStore db.DB) (resource.ResourceClient, error) {
|
||||
return unified.ProvideUnifiedStorageClient(cfg,
|
||||
featuremgmt.WithFeatures(), // none??
|
||||
sqlStore,
|
||||
tracing.NewNoopTracerService(),
|
||||
prometheus.NewPedanticRegistry(),
|
||||
authlib.FixedAccessClient(true), // always true!
|
||||
nil, // document supplier
|
||||
)
|
||||
return unified.ProvideUnifiedStorageClient(&unified.Options{
|
||||
Cfg: cfg,
|
||||
Features: featuremgmt.WithFeatures(), // none??
|
||||
DB: sqlStore,
|
||||
Tracer: tracing.NewNoopTracerService(),
|
||||
Reg: prometheus.NewPedanticRegistry(),
|
||||
Authzc: authlib.FixedAccessClient(true), // always true!
|
||||
Docs: nil, // document supplier
|
||||
})
|
||||
}
|
||||
|
||||
func newParquetClient(file *os.File) (resource.BatchStoreClient, error) {
|
||||
|
@ -92,6 +92,23 @@ func TestSQLService(t *testing.T) {
|
||||
require.Error(t, rsp.Responses["B"].Error, "should return invalid sql error")
|
||||
require.ErrorContains(t, rsp.Responses["B"].Error, "blocked function load_file")
|
||||
})
|
||||
|
||||
t.Run("parse error should be returned", func(t *testing.T) {
|
||||
s, req := newMockQueryService(resp,
|
||||
newABSQLQueries(`SELECT * FROM A LIMIT sloth`),
|
||||
)
|
||||
|
||||
s.features = featuremgmt.WithFeatures(featuremgmt.FlagSqlExpressions)
|
||||
|
||||
pl, err := s.BuildPipeline(req)
|
||||
require.NoError(t, err)
|
||||
|
||||
rsp, err := s.ExecutePipeline(context.Background(), time.Now(), pl)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Error(t, rsp.Responses["B"].Error, "should return sql error on parsing")
|
||||
require.ErrorContains(t, rsp.Responses["B"].Error, "limit expression expected to be numeric")
|
||||
})
|
||||
}
|
||||
|
||||
func jsonEscape(input string) (string, error) {
|
||||
|
@ -28,7 +28,7 @@ func TestQueryFrames(t *testing.T) {
|
||||
expected: data.NewFrame(
|
||||
"sqlExpressionRefId",
|
||||
data.NewField("n", nil, []string{"1"}),
|
||||
),
|
||||
).SetRefID("sqlExpressionRefId"),
|
||||
},
|
||||
{
|
||||
name: "valid query with no input frames, one row two columns",
|
||||
@ -38,7 +38,7 @@ func TestQueryFrames(t *testing.T) {
|
||||
"sqlExpressionRefId",
|
||||
data.NewField("name", nil, []string{"sam"}),
|
||||
data.NewField("age", nil, []int8{40}),
|
||||
),
|
||||
).SetRefID("sqlExpressionRefId"),
|
||||
},
|
||||
{
|
||||
// TODO: Also ORDER BY to ensure the order is preserved
|
||||
@ -54,7 +54,7 @@ func TestQueryFrames(t *testing.T) {
|
||||
expected: data.NewFrame(
|
||||
"sqlExpressionRefId",
|
||||
data.NewField("OSS Projects with Typos", nil, []string{"Garfana"}),
|
||||
),
|
||||
).SetRefID("sqlExpressionRefId"),
|
||||
},
|
||||
}
|
||||
|
||||
@ -62,13 +62,9 @@ func TestQueryFrames(t *testing.T) {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
frame, err := db.QueryFrames(context.Background(), "sqlExpressionRefId", tt.query, tt.input_frames)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, frame.Fields)
|
||||
|
||||
require.Equal(t, tt.expected.Name, frame.RefID)
|
||||
require.Equal(t, len(tt.expected.Fields), len(frame.Fields))
|
||||
for i := range tt.expected.Fields {
|
||||
require.Equal(t, tt.expected.Fields[i].Name, frame.Fields[i].Name)
|
||||
require.Equal(t, tt.expected.Fields[i].At(0), frame.Fields[i].At(0))
|
||||
if diff := cmp.Diff(tt.expected, frame, data.FrameTestCompareOptions()...); diff != "" {
|
||||
require.FailNowf(t, "Result mismatch (-want +got):%s\n", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -79,20 +75,47 @@ func TestQueryFramesInOut(t *testing.T) {
|
||||
RefID: "a",
|
||||
Name: "a",
|
||||
Fields: []*data.Field{
|
||||
data.NewField("time", nil, []time.Time{time.Now(), time.Now()}),
|
||||
data.NewField("time_nullable", nil, []*time.Time{p(time.Now()), nil}),
|
||||
data.NewField("time", nil, []time.Time{time.Date(2025, 1, 2, 3, 4, 5, 0, time.UTC), time.Date(2025, 1, 2, 3, 4, 5, 0, time.UTC)}),
|
||||
data.NewField("time_nullable", nil, []*time.Time{p(time.Date(2025, 1, 2, 3, 4, 5, 0, time.UTC)), nil}),
|
||||
|
||||
data.NewField("string", nil, []string{"cat", "dog"}),
|
||||
data.NewField("null_nullable", nil, []*string{p("cat"), nil}),
|
||||
|
||||
data.NewField("bool", nil, []bool{true, false}),
|
||||
data.NewField("bool_nullable", nil, []*bool{p(true), nil}),
|
||||
|
||||
// Floats
|
||||
data.NewField("float32", nil, []float32{1, 3}),
|
||||
data.NewField("float32_nullable", nil, []*float32{p(float32(2.0)), nil}),
|
||||
|
||||
data.NewField("float64", nil, []float64{1, 3}),
|
||||
data.NewField("float64_nullable", nil, []*float64{p(2.0), nil}),
|
||||
data.NewField("float64_nullable", nil, []*float64{p(float64(2.0)), nil}),
|
||||
|
||||
// Ints
|
||||
data.NewField("int8", nil, []int8{1, 3}),
|
||||
data.NewField("int8_nullable", nil, []*int8{p(int8(2)), nil}),
|
||||
|
||||
data.NewField("int16", nil, []int16{1, 3}),
|
||||
data.NewField("int16_nullable", nil, []*int16{p(int16(2)), nil}),
|
||||
|
||||
data.NewField("int32", nil, []int32{1, 3}),
|
||||
data.NewField("int32_nullable", nil, []*int32{p(int32(2)), nil}),
|
||||
|
||||
data.NewField("int64", nil, []int64{1, 3}),
|
||||
data.NewField("int64_nullable", nil, []*int64{p(int64(2)), nil}),
|
||||
|
||||
data.NewField("bool", nil, []bool{true, false}),
|
||||
data.NewField("bool_nullable", nil, []*bool{p(true), nil}),
|
||||
// Unsigned Ints
|
||||
data.NewField("uint8", nil, []uint8{1, 3}),
|
||||
data.NewField("uint8_nullable", nil, []*uint8{p(uint8(2)), nil}),
|
||||
|
||||
data.NewField("uint16", nil, []uint16{1, 3}),
|
||||
data.NewField("uint16_nullable", nil, []*uint16{p(uint16(2)), nil}),
|
||||
|
||||
data.NewField("uint32", nil, []uint32{1, 3}),
|
||||
data.NewField("uint32_nullable", nil, []*uint32{p(uint32(2)), nil}),
|
||||
|
||||
data.NewField("uint64", nil, []uint64{1, 3}),
|
||||
data.NewField("uint64_nullable", nil, []*uint64{p(uint64(2)), nil}),
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -23,7 +23,6 @@ func convertToDataFrame(ctx *mysql.Context, iter mysql.RowIter, schema mysql.Sch
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
field := data.NewFieldFromFieldType(fT, 0)
|
||||
field.Name = col.Name
|
||||
f.Fields = append(f.Fields, field)
|
||||
@ -40,11 +39,22 @@ func convertToDataFrame(ctx *mysql.Context, iter mysql.RowIter, schema mysql.Sch
|
||||
}
|
||||
|
||||
for i, val := range row {
|
||||
v, err := fieldValFromRowVal(f.Fields[i].Type(), val)
|
||||
// Run val through mysql.Type.Convert to normalize underlying value
|
||||
// of the interface
|
||||
nV, _, err := schema[i].Type.Convert(val)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Run the normalized value through fieldValFromRowVal to normalize
|
||||
// the interface type to the dataframe value type, and make nullable
|
||||
// values pointers as dataframe expects.
|
||||
fV, err := fieldValFromRowVal(f.Fields[i].Type(), nV)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unexpected type for column %s: %w", schema[i].Name, err)
|
||||
}
|
||||
f.Fields[i].Append(v)
|
||||
|
||||
f.Fields[i].Append(fV)
|
||||
}
|
||||
}
|
||||
|
||||
@ -72,11 +82,10 @@ func MySQLColToFieldType(col *mysql.Column) (data.FieldType, error) {
|
||||
fT = data.FieldTypeInt64
|
||||
case types.Uint64:
|
||||
fT = data.FieldTypeUint64
|
||||
case types.Float32:
|
||||
fT = data.FieldTypeFloat32
|
||||
case types.Float64:
|
||||
fT = data.FieldTypeFloat64
|
||||
// StringType represents all string types, including VARCHAR and BLOB.
|
||||
case types.Text, types.LongText:
|
||||
fT = data.FieldTypeString
|
||||
case types.Timestamp:
|
||||
fT = data.FieldTypeTime
|
||||
case types.Datetime:
|
||||
@ -84,9 +93,12 @@ func MySQLColToFieldType(col *mysql.Column) (data.FieldType, error) {
|
||||
case types.Boolean:
|
||||
fT = data.FieldTypeBool
|
||||
default:
|
||||
if types.IsDecimal(col.Type) {
|
||||
switch {
|
||||
case types.IsDecimal(col.Type):
|
||||
fT = data.FieldTypeFloat64
|
||||
} else {
|
||||
case types.IsText(col.Type):
|
||||
fT = data.FieldTypeString
|
||||
default:
|
||||
return fT, fmt.Errorf("unsupported type for column %s of type %v", col.Name, col.Type)
|
||||
}
|
||||
}
|
||||
@ -98,315 +110,96 @@ func MySQLColToFieldType(col *mysql.Column) (data.FieldType, error) {
|
||||
return fT, nil
|
||||
}
|
||||
|
||||
// Helper function to convert data.FieldType to types.Type
|
||||
func convertDataType(fieldType data.FieldType) mysql.Type {
|
||||
switch fieldType {
|
||||
case data.FieldTypeInt8, data.FieldTypeNullableInt8:
|
||||
return types.Int8
|
||||
case data.FieldTypeUint8, data.FieldTypeNullableUint8:
|
||||
return types.Uint8
|
||||
case data.FieldTypeInt16, data.FieldTypeNullableInt16:
|
||||
return types.Int16
|
||||
case data.FieldTypeUint16, data.FieldTypeNullableUint16:
|
||||
return types.Uint16
|
||||
case data.FieldTypeInt32, data.FieldTypeNullableInt32:
|
||||
return types.Int32
|
||||
case data.FieldTypeUint32, data.FieldTypeNullableUint32:
|
||||
return types.Uint32
|
||||
case data.FieldTypeInt64, data.FieldTypeNullableInt64:
|
||||
return types.Int64
|
||||
case data.FieldTypeUint64, data.FieldTypeNullableUint64:
|
||||
return types.Uint64
|
||||
case data.FieldTypeFloat32, data.FieldTypeNullableFloat32:
|
||||
return types.Float32
|
||||
case data.FieldTypeFloat64, data.FieldTypeNullableFloat64:
|
||||
return types.Float64
|
||||
case data.FieldTypeString, data.FieldTypeNullableString:
|
||||
return types.Text
|
||||
case data.FieldTypeBool, data.FieldTypeNullableBool:
|
||||
return types.Boolean
|
||||
case data.FieldTypeTime, data.FieldTypeNullableTime:
|
||||
return types.Timestamp
|
||||
default:
|
||||
fmt.Printf("------- Unsupported field type: %v", fieldType)
|
||||
return types.JSON
|
||||
}
|
||||
}
|
||||
|
||||
// fieldValFromRowVal converts a go-mysql-server row value to a data.field value
|
||||
//
|
||||
//nolint:gocyclo
|
||||
func fieldValFromRowVal(fieldType data.FieldType, val interface{}) (interface{}, error) {
|
||||
// the input val may be nil, it also may not be a pointer even if the fieldtype is a nullable pointer type
|
||||
// if the input interface is nil, we can return an untyped nil
|
||||
if val == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
nullable := fieldType.Nullable()
|
||||
|
||||
switch fieldType {
|
||||
// ----------------------------
|
||||
// Int8 / Nullable Int8
|
||||
// ----------------------------
|
||||
case data.FieldTypeInt8:
|
||||
v, ok := val.(int8)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected int8", val, val)
|
||||
}
|
||||
return v, nil
|
||||
case data.FieldTypeInt8, data.FieldTypeNullableInt8:
|
||||
return parseVal[int8](val, "int8", nullable)
|
||||
|
||||
case data.FieldTypeNullableInt8:
|
||||
vP, ok := val.(*int8)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(int8)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected int8 or *int8", val, val)
|
||||
case data.FieldTypeUint8, data.FieldTypeNullableUint8:
|
||||
return parseVal[uint8](val, "uint8", nullable)
|
||||
|
||||
// ----------------------------
|
||||
// Uint8 / Nullable Uint8
|
||||
// ----------------------------
|
||||
case data.FieldTypeUint8:
|
||||
v, ok := val.(uint8)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected uint8", val, val)
|
||||
}
|
||||
return v, nil
|
||||
case data.FieldTypeInt16, data.FieldTypeNullableInt16:
|
||||
return parseVal[int16](val, "int16", nullable)
|
||||
|
||||
case data.FieldTypeNullableUint8:
|
||||
vP, ok := val.(*uint8)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(uint8)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected uint8 or *uint8", val, val)
|
||||
case data.FieldTypeUint16, data.FieldTypeNullableUint16:
|
||||
return parseVal[uint16](val, "uint16", nullable)
|
||||
|
||||
// ----------------------------
|
||||
// Int16 / Nullable Int16
|
||||
// ----------------------------
|
||||
case data.FieldTypeInt16:
|
||||
v, ok := val.(int16)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected int16", val, val)
|
||||
}
|
||||
return v, nil
|
||||
case data.FieldTypeInt32, data.FieldTypeNullableInt32:
|
||||
return parseVal[int32](val, "int32", nullable)
|
||||
|
||||
case data.FieldTypeNullableInt16:
|
||||
vP, ok := val.(*int16)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(int16)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected int16 or *int16", val, val)
|
||||
case data.FieldTypeUint32, data.FieldTypeNullableUint32:
|
||||
return parseVal[uint32](val, "uint32", nullable)
|
||||
|
||||
// ----------------------------
|
||||
// Uint16 / Nullable Uint16
|
||||
// ----------------------------
|
||||
case data.FieldTypeUint16:
|
||||
v, ok := val.(uint16)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected uint16", val, val)
|
||||
}
|
||||
return v, nil
|
||||
case data.FieldTypeInt64, data.FieldTypeNullableInt64:
|
||||
return parseVal[int64](val, "int64", nullable)
|
||||
|
||||
case data.FieldTypeNullableUint16:
|
||||
vP, ok := val.(*uint16)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(uint16)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected uint16 or *uint16", val, val)
|
||||
case data.FieldTypeUint64, data.FieldTypeNullableUint64:
|
||||
return parseVal[uint64](val, "uint64", nullable)
|
||||
|
||||
// ----------------------------
|
||||
// Int32 / Nullable Int32
|
||||
// ----------------------------
|
||||
case data.FieldTypeInt32:
|
||||
v, ok := val.(int32)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected int32", val, val)
|
||||
}
|
||||
return v, nil
|
||||
case data.FieldTypeFloat32, data.FieldTypeNullableFloat32:
|
||||
return parseVal[float32](val, "float32", nullable)
|
||||
|
||||
case data.FieldTypeNullableInt32:
|
||||
vP, ok := val.(*int32)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(int32)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected int32 or *int32", val, val)
|
||||
case data.FieldTypeFloat64, data.FieldTypeNullableFloat64:
|
||||
return parseFloat64OrDecimal(val, nullable)
|
||||
|
||||
// ----------------------------
|
||||
// Uint32 / Nullable Uint32
|
||||
// ----------------------------
|
||||
case data.FieldTypeUint32:
|
||||
v, ok := val.(uint32)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected uint32", val, val)
|
||||
}
|
||||
return v, nil
|
||||
case data.FieldTypeTime, data.FieldTypeNullableTime:
|
||||
return parseVal[time.Time](val, "time.Time", nullable)
|
||||
|
||||
case data.FieldTypeNullableUint32:
|
||||
vP, ok := val.(*uint32)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(uint32)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected uint32 or *uint32", val, val)
|
||||
case data.FieldTypeString, data.FieldTypeNullableString:
|
||||
return parseVal[string](val, "string", nullable)
|
||||
|
||||
// ----------------------------
|
||||
// Int64 / Nullable Int64
|
||||
// ----------------------------
|
||||
case data.FieldTypeInt64:
|
||||
v, ok := val.(int64)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected int64", val, val)
|
||||
}
|
||||
return v, nil
|
||||
case data.FieldTypeBool, data.FieldTypeNullableBool:
|
||||
return parseBoolFromInt8(val, nullable)
|
||||
|
||||
case data.FieldTypeNullableInt64:
|
||||
vP, ok := val.(*int64)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(int64)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected int64 or *int64", val, val)
|
||||
|
||||
// ----------------------------
|
||||
// Uint64 / Nullable Uint64
|
||||
// ----------------------------
|
||||
case data.FieldTypeUint64:
|
||||
v, ok := val.(uint64)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected uint64", val, val)
|
||||
}
|
||||
return v, nil
|
||||
|
||||
case data.FieldTypeNullableUint64:
|
||||
vP, ok := val.(*uint64)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(uint64)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected uint64 or *uint64", val, val)
|
||||
|
||||
// ----------------------------
|
||||
// Float64 / Nullable Float64
|
||||
// ----------------------------
|
||||
case data.FieldTypeFloat64:
|
||||
// Accept float64 or decimal.Decimal, convert decimal.Decimal -> float64
|
||||
if v, ok := val.(float64); ok {
|
||||
return v, nil
|
||||
}
|
||||
if d, ok := val.(decimal.Decimal); ok {
|
||||
return d.InexactFloat64(), nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected float64 or decimal.Decimal", val, val)
|
||||
|
||||
case data.FieldTypeNullableFloat64:
|
||||
// Possibly already *float64
|
||||
if vP, ok := val.(*float64); ok {
|
||||
return vP, nil
|
||||
}
|
||||
// Possibly float64
|
||||
if v, ok := val.(float64); ok {
|
||||
return &v, nil
|
||||
}
|
||||
// Possibly decimal.Decimal
|
||||
if d, ok := val.(decimal.Decimal); ok {
|
||||
f := d.InexactFloat64()
|
||||
return &f, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected float64, *float64, or decimal.Decimal", val, val)
|
||||
|
||||
// ----------------------------
|
||||
// Time / Nullable Time
|
||||
// ----------------------------
|
||||
case data.FieldTypeTime:
|
||||
v, ok := val.(time.Time)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected time.Time", val, val)
|
||||
}
|
||||
return v, nil
|
||||
|
||||
case data.FieldTypeNullableTime:
|
||||
vP, ok := val.(*time.Time)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(time.Time)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected time.Time or *time.Time", val, val)
|
||||
|
||||
// ----------------------------
|
||||
// String / Nullable String
|
||||
// ----------------------------
|
||||
case data.FieldTypeString:
|
||||
v, ok := val.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected string", val, val)
|
||||
}
|
||||
return v, nil
|
||||
|
||||
case data.FieldTypeNullableString:
|
||||
vP, ok := val.(*string)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(string)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected string or *string", val, val)
|
||||
|
||||
// ----------------------------
|
||||
// Bool / Nullable Bool
|
||||
// ----------------------------
|
||||
case data.FieldTypeBool:
|
||||
v, ok := val.(bool)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected bool", val, val)
|
||||
}
|
||||
return v, nil
|
||||
|
||||
case data.FieldTypeNullableBool:
|
||||
vP, ok := val.(*bool)
|
||||
if ok {
|
||||
return vP, nil
|
||||
}
|
||||
v, ok := val.(bool)
|
||||
if ok {
|
||||
return &v, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type for interface %v of type %T, expected bool or *bool", val, val)
|
||||
|
||||
// ----------------------------
|
||||
// Fallback / Unsupported
|
||||
// ----------------------------
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported field type %s for val %v", fieldType, val)
|
||||
}
|
||||
}
|
||||
|
||||
// parseVal attempts to assert `val` as type T. If successful, it returns either
|
||||
// the value or a pointer, depending on `isNullable`. If not, returns an error.
|
||||
func parseVal[T any](val interface{}, typeName string, isNullable bool) (interface{}, error) {
|
||||
v, ok := val.(T)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type %v of type %T, expected %s", val, val, typeName)
|
||||
}
|
||||
return ptrIfNull(v, isNullable), nil
|
||||
}
|
||||
|
||||
// parseFloat64OrDecimal handles the special case where val can be float64 or decimal.Decimal.
|
||||
func parseFloat64OrDecimal(val interface{}, isNullable bool) (interface{}, error) {
|
||||
if fv, ok := val.(float64); ok {
|
||||
return ptrIfNull(fv, isNullable), nil
|
||||
}
|
||||
if d, ok := val.(decimal.Decimal); ok {
|
||||
return ptrIfNull(d.InexactFloat64(), isNullable), nil
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected value type %v of type %T, expected float64 or decimal.Decimal", val, val)
|
||||
}
|
||||
|
||||
// parseBoolFromInt8 asserts val as an int8, converts non-zero to true.
|
||||
// Returns pointer if isNullable, otherwise the bool value.
|
||||
func parseBoolFromInt8(val interface{}, isNullable bool) (interface{}, error) {
|
||||
v, ok := val.(int8)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected value type %v of type %T, expected int8 (for bool)", val, val)
|
||||
}
|
||||
b := (v != 0)
|
||||
return ptrIfNull(b, isNullable), nil
|
||||
}
|
||||
|
||||
// ptrIfNull returns a pointer to val if isNullable is true; otherwise, returns val.
|
||||
func ptrIfNull[T any](val T, isNullable bool) interface{} {
|
||||
if isNullable {
|
||||
return &val
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
@ -3,10 +3,12 @@
|
||||
package sql
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
mysql "github.com/dolthub/go-mysql-server/sql"
|
||||
"github.com/dolthub/go-mysql-server/sql/types"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
)
|
||||
|
||||
@ -124,3 +126,38 @@ type partition []byte
|
||||
func (p partition) Key() []byte {
|
||||
return p
|
||||
}
|
||||
|
||||
// Helper function to convert data.FieldType to types.Type
|
||||
func convertDataType(fieldType data.FieldType) mysql.Type {
|
||||
switch fieldType {
|
||||
case data.FieldTypeInt8, data.FieldTypeNullableInt8:
|
||||
return types.Int8
|
||||
case data.FieldTypeUint8, data.FieldTypeNullableUint8:
|
||||
return types.Uint8
|
||||
case data.FieldTypeInt16, data.FieldTypeNullableInt16:
|
||||
return types.Int16
|
||||
case data.FieldTypeUint16, data.FieldTypeNullableUint16:
|
||||
return types.Uint16
|
||||
case data.FieldTypeInt32, data.FieldTypeNullableInt32:
|
||||
return types.Int32
|
||||
case data.FieldTypeUint32, data.FieldTypeNullableUint32:
|
||||
return types.Uint32
|
||||
case data.FieldTypeInt64, data.FieldTypeNullableInt64:
|
||||
return types.Int64
|
||||
case data.FieldTypeUint64, data.FieldTypeNullableUint64:
|
||||
return types.Uint64
|
||||
case data.FieldTypeFloat32, data.FieldTypeNullableFloat32:
|
||||
return types.Float32
|
||||
case data.FieldTypeFloat64, data.FieldTypeNullableFloat64:
|
||||
return types.Float64
|
||||
case data.FieldTypeString, data.FieldTypeNullableString:
|
||||
return types.Text
|
||||
case data.FieldTypeBool, data.FieldTypeNullableBool:
|
||||
return types.Boolean
|
||||
case data.FieldTypeTime, data.FieldTypeNullableTime:
|
||||
return types.Timestamp
|
||||
default:
|
||||
fmt.Printf("------- Unsupported field type: %v", fieldType)
|
||||
return types.JSON
|
||||
}
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ func NewSQLCommand(refID, rawSQL string) (*SQLCommand, error) {
|
||||
if err != nil {
|
||||
logger.Warn("invalid sql query", "sql", rawSQL, "error", err)
|
||||
return nil, errutil.BadRequest("sql-invalid-sql",
|
||||
errutil.WithPublicMessage("error reading SQL command"),
|
||||
errutil.WithPublicMessage(fmt.Sprintf("invalid SQL query: %s", err)),
|
||||
)
|
||||
}
|
||||
if len(tables) == 0 {
|
||||
|
@ -111,7 +111,8 @@ func (s *SocialGenericOAuth) Validate(ctx context.Context, newSettings ssoModels
|
||||
return err
|
||||
}
|
||||
|
||||
if info.Extra[teamIdsKey] != "" && (info.TeamIdsAttributePath == "" || info.TeamsUrl == "") {
|
||||
teamIds := util.SplitString(info.Extra[teamIdsKey])
|
||||
if len(teamIds) > 0 && (info.TeamIdsAttributePath == "" || info.TeamsUrl == "") {
|
||||
return ssosettings.ErrInvalidOAuthConfig("If Team Ids are configured then Team Ids attribute path and Teams URL must be configured.")
|
||||
}
|
||||
|
||||
|
@ -1000,6 +1000,34 @@ func TestSocialGenericOAuth_Validate(t *testing.T) {
|
||||
},
|
||||
wantErr: nil,
|
||||
},
|
||||
{
|
||||
name: "passes when team_ids is an empty array and teams_id_attribute_path and teams_url are empty",
|
||||
settings: ssoModels.SSOSettings{
|
||||
Settings: map[string]any{
|
||||
"client_id": "client-id",
|
||||
"team_ids_attribute_path": "",
|
||||
"teams_url": "",
|
||||
"auth_url": "https://example.com/auth",
|
||||
"token_url": "https://example.com/token",
|
||||
"team_ids": "[]",
|
||||
},
|
||||
},
|
||||
wantErr: nil,
|
||||
},
|
||||
{
|
||||
name: "passes when team_ids is set and teams_id_attribute_path and teams_url are not empty",
|
||||
settings: ssoModels.SSOSettings{
|
||||
Settings: map[string]any{
|
||||
"client_id": "client-id",
|
||||
"team_ids_attribute_path": "teams",
|
||||
"teams_url": "https://example.com/teams",
|
||||
"auth_url": "https://example.com/auth",
|
||||
"token_url": "https://example.com/token",
|
||||
"team_ids": "[\"123\"]",
|
||||
},
|
||||
},
|
||||
wantErr: nil,
|
||||
},
|
||||
{
|
||||
name: "fails if settings map contains an invalid field",
|
||||
settings: ssoModels.SSOSettings{
|
||||
@ -1116,6 +1144,34 @@ func TestSocialGenericOAuth_Validate(t *testing.T) {
|
||||
},
|
||||
wantErr: ssosettings.ErrBaseInvalidOAuthConfig,
|
||||
},
|
||||
{
|
||||
name: "fails when team_ids is a valid string and teams_id_attribute_path and teams_url are empty",
|
||||
settings: ssoModels.SSOSettings{
|
||||
Settings: map[string]any{
|
||||
"client_id": "client-id",
|
||||
"team_ids_attribute_path": "",
|
||||
"teams_url": "",
|
||||
"auth_url": "https://example.com/auth",
|
||||
"token_url": "https://example.com/token",
|
||||
"team_ids": "123",
|
||||
},
|
||||
},
|
||||
wantErr: ssosettings.ErrBaseInvalidOAuthConfig,
|
||||
},
|
||||
{
|
||||
name: "fails when team_ids is a valid array and teams_id_attribute_path and teams_url are empty",
|
||||
settings: ssoModels.SSOSettings{
|
||||
Settings: map[string]any{
|
||||
"client_id": "client-id",
|
||||
"team_ids_attribute_path": "",
|
||||
"teams_url": "",
|
||||
"auth_url": "https://example.com/auth",
|
||||
"token_url": "https://example.com/token",
|
||||
"team_ids": "[\"123\",\"456\",\"789\"]",
|
||||
},
|
||||
},
|
||||
wantErr: ssosettings.ErrBaseInvalidOAuthConfig,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
|
@ -6,7 +6,7 @@ toolchain go1.23.6
|
||||
|
||||
require (
|
||||
github.com/grafana/dskit v0.0.0-20241105154643-a6b453a88040
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.265.0
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/prometheus/client_golang v1.20.5
|
||||
github.com/prometheus/common v0.62.0
|
||||
@ -111,7 +111,7 @@ require (
|
||||
golang.org/x/mod v0.22.0 // indirect
|
||||
golang.org/x/net v0.34.0 // indirect
|
||||
golang.org/x/sync v0.11.0 // indirect
|
||||
golang.org/x/sys v0.29.0 // indirect
|
||||
golang.org/x/sys v0.30.0 // indirect
|
||||
golang.org/x/text v0.21.0 // indirect
|
||||
golang.org/x/tools v0.29.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||
|
@ -120,8 +120,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
|
||||
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
|
||||
github.com/grafana/dskit v0.0.0-20241105154643-a6b453a88040 h1:IR+UNYHqaU31t8/TArJk8K/GlDwOyxMpGNkWCXeZ28g=
|
||||
github.com/grafana/dskit v0.0.0-20241105154643-a6b453a88040/go.mod h1:SPLNCARd4xdjCkue0O6hvuoveuS1dGJjDnfxYe405YQ=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.265.0 h1:XshoH8R23Jm9jRreW9R3aOrIVr9vxhCWFyrMe7BFSks=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.265.0/go.mod h1:nkN6xI08YcX6CGsgvRA2+19nhXA/ZPuneLMUUElOD80=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0 h1:YP+iEpXH3HRX9Xo4NHjsrJhN2W7uVTtkLNzMHYbmiLI=
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.266.0/go.mod h1:bxkXrBQ4QSmOncsWdIOcpgP+M6wajQNMAPXlbWrqAWY=
|
||||
github.com/grafana/otel-profiling-go v0.5.1 h1:stVPKAFZSa7eGiqbYuG25VcqYksR6iWvF3YH66t4qL8=
|
||||
github.com/grafana/otel-profiling-go v0.5.1/go.mod h1:ftN/t5A/4gQI19/8MoWurBEtC6gFw8Dns1sJZ9W4Tls=
|
||||
github.com/grafana/pyroscope-go/godeltaprof v0.1.8 h1:iwOtYXeeVSAeYefJNaxDytgjKtUuKQbJqgAIjlnicKg=
|
||||
@ -344,8 +344,8 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0=
|
||||
golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k=
|
||||
golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70=
|
||||
golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/oauth2 v0.26.0 h1:afQXWNNaeC4nvZ0Ed9XvCCzXM6UHJG7iCg0W4fPqSBE=
|
||||
golang.org/x/oauth2 v0.26.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@ -363,8 +363,8 @@ golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
|
||||
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
||||
|
@ -11,7 +11,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/registry/apis/featuretoggle"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/folders"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/iam"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/peakq"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/provisioning"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/query"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/scope"
|
||||
@ -31,7 +30,6 @@ func ProvideRegistryServiceSink(
|
||||
_ *featuretoggle.FeatureFlagAPIBuilder,
|
||||
_ *datasource.DataSourceAPIBuilder,
|
||||
_ *folders.FolderAPIBuilder,
|
||||
_ *peakq.PeakQAPIBuilder,
|
||||
_ *iam.IdentityAccessManagementAPIBuilder,
|
||||
_ *scope.ScopeAPIBuilder,
|
||||
_ *query.QueryAPIBuilder,
|
||||
|
@ -23,7 +23,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
gapiutil "github.com/grafana/grafana/pkg/services/apiserver/utils"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards/service"
|
||||
"github.com/grafana/grafana/pkg/services/provisioning"
|
||||
"github.com/grafana/grafana/pkg/storage/legacysql"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
@ -312,7 +311,7 @@ func (a *dashboardSqlAccess) scanRow(rows *sql.Rows, history bool) (*dashboardRo
|
||||
ts := time.Unix(origin_ts.Int64, 0)
|
||||
|
||||
repo := &utils.ResourceRepositoryInfo{
|
||||
Name: origin_name.String,
|
||||
Name: dashboard.ProvisionedFileNameWithPrefix(origin_name.String),
|
||||
Hash: origin_hash.String,
|
||||
Timestamp: &ts,
|
||||
}
|
||||
@ -331,7 +330,7 @@ func (a *dashboardSqlAccess) scanRow(rows *sql.Rows, history bool) (*dashboardRo
|
||||
meta.SetRepositoryInfo(repo)
|
||||
} else if plugin_id.String != "" {
|
||||
meta.SetRepositoryInfo(&utils.ResourceRepositoryInfo{
|
||||
Name: "plugin",
|
||||
Name: dashboard.PluginIDRepoName,
|
||||
Path: plugin_id.String,
|
||||
})
|
||||
}
|
||||
@ -427,7 +426,7 @@ func (a *dashboardSqlAccess) SaveDashboard(ctx context.Context, orgId int64, das
|
||||
out, err := a.dashStore.SaveDashboard(ctx, dashboards.SaveDashboardCommand{
|
||||
OrgID: orgId,
|
||||
Message: meta.GetMessage(),
|
||||
PluginID: service.GetPluginIDFromMeta(meta),
|
||||
PluginID: dashboard.GetPluginIDFromMeta(meta),
|
||||
Dashboard: simplejson.NewFromAny(dash.Spec.UnstructuredContent()),
|
||||
FolderUID: meta.GetFolder(),
|
||||
Overwrite: true, // already passed the revisionVersion checks!
|
||||
|
110
pkg/registry/apis/dashboard/legacy/sql_dashboards_test.go
Normal file
110
pkg/registry/apis/dashboard/legacy/sql_dashboards_test.go
Normal file
@ -0,0 +1,110 @@
|
||||
package legacy
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/apis/common/v0alpha1"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
"github.com/grafana/grafana/pkg/services/provisioning"
|
||||
"github.com/stretchr/testify/require"
|
||||
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
)
|
||||
|
||||
func TestScanRow(t *testing.T) {
|
||||
mockDB, mock, err := sqlmock.New()
|
||||
require.NoError(t, err)
|
||||
defer mockDB.Close() // nolint:errcheck
|
||||
|
||||
pathToFile := "path/to/file"
|
||||
provisioner := provisioning.NewProvisioningServiceMock(context.Background())
|
||||
provisioner.GetDashboardProvisionerResolvedPathFunc = func(name string) string { return "provisioner" }
|
||||
store := &dashboardSqlAccess{
|
||||
namespacer: func(_ int64) string { return "default" },
|
||||
provisioning: provisioner,
|
||||
}
|
||||
|
||||
columns := []string{"orgId", "dashboard_id", "name", "folder_uid", "deleted", "plugin_id", "origin_name", "origin_path", "origin_hash", "origin_ts", "created", "createdBy", "createdByID", "updated", "updatedBy", "updatedByID", "version", "message", "data"}
|
||||
id := int64(100)
|
||||
title := "Test Dashboard"
|
||||
folderUID := "folder123"
|
||||
timestamp := time.Now()
|
||||
k8sTimestamp := v1.Time{Time: timestamp}
|
||||
version := int64(2)
|
||||
message := "updated message"
|
||||
createdUser := "creator"
|
||||
updatedUser := "updator"
|
||||
|
||||
t.Run("Should scan a valid row correctly", func(t *testing.T) {
|
||||
rows := sqlmock.NewRows(columns).AddRow(1, id, title, folderUID, nil, "", "", "", "", 0, timestamp, createdUser, 0, timestamp, updatedUser, 0, version, message, []byte(`{"key": "value"}`))
|
||||
mock.ExpectQuery("SELECT *").WillReturnRows(rows)
|
||||
resultRows, err := mockDB.Query("SELECT *")
|
||||
require.NoError(t, err)
|
||||
defer resultRows.Close() // nolint:errcheck
|
||||
resultRows.Next()
|
||||
|
||||
row, err := store.scanRow(resultRows, false)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, row)
|
||||
require.Equal(t, "Test Dashboard", row.Dash.Name)
|
||||
require.Equal(t, version, row.RV) // rv should be the dashboard version
|
||||
require.Equal(t, v0alpha1.Unstructured{
|
||||
Object: map[string]interface{}{"key": "value"},
|
||||
}, row.Dash.Spec)
|
||||
require.Equal(t, "default", row.Dash.Namespace)
|
||||
require.Equal(t, &continueToken{orgId: int64(1), id: id}, row.token)
|
||||
|
||||
meta, err := utils.MetaAccessor(row.Dash)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, id, meta.GetDeprecatedInternalID()) // nolint:staticcheck
|
||||
require.Equal(t, version, meta.GetGeneration()) // generation should be dash version
|
||||
require.Equal(t, k8sTimestamp, meta.GetCreationTimestamp())
|
||||
require.Equal(t, "user:"+createdUser, meta.GetCreatedBy()) // should be prefixed by user:
|
||||
require.Equal(t, "user:"+updatedUser, meta.GetUpdatedBy()) // should be prefixed by user:
|
||||
require.Equal(t, message, meta.GetMessage())
|
||||
require.Equal(t, folderUID, meta.GetFolder())
|
||||
})
|
||||
|
||||
t.Run("File provisioned dashboard should have annotations", func(t *testing.T) {
|
||||
rows := sqlmock.NewRows(columns).AddRow(1, id, title, folderUID, nil, "", "provisioner", pathToFile, "hashing", 100000, timestamp, createdUser, 0, timestamp, updatedUser, 0, version, message, []byte(`{"key": "value"}`))
|
||||
mock.ExpectQuery("SELECT *").WillReturnRows(rows)
|
||||
resultRows, err := mockDB.Query("SELECT *")
|
||||
require.NoError(t, err)
|
||||
defer resultRows.Close() // nolint:errcheck
|
||||
resultRows.Next()
|
||||
|
||||
row, err := store.scanRow(resultRows, false)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, row)
|
||||
|
||||
meta, err := utils.MetaAccessor(row.Dash)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "file:provisioner", meta.GetRepositoryName()) // should be prefixed by file:
|
||||
require.Equal(t, "../"+pathToFile, meta.GetRepositoryPath()) // relative to provisioner
|
||||
require.Equal(t, "hashing", meta.GetRepositoryHash())
|
||||
ts, err := meta.GetRepositoryTimestamp()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, int64(100000), ts.Unix())
|
||||
})
|
||||
|
||||
t.Run("Plugin provisioned dashboard should have annotations", func(t *testing.T) {
|
||||
rows := sqlmock.NewRows(columns).AddRow(1, id, title, folderUID, nil, "slo", "", "", "", 0, timestamp, createdUser, 0, timestamp, updatedUser, 0, version, message, []byte(`{"key": "value"}`))
|
||||
mock.ExpectQuery("SELECT *").WillReturnRows(rows)
|
||||
resultRows, err := mockDB.Query("SELECT *")
|
||||
require.NoError(t, err)
|
||||
defer resultRows.Close() // nolint:errcheck
|
||||
resultRows.Next()
|
||||
|
||||
row, err := store.scanRow(resultRows, false)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, row)
|
||||
|
||||
meta, err := utils.MetaAccessor(row.Dash)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "plugin", meta.GetRepositoryName())
|
||||
require.Equal(t, "slo", meta.GetRepositoryPath()) // the ID of the plugin
|
||||
require.Equal(t, "", meta.GetRepositoryHash()) // hash is not used on plugins
|
||||
})
|
||||
}
|
@ -18,6 +18,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/searchstore"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
"google.golang.org/grpc"
|
||||
"k8s.io/apimachinery/pkg/selection"
|
||||
)
|
||||
|
||||
type DashboardSearchClient struct {
|
||||
@ -29,6 +30,7 @@ func NewDashboardSearchClient(dashboardStore dashboards.Store) *DashboardSearchC
|
||||
return &DashboardSearchClient{dashboardStore: dashboardStore}
|
||||
}
|
||||
|
||||
// nolint:gocyclo
|
||||
func (c *DashboardSearchClient) Search(ctx context.Context, req *resource.ResourceSearchRequest, opts ...grpc.CallOption) (*resource.ResourceSearchResponse, error) {
|
||||
user, err := identity.GetRequester(ctx)
|
||||
if err != nil {
|
||||
@ -103,6 +105,8 @@ func (c *DashboardSearchClient) Search(ctx context.Context, req *resource.Resour
|
||||
}
|
||||
|
||||
for _, field := range req.Options.Fields {
|
||||
vals := field.GetValues()
|
||||
|
||||
switch field.Key {
|
||||
case resource.SEARCH_FIELD_TAGS:
|
||||
query.Tags = field.GetValues()
|
||||
@ -110,7 +114,6 @@ func (c *DashboardSearchClient) Search(ctx context.Context, req *resource.Resour
|
||||
query.DashboardUIDs = field.GetValues()
|
||||
query.DashboardIds = nil
|
||||
case resource.SEARCH_FIELD_FOLDER:
|
||||
vals := field.GetValues()
|
||||
folders := make([]string, len(vals))
|
||||
|
||||
for i, val := range vals {
|
||||
@ -122,12 +125,28 @@ func (c *DashboardSearchClient) Search(ctx context.Context, req *resource.Resour
|
||||
}
|
||||
|
||||
query.FolderUIDs = folders
|
||||
}
|
||||
}
|
||||
case resource.SEARCH_FIELD_REPOSITORY_PATH:
|
||||
// only one value is supported in legacy search
|
||||
if len(vals) != 1 {
|
||||
return nil, fmt.Errorf("only one repo path query is supported")
|
||||
}
|
||||
query.ProvisionedPath = vals[0]
|
||||
case resource.SEARCH_FIELD_REPOSITORY_NAME:
|
||||
if field.Operator == string(selection.NotIn) {
|
||||
for _, val := range vals {
|
||||
name, _ := dashboard.GetProvisionedFileNameFromMeta(val)
|
||||
query.ProvisionedReposNotIn = append(query.ProvisionedReposNotIn, name)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
res, err := c.dashboardStore.FindDashboards(ctx, query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
// only one value is supported in legacy search
|
||||
if len(vals) != 1 {
|
||||
return nil, fmt.Errorf("only one repo name is supported")
|
||||
}
|
||||
|
||||
query.ProvisionedRepo, _ = dashboard.GetProvisionedFileNameFromMeta(vals[0])
|
||||
}
|
||||
}
|
||||
|
||||
searchFields := resource.StandardSearchFields()
|
||||
@ -141,6 +160,41 @@ func (c *DashboardSearchClient) Search(ctx context.Context, req *resource.Resour
|
||||
},
|
||||
}
|
||||
|
||||
// if we are querying for provisioning information, we need to use a different
|
||||
// legacy sql query, since legacy search does not support this
|
||||
if query.ProvisionedRepo != "" || len(query.ProvisionedReposNotIn) > 0 {
|
||||
var dashes []*dashboards.Dashboard
|
||||
if query.ProvisionedRepo == dashboard.PluginIDRepoName {
|
||||
dashes, err = c.dashboardStore.GetDashboardsByPluginID(ctx, &dashboards.GetDashboardsByPluginIDQuery{
|
||||
PluginID: query.ProvisionedPath,
|
||||
OrgID: user.GetOrgID(),
|
||||
})
|
||||
} else if query.ProvisionedRepo != "" {
|
||||
dashes, err = c.dashboardStore.GetProvisionedDashboardsByName(ctx, query.ProvisionedRepo)
|
||||
} else if len(query.ProvisionedReposNotIn) > 0 {
|
||||
dashes, err = c.dashboardStore.GetOrphanedProvisionedDashboards(ctx, query.ProvisionedReposNotIn)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, dashboard := range dashes {
|
||||
list.Results.Rows = append(list.Results.Rows, &resource.ResourceTableRow{
|
||||
Key: getResourceKey(&dashboards.DashboardSearchProjection{
|
||||
UID: dashboard.UID,
|
||||
}, req.Options.Key.Namespace),
|
||||
Cells: [][]byte{[]byte(dashboard.Title), []byte(dashboard.FolderUID), []byte{}},
|
||||
})
|
||||
}
|
||||
|
||||
return list, nil
|
||||
}
|
||||
|
||||
res, err := c.dashboardStore.FindDashboards(ctx, query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
hits := formatQueryResult(res)
|
||||
|
||||
for _, dashboard := range hits {
|
||||
@ -155,6 +209,8 @@ func (c *DashboardSearchClient) Search(ctx context.Context, req *resource.Resour
|
||||
})
|
||||
}
|
||||
|
||||
list.TotalHits = int64(len(list.Results.Rows))
|
||||
|
||||
return list, nil
|
||||
}
|
||||
|
||||
|
264
pkg/registry/apis/dashboard/legacysearcher/search_client_test.go
Normal file
264
pkg/registry/apis/dashboard/legacysearcher/search_client_test.go
Normal file
@ -0,0 +1,264 @@
|
||||
package legacysearcher
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
"github.com/grafana/grafana/pkg/apis/dashboard"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
"k8s.io/apimachinery/pkg/selection"
|
||||
)
|
||||
|
||||
func TestDashboardSearchClient_Search(t *testing.T) {
|
||||
mockStore := dashboards.NewFakeDashboardStore(t)
|
||||
client := NewDashboardSearchClient(mockStore)
|
||||
ctx := context.Background()
|
||||
user := &user.SignedInUser{OrgID: 2}
|
||||
ctx = identity.WithRequester(ctx, user)
|
||||
|
||||
dashboardKey := &resource.ResourceKey{
|
||||
Name: "uid",
|
||||
Resource: dashboard.DASHBOARD_RESOURCE,
|
||||
}
|
||||
|
||||
t.Run("Should parse results into GRPC", func(t *testing.T) {
|
||||
mockStore.On("FindDashboards", mock.Anything, &dashboards.FindPersistedDashboardsQuery{
|
||||
SignedInUser: user, // user from context should be used
|
||||
Type: "dash-db", // should set type based off of key
|
||||
}).Return([]dashboards.DashboardSearchProjection{
|
||||
{UID: "uid", Title: "Test Dashboard", FolderUID: "folder1", Term: "term"},
|
||||
{UID: "uid2", Title: "Test Dashboard2", FolderUID: "folder2"},
|
||||
}, nil).Once()
|
||||
|
||||
req := &resource.ResourceSearchRequest{
|
||||
Options: &resource.ListOptions{
|
||||
Key: dashboardKey,
|
||||
},
|
||||
}
|
||||
resp, err := client.Search(ctx, req)
|
||||
require.NoError(t, err)
|
||||
|
||||
tags, err := json.Marshal([]string{"term"})
|
||||
require.NoError(t, err)
|
||||
|
||||
emptyTags, err := json.Marshal([]string{})
|
||||
require.NoError(t, err)
|
||||
|
||||
require.NotNil(t, resp)
|
||||
searchFields := resource.StandardSearchFields()
|
||||
require.Equal(t, &resource.ResourceSearchResponse{
|
||||
TotalHits: 2,
|
||||
Results: &resource.ResourceTable{
|
||||
Columns: []*resource.ResourceTableColumnDefinition{
|
||||
searchFields.Field(resource.SEARCH_FIELD_TITLE),
|
||||
searchFields.Field(resource.SEARCH_FIELD_FOLDER),
|
||||
searchFields.Field(resource.SEARCH_FIELD_TAGS),
|
||||
},
|
||||
Rows: []*resource.ResourceTableRow{
|
||||
{
|
||||
Key: &resource.ResourceKey{
|
||||
Name: "uid",
|
||||
Group: dashboard.GROUP,
|
||||
Resource: dashboard.DASHBOARD_RESOURCE,
|
||||
},
|
||||
Cells: [][]byte{
|
||||
[]byte("Test Dashboard"),
|
||||
[]byte("folder1"),
|
||||
tags,
|
||||
},
|
||||
},
|
||||
{
|
||||
Key: &resource.ResourceKey{
|
||||
Name: "uid2",
|
||||
Group: dashboard.GROUP,
|
||||
Resource: dashboard.DASHBOARD_RESOURCE,
|
||||
},
|
||||
Cells: [][]byte{
|
||||
[]byte("Test Dashboard2"),
|
||||
[]byte("folder2"),
|
||||
emptyTags,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}, resp)
|
||||
mockStore.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("Query should be set as the title, and * should be removed", func(t *testing.T) {
|
||||
mockStore.On("FindDashboards", mock.Anything, &dashboards.FindPersistedDashboardsQuery{
|
||||
Title: "test",
|
||||
SignedInUser: user, // user from context should be used
|
||||
Type: "dash-db", // should set type based off of key
|
||||
}).Return([]dashboards.DashboardSearchProjection{
|
||||
{UID: "uid", Title: "Test Dashboard", FolderUID: "folder1"},
|
||||
}, nil).Once()
|
||||
|
||||
req := &resource.ResourceSearchRequest{
|
||||
Options: &resource.ListOptions{
|
||||
Key: dashboardKey,
|
||||
},
|
||||
Query: "*test*",
|
||||
}
|
||||
resp, err := client.Search(ctx, req)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resp)
|
||||
mockStore.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("Should read labels for the dashboard ids", func(t *testing.T) {
|
||||
mockStore.On("FindDashboards", mock.Anything, &dashboards.FindPersistedDashboardsQuery{
|
||||
DashboardIds: []int64{1, 2},
|
||||
SignedInUser: user, // user from context should be used
|
||||
Type: "dash-db", // should set type based off of key
|
||||
}).Return([]dashboards.DashboardSearchProjection{
|
||||
{UID: "uid", Title: "Test Dashboard", FolderUID: "folder1"},
|
||||
}, nil).Once()
|
||||
|
||||
req := &resource.ResourceSearchRequest{
|
||||
Options: &resource.ListOptions{
|
||||
Key: dashboardKey,
|
||||
Labels: []*resource.Requirement{
|
||||
{
|
||||
Key: utils.LabelKeyDeprecatedInternalID,
|
||||
Operator: "in",
|
||||
Values: []string{"1", "2"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
resp, err := client.Search(ctx, req)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resp)
|
||||
mockStore.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("Should modify fields to legacy compatible queries", func(t *testing.T) {
|
||||
mockStore.On("FindDashboards", mock.Anything, &dashboards.FindPersistedDashboardsQuery{
|
||||
DashboardUIDs: []string{"uid1", "uid2"},
|
||||
Tags: []string{"tag1", "tag2"},
|
||||
FolderUIDs: []string{"general", "folder1"},
|
||||
SignedInUser: user, // user from context should be used
|
||||
Type: "dash-db", // should set type based off of key
|
||||
}).Return([]dashboards.DashboardSearchProjection{
|
||||
{UID: "uid", Title: "Test Dashboard", FolderUID: "folder1"},
|
||||
}, nil).Once()
|
||||
|
||||
req := &resource.ResourceSearchRequest{
|
||||
Options: &resource.ListOptions{
|
||||
Key: dashboardKey,
|
||||
Fields: []*resource.Requirement{
|
||||
{
|
||||
Key: resource.SEARCH_FIELD_TAGS,
|
||||
Operator: "in",
|
||||
Values: []string{"tag1", "tag2"},
|
||||
},
|
||||
{
|
||||
Key: resource.SEARCH_FIELD_NAME, // name should be used as uid
|
||||
Operator: "in",
|
||||
Values: []string{"uid1", "uid2"},
|
||||
},
|
||||
{
|
||||
Key: resource.SEARCH_FIELD_FOLDER,
|
||||
Operator: "in",
|
||||
Values: []string{"", "folder1"}, // empty folder should be general
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
resp, err := client.Search(ctx, req)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resp)
|
||||
mockStore.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("Should retrieve dashboards by plugin through a different function", func(t *testing.T) {
|
||||
mockStore.On("GetDashboardsByPluginID", mock.Anything, &dashboards.GetDashboardsByPluginIDQuery{
|
||||
PluginID: "slo",
|
||||
OrgID: 2, // retrieved from the signed in user
|
||||
}).Return([]*dashboards.Dashboard{
|
||||
{UID: "uid", Title: "Test Dashboard", FolderUID: "folder1"},
|
||||
}, nil).Once()
|
||||
|
||||
req := &resource.ResourceSearchRequest{
|
||||
Options: &resource.ListOptions{
|
||||
Key: dashboardKey,
|
||||
Fields: []*resource.Requirement{
|
||||
{
|
||||
Key: resource.SEARCH_FIELD_REPOSITORY_PATH,
|
||||
Operator: "in",
|
||||
Values: []string{"slo"},
|
||||
},
|
||||
{
|
||||
Key: resource.SEARCH_FIELD_REPOSITORY_NAME,
|
||||
Operator: "in",
|
||||
Values: []string{"plugin"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
resp, err := client.Search(ctx, req)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resp)
|
||||
mockStore.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("Should retrieve dashboards by provisioner name through a different function", func(t *testing.T) {
|
||||
mockStore.On("GetProvisionedDashboardsByName", mock.Anything, "test").Return([]*dashboards.Dashboard{
|
||||
{UID: "uid", Title: "Test Dashboard", FolderUID: "folder1"},
|
||||
}, nil).Once()
|
||||
|
||||
req := &resource.ResourceSearchRequest{
|
||||
Options: &resource.ListOptions{
|
||||
Key: dashboardKey,
|
||||
Fields: []*resource.Requirement{
|
||||
{
|
||||
Key: resource.SEARCH_FIELD_REPOSITORY_NAME,
|
||||
Operator: "in",
|
||||
Values: []string{"file:test"}, // file prefix should be removed before going to legacy
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
resp, err := client.Search(ctx, req)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resp)
|
||||
mockStore.AssertExpectations(t)
|
||||
})
|
||||
|
||||
t.Run("Should retrieve orphaned dashboards if provisioner not in is specified", func(t *testing.T) {
|
||||
mockStore.On("GetOrphanedProvisionedDashboards", mock.Anything, []string{"test", "test2"}).Return([]*dashboards.Dashboard{
|
||||
{UID: "uid", Title: "Test Dashboard", FolderUID: "folder1"},
|
||||
}, nil).Once()
|
||||
|
||||
req := &resource.ResourceSearchRequest{
|
||||
Options: &resource.ListOptions{
|
||||
Key: dashboardKey,
|
||||
Fields: []*resource.Requirement{
|
||||
{
|
||||
Key: resource.SEARCH_FIELD_REPOSITORY_NAME,
|
||||
Operator: string(selection.NotIn),
|
||||
Values: []string{"file:test", "file:test2"}, // file prefix should be removed before going to legacy
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
resp, err := client.Search(ctx, req)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, resp)
|
||||
mockStore.AssertExpectations(t)
|
||||
})
|
||||
}
|
@ -3,6 +3,7 @@ package dashboard
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"slices"
|
||||
@ -10,8 +11,6 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/storage/unified"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/search"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
apierrors "k8s.io/apimachinery/pkg/api/errors"
|
||||
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
@ -19,13 +18,18 @@ import (
|
||||
"k8s.io/kube-openapi/pkg/spec3"
|
||||
"k8s.io/kube-openapi/pkg/validation/spec"
|
||||
|
||||
"github.com/grafana/grafana/pkg/storage/unified/search"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||
"github.com/grafana/grafana/pkg/apis/dashboard"
|
||||
dashboardv0alpha1 "github.com/grafana/grafana/pkg/apis/dashboard/v0alpha1"
|
||||
folderv0alpha1 "github.com/grafana/grafana/pkg/apis/folder/v0alpha1"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/builder"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
dashboardsearch "github.com/grafana/grafana/pkg/services/dashboards/service/search"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
foldermodel "github.com/grafana/grafana/pkg/services/folder"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
"github.com/grafana/grafana/pkg/util/errhttp"
|
||||
@ -33,17 +37,19 @@ import (
|
||||
|
||||
// The DTO returns everything the UI needs in a single request
|
||||
type SearchHandler struct {
|
||||
log log.Logger
|
||||
client func(context.Context) resource.ResourceIndexClient
|
||||
tracer trace.Tracer
|
||||
log log.Logger
|
||||
client resource.ResourceIndexClient
|
||||
tracer trace.Tracer
|
||||
features featuremgmt.FeatureToggles
|
||||
}
|
||||
|
||||
func NewSearchHandler(tracer trace.Tracer, cfg *setting.Cfg, legacyDashboardSearcher resource.ResourceIndexClient) *SearchHandler {
|
||||
searchClient := resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, unified.GetResourceClient, legacyDashboardSearcher)
|
||||
func NewSearchHandler(tracer trace.Tracer, cfg *setting.Cfg, legacyDashboardSearcher resource.ResourceIndexClient, resourceClient resource.ResourceClient, features featuremgmt.FeatureToggles) *SearchHandler {
|
||||
searchClient := resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, resourceClient, legacyDashboardSearcher)
|
||||
return &SearchHandler{
|
||||
client: searchClient,
|
||||
log: log.New("grafana-apiserver.dashboards.search"),
|
||||
tracer: tracer,
|
||||
client: searchClient,
|
||||
log: log.New("grafana-apiserver.dashboards.search"),
|
||||
tracer: tracer,
|
||||
features: features,
|
||||
}
|
||||
}
|
||||
|
||||
@ -252,19 +258,6 @@ func (s *SearchHandler) DoSearch(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
searchRequest.Fields = fields
|
||||
|
||||
// Add the folder constraint. Note this does not do recursive search
|
||||
folder := queryParams.Get("folder")
|
||||
if folder != "" {
|
||||
if folder == rootFolder {
|
||||
folder = "" // root folder is empty in the search index
|
||||
}
|
||||
searchRequest.Options.Fields = []*resource.Requirement{{
|
||||
Key: "folder",
|
||||
Operator: "=",
|
||||
Values: []string{folder},
|
||||
}}
|
||||
}
|
||||
|
||||
types := queryParams["type"]
|
||||
var federate *resource.ResourceKey
|
||||
switch len(types) {
|
||||
@ -329,7 +322,33 @@ func (s *SearchHandler) DoSearch(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
// The names filter
|
||||
if names, ok := queryParams["name"]; ok {
|
||||
names := queryParams["name"]
|
||||
|
||||
// Add the folder constraint. Note this does not do recursive search
|
||||
folder := queryParams.Get("folder")
|
||||
if folder == foldermodel.SharedWithMeFolderUID {
|
||||
dashboardUIDs, err := s.getDashboardsUIDsSharedWithUser(ctx, user)
|
||||
if err != nil {
|
||||
errhttp.Write(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
// hijacks the "name" query param to only search for shared dashboard UIDs
|
||||
if len(dashboardUIDs) > 0 {
|
||||
names = append(names, dashboardUIDs...)
|
||||
}
|
||||
} else if folder != "" {
|
||||
if folder == rootFolder {
|
||||
folder = "" // root folder is empty in the search index
|
||||
}
|
||||
searchRequest.Options.Fields = []*resource.Requirement{{
|
||||
Key: "folder",
|
||||
Operator: "=",
|
||||
Values: []string{folder},
|
||||
}}
|
||||
}
|
||||
|
||||
if len(names) > 0 {
|
||||
if searchRequest.Options.Fields == nil {
|
||||
searchRequest.Options.Fields = []*resource.Requirement{}
|
||||
}
|
||||
@ -341,7 +360,7 @@ func (s *SearchHandler) DoSearch(w http.ResponseWriter, r *http.Request) {
|
||||
searchRequest.Options.Fields = append(searchRequest.Options.Fields, namesFilter...)
|
||||
}
|
||||
|
||||
result, err := s.client(ctx).Search(ctx, searchRequest)
|
||||
result, err := s.client.Search(ctx, searchRequest)
|
||||
if err != nil {
|
||||
errhttp.Write(ctx, err, w)
|
||||
return
|
||||
@ -378,3 +397,108 @@ func asResourceKey(ns string, k string) (*resource.ResourceKey, error) {
|
||||
|
||||
return key, nil
|
||||
}
|
||||
|
||||
func (s *SearchHandler) getDashboardsUIDsSharedWithUser(ctx context.Context, user identity.Requester) ([]string, error) {
|
||||
if !s.features.IsEnabledGlobally(featuremgmt.FlagUnifiedStorageSearchPermissionFiltering) {
|
||||
return []string{}, nil
|
||||
}
|
||||
|
||||
// gets dashboards that the user was granted read access to
|
||||
permissions := user.GetPermissions()
|
||||
dashboardPermissions := permissions[dashboards.ActionDashboardsRead]
|
||||
dashboardUids := make([]string, 0)
|
||||
sharedDashboards := make([]string, 0)
|
||||
|
||||
for _, dashboardPermission := range dashboardPermissions {
|
||||
if dashboardUid, found := strings.CutPrefix(dashboardPermission, dashboards.ScopeDashboardsPrefix); found {
|
||||
if !slices.Contains(dashboardUids, dashboardUid) {
|
||||
dashboardUids = append(dashboardUids, dashboardUid)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(dashboardUids) == 0 {
|
||||
return sharedDashboards, nil
|
||||
}
|
||||
|
||||
key, err := asResourceKey(user.GetNamespace(), dashboard.DASHBOARD_RESOURCE)
|
||||
if err != nil {
|
||||
return sharedDashboards, err
|
||||
}
|
||||
|
||||
dashboardSearchRequest := &resource.ResourceSearchRequest{
|
||||
Fields: []string{"folder"},
|
||||
Limit: int64(len(dashboardUids)),
|
||||
Options: &resource.ListOptions{
|
||||
Key: key,
|
||||
Fields: []*resource.Requirement{{
|
||||
Key: "name",
|
||||
Operator: "in",
|
||||
Values: dashboardUids,
|
||||
}},
|
||||
},
|
||||
}
|
||||
// get all dashboards user has access to, along with their parent folder uid
|
||||
dashboardResult, err := s.client.Search(ctx, dashboardSearchRequest)
|
||||
if err != nil {
|
||||
return sharedDashboards, err
|
||||
}
|
||||
|
||||
folderUidIdx := -1
|
||||
for i, col := range dashboardResult.Results.Columns {
|
||||
if col.Name == "folder" {
|
||||
folderUidIdx = i
|
||||
}
|
||||
}
|
||||
|
||||
if folderUidIdx == -1 {
|
||||
return sharedDashboards, fmt.Errorf("Error retrieving folder information")
|
||||
}
|
||||
|
||||
// populate list of unique folder UIDs in the list of dashboards user has read permissions
|
||||
allFolders := make([]string, 0)
|
||||
for _, dash := range dashboardResult.Results.Rows {
|
||||
folderUid := string(dash.Cells[folderUidIdx])
|
||||
if folderUid != "" && !slices.Contains(allFolders, folderUid) {
|
||||
allFolders = append(allFolders, folderUid)
|
||||
}
|
||||
}
|
||||
|
||||
// only folders the user has access to will be returned here
|
||||
folderKey, err := asResourceKey(user.GetNamespace(), folderv0alpha1.RESOURCE)
|
||||
if err != nil {
|
||||
return sharedDashboards, err
|
||||
}
|
||||
|
||||
folderSearchRequest := &resource.ResourceSearchRequest{
|
||||
Fields: []string{"folder"},
|
||||
Limit: int64(len(allFolders)),
|
||||
Options: &resource.ListOptions{
|
||||
Key: folderKey,
|
||||
Fields: []*resource.Requirement{{
|
||||
Key: "name",
|
||||
Operator: "in",
|
||||
Values: allFolders,
|
||||
}},
|
||||
},
|
||||
}
|
||||
foldersResult, err := s.client.Search(ctx, folderSearchRequest)
|
||||
if err != nil {
|
||||
return sharedDashboards, err
|
||||
}
|
||||
|
||||
foldersWithAccess := make([]string, 0, len(foldersResult.Results.Rows))
|
||||
for _, fold := range foldersResult.Results.Rows {
|
||||
foldersWithAccess = append(foldersWithAccess, fold.Key.Name)
|
||||
}
|
||||
|
||||
// add to sharedDashboards dashboards user has access to, but does NOT have access to it's parent folder
|
||||
for _, dash := range dashboardResult.Results.Rows {
|
||||
dashboardUid := dash.Key.Name
|
||||
folderUid := string(dash.Cells[folderUidIdx])
|
||||
if folderUid != "" && !slices.Contains(foldersWithAccess, folderUid) {
|
||||
sharedDashboards = append(sharedDashboards, dashboardUid)
|
||||
}
|
||||
}
|
||||
return sharedDashboards, nil
|
||||
}
|
||||
|
@ -7,23 +7,25 @@ import (
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"google.golang.org/grpc"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||
"github.com/grafana/grafana/pkg/apis/dashboard/v0alpha1"
|
||||
"github.com/grafana/grafana/pkg/apiserver/rest"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"google.golang.org/grpc"
|
||||
)
|
||||
|
||||
func TestSearchFallback(t *testing.T) {
|
||||
t.Run("should hit legacy search handler on mode 0", func(t *testing.T) {
|
||||
mockClient := &MockClient{}
|
||||
mockUnifiedCtxclient := func(context.Context) resource.ResourceClient { return mockClient }
|
||||
mockLegacyClient := &MockClient{}
|
||||
|
||||
cfg := &setting.Cfg{
|
||||
@ -31,8 +33,8 @@ func TestSearchFallback(t *testing.T) {
|
||||
"dashboards.dashboard.grafana.app": {DualWriterMode: rest.Mode0},
|
||||
},
|
||||
}
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockUnifiedCtxclient, mockLegacyClient)
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient, mockClient, nil)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockClient, mockLegacyClient)
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search", nil)
|
||||
@ -51,7 +53,6 @@ func TestSearchFallback(t *testing.T) {
|
||||
|
||||
t.Run("should hit legacy search handler on mode 1", func(t *testing.T) {
|
||||
mockClient := &MockClient{}
|
||||
mockUnifiedCtxclient := func(context.Context) resource.ResourceClient { return mockClient }
|
||||
mockLegacyClient := &MockClient{}
|
||||
|
||||
cfg := &setting.Cfg{
|
||||
@ -59,8 +60,8 @@ func TestSearchFallback(t *testing.T) {
|
||||
"dashboards.dashboard.grafana.app": {DualWriterMode: rest.Mode1},
|
||||
},
|
||||
}
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockUnifiedCtxclient, mockLegacyClient)
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient, mockClient, nil)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockClient, mockLegacyClient)
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search", nil)
|
||||
@ -79,7 +80,6 @@ func TestSearchFallback(t *testing.T) {
|
||||
|
||||
t.Run("should hit legacy search handler on mode 2", func(t *testing.T) {
|
||||
mockClient := &MockClient{}
|
||||
mockUnifiedCtxclient := func(context.Context) resource.ResourceClient { return mockClient }
|
||||
mockLegacyClient := &MockClient{}
|
||||
|
||||
cfg := &setting.Cfg{
|
||||
@ -87,8 +87,8 @@ func TestSearchFallback(t *testing.T) {
|
||||
"dashboards.dashboard.grafana.app": {DualWriterMode: rest.Mode2},
|
||||
},
|
||||
}
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockUnifiedCtxclient, mockLegacyClient)
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient, mockClient, nil)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockClient, mockLegacyClient)
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search", nil)
|
||||
@ -107,7 +107,6 @@ func TestSearchFallback(t *testing.T) {
|
||||
|
||||
t.Run("should hit unified storage search handler on mode 3", func(t *testing.T) {
|
||||
mockClient := &MockClient{}
|
||||
mockUnifiedCtxclient := func(context.Context) resource.ResourceClient { return mockClient }
|
||||
mockLegacyClient := &MockClient{}
|
||||
|
||||
cfg := &setting.Cfg{
|
||||
@ -115,8 +114,8 @@ func TestSearchFallback(t *testing.T) {
|
||||
"dashboards.dashboard.grafana.app": {DualWriterMode: rest.Mode3},
|
||||
},
|
||||
}
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockUnifiedCtxclient, mockLegacyClient)
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient, mockClient, nil)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockClient, mockLegacyClient)
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search", nil)
|
||||
@ -135,7 +134,6 @@ func TestSearchFallback(t *testing.T) {
|
||||
|
||||
t.Run("should hit unified storage search handler on mode 4", func(t *testing.T) {
|
||||
mockClient := &MockClient{}
|
||||
mockUnifiedCtxclient := func(context.Context) resource.ResourceClient { return mockClient }
|
||||
mockLegacyClient := &MockClient{}
|
||||
|
||||
cfg := &setting.Cfg{
|
||||
@ -143,8 +141,8 @@ func TestSearchFallback(t *testing.T) {
|
||||
"dashboards.dashboard.grafana.app": {DualWriterMode: rest.Mode4},
|
||||
},
|
||||
}
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockUnifiedCtxclient, mockLegacyClient)
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient, mockClient, nil)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockClient, mockLegacyClient)
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search", nil)
|
||||
@ -163,7 +161,6 @@ func TestSearchFallback(t *testing.T) {
|
||||
|
||||
t.Run("should hit unified storage search handler on mode 5", func(t *testing.T) {
|
||||
mockClient := &MockClient{}
|
||||
mockUnifiedCtxclient := func(context.Context) resource.ResourceClient { return mockClient }
|
||||
mockLegacyClient := &MockClient{}
|
||||
|
||||
cfg := &setting.Cfg{
|
||||
@ -171,8 +168,8 @@ func TestSearchFallback(t *testing.T) {
|
||||
"dashboards.dashboard.grafana.app": {DualWriterMode: rest.Mode5},
|
||||
},
|
||||
}
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockUnifiedCtxclient, mockLegacyClient)
|
||||
searchHandler := NewSearchHandler(tracing.NewNoopTracerService(), cfg, mockLegacyClient, mockClient, nil)
|
||||
searchHandler.client = resource.NewSearchClient(cfg, setting.UnifiedStorageConfigKeyDashboard, mockClient, mockLegacyClient)
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search", nil)
|
||||
@ -191,17 +188,19 @@ func TestSearchFallback(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestSearchHandler(t *testing.T) {
|
||||
// Create a mock client
|
||||
mockClient := &MockClient{}
|
||||
|
||||
// Initialize the search handler with the mock client
|
||||
searchHandler := SearchHandler{
|
||||
log: log.New("test", "test"),
|
||||
client: func(context.Context) resource.ResourceIndexClient { return mockClient },
|
||||
tracer: tracing.NewNoopTracerService(),
|
||||
}
|
||||
|
||||
t.Run("Multiple comma separated fields will be appended to default dashboard search fields", func(t *testing.T) {
|
||||
// Create a mock client
|
||||
mockClient := &MockClient{}
|
||||
|
||||
features := featuremgmt.WithFeatures()
|
||||
// Initialize the search handler with the mock client
|
||||
searchHandler := SearchHandler{
|
||||
log: log.New("test", "test"),
|
||||
client: mockClient,
|
||||
tracer: tracing.NewNoopTracerService(),
|
||||
features: features,
|
||||
}
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search?field=field1&field=field2&field=field3", nil)
|
||||
req.Header.Add("content-type", "application/json")
|
||||
@ -219,6 +218,18 @@ func TestSearchHandler(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Single field will be appended to default dashboard search fields", func(t *testing.T) {
|
||||
// Create a mock client
|
||||
mockClient := &MockClient{}
|
||||
|
||||
features := featuremgmt.WithFeatures()
|
||||
// Initialize the search handler with the mock client
|
||||
searchHandler := SearchHandler{
|
||||
log: log.New("test", "test"),
|
||||
client: mockClient,
|
||||
tracer: tracing.NewNoopTracerService(),
|
||||
features: features,
|
||||
}
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search?field=field1", nil)
|
||||
req.Header.Add("content-type", "application/json")
|
||||
@ -236,6 +247,18 @@ func TestSearchHandler(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Passing no fields will search using default dashboard fields", func(t *testing.T) {
|
||||
// Create a mock client
|
||||
mockClient := &MockClient{}
|
||||
|
||||
features := featuremgmt.WithFeatures()
|
||||
// Initialize the search handler with the mock client
|
||||
searchHandler := SearchHandler{
|
||||
log: log.New("test", "test"),
|
||||
client: mockClient,
|
||||
tracer: tracing.NewNoopTracerService(),
|
||||
features: features,
|
||||
}
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search", nil)
|
||||
req.Header.Add("content-type", "application/json")
|
||||
@ -253,6 +276,41 @@ func TestSearchHandler(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("Sort - default sort by resource then title", func(t *testing.T) {
|
||||
rows := make([]*resource.ResourceTableRow, len(mockResults))
|
||||
for i, r := range mockResults {
|
||||
rows[i] = &resource.ResourceTableRow{
|
||||
Key: &resource.ResourceKey{
|
||||
Name: r.Name,
|
||||
Resource: r.Resource,
|
||||
},
|
||||
Cells: [][]byte{
|
||||
[]byte(r.Value),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
mockResponse := &resource.ResourceSearchResponse{
|
||||
Results: &resource.ResourceTable{
|
||||
Columns: []*resource.ResourceTableColumnDefinition{
|
||||
{Name: resource.SEARCH_FIELD_TITLE},
|
||||
},
|
||||
Rows: rows,
|
||||
},
|
||||
}
|
||||
// Create a mock client
|
||||
mockClient := &MockClient{
|
||||
MockResponses: []*resource.ResourceSearchResponse{mockResponse},
|
||||
}
|
||||
|
||||
features := featuremgmt.WithFeatures()
|
||||
// Initialize the search handler with the mock client
|
||||
searchHandler := SearchHandler{
|
||||
log: log.New("test", "test"),
|
||||
client: mockClient,
|
||||
tracer: tracing.NewNoopTracerService(),
|
||||
features: features,
|
||||
}
|
||||
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search", nil)
|
||||
req.Header.Add("content-type", "application/json")
|
||||
@ -280,6 +338,125 @@ func TestSearchHandler(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestSearchHandlerSharedDashboards(t *testing.T) {
|
||||
t.Run("should bail out if FlagUnifiedStorageSearchPermissionFiltering is not enabled globally", func(t *testing.T) {
|
||||
mockClient := &MockClient{}
|
||||
|
||||
features := featuremgmt.WithFeatures()
|
||||
searchHandler := SearchHandler{
|
||||
log: log.New("test", "test"),
|
||||
client: mockClient,
|
||||
tracer: tracing.NewNoopTracerService(),
|
||||
features: features,
|
||||
}
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search?folder=sharedwithme", nil)
|
||||
req.Header.Add("content-type", "application/json")
|
||||
req = req.WithContext(identity.WithRequester(req.Context(), &user.SignedInUser{Namespace: "test"}))
|
||||
|
||||
searchHandler.DoSearch(rr, req)
|
||||
|
||||
assert.Equal(t, mockClient.CallCount, 1)
|
||||
})
|
||||
|
||||
t.Run("should return the dashboards shared with the user", func(t *testing.T) {
|
||||
// dashboardSearchRequest
|
||||
mockResponse1 := &resource.ResourceSearchResponse{
|
||||
Results: &resource.ResourceTable{
|
||||
Columns: []*resource.ResourceTableColumnDefinition{
|
||||
{
|
||||
Name: "folder",
|
||||
},
|
||||
},
|
||||
Rows: []*resource.ResourceTableRow{
|
||||
{
|
||||
Key: &resource.ResourceKey{
|
||||
Name: "dashboardinroot",
|
||||
Resource: "dashboard",
|
||||
},
|
||||
Cells: [][]byte{[]byte("")}, // root folder doesn't have uid
|
||||
},
|
||||
{
|
||||
Key: &resource.ResourceKey{
|
||||
Name: "dashboardinprivatefolder",
|
||||
Resource: "dashboard",
|
||||
},
|
||||
Cells: [][]byte{
|
||||
[]byte("privatefolder"), // folder uid
|
||||
},
|
||||
},
|
||||
{
|
||||
Key: &resource.ResourceKey{
|
||||
Name: "dashboardinpublicfolder",
|
||||
Resource: "dashboard",
|
||||
},
|
||||
Cells: [][]byte{
|
||||
[]byte("publicfolder"), // folder uid
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// folderSearchRequest
|
||||
mockResponse2 := &resource.ResourceSearchResponse{
|
||||
Results: &resource.ResourceTable{
|
||||
Columns: []*resource.ResourceTableColumnDefinition{
|
||||
{
|
||||
Name: "folder",
|
||||
},
|
||||
},
|
||||
Rows: []*resource.ResourceTableRow{
|
||||
{
|
||||
Key: &resource.ResourceKey{
|
||||
Name: "publicfolder",
|
||||
Resource: "folder",
|
||||
},
|
||||
Cells: [][]byte{
|
||||
[]byte(""), // root folder uid
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
mockClient := &MockClient{
|
||||
MockResponses: []*resource.ResourceSearchResponse{mockResponse1, mockResponse2},
|
||||
}
|
||||
|
||||
features := featuremgmt.WithFeatures(featuremgmt.FlagUnifiedStorageSearchPermissionFiltering)
|
||||
searchHandler := SearchHandler{
|
||||
log: log.New("test", "test"),
|
||||
client: mockClient,
|
||||
tracer: tracing.NewNoopTracerService(),
|
||||
features: features,
|
||||
}
|
||||
rr := httptest.NewRecorder()
|
||||
req := httptest.NewRequest("GET", "/search?folder=sharedwithme", nil)
|
||||
req.Header.Add("content-type", "application/json")
|
||||
allPermissions := make(map[int64]map[string][]string)
|
||||
permissions := make(map[string][]string)
|
||||
permissions[dashboards.ActionDashboardsRead] = []string{"dashboards:uid:dashboardinroot", "dashboards:uid:dashboardinprivatefolder", "dashboards:uid:dashboardinpublicfolder"}
|
||||
allPermissions[1] = permissions
|
||||
req = req.WithContext(identity.WithRequester(req.Context(), &user.SignedInUser{Namespace: "test", OrgID: 1, Permissions: allPermissions}))
|
||||
|
||||
searchHandler.DoSearch(rr, req)
|
||||
|
||||
assert.Equal(t, mockClient.CallCount, 3)
|
||||
|
||||
// first call gets all dashboards user has permission for
|
||||
firstCall := mockClient.MockCalls[0]
|
||||
assert.Equal(t, firstCall.Options.Fields[0].Values, []string{"dashboardinroot", "dashboardinprivatefolder", "dashboardinpublicfolder"})
|
||||
// second call gets folders associated with the previous dashboards
|
||||
secondCall := mockClient.MockCalls[1]
|
||||
assert.Equal(t, secondCall.Options.Fields[0].Values, []string{"privatefolder", "publicfolder"})
|
||||
// lastly, search ONLY for dashboards user has permission to read that are within folders the user does NOT have
|
||||
// permission to read
|
||||
thirdCall := mockClient.MockCalls[2]
|
||||
assert.Equal(t, thirdCall.Options.Fields[0].Values, []string{"dashboardinprivatefolder"})
|
||||
})
|
||||
}
|
||||
|
||||
// MockClient implements the ResourceIndexClient interface for testing
|
||||
type MockClient struct {
|
||||
resource.ResourceIndexClient
|
||||
@ -287,6 +464,10 @@ type MockClient struct {
|
||||
|
||||
// Capture the last SearchRequest for assertions
|
||||
LastSearchRequest *resource.ResourceSearchRequest
|
||||
|
||||
MockResponses []*resource.ResourceSearchResponse
|
||||
MockCalls []*resource.ResourceSearchRequest
|
||||
CallCount int
|
||||
}
|
||||
|
||||
type MockResult struct {
|
||||
@ -320,28 +501,16 @@ var mockResults = []MockResult{
|
||||
|
||||
func (m *MockClient) Search(ctx context.Context, in *resource.ResourceSearchRequest, opts ...grpc.CallOption) (*resource.ResourceSearchResponse, error) {
|
||||
m.LastSearchRequest = in
|
||||
m.MockCalls = append(m.MockCalls, in)
|
||||
|
||||
rows := make([]*resource.ResourceTableRow, len(mockResults))
|
||||
for i, r := range mockResults {
|
||||
rows[i] = &resource.ResourceTableRow{
|
||||
Key: &resource.ResourceKey{
|
||||
Name: r.Name,
|
||||
Resource: r.Resource,
|
||||
},
|
||||
Cells: [][]byte{
|
||||
[]byte(r.Value),
|
||||
},
|
||||
}
|
||||
var response *resource.ResourceSearchResponse
|
||||
if m.CallCount < len(m.MockResponses) {
|
||||
response = m.MockResponses[m.CallCount]
|
||||
}
|
||||
|
||||
return &resource.ResourceSearchResponse{
|
||||
Results: &resource.ResourceTable{
|
||||
Columns: []*resource.ResourceTableColumnDefinition{
|
||||
{Name: resource.SEARCH_FIELD_TITLE},
|
||||
},
|
||||
Rows: rows,
|
||||
},
|
||||
}, nil
|
||||
m.CallCount = m.CallCount + 1
|
||||
|
||||
return response, nil
|
||||
}
|
||||
func (m *MockClient) GetStats(ctx context.Context, in *resource.ResourceStatsRequest, opts ...grpc.CallOption) (*resource.ResourceStatsResponse, error) {
|
||||
return nil, nil
|
||||
|
@ -125,7 +125,7 @@ func (r *DTOConnector) Connect(ctx context.Context, name string, opts runtime.Ob
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if repo != nil && repo.Name == "plugin" {
|
||||
if repo != nil && repo.Name == dashboard.PluginIDRepoName {
|
||||
dto.PluginID = repo.Path
|
||||
}
|
||||
|
||||
|
@ -82,7 +82,7 @@ func RegisterAPIService(cfg *setting.Cfg, features featuremgmt.FeatureToggles,
|
||||
features: features,
|
||||
accessControl: accessControl,
|
||||
unified: unified,
|
||||
search: dashboard.NewSearchHandler(tracing, cfg, legacyDashboardSearcher),
|
||||
search: dashboard.NewSearchHandler(tracing, cfg, legacyDashboardSearcher, unified, features),
|
||||
|
||||
legacy: &dashboard.DashboardStorage{
|
||||
Resource: dashboardv0alpha1.DashboardResourceInfo,
|
||||
|
@ -1,175 +0,0 @@
|
||||
package peakq
|
||||
|
||||
import (
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/apiserver/pkg/authorization/authorizer"
|
||||
"k8s.io/apiserver/pkg/registry/rest"
|
||||
genericapiserver "k8s.io/apiserver/pkg/server"
|
||||
"k8s.io/kube-openapi/pkg/common"
|
||||
"k8s.io/kube-openapi/pkg/spec3"
|
||||
"k8s.io/kube-openapi/pkg/validation/spec"
|
||||
|
||||
peakq "github.com/grafana/grafana/pkg/apis/peakq/v0alpha1"
|
||||
grafanaregistry "github.com/grafana/grafana/pkg/apiserver/registry/generic"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/builder"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
)
|
||||
|
||||
var _ builder.APIGroupBuilder = (*PeakQAPIBuilder)(nil)
|
||||
|
||||
// This is used just so wire has something unique to return
|
||||
type PeakQAPIBuilder struct{}
|
||||
|
||||
func NewPeakQAPIBuilder() *PeakQAPIBuilder {
|
||||
return &PeakQAPIBuilder{}
|
||||
}
|
||||
|
||||
func RegisterAPIService(features featuremgmt.FeatureToggles, apiregistration builder.APIRegistrar, reg prometheus.Registerer) *PeakQAPIBuilder {
|
||||
if !featuremgmt.AnyEnabled(features,
|
||||
featuremgmt.FlagQueryService,
|
||||
featuremgmt.FlagQueryLibrary,
|
||||
featuremgmt.FlagGrafanaAPIServerWithExperimentalAPIs) {
|
||||
return nil // skip registration unless explicitly added (or all experimental are added)
|
||||
}
|
||||
builder := NewPeakQAPIBuilder()
|
||||
apiregistration.RegisterAPI(builder)
|
||||
return builder
|
||||
}
|
||||
|
||||
func (b *PeakQAPIBuilder) GetAuthorizer() authorizer.Authorizer {
|
||||
return nil // default authorizer is fine
|
||||
}
|
||||
|
||||
func (b *PeakQAPIBuilder) GetGroupVersion() schema.GroupVersion {
|
||||
return peakq.SchemeGroupVersion
|
||||
}
|
||||
|
||||
func (b *PeakQAPIBuilder) InstallSchema(scheme *runtime.Scheme) error {
|
||||
gv := peakq.SchemeGroupVersion
|
||||
err := peakq.AddToScheme(scheme)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Link this version to the internal representation.
|
||||
// This is used for server-side-apply (PATCH), and avoids the error:
|
||||
// "no kind is registered for the type"
|
||||
// addKnownTypes(scheme, schema.GroupVersion{
|
||||
// Group: peakq.GROUP,
|
||||
// Version: runtime.APIVersionInternal,
|
||||
// })
|
||||
metav1.AddToGroupVersion(scheme, gv)
|
||||
return scheme.SetVersionPriority(gv)
|
||||
}
|
||||
|
||||
func (b *PeakQAPIBuilder) UpdateAPIGroupInfo(apiGroupInfo *genericapiserver.APIGroupInfo, opts builder.APIGroupOptions) error {
|
||||
resourceInfo := peakq.QueryTemplateResourceInfo
|
||||
storage := map[string]rest.Storage{}
|
||||
|
||||
peakqStorage, err := grafanaregistry.NewRegistryStore(opts.Scheme, resourceInfo, opts.OptsGetter)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
storage[resourceInfo.StoragePath()] = peakqStorage
|
||||
storage[resourceInfo.StoragePath("render")] = &renderREST{
|
||||
getter: peakqStorage,
|
||||
}
|
||||
|
||||
apiGroupInfo.VersionedResourcesStorageMap[peakq.VERSION] = storage
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *PeakQAPIBuilder) GetOpenAPIDefinitions() common.GetOpenAPIDefinitions {
|
||||
return peakq.GetOpenAPIDefinitions
|
||||
}
|
||||
|
||||
// NOT A GREAT APPROACH... BUT will make a UI for statically defined
|
||||
func (b *PeakQAPIBuilder) GetAPIRoutes() *builder.APIRoutes {
|
||||
defs := peakq.GetOpenAPIDefinitions(func(path string) spec.Ref { return spec.Ref{} })
|
||||
renderedQuerySchema := defs["github.com/grafana/grafana/pkg/apis/peakq/v0alpha1.RenderedQuery"].Schema
|
||||
queryTemplateSpecSchema := defs["github.com/grafana/grafana/pkg/apis/peakq/v0alpha1.QueryTemplateSpec"].Schema
|
||||
|
||||
params := []*spec3.Parameter{
|
||||
{
|
||||
ParameterProps: spec3.ParameterProps{
|
||||
// Arbitrary name. It won't appear in the request URL,
|
||||
// but will be used in code generated from this OAS spec
|
||||
Name: "variables",
|
||||
In: "query",
|
||||
Schema: spec.MapProperty(spec.ArrayProperty(spec.StringProperty())),
|
||||
Style: "form",
|
||||
Explode: true,
|
||||
Description: "Each variable is prefixed with var-{variable}={value}",
|
||||
Example: map[string][]string{
|
||||
"var-metricName": {"up"},
|
||||
"var-another": {"first", "second"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
return &builder.APIRoutes{
|
||||
Root: []builder.APIRouteHandler{
|
||||
{
|
||||
Path: "render",
|
||||
Spec: &spec3.PathProps{
|
||||
Summary: "an example at the root level",
|
||||
Description: "longer description here?",
|
||||
Post: &spec3.Operation{
|
||||
OperationProps: spec3.OperationProps{
|
||||
Parameters: params,
|
||||
RequestBody: &spec3.RequestBody{
|
||||
RequestBodyProps: spec3.RequestBodyProps{
|
||||
Content: map[string]*spec3.MediaType{
|
||||
"application/json": {
|
||||
MediaTypeProps: spec3.MediaTypeProps{
|
||||
Schema: &queryTemplateSpecSchema,
|
||||
// Example: basicTemplateSpec,
|
||||
Examples: map[string]*spec3.Example{
|
||||
"test": {
|
||||
ExampleProps: spec3.ExampleProps{
|
||||
Summary: "hello",
|
||||
Value: basicTemplateSpec,
|
||||
},
|
||||
},
|
||||
"test2": {
|
||||
ExampleProps: spec3.ExampleProps{
|
||||
Summary: "hello2",
|
||||
Value: basicTemplateSpec,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Responses: &spec3.Responses{
|
||||
ResponsesProps: spec3.ResponsesProps{
|
||||
StatusCodeResponses: map[int]*spec3.Response{
|
||||
200: {
|
||||
ResponseProps: spec3.ResponseProps{
|
||||
Description: "OK",
|
||||
Content: map[string]*spec3.MediaType{
|
||||
"application/json": {
|
||||
MediaTypeProps: spec3.MediaTypeProps{
|
||||
Schema: &renderedQuerySchema,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Handler: renderPOSTHandler,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
@ -1,107 +0,0 @@
|
||||
package peakq
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apiserver/pkg/registry/rest"
|
||||
|
||||
peakq "github.com/grafana/grafana/pkg/apis/peakq/v0alpha1"
|
||||
"github.com/grafana/grafana/pkg/apis/query/v0alpha1/template"
|
||||
)
|
||||
|
||||
type renderREST struct {
|
||||
getter rest.Getter
|
||||
}
|
||||
|
||||
var _ = rest.Connecter(&renderREST{})
|
||||
|
||||
func (r *renderREST) New() runtime.Object {
|
||||
return &peakq.RenderedQuery{}
|
||||
}
|
||||
|
||||
func (r *renderREST) Destroy() {
|
||||
}
|
||||
|
||||
func (r *renderREST) ConnectMethods() []string {
|
||||
return []string{"GET"}
|
||||
}
|
||||
|
||||
func (r *renderREST) NewConnectOptions() (runtime.Object, bool, string) {
|
||||
return nil, false, "" // true means you can use the trailing path as a variable
|
||||
}
|
||||
|
||||
func (r *renderREST) Connect(ctx context.Context, name string, opts runtime.Object, responder rest.Responder) (http.Handler, error) {
|
||||
obj, err := r.getter.Get(ctx, name, &v1.GetOptions{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
t, ok := obj.(*peakq.QueryTemplate)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected template")
|
||||
}
|
||||
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
|
||||
input, err := makeVarMapFromParams(req.URL.Query())
|
||||
if err != nil {
|
||||
responder.Error(err)
|
||||
return
|
||||
}
|
||||
out, err := template.RenderTemplate(t.Spec, input)
|
||||
if err != nil {
|
||||
responder.Error(fmt.Errorf("failed to render: %w", err))
|
||||
return
|
||||
}
|
||||
responder.Object(http.StatusOK, &peakq.RenderedQuery{
|
||||
Targets: out,
|
||||
})
|
||||
}), nil
|
||||
}
|
||||
|
||||
func renderPOSTHandler(w http.ResponseWriter, req *http.Request) {
|
||||
input, err := makeVarMapFromParams(req.URL.Query())
|
||||
if err != nil {
|
||||
_, _ = w.Write([]byte("ERROR: " + err.Error()))
|
||||
w.WriteHeader(500)
|
||||
return
|
||||
}
|
||||
|
||||
var qT peakq.QueryTemplate
|
||||
err = json.NewDecoder(req.Body).Decode(&qT.Spec)
|
||||
if err != nil {
|
||||
_, _ = w.Write([]byte("ERROR: " + err.Error()))
|
||||
w.WriteHeader(500)
|
||||
return
|
||||
}
|
||||
results, err := template.RenderTemplate(qT.Spec, input)
|
||||
if err != nil {
|
||||
_, _ = w.Write([]byte("ERROR: " + err.Error()))
|
||||
w.WriteHeader(500)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
_ = json.NewEncoder(w).Encode(peakq.RenderedQuery{
|
||||
Targets: results,
|
||||
})
|
||||
}
|
||||
|
||||
// Replicate the grafana dashboard URL syntax
|
||||
// &var-abc=1&var=abc=2&var-xyz=3...
|
||||
func makeVarMapFromParams(v url.Values) (map[string][]string, error) {
|
||||
input := make(map[string][]string, len(v))
|
||||
for key, vals := range v {
|
||||
if !strings.HasPrefix(key, "var-") {
|
||||
continue
|
||||
}
|
||||
input[key[4:]] = vals
|
||||
}
|
||||
return input, nil
|
||||
}
|
@ -1,74 +0,0 @@
|
||||
package peakq
|
||||
|
||||
import (
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
apidata "github.com/grafana/grafana-plugin-sdk-go/experimental/apis/data/v0alpha1"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apis/query/v0alpha1/template"
|
||||
)
|
||||
|
||||
var basicTemplateSpec = template.QueryTemplate{
|
||||
Title: "Test",
|
||||
Variables: []template.TemplateVariable{
|
||||
{
|
||||
Key: "metricName",
|
||||
DefaultValues: []string{`down`},
|
||||
},
|
||||
},
|
||||
Targets: []template.Target{
|
||||
{
|
||||
DataType: data.FrameTypeUnknown,
|
||||
//DataTypeVersion: data.FrameTypeVersion{0, 0},
|
||||
Variables: map[string][]template.VariableReplacement{
|
||||
"metricName": {
|
||||
{
|
||||
Path: "$.expr",
|
||||
Position: &template.Position{
|
||||
Start: 0,
|
||||
End: 10,
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: "$.expr",
|
||||
Position: &template.Position{
|
||||
Start: 13,
|
||||
End: 23,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
Properties: apidata.NewDataQuery(map[string]any{
|
||||
"refId": "A", // TODO: Set when Where?
|
||||
"datasource": map[string]any{
|
||||
"type": "prometheus",
|
||||
"uid": "foo", // TODO: Probably a default templating thing to set this.
|
||||
},
|
||||
"editorMode": "builder",
|
||||
"expr": "metricName + metricName + 42",
|
||||
"instant": true,
|
||||
"range": false,
|
||||
"exemplar": false,
|
||||
}),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
var basicTemplateRenderedTargets = []template.Target{
|
||||
{
|
||||
DataType: data.FrameTypeUnknown,
|
||||
//DataTypeVersion: data.FrameTypeVersion{0, 0},
|
||||
Properties: apidata.NewDataQuery(map[string]any{
|
||||
"refId": "A", // TODO: Set when Where?
|
||||
"datasource": map[string]any{
|
||||
"type": "prometheus",
|
||||
"uid": "foo", // TODO: Probably a default templating thing to set this.
|
||||
},
|
||||
"editorMode": "builder",
|
||||
"expr": "up + up + 42",
|
||||
"instant": true,
|
||||
"range": false,
|
||||
"exemplar": false,
|
||||
}),
|
||||
},
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
package peakq
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apis/query/v0alpha1/template"
|
||||
)
|
||||
|
||||
func TestRender(t *testing.T) {
|
||||
rT, err := template.RenderTemplate(basicTemplateSpec, map[string][]string{"metricName": {"up"}})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t,
|
||||
basicTemplateRenderedTargets[0].Properties.GetString("expr"),
|
||||
rT[0].Properties.GetString("expr"))
|
||||
b, _ := json.MarshalIndent(basicTemplateSpec, "", " ")
|
||||
fmt.Println(string(b))
|
||||
}
|
@ -104,9 +104,8 @@ func filterAndAppendItem(item scope.ScopeNode, parent string, query string, resu
|
||||
return // Someday this will have an index in raw storage on parentName
|
||||
}
|
||||
|
||||
// skip if query is passed and title doesn't match.
|
||||
// HasPrefix is not the end goal but something that that gets us started.
|
||||
if query != "" && !strings.HasPrefix(item.Spec.Title, query) {
|
||||
// skip if query is passed and title doesn't contain the query.
|
||||
if query != "" && !strings.Contains(item.Spec.Title, query) {
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -99,7 +99,7 @@ func (f *findScopeDashboardsREST) Connect(ctx context.Context, name string, opts
|
||||
return strings.Compare(i.Status.DashboardTitle, j.Status.DashboardTitle)
|
||||
})
|
||||
|
||||
logger.FromContext(req.Context()).Debug("find scopedashboardbinding", "raw", len(all.Items), "filtered", len(results.Items))
|
||||
logger.FromContext(req.Context()).Debug("find scopedashboardbinding", "raw", len(all.Items), "filtered", len(results.Items), "scopeQueryParams", strings.Join(scopes, ","))
|
||||
|
||||
responder.Object(200, results)
|
||||
}), nil
|
||||
|
@ -13,7 +13,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/registry/apis/featuretoggle"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/folders"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/iam"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/peakq"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/provisioning"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/query"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/scope"
|
||||
@ -40,7 +39,6 @@ var WireSet = wire.NewSet(
|
||||
datasource.RegisterAPIService,
|
||||
folders.RegisterAPIService,
|
||||
iam.RegisterAPIService,
|
||||
peakq.RegisterAPIService,
|
||||
provisioning.RegisterAPIService,
|
||||
service.RegisterAPIService,
|
||||
query.RegisterAPIService,
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
advisorapp "github.com/grafana/grafana/apps/advisor/pkg/app"
|
||||
"github.com/grafana/grafana/apps/advisor/pkg/app/checkregistry"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/builder/runner"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
type AdvisorAppProvider struct {
|
||||
@ -16,13 +17,19 @@ type AdvisorAppProvider struct {
|
||||
|
||||
func RegisterApp(
|
||||
checkRegistry checkregistry.CheckService,
|
||||
cfg *setting.Cfg,
|
||||
) *AdvisorAppProvider {
|
||||
provider := &AdvisorAppProvider{}
|
||||
pluginConfig := cfg.PluginSettings["grafana-advisor-app"]
|
||||
specificConfig := checkregistry.AdvisorAppConfig{
|
||||
CheckRegistry: checkRegistry,
|
||||
PluginConfig: pluginConfig,
|
||||
}
|
||||
appCfg := &runner.AppBuilderConfig{
|
||||
OpenAPIDefGetter: advisorv0alpha1.GetOpenAPIDefinitions,
|
||||
ManagedKinds: advisorapp.GetKinds(),
|
||||
Authorizer: advisorapp.GetAuthorizer(),
|
||||
CustomConfig: any(checkRegistry),
|
||||
CustomConfig: any(specificConfig),
|
||||
}
|
||||
provider.Provider = simple.NewAppProvider(apis.LocalManifest(), appCfg, advisorapp.New)
|
||||
return provider
|
||||
|
@ -158,7 +158,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/services/user/userimpl"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/storage/unified"
|
||||
unifiedsearch "github.com/grafana/grafana/pkg/storage/unified/search"
|
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor"
|
||||
cloudmonitoring "github.com/grafana/grafana/pkg/tsdb/cloud-monitoring"
|
||||
@ -214,7 +213,6 @@ var wireBasicSet = wire.NewSet(
|
||||
mysql.ProvideService,
|
||||
mssql.ProvideService,
|
||||
store.ProvideEntityEventsService,
|
||||
unified.ProvideUnifiedStorageClient,
|
||||
httpclientprovider.New,
|
||||
wire.Bind(new(httpclient.Provider), new(*sdkhttpclient.Provider)),
|
||||
serverlock.ProvideService,
|
||||
|
@ -7,6 +7,7 @@ package server
|
||||
import (
|
||||
"github.com/google/wire"
|
||||
|
||||
"github.com/grafana/grafana/pkg/storage/unified"
|
||||
search2 "github.com/grafana/grafana/pkg/storage/unified/search"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/metrics"
|
||||
@ -116,6 +117,8 @@ var wireExtsBasicSet = wire.NewSet(
|
||||
search2.ProvideDocumentBuilders,
|
||||
sandbox.ProvideService,
|
||||
wire.Bind(new(sandbox.Sandbox), new(*sandbox.Service)),
|
||||
wire.Struct(new(unified.Options), "*"),
|
||||
unified.ProvideUnifiedStorageClient,
|
||||
)
|
||||
|
||||
var wireExtsSet = wire.NewSet(
|
||||
|
@ -54,7 +54,7 @@ func ProvideService(
|
||||
cfg *setting.Cfg, db db.DB, routeRegister routing.RouteRegister, cache *localcache.CacheService,
|
||||
accessControl accesscontrol.AccessControl, userService user.Service, actionResolver accesscontrol.ActionResolver,
|
||||
features featuremgmt.FeatureToggles, tracer tracing.Tracer, permRegistry permreg.PermissionRegistry,
|
||||
lock *serverlock.ServerLockService, folderService folder.Service,
|
||||
lock *serverlock.ServerLockService,
|
||||
) (*Service, error) {
|
||||
service := ProvideOSSService(
|
||||
cfg,
|
||||
@ -66,7 +66,6 @@ func ProvideService(
|
||||
db,
|
||||
permRegistry,
|
||||
lock,
|
||||
folderService,
|
||||
)
|
||||
|
||||
api.NewAccessControlAPI(routeRegister, accessControl, service, userService, features).RegisterAPIEndpoints()
|
||||
@ -89,7 +88,6 @@ func ProvideOSSService(
|
||||
cfg *setting.Cfg, store accesscontrol.Store, actionResolver accesscontrol.ActionResolver,
|
||||
cache *localcache.CacheService, features featuremgmt.FeatureToggles, tracer tracing.Tracer,
|
||||
db db.DB, permRegistry permreg.PermissionRegistry, lock *serverlock.ServerLockService,
|
||||
folderService folder.Service,
|
||||
) *Service {
|
||||
s := &Service{
|
||||
actionResolver: actionResolver,
|
||||
|
@ -73,7 +73,6 @@ func TestUsageMetrics(t *testing.T) {
|
||||
nil,
|
||||
permreg.ProvidePermissionRegistry(),
|
||||
nil,
|
||||
nil,
|
||||
)
|
||||
assert.Equal(t, tt.expectedValue, s.GetUsageStats(context.Background())["stats.oss.accesscontrol.enabled.count"])
|
||||
})
|
||||
|
@ -47,12 +47,12 @@ func ProvideFolderPermissions(
|
||||
folderStore := folderimpl.ProvideDashboardFolderStore(sqlStore)
|
||||
fService := folderimpl.ProvideService(
|
||||
fStore, ac, bus.ProvideBus(tracing.InitializeTracerForTest()), dashboardStore, folderStore,
|
||||
nil, sqlStore, features, supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest())
|
||||
nil, sqlStore, features, supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest(), nil)
|
||||
|
||||
acSvc := acimpl.ProvideOSSService(
|
||||
cfg, acdb.ProvideService(sqlStore), actionSets, localcache.ProvideService(),
|
||||
features, tracing.InitializeTracerForTest(), sqlStore, permreg.ProvidePermissionRegistry(),
|
||||
nil, fService,
|
||||
nil,
|
||||
)
|
||||
|
||||
orgService, err := orgimpl.ProvideService(sqlStore, cfg, quotaService)
|
||||
|
@ -50,9 +50,9 @@ func TestIntegrationAuthorize(t *testing.T) {
|
||||
ac := acimpl.ProvideAccessControl(featuremgmt.WithFeatures())
|
||||
folderSvc := folderimpl.ProvideService(
|
||||
fStore, accesscontrolmock.New(), bus.ProvideBus(tracing.InitializeTracerForTest()), dashStore, folderStore,
|
||||
nil, sql, featuremgmt.WithFeatures(), supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest())
|
||||
nil, sql, featuremgmt.WithFeatures(), supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest(), nil)
|
||||
dashSvc, err := dashboardsservice.ProvideDashboardServiceImpl(cfg, dashStore, folderStore, featuremgmt.WithFeatures(), accesscontrolmock.NewMockedPermissionsService(),
|
||||
ac, folderSvc, fStore, nil, client.MockTestRestConfig{}, nil, quotatest.New(false, nil), nil, nil)
|
||||
ac, folderSvc, fStore, nil, client.MockTestRestConfig{}, nil, quotatest.New(false, nil), nil, nil, nil)
|
||||
require.NoError(t, err)
|
||||
dashSvc.RegisterDashboardPermissions(accesscontrolmock.NewMockedPermissionsService())
|
||||
|
||||
|
@ -62,9 +62,9 @@ func TestIntegrationAnnotationListingWithRBAC(t *testing.T) {
|
||||
ac := acimpl.ProvideAccessControl(featuremgmt.WithFeatures())
|
||||
folderSvc := folderimpl.ProvideService(
|
||||
fStore, accesscontrolmock.New(), bus.ProvideBus(tracing.InitializeTracerForTest()), dashStore, folderStore,
|
||||
nil, sql, featuremgmt.WithFeatures(), supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest())
|
||||
nil, sql, featuremgmt.WithFeatures(), supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest(), nil)
|
||||
dashSvc, err := dashboardsservice.ProvideDashboardServiceImpl(cfg, dashStore, folderStore, featuremgmt.WithFeatures(), accesscontrolmock.NewMockedPermissionsService(),
|
||||
ac, folderSvc, fStore, nil, client.MockTestRestConfig{}, nil, quotatest.New(false, nil), nil, nil)
|
||||
ac, folderSvc, fStore, nil, client.MockTestRestConfig{}, nil, quotatest.New(false, nil), nil, nil, nil)
|
||||
require.NoError(t, err)
|
||||
dashSvc.RegisterDashboardPermissions(accesscontrolmock.NewMockedPermissionsService())
|
||||
repo := ProvideService(sql, cfg, features, tagService, tracing.InitializeTracerForTest(), ruleStore, dashSvc)
|
||||
@ -245,9 +245,9 @@ func TestIntegrationAnnotationListingWithInheritedRBAC(t *testing.T) {
|
||||
folderStore := folderimpl.ProvideDashboardFolderStore(sql)
|
||||
folderSvc := folderimpl.ProvideService(
|
||||
fStore, ac, bus.ProvideBus(tracing.InitializeTracerForTest()), dashStore, folderStore,
|
||||
nil, sql, features, supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest())
|
||||
nil, sql, features, supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest(), nil)
|
||||
dashSvc, err := dashboardsservice.ProvideDashboardServiceImpl(cfg, dashStore, folderStore, features, accesscontrolmock.NewMockedPermissionsService(),
|
||||
ac, folderSvc, fStore, nil, client.MockTestRestConfig{}, nil, quotatest.New(false, nil), nil, nil)
|
||||
ac, folderSvc, fStore, nil, client.MockTestRestConfig{}, nil, quotatest.New(false, nil), nil, nil, nil)
|
||||
require.NoError(t, err)
|
||||
dashSvc.RegisterDashboardPermissions(accesscontrolmock.NewMockedPermissionsService())
|
||||
cfg.AnnotationMaximumTagsLength = 60
|
||||
|
@ -14,6 +14,9 @@ import (
|
||||
"k8s.io/client-go/dynamic"
|
||||
"k8s.io/client-go/rest"
|
||||
|
||||
k8sUser "k8s.io/apiserver/pkg/authentication/user"
|
||||
k8sRequest "k8s.io/apiserver/pkg/endpoints/request"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/dashboard/legacysearcher"
|
||||
@ -21,10 +24,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/storage/unified"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
k8sUser "k8s.io/apiserver/pkg/authentication/user"
|
||||
k8sRequest "k8s.io/apiserver/pkg/endpoints/request"
|
||||
)
|
||||
|
||||
type K8sHandler interface {
|
||||
@ -46,15 +46,15 @@ type k8sHandler struct {
|
||||
namespacer request.NamespaceMapper
|
||||
gvr schema.GroupVersionResource
|
||||
restConfig func(context.Context) *rest.Config
|
||||
searcher func(context.Context) resource.ResourceIndexClient
|
||||
searcher resource.ResourceIndexClient
|
||||
userService user.Service
|
||||
}
|
||||
|
||||
func NewK8sHandler(cfg *setting.Cfg, namespacer request.NamespaceMapper, gvr schema.GroupVersionResource,
|
||||
restConfig func(context.Context) *rest.Config, dashStore dashboards.Store, userSvc user.Service) K8sHandler {
|
||||
restConfig func(context.Context) *rest.Config, dashStore dashboards.Store, userSvc user.Service, resourceClient resource.ResourceClient) K8sHandler {
|
||||
legacySearcher := legacysearcher.NewDashboardSearchClient(dashStore)
|
||||
key := gvr.Resource + "." + gvr.Group // the unified storage key in the config.ini is resource + group
|
||||
searchClient := resource.NewSearchClient(cfg, key, unified.GetResourceClient, legacySearcher)
|
||||
searchClient := resource.NewSearchClient(cfg, key, resourceClient, legacySearcher)
|
||||
|
||||
return &k8sHandler{
|
||||
namespacer: namespacer,
|
||||
@ -191,12 +191,12 @@ func (h *k8sHandler) Search(ctx context.Context, orgID int64, in *resource.Resou
|
||||
}
|
||||
}
|
||||
|
||||
return h.searcher(ctx).Search(ctx, in)
|
||||
return h.searcher.Search(ctx, in)
|
||||
}
|
||||
|
||||
func (h *k8sHandler) GetStats(ctx context.Context, orgID int64) (*resource.ResourceStatsResponse, error) {
|
||||
// goes directly through grpc, so doesn't need the new context
|
||||
return h.searcher(ctx).GetStats(ctx, &resource.ResourceStatsRequest{
|
||||
return h.searcher.GetStats(ctx, &resource.ResourceStatsRequest{
|
||||
Namespace: h.GetNamespace(orgID),
|
||||
Kinds: []string{
|
||||
h.gvr.Group + "/" + h.gvr.Resource,
|
||||
|
@ -81,6 +81,8 @@ type Store interface {
|
||||
GetProvisionedDashboardData(ctx context.Context, name string) ([]*DashboardProvisioning, error)
|
||||
GetProvisionedDataByDashboardID(ctx context.Context, dashboardID int64) (*DashboardProvisioning, error)
|
||||
GetProvisionedDataByDashboardUID(ctx context.Context, orgID int64, dashboardUID string) (*DashboardProvisioning, error)
|
||||
GetProvisionedDashboardsByName(ctx context.Context, name string) ([]*Dashboard, error)
|
||||
GetOrphanedProvisionedDashboards(ctx context.Context, notIn []string) ([]*Dashboard, error)
|
||||
SaveDashboard(ctx context.Context, cmd SaveDashboardCommand) (*Dashboard, error)
|
||||
SaveProvisionedDashboard(ctx context.Context, dash *Dashboard, provisioning *DashboardProvisioning) error
|
||||
UnprovisionDashboard(ctx context.Context, id int64) error
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated by mockery v2.32.0. DO NOT EDIT.
|
||||
// Code generated by mockery v2.52.2. DO NOT EDIT.
|
||||
|
||||
package dashboards
|
||||
|
||||
@ -18,6 +18,10 @@ type FakeDashboardProvisioning struct {
|
||||
func (_m *FakeDashboardProvisioning) DeleteOrphanedProvisionedDashboards(ctx context.Context, cmd *DeleteOrphanedProvisionedDashboardsCommand) error {
|
||||
ret := _m.Called(ctx, cmd)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for DeleteOrphanedProvisionedDashboards")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, *DeleteOrphanedProvisionedDashboardsCommand) error); ok {
|
||||
r0 = rf(ctx, cmd)
|
||||
@ -32,6 +36,10 @@ func (_m *FakeDashboardProvisioning) DeleteOrphanedProvisionedDashboards(ctx con
|
||||
func (_m *FakeDashboardProvisioning) DeleteProvisionedDashboard(ctx context.Context, dashboardID int64, orgID int64) error {
|
||||
ret := _m.Called(ctx, dashboardID, orgID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for DeleteProvisionedDashboard")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64, int64) error); ok {
|
||||
r0 = rf(ctx, dashboardID, orgID)
|
||||
@ -46,6 +54,10 @@ func (_m *FakeDashboardProvisioning) DeleteProvisionedDashboard(ctx context.Cont
|
||||
func (_m *FakeDashboardProvisioning) GetProvisionedDashboardData(ctx context.Context, name string) ([]*DashboardProvisioning, error) {
|
||||
ret := _m.Called(ctx, name)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for GetProvisionedDashboardData")
|
||||
}
|
||||
|
||||
var r0 []*DashboardProvisioning
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, string) ([]*DashboardProvisioning, error)); ok {
|
||||
@ -72,6 +84,10 @@ func (_m *FakeDashboardProvisioning) GetProvisionedDashboardData(ctx context.Con
|
||||
func (_m *FakeDashboardProvisioning) GetProvisionedDashboardDataByDashboardID(ctx context.Context, dashboardID int64) (*DashboardProvisioning, error) {
|
||||
ret := _m.Called(ctx, dashboardID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for GetProvisionedDashboardDataByDashboardID")
|
||||
}
|
||||
|
||||
var r0 *DashboardProvisioning
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) (*DashboardProvisioning, error)); ok {
|
||||
@ -98,6 +114,10 @@ func (_m *FakeDashboardProvisioning) GetProvisionedDashboardDataByDashboardID(ct
|
||||
func (_m *FakeDashboardProvisioning) GetProvisionedDashboardDataByDashboardUID(ctx context.Context, orgID int64, dashboardUID string) (*DashboardProvisioning, error) {
|
||||
ret := _m.Called(ctx, orgID, dashboardUID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for GetProvisionedDashboardDataByDashboardUID")
|
||||
}
|
||||
|
||||
var r0 *DashboardProvisioning
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64, string) (*DashboardProvisioning, error)); ok {
|
||||
@ -124,6 +144,10 @@ func (_m *FakeDashboardProvisioning) GetProvisionedDashboardDataByDashboardUID(c
|
||||
func (_m *FakeDashboardProvisioning) SaveFolderForProvisionedDashboards(_a0 context.Context, _a1 *folder.CreateFolderCommand) (*folder.Folder, error) {
|
||||
ret := _m.Called(_a0, _a1)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for SaveFolderForProvisionedDashboards")
|
||||
}
|
||||
|
||||
var r0 *folder.Folder
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, *folder.CreateFolderCommand) (*folder.Folder, error)); ok {
|
||||
@ -150,6 +174,10 @@ func (_m *FakeDashboardProvisioning) SaveFolderForProvisionedDashboards(_a0 cont
|
||||
func (_m *FakeDashboardProvisioning) SaveProvisionedDashboard(ctx context.Context, dto *SaveDashboardDTO, provisioning *DashboardProvisioning) (*Dashboard, error) {
|
||||
ret := _m.Called(ctx, dto, provisioning)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for SaveProvisionedDashboard")
|
||||
}
|
||||
|
||||
var r0 *Dashboard
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, *SaveDashboardDTO, *DashboardProvisioning) (*Dashboard, error)); ok {
|
||||
@ -176,6 +204,10 @@ func (_m *FakeDashboardProvisioning) SaveProvisionedDashboard(ctx context.Contex
|
||||
func (_m *FakeDashboardProvisioning) UnprovisionDashboard(ctx context.Context, dashboardID int64) error {
|
||||
ret := _m.Called(ctx, dashboardID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for UnprovisionDashboard")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok {
|
||||
r0 = rf(ctx, dashboardID)
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated by mockery v2.42.2. DO NOT EDIT.
|
||||
// Code generated by mockery v2.52.2. DO NOT EDIT.
|
||||
|
||||
package dashboards
|
||||
|
||||
@ -74,6 +74,34 @@ func (_m *FakeDashboardService) CleanUpDeletedDashboards(ctx context.Context) (i
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// CountDashboardsInOrg provides a mock function with given fields: ctx, orgID
|
||||
func (_m *FakeDashboardService) CountDashboardsInOrg(ctx context.Context, orgID int64) (int64, error) {
|
||||
ret := _m.Called(ctx, orgID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for CountDashboardsInOrg")
|
||||
}
|
||||
|
||||
var r0 int64
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) (int64, error)); ok {
|
||||
return rf(ctx, orgID)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) int64); ok {
|
||||
r0 = rf(ctx, orgID)
|
||||
} else {
|
||||
r0 = ret.Get(0).(int64)
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok {
|
||||
r1 = rf(ctx, orgID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// CountInFolders provides a mock function with given fields: ctx, orgID, folderUIDs, user
|
||||
func (_m *FakeDashboardService) CountInFolders(ctx context.Context, orgID int64, folderUIDs []string, user identity.Requester) (int64, error) {
|
||||
ret := _m.Called(ctx, orgID, folderUIDs, user)
|
||||
@ -102,6 +130,24 @@ func (_m *FakeDashboardService) CountInFolders(ctx context.Context, orgID int64,
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// DeleteAllDashboards provides a mock function with given fields: ctx, orgID
|
||||
func (_m *FakeDashboardService) DeleteAllDashboards(ctx context.Context, orgID int64) error {
|
||||
ret := _m.Called(ctx, orgID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for DeleteAllDashboards")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok {
|
||||
r0 = rf(ctx, orgID)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// DeleteDashboard provides a mock function with given fields: ctx, dashboardId, dashboardUID, orgId
|
||||
func (_m *FakeDashboardService) DeleteDashboard(ctx context.Context, dashboardId int64, dashboardUID string, orgId int64) error {
|
||||
ret := _m.Called(ctx, dashboardId, dashboardUID, orgId)
|
||||
@ -120,24 +166,6 @@ func (_m *FakeDashboardService) DeleteDashboard(ctx context.Context, dashboardId
|
||||
return r0
|
||||
}
|
||||
|
||||
// DeleteAllDashboards provides a mock function with given fields: ctx, orgID
|
||||
func (_m *FakeDashboardService) DeleteAllDashboards(ctx context.Context, orgID int64) error {
|
||||
ret := _m.Called(ctx, orgID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for DeleteDashboard")
|
||||
}
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) error); ok {
|
||||
r0 = rf(ctx, orgID)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// FindDashboards provides a mock function with given fields: ctx, query
|
||||
func (_m *FakeDashboardService) FindDashboards(ctx context.Context, query *FindPersistedDashboardsQuery) ([]DashboardSearchProjection, error) {
|
||||
ret := _m.Called(ctx, query)
|
||||
@ -168,36 +196,6 @@ func (_m *FakeDashboardService) FindDashboards(ctx context.Context, query *FindP
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// CountDashboardsInOrg provides a mock function with given fields: ctx, orgID
|
||||
func (_m *FakeDashboardService) CountDashboardsInOrg(ctx context.Context, orgID int64) (int64, error) {
|
||||
ret := _m.Called(ctx, orgID)
|
||||
|
||||
if len(ret) == 0 {
|
||||
panic("no return value specified for CountDashboardsInOrg")
|
||||
}
|
||||
|
||||
var r0 int64
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) (int64, error)); ok {
|
||||
return rf(ctx, orgID)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(context.Context, int64) int64); ok {
|
||||
r0 = rf(ctx, orgID)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(int64)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(context.Context, int64) error); ok {
|
||||
r1 = rf(ctx, orgID)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetAllDashboards provides a mock function with given fields: ctx
|
||||
func (_m *FakeDashboardService) GetAllDashboards(ctx context.Context) ([]*Dashboard, error) {
|
||||
ret := _m.Called(ctx)
|
||||
@ -228,6 +226,7 @@ func (_m *FakeDashboardService) GetAllDashboards(ctx context.Context) ([]*Dashbo
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// GetAllDashboardsByOrgId provides a mock function with given fields: ctx, orgID
|
||||
func (_m *FakeDashboardService) GetAllDashboardsByOrgId(ctx context.Context, orgID int64) ([]*Dashboard, error) {
|
||||
ret := _m.Called(ctx, orgID)
|
||||
|
||||
|
@ -146,6 +146,38 @@ func (d *dashboardStore) GetProvisionedDashboardData(ctx context.Context, name s
|
||||
return result, err
|
||||
}
|
||||
|
||||
func (d *dashboardStore) GetProvisionedDashboardsByName(ctx context.Context, name string) ([]*dashboards.Dashboard, error) {
|
||||
ctx, span := tracer.Start(ctx, "dashboards.database.GetProvisionedDashboardsByName")
|
||||
defer span.End()
|
||||
|
||||
dashes := []*dashboards.Dashboard{}
|
||||
err := d.store.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
return sess.Table(`dashboard`).
|
||||
Join(`INNER`, `dashboard_provisioning`, `dashboard.id = dashboard_provisioning.dashboard_id`).
|
||||
Where(`dashboard_provisioning.name = ?`, name).Find(&dashes)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dashes, nil
|
||||
}
|
||||
|
||||
func (d *dashboardStore) GetOrphanedProvisionedDashboards(ctx context.Context, notIn []string) ([]*dashboards.Dashboard, error) {
|
||||
ctx, span := tracer.Start(ctx, "dashboards.database.GetOrphanedProvisionedDashboards")
|
||||
defer span.End()
|
||||
|
||||
dashes := []*dashboards.Dashboard{}
|
||||
err := d.store.WithDbSession(ctx, func(sess *db.Session) error {
|
||||
return sess.Table(`dashboard`).
|
||||
Join(`INNER`, `dashboard_provisioning`, `dashboard.id = dashboard_provisioning.dashboard_id`).
|
||||
NotIn(`dashboard_provisioning.name`, notIn).Find(&dashes)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dashes, nil
|
||||
}
|
||||
|
||||
func (d *dashboardStore) SaveProvisionedDashboard(ctx context.Context, dash *dashboards.Dashboard, provisioning *dashboards.DashboardProvisioning) error {
|
||||
ctx, span := tracer.Start(ctx, "dashboards.database.SaveProvisionedDashboard")
|
||||
defer span.End()
|
||||
|
@ -302,7 +302,7 @@ func TestIntegrationDashboardInheritedFolderRBAC(t *testing.T) {
|
||||
folderStore := folderimpl.ProvideStore(sqlStore)
|
||||
folderSvc := folderimpl.ProvideService(
|
||||
folderStore, mock.New(), bus.ProvideBus(tracer), dashboardWriteStore, folderimpl.ProvideDashboardFolderStore(sqlStore),
|
||||
nil, sqlStore, features, supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest())
|
||||
nil, sqlStore, features, supportbundlestest.NewFakeBundleService(), nil, cfg, nil, tracing.InitializeTracerForTest(), nil)
|
||||
|
||||
parentUID := ""
|
||||
for i := 0; ; i++ {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user