mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
clearnup against smaller upstream
This commit is contained in:
parent
07b804457e
commit
12257d9dfe
@ -15,7 +15,6 @@ const (
|
||||
NamespaceAnonymous Namespace = "anonymous"
|
||||
NamespaceRenderService Namespace = "render"
|
||||
NamespaceAccessPolicy Namespace = "access-policy"
|
||||
NamespaceProvisioning Namespace = "provisioning"
|
||||
NamespaceEmpty Namespace = ""
|
||||
)
|
||||
|
||||
|
409
pkg/registry/apis/dashboard/access/sql_dashboards.go
Normal file
409
pkg/registry/apis/dashboard/access/sql_dashboards.go
Normal file
@ -0,0 +1,409 @@
|
||||
package access
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/labels"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
dashboardsV0 "github.com/grafana/grafana/pkg/apis/dashboard/v0alpha1"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/infra/appcontext"
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
gapiutil "github.com/grafana/grafana/pkg/services/apiserver/utils"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/provisioning"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/session"
|
||||
)
|
||||
|
||||
var (
|
||||
_ DashboardAccess = (*dashboardSqlAccess)(nil)
|
||||
)
|
||||
|
||||
type dashboardRow struct {
|
||||
// Dashboard resource
|
||||
Dash *dashboardsV0.Dashboard
|
||||
|
||||
// Title -- this may come from saved metadata rather than the body
|
||||
Title string
|
||||
|
||||
// The folder UID (needed for access control checks)
|
||||
FolderUID string
|
||||
|
||||
// Needed for fast summary access
|
||||
Tags []string
|
||||
|
||||
// Size (in bytes) of the dashboard payload
|
||||
Bytes int
|
||||
|
||||
// The token we can use that will start a new connection that includes
|
||||
// this same dashboard
|
||||
token *continueToken
|
||||
}
|
||||
|
||||
type dashboardSqlAccess struct {
|
||||
sql db.DB
|
||||
sess *session.SessionDB
|
||||
namespacer request.NamespaceMapper
|
||||
dashStore dashboards.Store
|
||||
provisioning provisioning.ProvisioningService
|
||||
}
|
||||
|
||||
func NewDashboardAccess(sql db.DB, namespacer request.NamespaceMapper, dashStore dashboards.Store, provisioning provisioning.ProvisioningService) DashboardAccess {
|
||||
return &dashboardSqlAccess{
|
||||
sql: sql,
|
||||
sess: sql.GetSqlxSession(),
|
||||
namespacer: namespacer,
|
||||
dashStore: dashStore,
|
||||
provisioning: provisioning,
|
||||
}
|
||||
}
|
||||
|
||||
const selector = `SELECT
|
||||
dashboard.org_id, dashboard.id,
|
||||
dashboard.uid,slug,
|
||||
dashboard.folder_uid,
|
||||
dashboard.created,dashboard.created_by,CreatedUSER.login,
|
||||
dashboard.updated,dashboard.updated_by,UpdatedUSER.login,
|
||||
plugin_id,
|
||||
dashboard_provisioning.name as origin_name,
|
||||
dashboard_provisioning.external_id as origin_path,
|
||||
dashboard_provisioning.check_sum as origin_key,
|
||||
dashboard_provisioning.updated as origin_ts,
|
||||
dashboard.version,
|
||||
title,
|
||||
dashboard.data
|
||||
FROM dashboard
|
||||
LEFT OUTER JOIN dashboard_provisioning ON dashboard.id = dashboard_provisioning.dashboard_id
|
||||
LEFT OUTER JOIN user AS CreatedUSER ON dashboard.created_by = CreatedUSER.id
|
||||
LEFT OUTER JOIN user AS UpdatedUSER ON dashboard.created_by = UpdatedUSER.id
|
||||
WHERE is_folder = false`
|
||||
|
||||
func (a *dashboardSqlAccess) getRows(ctx context.Context, query *DashboardQuery, onlySummary bool) (*rowsWrapper, int, error) {
|
||||
if !query.Labels.Empty() {
|
||||
return nil, 0, fmt.Errorf("label selection not yet supported")
|
||||
}
|
||||
if len(query.Requirements.SortBy) > 0 {
|
||||
return nil, 0, fmt.Errorf("sorting not yet supported")
|
||||
}
|
||||
if query.Requirements.ListHistory != "" {
|
||||
return nil, 0, fmt.Errorf("ListHistory not yet supported")
|
||||
}
|
||||
if query.Requirements.ListDeleted {
|
||||
return nil, 0, fmt.Errorf("ListDeleted not yet supported")
|
||||
}
|
||||
|
||||
token, err := readContinueToken(query)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
limit := query.Limit
|
||||
if limit < 1 {
|
||||
limit = 15 //
|
||||
}
|
||||
args := []any{query.OrgID}
|
||||
|
||||
sqlcmd := selector
|
||||
|
||||
// We can not do this yet because title + tags are in the body
|
||||
if onlySummary && false {
|
||||
sqlcmd = strings.Replace(sqlcmd, "dashboard.data", `"{}"`, 1)
|
||||
}
|
||||
|
||||
sqlcmd = fmt.Sprintf("%s AND dashboard.org_id=$%d", sqlcmd, len(args))
|
||||
if query.UID != "" {
|
||||
args = append(args, query.UID)
|
||||
sqlcmd = fmt.Sprintf("%s AND dashboard.uid=$%d", sqlcmd, len(args))
|
||||
} else {
|
||||
args = append(args, token.id)
|
||||
sqlcmd = fmt.Sprintf("%s AND dashboard.id>=$%d", sqlcmd, len(args))
|
||||
}
|
||||
|
||||
if query.Requirements.Folder != nil {
|
||||
args = append(args, *query.Requirements.Folder)
|
||||
sqlcmd = fmt.Sprintf("%s AND dashboard.folder_uid=$%d", sqlcmd, len(args))
|
||||
}
|
||||
|
||||
args = append(args, (limit + 2)) // add more so we can include a next token
|
||||
sqlcmd = fmt.Sprintf("%s ORDER BY dashboard.id asc LIMIT $%d", sqlcmd, len(args))
|
||||
|
||||
rows, err := a.doQuery(ctx, sqlcmd, args...)
|
||||
if err != nil {
|
||||
if rows != nil {
|
||||
_ = rows.Close()
|
||||
}
|
||||
rows = nil
|
||||
}
|
||||
return rows, limit, err
|
||||
}
|
||||
|
||||
// GetDashboards implements DashboardAccess.
|
||||
func (a *dashboardSqlAccess) GetDashboards(ctx context.Context, query *DashboardQuery) (*dashboardsV0.DashboardList, error) {
|
||||
rows, limit, err := a.getRows(ctx, query, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() { _ = rows.Close() }()
|
||||
|
||||
totalSize := 0
|
||||
list := &dashboardsV0.DashboardList{}
|
||||
for {
|
||||
row, err := rows.Next()
|
||||
if err != nil || row == nil {
|
||||
return list, err
|
||||
}
|
||||
|
||||
totalSize += row.Bytes
|
||||
if len(list.Items) > 0 && (totalSize > query.MaxBytes || len(list.Items) >= limit) {
|
||||
if query.Requirements.Folder != nil {
|
||||
row.token.folder = *query.Requirements.Folder
|
||||
}
|
||||
list.Continue = row.token.String() // will skip this one but start here next time
|
||||
return list, err
|
||||
}
|
||||
list.Items = append(list.Items, *row.Dash)
|
||||
}
|
||||
}
|
||||
|
||||
func (a *dashboardSqlAccess) GetDashboard(ctx context.Context, orgId int64, uid string) (*dashboardsV0.Dashboard, error) {
|
||||
r, err := a.GetDashboards(ctx, &DashboardQuery{
|
||||
OrgID: orgId,
|
||||
UID: uid,
|
||||
Labels: labels.Everything(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(r.Items) > 0 {
|
||||
return &r.Items[0], nil
|
||||
}
|
||||
return nil, fmt.Errorf("not found")
|
||||
}
|
||||
|
||||
func (a *dashboardSqlAccess) doQuery(ctx context.Context, query string, args ...any) (*rowsWrapper, error) {
|
||||
user, err := appcontext.User(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rows, err := a.sess.Query(ctx, query, args...)
|
||||
return &rowsWrapper{
|
||||
rows: rows,
|
||||
a: a,
|
||||
// This looks up rules from the permissions on a user
|
||||
canReadDashboard: accesscontrol.Checker(user, dashboards.ActionDashboardsRead),
|
||||
}, err
|
||||
}
|
||||
|
||||
type rowsWrapper struct {
|
||||
a *dashboardSqlAccess
|
||||
rows *sql.Rows
|
||||
idx int
|
||||
total int64
|
||||
|
||||
canReadDashboard func(scopes ...string) bool
|
||||
}
|
||||
|
||||
func (r *rowsWrapper) Close() error {
|
||||
return r.rows.Close()
|
||||
}
|
||||
|
||||
func (r *rowsWrapper) Next() (*dashboardRow, error) {
|
||||
// breaks after first readable value
|
||||
for r.rows.Next() {
|
||||
r.idx++
|
||||
d, err := r.a.scanRow(r.rows)
|
||||
if d != nil {
|
||||
// Access control checker
|
||||
scopes := []string{dashboards.ScopeDashboardsProvider.GetResourceScopeUID(d.Dash.Name)}
|
||||
if d.FolderUID != "" { // Copied from searchV2... not sure the logic is right
|
||||
scopes = append(scopes, dashboards.ScopeFoldersProvider.GetResourceScopeUID(d.FolderUID))
|
||||
}
|
||||
if !r.canReadDashboard(scopes...) {
|
||||
continue
|
||||
}
|
||||
d.token.size = r.total // size before next!
|
||||
r.total += int64(d.Bytes)
|
||||
}
|
||||
|
||||
// returns the first folder it can
|
||||
return d, err
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (a *dashboardSqlAccess) scanRow(rows *sql.Rows) (*dashboardRow, error) {
|
||||
dash := &dashboardsV0.Dashboard{
|
||||
TypeMeta: dashboardsV0.DashboardResourceInfo.TypeMeta(),
|
||||
ObjectMeta: v1.ObjectMeta{Annotations: make(map[string]string)},
|
||||
}
|
||||
row := &dashboardRow{Dash: dash}
|
||||
|
||||
var dashboard_id int64
|
||||
var orgId int64
|
||||
var slug string
|
||||
var folder_uid sql.NullString
|
||||
var updated time.Time
|
||||
var updatedByID int64
|
||||
var updatedByName sql.NullString
|
||||
|
||||
var created time.Time
|
||||
var createdByID int64
|
||||
var createdByName sql.NullString
|
||||
|
||||
var plugin_id string
|
||||
var origin_name sql.NullString
|
||||
var origin_path sql.NullString
|
||||
var origin_ts sql.NullInt64
|
||||
var origin_hash sql.NullString
|
||||
var data []byte // the dashboard JSON
|
||||
var version int64
|
||||
|
||||
err := rows.Scan(&orgId, &dashboard_id, &dash.Name,
|
||||
&slug, &folder_uid,
|
||||
&created, &createdByID, &createdByName,
|
||||
&updated, &updatedByID, &updatedByName,
|
||||
&plugin_id,
|
||||
&origin_name, &origin_path, &origin_hash, &origin_ts,
|
||||
&version,
|
||||
&row.Title, &data,
|
||||
)
|
||||
|
||||
row.token = &continueToken{orgId: orgId, id: dashboard_id}
|
||||
if err == nil {
|
||||
dash.ResourceVersion = fmt.Sprintf("%d", created.UnixMilli())
|
||||
dash.Namespace = a.namespacer(orgId)
|
||||
dash.UID = gapiutil.CalculateClusterWideUID(dash)
|
||||
dash.SetCreationTimestamp(v1.NewTime(created))
|
||||
meta, err := utils.MetaAccessor(dash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
meta.SetUpdatedTimestamp(&updated)
|
||||
meta.SetSlug(slug)
|
||||
if createdByID > 0 {
|
||||
meta.SetCreatedBy(fmt.Sprintf("user:%d/%s", createdByID, createdByName.String))
|
||||
}
|
||||
if updatedByID > 0 {
|
||||
meta.SetUpdatedBy(fmt.Sprintf("user:%d/%s", updatedByID, updatedByName.String))
|
||||
}
|
||||
if folder_uid.Valid {
|
||||
meta.SetFolder(folder_uid.String)
|
||||
row.FolderUID = folder_uid.String
|
||||
}
|
||||
|
||||
if origin_name.Valid {
|
||||
ts := time.Unix(origin_ts.Int64, 0)
|
||||
|
||||
resolvedPath := a.provisioning.GetDashboardProvisionerResolvedPath(origin_name.String)
|
||||
originPath, err := filepath.Rel(
|
||||
resolvedPath,
|
||||
origin_path.String,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
meta.SetOriginInfo(&utils.ResourceOriginInfo{
|
||||
Name: origin_name.String,
|
||||
Path: originPath,
|
||||
Hash: origin_hash.String,
|
||||
Timestamp: &ts,
|
||||
})
|
||||
} else if plugin_id != "" {
|
||||
meta.SetOriginInfo(&utils.ResourceOriginInfo{
|
||||
Name: "plugin",
|
||||
Path: plugin_id,
|
||||
})
|
||||
}
|
||||
|
||||
row.Bytes = len(data)
|
||||
if row.Bytes > 0 {
|
||||
err = dash.Spec.UnmarshalJSON(data)
|
||||
if err != nil {
|
||||
return row, err
|
||||
}
|
||||
dash.Spec.Set("id", dashboard_id) // add it so we can get it from the body later
|
||||
row.Title = dash.Spec.GetNestedString("title")
|
||||
row.Tags = dash.Spec.GetNestedStringSlice("tags")
|
||||
}
|
||||
}
|
||||
return row, err
|
||||
}
|
||||
|
||||
// DeleteDashboard implements DashboardAccess.
|
||||
func (a *dashboardSqlAccess) DeleteDashboard(ctx context.Context, orgId int64, uid string) (*dashboardsV0.Dashboard, bool, error) {
|
||||
dash, err := a.GetDashboard(ctx, orgId, uid)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
id := dash.Spec.GetNestedInt64("id")
|
||||
if id == 0 {
|
||||
return nil, false, fmt.Errorf("could not find id in saved body")
|
||||
}
|
||||
|
||||
err = a.dashStore.DeleteDashboard(ctx, &dashboards.DeleteDashboardCommand{
|
||||
OrgID: orgId,
|
||||
ID: id,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
return dash, true, nil
|
||||
}
|
||||
|
||||
// SaveDashboard implements DashboardAccess.
|
||||
func (a *dashboardSqlAccess) SaveDashboard(ctx context.Context, orgId int64, dash *dashboardsV0.Dashboard) (*dashboardsV0.Dashboard, bool, error) {
|
||||
created := false
|
||||
user, err := appcontext.User(ctx)
|
||||
if err != nil {
|
||||
return nil, created, err
|
||||
}
|
||||
if dash.Name != "" {
|
||||
dash.Spec.Set("uid", dash.Name)
|
||||
|
||||
// Get the previous version to set the internal ID
|
||||
old, _ := a.dashStore.GetDashboard(ctx, &dashboards.GetDashboardQuery{
|
||||
OrgID: orgId,
|
||||
UID: dash.Name,
|
||||
})
|
||||
if old != nil {
|
||||
dash.Spec.Set("id", old.ID)
|
||||
} else {
|
||||
dash.Spec.Remove("id") // existing of "id" makes it an update
|
||||
created = true
|
||||
}
|
||||
} else {
|
||||
dash.Spec.Remove("id")
|
||||
dash.Spec.Remove("uid")
|
||||
}
|
||||
|
||||
meta, err := utils.MetaAccessor(dash)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
out, err := a.dashStore.SaveDashboard(ctx, dashboards.SaveDashboardCommand{
|
||||
OrgID: orgId,
|
||||
Dashboard: simplejson.NewFromAny(dash.Spec.UnstructuredContent()),
|
||||
FolderUID: meta.GetFolder(),
|
||||
Overwrite: true, // already passed the revisionVersion checks!
|
||||
UserID: user.UserID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
if out != nil {
|
||||
created = (out.Created.Unix() == out.Updated.Unix()) // and now?
|
||||
}
|
||||
dash, err = a.GetDashboard(ctx, orgId, out.UID)
|
||||
return dash, created, err
|
||||
}
|
67
pkg/registry/apis/dashboard/access/token.go
Normal file
67
pkg/registry/apis/dashboard/access/token.go
Normal file
@ -0,0 +1,67 @@
|
||||
package access
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
)
|
||||
|
||||
type continueToken struct {
|
||||
orgId int64
|
||||
id int64 // the internal id (sort by!)
|
||||
folder string // from the query
|
||||
size int64
|
||||
}
|
||||
|
||||
func readContinueToken(q *DashboardQuery) (continueToken, error) {
|
||||
var err error
|
||||
token := continueToken{}
|
||||
if q.ContinueToken == "" {
|
||||
return token, nil
|
||||
}
|
||||
parts := strings.Split(q.ContinueToken, "/")
|
||||
if len(parts) < 3 {
|
||||
return token, fmt.Errorf("invalid continue token (too few parts)")
|
||||
}
|
||||
sub := strings.Split(parts[0], ":")
|
||||
if sub[0] != "org" {
|
||||
return token, fmt.Errorf("expected org in first slug")
|
||||
}
|
||||
token.orgId, err = strconv.ParseInt(sub[1], 10, 64)
|
||||
if err != nil {
|
||||
return token, fmt.Errorf("error parsing orgid")
|
||||
}
|
||||
|
||||
sub = strings.Split(parts[1], ":")
|
||||
if sub[0] != "start" {
|
||||
return token, fmt.Errorf("expected internal ID in second slug")
|
||||
}
|
||||
token.id, err = strconv.ParseInt(sub[1], 10, 64)
|
||||
if err != nil {
|
||||
return token, fmt.Errorf("error parsing updated")
|
||||
}
|
||||
|
||||
sub = strings.Split(parts[2], ":")
|
||||
if sub[0] != "folder" {
|
||||
return token, fmt.Errorf("expected folder UID in third slug")
|
||||
}
|
||||
token.folder = sub[1]
|
||||
|
||||
// Check if the folder filter is the same from the previous query
|
||||
if q.Requirements.Folder == nil {
|
||||
if token.folder != "" {
|
||||
return token, fmt.Errorf("invalid token, the folder must match previous query")
|
||||
}
|
||||
} else if token.folder != *q.Requirements.Folder {
|
||||
return token, fmt.Errorf("invalid token, the folder must match previous query")
|
||||
}
|
||||
|
||||
return token, err
|
||||
}
|
||||
|
||||
func (r *continueToken) String() string {
|
||||
return fmt.Sprintf("org:%d/start:%d/folder:%s/%s",
|
||||
r.orgId, r.id, r.folder, util.ByteCountSI(r.size))
|
||||
}
|
35
pkg/registry/apis/dashboard/access/types.go
Normal file
35
pkg/registry/apis/dashboard/access/types.go
Normal file
@ -0,0 +1,35 @@
|
||||
package access
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"k8s.io/apimachinery/pkg/labels"
|
||||
|
||||
dashboardsV0 "github.com/grafana/grafana/pkg/apis/dashboard/v0alpha1"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/storage/entity"
|
||||
)
|
||||
|
||||
// This does not check if you have permissions!
|
||||
|
||||
type DashboardQuery struct {
|
||||
OrgID int64
|
||||
UID string // to select a single dashboard
|
||||
Limit int
|
||||
MaxBytes int
|
||||
|
||||
// FolderUID etc
|
||||
Requirements entity.Requirements
|
||||
// Post processing label filter
|
||||
Labels labels.Selector
|
||||
|
||||
// The token from previous query
|
||||
ContinueToken string
|
||||
}
|
||||
|
||||
type DashboardAccess interface {
|
||||
GetDashboard(ctx context.Context, orgId int64, uid string) (*dashboardsV0.Dashboard, error)
|
||||
GetDashboards(ctx context.Context, query *DashboardQuery) (*dashboardsV0.DashboardList, error)
|
||||
|
||||
SaveDashboard(ctx context.Context, orgId int64, dash *dashboardsV0.Dashboard) (*dashboardsV0.Dashboard, bool, error)
|
||||
DeleteDashboard(ctx context.Context, orgId int64, uid string) (*dashboardsV0.Dashboard, bool, error)
|
||||
}
|
@ -1,71 +1,170 @@
|
||||
package dashboard
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"k8s.io/apimachinery/pkg/apis/meta/internalversion"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apiserver/pkg/registry/generic"
|
||||
genericregistry "k8s.io/apiserver/pkg/registry/generic/registry"
|
||||
"k8s.io/apiserver/pkg/registry/rest"
|
||||
|
||||
common "github.com/grafana/grafana/pkg/apimachinery/apis/common/v0alpha1"
|
||||
grafanaregistry "github.com/grafana/grafana/pkg/apiserver/registry/generic"
|
||||
grafanarest "github.com/grafana/grafana/pkg/apiserver/rest"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/dashboard/legacy"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/apistore"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
"github.com/grafana/grafana/pkg/apis/dashboard/v0alpha1"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/dashboard/access"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/storage/entity"
|
||||
)
|
||||
|
||||
var (
|
||||
_ rest.Storage = (*dashboardStorage)(nil)
|
||||
_ rest.Scoper = (*dashboardStorage)(nil)
|
||||
_ rest.SingularNameProvider = (*dashboardStorage)(nil)
|
||||
_ rest.Getter = (*dashboardStorage)(nil)
|
||||
_ rest.Lister = (*dashboardStorage)(nil)
|
||||
_ rest.Creater = (*dashboardStorage)(nil)
|
||||
_ rest.Updater = (*dashboardStorage)(nil)
|
||||
_ rest.GracefulDeleter = (*dashboardStorage)(nil)
|
||||
)
|
||||
|
||||
type dashboardStorage struct {
|
||||
resource common.ResourceInfo
|
||||
access legacy.DashboardAccess
|
||||
access access.DashboardAccess
|
||||
tableConverter rest.TableConvertor
|
||||
|
||||
server resource.ResourceServer
|
||||
}
|
||||
|
||||
func (s *dashboardStorage) newStore(scheme *runtime.Scheme, defaultOptsGetter generic.RESTOptionsGetter) (grafanarest.LegacyStorage, error) {
|
||||
server, err := resource.NewResourceServer(resource.ResourceServerOptions{
|
||||
Backend: s.access,
|
||||
Search: s.access,
|
||||
Blob: s.access,
|
||||
// WriteAccess: resource.WriteAccessHooks{
|
||||
// Folder: func(ctx context.Context, user identity.Requester, uid string) bool {
|
||||
// // ???
|
||||
// },
|
||||
// },
|
||||
})
|
||||
func (s *dashboardStorage) New() runtime.Object {
|
||||
return s.resource.NewFunc()
|
||||
}
|
||||
|
||||
func (s *dashboardStorage) Destroy() {}
|
||||
|
||||
func (s *dashboardStorage) NamespaceScoped() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (s *dashboardStorage) GetSingularName() string {
|
||||
return s.resource.GetSingularName()
|
||||
}
|
||||
|
||||
func (s *dashboardStorage) NewList() runtime.Object {
|
||||
return s.resource.NewListFunc()
|
||||
}
|
||||
|
||||
func (s *dashboardStorage) ConvertToTable(ctx context.Context, object runtime.Object, tableOptions runtime.Object) (*metav1.Table, error) {
|
||||
return s.tableConverter.ConvertToTable(ctx, object, tableOptions)
|
||||
}
|
||||
|
||||
func (s *dashboardStorage) Create(ctx context.Context,
|
||||
obj runtime.Object,
|
||||
createValidation rest.ValidateObjectFunc,
|
||||
options *metav1.CreateOptions,
|
||||
) (runtime.Object, error) {
|
||||
info, err := request.NamespaceInfoFrom(ctx, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.server = server
|
||||
|
||||
resourceInfo := s.resource
|
||||
defaultOpts, err := defaultOptsGetter.GetRESTOptions(resourceInfo.GroupResource())
|
||||
p, ok := obj.(*v0alpha1.Dashboard)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected dashboard?")
|
||||
}
|
||||
|
||||
// HACK to simplify unique name testing from kubectl
|
||||
t := p.Spec.GetNestedString("title")
|
||||
if strings.Contains(t, "${NOW}") {
|
||||
t = strings.ReplaceAll(t, "${NOW}", fmt.Sprintf("%d", time.Now().Unix()))
|
||||
p.Spec.Set("title", t)
|
||||
}
|
||||
|
||||
dash, _, err := s.access.SaveDashboard(ctx, info.OrgID, p)
|
||||
return dash, err
|
||||
}
|
||||
|
||||
func (s *dashboardStorage) Update(ctx context.Context,
|
||||
name string,
|
||||
objInfo rest.UpdatedObjectInfo,
|
||||
createValidation rest.ValidateObjectFunc,
|
||||
updateValidation rest.ValidateObjectUpdateFunc,
|
||||
forceAllowCreate bool,
|
||||
options *metav1.UpdateOptions,
|
||||
) (runtime.Object, bool, error) {
|
||||
info, err := request.NamespaceInfoFrom(ctx, true)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
created := false
|
||||
old, err := s.Get(ctx, name, nil)
|
||||
if err != nil {
|
||||
return old, created, err
|
||||
}
|
||||
|
||||
obj, err := objInfo.UpdatedObject(ctx, old)
|
||||
if err != nil {
|
||||
return old, created, err
|
||||
}
|
||||
p, ok := obj.(*v0alpha1.Dashboard)
|
||||
if !ok {
|
||||
return nil, created, fmt.Errorf("expected dashboard after update")
|
||||
}
|
||||
|
||||
_, created, err = s.access.SaveDashboard(ctx, info.OrgID, p)
|
||||
if err == nil {
|
||||
r, err := s.Get(ctx, name, nil)
|
||||
return r, created, err
|
||||
}
|
||||
return nil, created, err
|
||||
}
|
||||
|
||||
// GracefulDeleter
|
||||
func (s *dashboardStorage) Delete(ctx context.Context, name string, deleteValidation rest.ValidateObjectFunc, options *metav1.DeleteOptions) (runtime.Object, bool, error) {
|
||||
info, err := request.NamespaceInfoFrom(ctx, true)
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
|
||||
return s.access.DeleteDashboard(ctx, info.OrgID, name)
|
||||
}
|
||||
|
||||
func (s *dashboardStorage) List(ctx context.Context, options *internalversion.ListOptions) (runtime.Object, error) {
|
||||
orgId, err := request.OrgIDForList(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
client := resource.NewLocalResourceStoreClient(server)
|
||||
optsGetter := apistore.NewRESTOptionsGetter(client,
|
||||
defaultOpts.StorageConfig.Codec,
|
||||
)
|
||||
|
||||
strategy := grafanaregistry.NewStrategy(scheme)
|
||||
store := &genericregistry.Store{
|
||||
NewFunc: resourceInfo.NewFunc,
|
||||
NewListFunc: resourceInfo.NewListFunc,
|
||||
KeyRootFunc: grafanaregistry.KeyRootFunc(resourceInfo.GroupResource()),
|
||||
KeyFunc: grafanaregistry.NamespaceKeyFunc(resourceInfo.GroupResource()),
|
||||
PredicateFunc: grafanaregistry.Matcher,
|
||||
DefaultQualifiedResource: resourceInfo.GroupResource(),
|
||||
SingularQualifiedResource: resourceInfo.SingularGroupResource(),
|
||||
CreateStrategy: strategy,
|
||||
UpdateStrategy: strategy,
|
||||
DeleteStrategy: strategy,
|
||||
TableConvertor: s.tableConverter,
|
||||
}
|
||||
// fmt.Printf("LIST: %s\n", options.Continue)
|
||||
|
||||
options := &generic.StoreOptions{RESTOptions: optsGetter}
|
||||
if err := store.CompleteWithOptions(options); err != nil {
|
||||
// translate grafana.app/* label selectors into field requirements
|
||||
requirements, newSelector, err := entity.ReadLabelSelectors(options.LabelSelector)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return store, err
|
||||
|
||||
query := &access.DashboardQuery{
|
||||
OrgID: orgId,
|
||||
Limit: int(options.Limit),
|
||||
MaxBytes: 2 * 1024 * 1024, // 2MB,
|
||||
ContinueToken: options.Continue,
|
||||
Requirements: requirements,
|
||||
Labels: newSelector,
|
||||
}
|
||||
|
||||
return s.access.GetDashboards(ctx, query)
|
||||
}
|
||||
|
||||
func (s *dashboardStorage) Get(ctx context.Context, name string, options *metav1.GetOptions) (runtime.Object, error) {
|
||||
info, err := request.NamespaceInfoFrom(ctx, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return s.access.GetDashboard(ctx, info.OrgID, name)
|
||||
}
|
||||
|
||||
// GracefulDeleter
|
||||
func (s *dashboardStorage) DeleteCollection(ctx context.Context, deleteValidation rest.ValidateObjectFunc, options *metav1.DeleteOptions, listOptions *internalversion.ListOptions) (runtime.Object, error) {
|
||||
return nil, fmt.Errorf("DeleteCollection for dashboards not implemented")
|
||||
}
|
||||
|
@ -1,10 +1,6 @@
|
||||
package dashboard
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
@ -15,22 +11,22 @@ import (
|
||||
common "k8s.io/kube-openapi/pkg/common"
|
||||
"k8s.io/kube-openapi/pkg/spec3"
|
||||
|
||||
dashboard "github.com/grafana/grafana/pkg/apis/dashboard/v0alpha1"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apis/dashboard/v0alpha1"
|
||||
grafanaregistry "github.com/grafana/grafana/pkg/apiserver/registry/generic"
|
||||
grafanarest "github.com/grafana/grafana/pkg/apiserver/rest"
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/dashboard/legacy"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/dashboard/access"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/builder"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
gapiutil "github.com/grafana/grafana/pkg/services/apiserver/utils"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
dashver "github.com/grafana/grafana/pkg/services/dashboardversion"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/provisioning"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/apistore"
|
||||
)
|
||||
|
||||
var _ builder.APIGroupBuilder = (*DashboardsAPIBuilder)(nil)
|
||||
@ -39,8 +35,11 @@ var _ builder.APIGroupBuilder = (*DashboardsAPIBuilder)(nil)
|
||||
type DashboardsAPIBuilder struct {
|
||||
dashboardService dashboards.DashboardService
|
||||
|
||||
accessControl accesscontrol.AccessControl
|
||||
legacy *dashboardStorage
|
||||
dashboardVersionService dashver.Service
|
||||
accessControl accesscontrol.AccessControl
|
||||
namespacer request.NamespaceMapper
|
||||
access access.DashboardAccess
|
||||
dashStore dashboards.Store
|
||||
|
||||
log log.Logger
|
||||
}
|
||||
@ -48,12 +47,12 @@ type DashboardsAPIBuilder struct {
|
||||
func RegisterAPIService(cfg *setting.Cfg, features featuremgmt.FeatureToggles,
|
||||
apiregistration builder.APIRegistrar,
|
||||
dashboardService dashboards.DashboardService,
|
||||
dashboardVersionService dashver.Service,
|
||||
accessControl accesscontrol.AccessControl,
|
||||
provisioning provisioning.ProvisioningService,
|
||||
dashStore dashboards.Store,
|
||||
reg prometheus.Registerer,
|
||||
sql db.DB,
|
||||
tracing *tracing.TracingService,
|
||||
) *DashboardsAPIBuilder {
|
||||
if !features.IsEnabledGlobally(featuremgmt.FlagGrafanaAPIServerWithExperimentalAPIs) {
|
||||
return nil // skip registration unless opting into experimental apis
|
||||
@ -61,42 +60,20 @@ func RegisterAPIService(cfg *setting.Cfg, features featuremgmt.FeatureToggles,
|
||||
|
||||
namespacer := request.GetNamespaceMapper(cfg)
|
||||
builder := &DashboardsAPIBuilder{
|
||||
log: log.New("grafana-apiserver.dashboards"),
|
||||
|
||||
dashboardService: dashboardService,
|
||||
accessControl: accessControl,
|
||||
|
||||
legacy: &dashboardStorage{
|
||||
resource: dashboard.DashboardResourceInfo,
|
||||
access: legacy.NewDashboardAccess(sql, namespacer, dashStore, provisioning),
|
||||
tableConverter: gapiutil.NewTableConverter(
|
||||
dashboard.DashboardResourceInfo.GroupResource(),
|
||||
[]metav1.TableColumnDefinition{
|
||||
{Name: "Name", Type: "string", Format: "name"},
|
||||
{Name: "Title", Type: "string", Format: "string", Description: "The dashboard name"},
|
||||
{Name: "Created At", Type: "date"},
|
||||
},
|
||||
func(obj any) ([]interface{}, error) {
|
||||
dash, ok := obj.(*dashboard.Dashboard)
|
||||
if ok {
|
||||
if dash != nil {
|
||||
return []interface{}{
|
||||
dash.Name,
|
||||
dash.Spec.GetNestedString("title"),
|
||||
dash.CreationTimestamp.UTC().Format(time.RFC3339),
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("expected dashboard or summary")
|
||||
}),
|
||||
},
|
||||
dashboardService: dashboardService,
|
||||
dashboardVersionService: dashboardVersionService,
|
||||
dashStore: dashStore,
|
||||
accessControl: accessControl,
|
||||
namespacer: namespacer,
|
||||
access: access.NewDashboardAccess(sql, namespacer, dashStore, provisioning),
|
||||
log: log.New("grafana-apiserver.dashboards"),
|
||||
}
|
||||
apiregistration.RegisterAPI(builder)
|
||||
return builder
|
||||
}
|
||||
|
||||
func (b *DashboardsAPIBuilder) GetGroupVersion() schema.GroupVersion {
|
||||
return dashboard.DashboardResourceInfo.GroupVersion()
|
||||
return v0alpha1.DashboardResourceInfo.GroupVersion()
|
||||
}
|
||||
|
||||
func (b *DashboardsAPIBuilder) GetDesiredDualWriterMode(dualWrite bool, modeMap map[string]grafanarest.DualWriterMode) grafanarest.DualWriterMode {
|
||||
@ -106,18 +83,16 @@ func (b *DashboardsAPIBuilder) GetDesiredDualWriterMode(dualWrite bool, modeMap
|
||||
|
||||
func addKnownTypes(scheme *runtime.Scheme, gv schema.GroupVersion) {
|
||||
scheme.AddKnownTypes(gv,
|
||||
&dashboard.Dashboard{},
|
||||
&dashboard.DashboardList{},
|
||||
&dashboard.DashboardWithAccessInfo{},
|
||||
&dashboard.DashboardVersionList{},
|
||||
&dashboard.VersionsQueryOptions{},
|
||||
&metav1.PartialObjectMetadata{},
|
||||
&metav1.PartialObjectMetadataList{},
|
||||
&v0alpha1.Dashboard{},
|
||||
&v0alpha1.DashboardList{},
|
||||
&v0alpha1.DashboardWithAccessInfo{},
|
||||
&v0alpha1.DashboardVersionList{},
|
||||
&v0alpha1.VersionsQueryOptions{},
|
||||
)
|
||||
}
|
||||
|
||||
func (b *DashboardsAPIBuilder) InstallSchema(scheme *runtime.Scheme) error {
|
||||
resourceInfo := dashboard.DashboardResourceInfo
|
||||
resourceInfo := v0alpha1.DashboardResourceInfo
|
||||
addKnownTypes(scheme, resourceInfo.GroupVersion())
|
||||
|
||||
// Link this version to the internal representation.
|
||||
@ -143,44 +118,44 @@ func (b *DashboardsAPIBuilder) GetAPIGroupInfo(
|
||||
desiredMode grafanarest.DualWriterMode,
|
||||
reg prometheus.Registerer,
|
||||
) (*genericapiserver.APIGroupInfo, error) {
|
||||
apiGroupInfo := genericapiserver.NewDefaultAPIGroupInfo(dashboard.GROUP, scheme, metav1.ParameterCodec, codecs)
|
||||
apiGroupInfo := genericapiserver.NewDefaultAPIGroupInfo(v0alpha1.GROUP, scheme, metav1.ParameterCodec, codecs)
|
||||
|
||||
dash := b.legacy.resource
|
||||
legacyStore, err := b.legacy.newStore(scheme, optsGetter)
|
||||
resourceInfo := v0alpha1.DashboardResourceInfo
|
||||
store, err := newStorage(scheme)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
legacyStore := &dashboardStorage{
|
||||
resource: resourceInfo,
|
||||
access: b.access,
|
||||
tableConverter: store.TableConvertor,
|
||||
}
|
||||
|
||||
storage := map[string]rest.Storage{}
|
||||
storage[dash.StoragePath()] = legacyStore
|
||||
storage[dash.StoragePath("dto")] = &DTOConnector{
|
||||
storage[resourceInfo.StoragePath()] = legacyStore
|
||||
storage[resourceInfo.StoragePath("dto")] = &DTOConnector{
|
||||
builder: b,
|
||||
}
|
||||
storage[resourceInfo.StoragePath("versions")] = &VersionsREST{
|
||||
builder: b,
|
||||
}
|
||||
storage[dash.StoragePath("history")] = apistore.NewHistoryConnector(
|
||||
b.legacy.server, // as client???
|
||||
dashboard.DashboardResourceInfo.GroupResource(),
|
||||
)
|
||||
|
||||
// Dual writes if a RESTOptionsGetter is provided
|
||||
if desiredMode != grafanarest.Mode0 && optsGetter != nil {
|
||||
store, err := newStorage(scheme)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
options := &generic.StoreOptions{RESTOptions: optsGetter, AttrFunc: grafanaregistry.GetAttrs}
|
||||
if err := store.CompleteWithOptions(options); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
storage[dash.StoragePath()] = grafanarest.NewDualWriter(grafanarest.Mode1, legacyStore, store, reg)
|
||||
storage[resourceInfo.StoragePath()] = grafanarest.NewDualWriter(grafanarest.Mode1, legacyStore, store, reg)
|
||||
}
|
||||
|
||||
apiGroupInfo.VersionedResourcesStorageMap[dashboard.VERSION] = storage
|
||||
apiGroupInfo.VersionedResourcesStorageMap[v0alpha1.VERSION] = storage
|
||||
return &apiGroupInfo, nil
|
||||
}
|
||||
|
||||
func (b *DashboardsAPIBuilder) GetOpenAPIDefinitions() common.GetOpenAPIDefinitions {
|
||||
return dashboard.GetOpenAPIDefinitions
|
||||
return v0alpha1.GetOpenAPIDefinitions
|
||||
}
|
||||
|
||||
func (b *DashboardsAPIBuilder) PostProcessOpenAPI(oas *spec3.OpenAPI) (*spec3.OpenAPI, error) {
|
||||
@ -191,8 +166,8 @@ func (b *DashboardsAPIBuilder) PostProcessOpenAPI(oas *spec3.OpenAPI) (*spec3.Op
|
||||
root := "/apis/" + b.GetGroupVersion().String() + "/"
|
||||
|
||||
// Hide the ability to list or watch across all tenants
|
||||
delete(oas.Paths.Paths, root+dashboard.DashboardResourceInfo.GroupResource().Resource)
|
||||
delete(oas.Paths.Paths, root+"watch/"+dashboard.DashboardResourceInfo.GroupResource().Resource)
|
||||
delete(oas.Paths.Paths, root+v0alpha1.DashboardResourceInfo.GroupResource().Resource)
|
||||
delete(oas.Paths.Paths, root+"watch/"+v0alpha1.DashboardResourceInfo.GroupResource().Resource)
|
||||
|
||||
// The root API discovery list
|
||||
sub := oas.Paths.Paths[root]
|
||||
|
@ -2,7 +2,6 @@ package dashboard
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
@ -10,7 +9,6 @@ import (
|
||||
"k8s.io/apiserver/pkg/registry/rest"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
dashboard "github.com/grafana/grafana/pkg/apis/dashboard/v0alpha1"
|
||||
"github.com/grafana/grafana/pkg/infra/appcontext"
|
||||
"github.com/grafana/grafana/pkg/infra/slugify"
|
||||
@ -18,7 +16,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/guardian"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
)
|
||||
|
||||
// The DTO returns everything the UI needs in a single request
|
||||
@ -26,10 +23,8 @@ type DTOConnector struct {
|
||||
builder *DashboardsAPIBuilder
|
||||
}
|
||||
|
||||
var (
|
||||
_ rest.Connecter = (*DTOConnector)(nil)
|
||||
_ rest.StorageMetadata = (*DTOConnector)(nil)
|
||||
)
|
||||
var _ = rest.Connecter(&DTOConnector{})
|
||||
var _ = rest.StorageMetadata(&DTOConnector{})
|
||||
|
||||
func (r *DTOConnector) New() runtime.Object {
|
||||
return &dashboard.DashboardWithAccessInfo{}
|
||||
@ -93,35 +88,10 @@ func (r *DTOConnector) Connect(ctx context.Context, name string, opts runtime.Ob
|
||||
r.getAnnotationPermissionsByScope(ctx, user, &access.AnnotationsPermissions.Dashboard, accesscontrol.ScopeAnnotationsTypeDashboard)
|
||||
r.getAnnotationPermissionsByScope(ctx, user, &access.AnnotationsPermissions.Organization, accesscontrol.ScopeAnnotationsTypeOrganization)
|
||||
|
||||
key := &resource.ResourceKey{
|
||||
Namespace: info.Value,
|
||||
Group: dashboard.GROUP,
|
||||
Resource: dashboard.DashboardResourceInfo.GroupResource().Resource,
|
||||
Name: name,
|
||||
}
|
||||
store := r.builder.legacy.access
|
||||
rsp, err := store.Read(ctx, &resource.ReadRequest{Key: key})
|
||||
dash, err := r.builder.access.GetDashboard(ctx, info.OrgID, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dash := &dashboard.Dashboard{}
|
||||
err = json.Unmarshal(rsp.Value, dash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// TODO, load the full spec from blob storage
|
||||
if false {
|
||||
blob, err := store.GetBlob(ctx, key, &utils.BlobInfo{UID: "dto"}, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = json.Unmarshal(blob.Value, &dash.Spec)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
access.Slug = slugify.Slugify(dash.Spec.GetNestedString("title"))
|
||||
access.Url = dashboards.GetDashboardFolderURL(false, name, access.Slug)
|
||||
|
||||
|
117
pkg/registry/apis/dashboard/sub_versions.go
Normal file
117
pkg/registry/apis/dashboard/sub_versions.go
Normal file
@ -0,0 +1,117 @@
|
||||
package dashboard
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apiserver/pkg/registry/rest"
|
||||
|
||||
common "github.com/grafana/grafana/pkg/apimachinery/apis/common/v0alpha1"
|
||||
dashboard "github.com/grafana/grafana/pkg/apis/dashboard/v0alpha1"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
dashver "github.com/grafana/grafana/pkg/services/dashboardversion"
|
||||
)
|
||||
|
||||
type VersionsREST struct {
|
||||
builder *DashboardsAPIBuilder
|
||||
}
|
||||
|
||||
var _ = rest.Connecter(&VersionsREST{})
|
||||
var _ = rest.StorageMetadata(&VersionsREST{})
|
||||
|
||||
func (r *VersionsREST) New() runtime.Object {
|
||||
return &dashboard.DashboardVersionList{}
|
||||
}
|
||||
|
||||
func (r *VersionsREST) Destroy() {
|
||||
}
|
||||
|
||||
func (r *VersionsREST) ConnectMethods() []string {
|
||||
return []string{"GET"}
|
||||
}
|
||||
|
||||
func (r *VersionsREST) ProducesMIMETypes(verb string) []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *VersionsREST) ProducesObject(verb string) interface{} {
|
||||
return &dashboard.DashboardVersionList{}
|
||||
}
|
||||
|
||||
func (r *VersionsREST) NewConnectOptions() (runtime.Object, bool, string) {
|
||||
return nil, true, ""
|
||||
}
|
||||
|
||||
func (r *VersionsREST) Connect(ctx context.Context, uid string, opts runtime.Object, responder rest.Responder) (http.Handler, error) {
|
||||
info, err := request.NamespaceInfoFrom(ctx, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
|
||||
path := req.URL.Path
|
||||
idx := strings.LastIndex(path, "/versions/")
|
||||
if idx > 0 {
|
||||
key := path[strings.LastIndex(path, "/")+1:]
|
||||
version, err := strconv.Atoi(key)
|
||||
if err != nil {
|
||||
responder.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
dto, err := r.builder.dashboardVersionService.Get(ctx, &dashver.GetDashboardVersionQuery{
|
||||
DashboardUID: uid,
|
||||
OrgID: info.OrgID,
|
||||
Version: version,
|
||||
})
|
||||
if err != nil {
|
||||
responder.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
data, _ := dto.Data.Map()
|
||||
|
||||
// Convert the version to a regular dashboard
|
||||
dash := &dashboard.Dashboard{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
Name: uid,
|
||||
CreationTimestamp: metav1.NewTime(dto.Created),
|
||||
},
|
||||
Spec: common.Unstructured{Object: data},
|
||||
}
|
||||
responder.Object(100, dash)
|
||||
return
|
||||
}
|
||||
|
||||
// Or list versions
|
||||
rsp, err := r.builder.dashboardVersionService.List(ctx, &dashver.ListDashboardVersionsQuery{
|
||||
DashboardUID: uid,
|
||||
OrgID: info.OrgID,
|
||||
})
|
||||
if err != nil {
|
||||
responder.Error(err)
|
||||
return
|
||||
}
|
||||
versions := &dashboard.DashboardVersionList{}
|
||||
for _, v := range rsp {
|
||||
info := dashboard.DashboardVersionInfo{
|
||||
Version: v.Version,
|
||||
Created: v.Created.UnixMilli(),
|
||||
Message: v.Message,
|
||||
}
|
||||
if v.ParentVersion != v.Version {
|
||||
info.ParentVersion = v.ParentVersion
|
||||
}
|
||||
if v.CreatedBy > 0 {
|
||||
info.CreatedBy = fmt.Sprintf("%d", v.CreatedBy)
|
||||
}
|
||||
versions.Items = append(versions.Items, info)
|
||||
}
|
||||
responder.Object(http.StatusOK, versions)
|
||||
}), nil
|
||||
}
|
@ -9,7 +9,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrations/ssosettings"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrations/ualert"
|
||||
. "github.com/grafana/grafana/pkg/services/sqlstore/migrator"
|
||||
basicResourceMigrations "github.com/grafana/grafana/pkg/storage/unified/basic/migrations"
|
||||
)
|
||||
|
||||
// --- Migration Guide line ---
|
||||
@ -124,8 +123,6 @@ func (oss *OSSMigrations) AddMigration(mg *Migrator) {
|
||||
accesscontrol.AddManagedFolderAlertingSilencesActionsMigrator(mg)
|
||||
|
||||
ualert.AddRecordingRuleColumns(mg)
|
||||
|
||||
basicResourceMigrations.AddBasicResourceMigrations(mg)
|
||||
}
|
||||
|
||||
func addStarMigrations(mg *Migrator) {
|
||||
|
@ -1,30 +0,0 @@
|
||||
This includes four packages
|
||||
|
||||
## resource
|
||||
|
||||
this is a go module that can be imported into external projects
|
||||
|
||||
This includes the protobuf based client+server and all the logic required to convert requests into write events.
|
||||
|
||||
Protobuf TODO?
|
||||
* can/should we use upstream k8s proto for query object?
|
||||
* starting a project today... should we use proto3?
|
||||
|
||||
|
||||
## apistore
|
||||
|
||||
The apiserver storage.Interface that links the storage to kubernetes
|
||||
|
||||
Mostly a copy of te
|
||||
|
||||
|
||||
## entitybridge
|
||||
|
||||
Implementes a resource store using the existing entity service. This will let us evolve the
|
||||
kubernetes interface.Store using existing system structures while we explore better options.
|
||||
|
||||
|
||||
## sqlnext
|
||||
|
||||
VERY early stub exploring alternative sql structure... really just a stub right now
|
||||
|
@ -1,103 +0,0 @@
|
||||
package apistore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/apiserver/pkg/registry/rest"
|
||||
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
)
|
||||
|
||||
type HistoryConnector interface {
|
||||
rest.Storage
|
||||
rest.Connecter
|
||||
rest.StorageMetadata
|
||||
}
|
||||
|
||||
func NewHistoryConnector(search resource.ResourceIndexServer, gr schema.GroupResource) HistoryConnector {
|
||||
return &historyREST{
|
||||
search: search,
|
||||
gr: gr,
|
||||
}
|
||||
}
|
||||
|
||||
type historyREST struct {
|
||||
search resource.ResourceIndexServer // should be a client!
|
||||
gr schema.GroupResource
|
||||
}
|
||||
|
||||
func (r *historyREST) New() runtime.Object {
|
||||
return &metav1.PartialObjectMetadataList{}
|
||||
}
|
||||
|
||||
func (r *historyREST) Destroy() {
|
||||
}
|
||||
|
||||
func (r *historyREST) ConnectMethods() []string {
|
||||
return []string{"GET"}
|
||||
}
|
||||
|
||||
func (r *historyREST) ProducesMIMETypes(verb string) []string {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *historyREST) ProducesObject(verb string) interface{} {
|
||||
return &metav1.PartialObjectMetadataList{}
|
||||
}
|
||||
|
||||
func (r *historyREST) NewConnectOptions() (runtime.Object, bool, string) {
|
||||
return nil, false, ""
|
||||
}
|
||||
|
||||
func (r *historyREST) Connect(ctx context.Context, uid string, opts runtime.Object, responder rest.Responder) (http.Handler, error) {
|
||||
info, err := request.NamespaceInfoFrom(ctx, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
key := &resource.ResourceKey{
|
||||
Namespace: info.Value,
|
||||
Group: r.gr.Group,
|
||||
Resource: r.gr.Resource,
|
||||
Name: uid,
|
||||
}
|
||||
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
|
||||
query := req.URL.Query()
|
||||
rsp, err := r.search.History(ctx, &resource.HistoryRequest{
|
||||
NextPageToken: query.Get("token"),
|
||||
Limit: 100, // TODO, from query
|
||||
Key: key,
|
||||
})
|
||||
if err != nil {
|
||||
responder.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
list := &metav1.PartialObjectMetadataList{
|
||||
ListMeta: metav1.ListMeta{
|
||||
Continue: rsp.NextPageToken,
|
||||
},
|
||||
}
|
||||
if rsp.ResourceVersion > 0 {
|
||||
list.ResourceVersion = strconv.FormatInt(rsp.ResourceVersion, 10)
|
||||
}
|
||||
for _, v := range rsp.Items {
|
||||
partial := metav1.PartialObjectMetadata{}
|
||||
err = json.Unmarshal(v.PartialObjectMeta, &partial)
|
||||
if err != nil {
|
||||
responder.Error(err)
|
||||
return
|
||||
}
|
||||
list.Items = append(list.Items, partial)
|
||||
}
|
||||
responder.Object(http.StatusOK, list)
|
||||
}), nil
|
||||
}
|
@ -86,9 +86,9 @@ func getKey(val string) (*resource.ResourceKey, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if k.Group == "" {
|
||||
return nil, apierrors.NewInternalError(fmt.Errorf("missing group in request"))
|
||||
}
|
||||
// if k.Group == "" {
|
||||
// return nil, apierrors.NewInternalError(fmt.Errorf("missing group in request"))
|
||||
// }
|
||||
if k.Resource == "" {
|
||||
return nil, apierrors.NewInternalError(fmt.Errorf("missing resource in request"))
|
||||
}
|
||||
@ -164,9 +164,9 @@ func (s *Storage) Delete(ctx context.Context, key string, out runtime.Object, pr
|
||||
return err
|
||||
}
|
||||
|
||||
if validateDeletion != nil {
|
||||
return fmt.Errorf("not supported (validate deletion)")
|
||||
}
|
||||
// if validateDeletion != nil {
|
||||
// return fmt.Errorf("not supported (validate deletion)")
|
||||
// }
|
||||
|
||||
cmd := &resource.DeleteRequest{Key: k}
|
||||
if preconditions != nil {
|
||||
|
@ -1,227 +0,0 @@
|
||||
package basic
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
"go.opentelemetry.io/otel/trace/noop"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/session"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
)
|
||||
|
||||
const trace_prefix = "basic.sql.resource."
|
||||
const table_name = "basic_resource"
|
||||
|
||||
type ResourceServerOptions struct {
|
||||
DB db.DB
|
||||
GroupResource schema.GroupResource
|
||||
Tracer trace.Tracer
|
||||
MaxItems int
|
||||
}
|
||||
|
||||
// This storage engine is not designed to support large collections
|
||||
// The goal with this package is a production ready implementation that
|
||||
// can support modest requirements. By design, this will scan all
|
||||
// results on all list operations, so we do not want this to grow too big
|
||||
func NewResourceServer(opts ResourceServerOptions) (resource.ResourceServer, error) {
|
||||
if opts.Tracer == nil {
|
||||
opts.Tracer = noop.NewTracerProvider().Tracer("resource-server")
|
||||
}
|
||||
|
||||
store := &basicSQLBackend{
|
||||
db: opts.DB,
|
||||
gr: opts.GroupResource,
|
||||
tracer: opts.Tracer,
|
||||
log: slog.Default().With("logger", "basic-sql-resource"),
|
||||
}
|
||||
|
||||
return resource.NewResourceServer(resource.ResourceServerOptions{
|
||||
Tracer: opts.Tracer,
|
||||
Backend: store,
|
||||
Diagnostics: store,
|
||||
Lifecycle: store,
|
||||
})
|
||||
}
|
||||
|
||||
type basicSQLBackend struct {
|
||||
log *slog.Logger
|
||||
db db.DB
|
||||
gr schema.GroupResource
|
||||
maxItems int
|
||||
tracer trace.Tracer
|
||||
|
||||
// Simple watch stream -- NOTE, this only works for single tenant!
|
||||
broadcaster resource.Broadcaster[*resource.WrittenEvent]
|
||||
|
||||
stream chan<- *resource.WrittenEvent
|
||||
}
|
||||
|
||||
func (s *basicSQLBackend) Init() (err error) {
|
||||
s.broadcaster, err = resource.NewBroadcaster(context.Background(), func(c chan<- *resource.WrittenEvent) error {
|
||||
s.stream = c
|
||||
return nil
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
func (s *basicSQLBackend) IsHealthy(ctx context.Context, r *resource.HealthCheckRequest) (*resource.HealthCheckResponse, error) {
|
||||
return &resource.HealthCheckResponse{Status: resource.HealthCheckResponse_SERVING}, nil
|
||||
}
|
||||
|
||||
func (s *basicSQLBackend) Stop() {
|
||||
if s.stream != nil {
|
||||
close(s.stream)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *basicSQLBackend) validateKey(key *resource.ResourceKey) error {
|
||||
if s.gr.Group != "" && s.gr.Group != key.Group {
|
||||
return fmt.Errorf("expected group: %s, found: %s", s.gr.Group, key.Group)
|
||||
}
|
||||
if s.gr.Resource != "" && s.gr.Resource != key.Resource {
|
||||
return fmt.Errorf("expected resource: %s, found: %s", s.gr.Resource, key.Resource)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *basicSQLBackend) WriteEvent(ctx context.Context, event resource.WriteEvent) (rv int64, err error) {
|
||||
_, span := s.tracer.Start(ctx, trace_prefix+"WriteEvent")
|
||||
defer span.End()
|
||||
|
||||
key := event.Key
|
||||
err = s.validateKey(key)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
gvk := event.Object.GetGroupVersionKind()
|
||||
|
||||
// This delegates resource version creation to auto-increment
|
||||
// At scale, this is not a great strategy since everything is locked across all resources while this executes
|
||||
appender := func(tx *session.SessionTx) (int64, error) {
|
||||
return tx.ExecWithReturningId(ctx,
|
||||
`INSERT INTO `+table_name+` (api_group,api_version,namespace,resource,name,value) VALUES($1,$2,$3,$4,$5,$6)`,
|
||||
key.Group, gvk.Version, key.Namespace, key.Resource, key.Name, event.Value)
|
||||
}
|
||||
|
||||
wiper := func(tx *session.SessionTx) (sql.Result, error) {
|
||||
return tx.Exec(ctx, `DELETE FROM `+table_name+` WHERE `+
|
||||
`api_group=$1 AND `+
|
||||
`namespace=$2 AND `+
|
||||
`resource=$3 AND `+
|
||||
`name=$4`,
|
||||
key.Group, key.Namespace, key.Resource, key.Name)
|
||||
}
|
||||
|
||||
err = s.db.GetSqlxSession().WithTransaction(ctx, func(tx *session.SessionTx) error {
|
||||
switch event.Type {
|
||||
case resource.WatchEvent_ADDED:
|
||||
count := 0
|
||||
err = tx.Get(ctx, &count, `SELECT count(*) FROM `+table_name+` WHERE api_group=$1 AND resource=$2`, key.Group, key.Resource)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if count >= s.maxItems {
|
||||
return fmt.Errorf("the storage backend only supports %d items", s.maxItems)
|
||||
}
|
||||
rv, err = appender(tx)
|
||||
|
||||
case resource.WatchEvent_MODIFIED:
|
||||
_, err = wiper(tx)
|
||||
if err == nil {
|
||||
rv, err = appender(tx)
|
||||
}
|
||||
case resource.WatchEvent_DELETED:
|
||||
_, err = wiper(tx)
|
||||
default:
|
||||
return fmt.Errorf("unsupported event type")
|
||||
}
|
||||
return err
|
||||
})
|
||||
|
||||
// Async notify all subscribers
|
||||
if s.stream != nil {
|
||||
go func() {
|
||||
write := &resource.WrittenEvent{
|
||||
WriteEvent: event,
|
||||
Timestamp: time.Now().UnixMilli(),
|
||||
ResourceVersion: rv,
|
||||
}
|
||||
s.stream <- write
|
||||
}()
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (s *basicSQLBackend) WatchWriteEvents(ctx context.Context) (<-chan *resource.WrittenEvent, error) {
|
||||
return s.broadcaster.Subscribe(ctx)
|
||||
}
|
||||
|
||||
func (s *basicSQLBackend) Read(ctx context.Context, req *resource.ReadRequest) (*resource.ReadResponse, error) {
|
||||
_, span := s.tracer.Start(ctx, trace_prefix+"Read")
|
||||
defer span.End()
|
||||
|
||||
key := req.Key
|
||||
err := s.validateKey(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows, err := s.db.GetSqlxSession().Query(ctx, "SELECT rv,value FROM "+table_name+" WHERE api_group=$1 AND namespace=$2 AND resource=$3 AND name=$4",
|
||||
key.Group, key.Namespace, key.Resource, key.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if rows.Next() {
|
||||
rsp := &resource.ReadResponse{}
|
||||
err = rows.Scan(&rsp.ResourceVersion, &rsp.Value)
|
||||
if err == nil && rows.Next() {
|
||||
return nil, fmt.Errorf("unexpected multiple results found") // should not be possible with the index strategy
|
||||
}
|
||||
return rsp, err
|
||||
}
|
||||
return nil, fmt.Errorf("NOT FOUND ERROR")
|
||||
}
|
||||
|
||||
// This implementation is only ever called from inside single tenant grafana, so there is no need to decode
|
||||
// the value and try filtering first -- that will happen one layer up anyway
|
||||
func (s *basicSQLBackend) PrepareList(ctx context.Context, req *resource.ListRequest) (*resource.ListResponse, error) {
|
||||
if req.NextPageToken != "" {
|
||||
return nil, fmt.Errorf("this storage backend does not support paging")
|
||||
}
|
||||
_, span := s.tracer.Start(ctx, trace_prefix+"PrepareList")
|
||||
defer span.End()
|
||||
|
||||
key := req.Options.Key
|
||||
err := s.validateKey(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rsp := &resource.ListResponse{}
|
||||
rows, err := s.db.GetSqlxSession().Query(ctx,
|
||||
"SELECT rv,value FROM "+table_name+
|
||||
" WHERE api_group=$1 AND namespace=$2 AND resource=$3 "+
|
||||
" ORDER BY name asc LIMIT $4",
|
||||
key.Group, key.Namespace, key.Resource, s.maxItems+1)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for rows.Next() {
|
||||
wrapper := &resource.ResourceWrapper{}
|
||||
err = rows.Scan(&wrapper.ResourceVersion, &wrapper.Value)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
rsp.Items = append(rsp.Items, wrapper)
|
||||
}
|
||||
if len(rsp.Items) > s.maxItems {
|
||||
err = fmt.Errorf("more values that are supported by this storage engine")
|
||||
}
|
||||
return rsp, err
|
||||
}
|
@ -1,26 +0,0 @@
|
||||
package migrations
|
||||
|
||||
import "github.com/grafana/grafana/pkg/services/sqlstore/migrator"
|
||||
|
||||
func AddBasicResourceMigrations(mg *migrator.Migrator) {
|
||||
mg.AddMigration("create unified storage basic resource table", migrator.NewAddTableMigration(migrator.Table{
|
||||
Name: "basic_resource",
|
||||
Columns: []*migrator.Column{
|
||||
// Sequential resource version
|
||||
{Name: "rv", Type: migrator.DB_BigInt, Nullable: false, IsPrimaryKey: true, IsAutoIncrement: true},
|
||||
|
||||
// Properties that exist in path/key (and duplicated in the json value)
|
||||
{Name: "api_group", Type: migrator.DB_NVarchar, Length: 190, Nullable: false}, // avoid "group" so escaping is easier :)
|
||||
{Name: "api_version", Type: migrator.DB_NVarchar, Length: 32, Nullable: false}, // informational
|
||||
{Name: "namespace", Type: migrator.DB_NVarchar, Length: 63, Nullable: true}, // namespace is not required (cluster scope)
|
||||
{Name: "resource", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
{Name: "name", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
|
||||
// The k8s resource JSON text (without the resourceVersion populated)
|
||||
{Name: "value", Type: migrator.DB_MediumText, Nullable: false},
|
||||
},
|
||||
Indices: []*migrator.Index{
|
||||
{Cols: []string{"api_group", "resource", "namespace", "name"}, Type: migrator.UniqueIndex},
|
||||
},
|
||||
}))
|
||||
}
|
@ -3,101 +3,35 @@ package entitybridge
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"gocloud.dev/blob/fileblob"
|
||||
"k8s.io/apimachinery/pkg/selection"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
grafanaregistry "github.com/grafana/grafana/pkg/apiserver/registry/generic"
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/store/entity"
|
||||
"github.com/grafana/grafana/pkg/services/store/entity/db/dbimpl"
|
||||
"github.com/grafana/grafana/pkg/services/store/entity/sqlstash"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
)
|
||||
|
||||
// Creates a ResourceServer using the existing entity tables
|
||||
// NOTE: most of the field values are ignored
|
||||
func ProvideResourceServer(db db.DB, cfg *setting.Cfg, features featuremgmt.FeatureToggles, tracer tracing.Tracer) (resource.ResourceServer, error) {
|
||||
opts := resource.ResourceServerOptions{
|
||||
Tracer: tracer,
|
||||
// NOTE: the server is optional and only used to pass init+close functions
|
||||
func EntityAsResourceServer(client entity.EntityStoreClient, server sqlstash.SqlEntityServer, tracer tracing.Tracer) (resource.ResourceServer, error) {
|
||||
if client == nil {
|
||||
return nil, fmt.Errorf("client must be defined")
|
||||
}
|
||||
|
||||
supportBlobs := true
|
||||
useEntitySQL := true
|
||||
|
||||
// Create a local blob filesystem blob store
|
||||
if supportBlobs {
|
||||
dir := filepath.Join(cfg.DataPath, "unistore", "blobs")
|
||||
if err := os.MkdirAll(dir, 0o750); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bucket, err := fileblob.OpenBucket(dir, &fileblob.Options{
|
||||
CreateDir: true,
|
||||
Metadata: fileblob.MetadataDontWrite, // skip
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opts.Blob, err = resource.NewCDKBlobStore(context.Background(), resource.CDKBlobStoreOptions{
|
||||
Tracer: tracer,
|
||||
Bucket: bucket,
|
||||
URLExpiration: time.Minute * 20,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Use this bridge as the resource store
|
||||
bridge := &entityBridge{
|
||||
client: client,
|
||||
server: server,
|
||||
}
|
||||
|
||||
if useEntitySQL {
|
||||
eDB, err := dbimpl.ProvideEntityDB(db, cfg, features, tracer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
server, err := sqlstash.ProvideSQLEntityServer(eDB, tracer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
client := entity.NewEntityStoreClientLocal(server)
|
||||
|
||||
// Use this bridge as the resource store
|
||||
bridge := &entityBridge{
|
||||
server: server,
|
||||
client: client,
|
||||
}
|
||||
opts.Backend = bridge
|
||||
opts.Diagnostics = bridge
|
||||
opts.Lifecycle = bridge
|
||||
} else {
|
||||
dir := filepath.Join(cfg.DataPath, "unistore", "resource")
|
||||
if err := os.MkdirAll(dir, 0o750); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bucket, err := fileblob.OpenBucket(dir, &fileblob.Options{
|
||||
CreateDir: true,
|
||||
Metadata: fileblob.MetadataDontWrite, // skip
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opts.Backend, err = resource.NewCDKBackend(context.Background(), resource.CDKBackendOptions{
|
||||
Tracer: tracer,
|
||||
Bucket: bucket,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return resource.NewResourceServer(opts)
|
||||
return resource.NewResourceServer(resource.ResourceServerOptions{
|
||||
Tracer: tracer,
|
||||
Backend: bridge,
|
||||
Diagnostics: bridge,
|
||||
Lifecycle: bridge,
|
||||
})
|
||||
}
|
||||
|
||||
// This is only created if we use the entity implementation
|
||||
|
@ -9,6 +9,7 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
@ -25,8 +26,6 @@ type CDKBackendOptions struct {
|
||||
Tracer trace.Tracer
|
||||
Bucket *blob.Bucket
|
||||
RootFolder string
|
||||
|
||||
NextResourceVersion NextResourceVersion
|
||||
}
|
||||
|
||||
func NewCDKBackend(ctx context.Context, opts CDKBackendOptions) (StorageBackend, error) {
|
||||
@ -49,25 +48,22 @@ func NewCDKBackend(ctx context.Context, opts CDKBackendOptions) (StorageBackend,
|
||||
return nil, fmt.Errorf("the root folder does not exist")
|
||||
}
|
||||
|
||||
// This is not safe when running in HA!
|
||||
if opts.NextResourceVersion == nil {
|
||||
opts.NextResourceVersion = newResourceVersionCounter(time.Now().UnixMilli())
|
||||
}
|
||||
|
||||
return &cdkBackend{
|
||||
backend := &cdkBackend{
|
||||
tracer: opts.Tracer,
|
||||
bucket: opts.Bucket,
|
||||
root: opts.RootFolder,
|
||||
nextRV: opts.NextResourceVersion,
|
||||
}, nil
|
||||
}
|
||||
backend.rv.Swap(time.Now().UnixMilli())
|
||||
return backend, nil
|
||||
}
|
||||
|
||||
type cdkBackend struct {
|
||||
tracer trace.Tracer
|
||||
bucket *blob.Bucket
|
||||
root string
|
||||
nextRV NextResourceVersion
|
||||
mutex sync.Mutex
|
||||
|
||||
mutex sync.Mutex
|
||||
rv atomic.Int64
|
||||
|
||||
// Simple watch stream -- NOTE, this only works for single tenant!
|
||||
broadcaster Broadcaster[*WrittenEvent]
|
||||
@ -117,7 +113,7 @@ func (s *cdkBackend) WriteEvent(ctx context.Context, event WriteEvent) (rv int64
|
||||
s.mutex.Lock()
|
||||
defer s.mutex.Unlock()
|
||||
|
||||
rv = s.nextRV()
|
||||
rv = s.rv.Add(1)
|
||||
err = s.bucket.WriteAll(ctx, s.getPath(event.Key, rv), event.Value, &blob.WriterOptions{
|
||||
ContentType: "application/json",
|
||||
})
|
||||
@ -163,7 +159,7 @@ func (s *cdkBackend) Read(ctx context.Context, req *ReadRequest) (*ReadResponse,
|
||||
}
|
||||
|
||||
raw, err := s.bucket.ReadAll(ctx, path)
|
||||
if err == nil && isDeletedMarker(raw) {
|
||||
if raw == nil || (err == nil && isDeletedMarker(raw)) {
|
||||
return nil, apierrors.NewNotFound(schema.GroupResource{
|
||||
Group: req.Key.Group,
|
||||
Resource: req.Key.Resource,
|
||||
@ -193,7 +189,9 @@ func (s *cdkBackend) PrepareList(ctx context.Context, req *ListRequest) (*ListRe
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rsp := &ListResponse{}
|
||||
rsp := &ListResponse{
|
||||
ResourceVersion: s.rv.Load(),
|
||||
}
|
||||
for _, item := range resources {
|
||||
latest := item.versions[0]
|
||||
raw, err := s.bucket.ReadAll(ctx, latest.key)
|
||||
|
@ -1,176 +0,0 @@
|
||||
package resource
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
context "context"
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"mime"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
"go.opentelemetry.io/otel/trace/noop"
|
||||
"gocloud.dev/blob"
|
||||
_ "gocloud.dev/blob/fileblob"
|
||||
_ "gocloud.dev/blob/memblob"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
)
|
||||
|
||||
type CDKBlobStoreOptions struct {
|
||||
Tracer trace.Tracer
|
||||
Bucket *blob.Bucket
|
||||
RootFolder string
|
||||
URLExpiration time.Duration
|
||||
}
|
||||
|
||||
func NewCDKBlobStore(ctx context.Context, opts CDKBlobStoreOptions) (BlobStore, error) {
|
||||
if opts.Tracer == nil {
|
||||
opts.Tracer = noop.NewTracerProvider().Tracer("cdk-blob-store")
|
||||
}
|
||||
|
||||
if opts.Bucket == nil {
|
||||
return nil, fmt.Errorf("missing bucket")
|
||||
}
|
||||
if opts.URLExpiration < 1 {
|
||||
opts.URLExpiration = time.Minute * 10 // 10 min default
|
||||
}
|
||||
|
||||
found, _, err := opts.Bucket.ListPage(ctx, blob.FirstPageToken, 1, &blob.ListOptions{
|
||||
Prefix: opts.RootFolder,
|
||||
Delimiter: "/",
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if found == nil {
|
||||
return nil, fmt.Errorf("the root folder does not exist")
|
||||
}
|
||||
|
||||
return &cdkBlobStore{
|
||||
tracer: opts.Tracer,
|
||||
bucket: opts.Bucket,
|
||||
root: opts.RootFolder,
|
||||
cansignurls: false, // TODO depends on the implementation
|
||||
expiration: opts.URLExpiration,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type cdkBlobStore struct {
|
||||
tracer trace.Tracer
|
||||
bucket *blob.Bucket
|
||||
root string
|
||||
cansignurls bool
|
||||
expiration time.Duration
|
||||
}
|
||||
|
||||
func (s *cdkBlobStore) getBlobPath(key *ResourceKey, info *utils.BlobInfo) (string, error) {
|
||||
var buffer bytes.Buffer
|
||||
buffer.WriteString(s.root)
|
||||
|
||||
if key.Namespace == "" {
|
||||
buffer.WriteString("__cluster__/")
|
||||
} else {
|
||||
buffer.WriteString(key.Namespace)
|
||||
buffer.WriteString("/")
|
||||
}
|
||||
|
||||
if key.Group == "" {
|
||||
return "", fmt.Errorf("missing group")
|
||||
}
|
||||
buffer.WriteString(key.Group)
|
||||
buffer.WriteString("/")
|
||||
|
||||
if key.Resource == "" {
|
||||
return "", fmt.Errorf("missing resource")
|
||||
}
|
||||
buffer.WriteString(key.Resource)
|
||||
buffer.WriteString("/")
|
||||
|
||||
if key.Name == "" {
|
||||
return "", fmt.Errorf("missing name")
|
||||
}
|
||||
buffer.WriteString(key.Name)
|
||||
buffer.WriteString("/")
|
||||
buffer.WriteString(info.UID)
|
||||
|
||||
ext, err := mime.ExtensionsByType(info.MimeType)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if len(ext) > 0 {
|
||||
buffer.WriteString(ext[0])
|
||||
}
|
||||
return buffer.String(), nil
|
||||
}
|
||||
|
||||
func (s *cdkBlobStore) SupportsSignedURLs() bool {
|
||||
return s.cansignurls
|
||||
}
|
||||
|
||||
func (s *cdkBlobStore) PutBlob(ctx context.Context, req *PutBlobRequest) (*PutBlobResponse, error) {
|
||||
info := &utils.BlobInfo{
|
||||
UID: uuid.New().String(),
|
||||
}
|
||||
info.SetContentType(req.ContentType)
|
||||
path, err := s.getBlobPath(req.Resource, info)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rsp := &PutBlobResponse{Uid: info.UID, MimeType: info.MimeType, Charset: info.Charset}
|
||||
if req.Method == PutBlobRequest_HTTP {
|
||||
rsp.Url, err = s.bucket.SignedURL(ctx, path, &blob.SignedURLOptions{
|
||||
Method: "PUT",
|
||||
Expiry: s.expiration,
|
||||
ContentType: req.ContentType,
|
||||
})
|
||||
return rsp, err
|
||||
}
|
||||
if len(req.Value) < 1 {
|
||||
return nil, fmt.Errorf("missing content value")
|
||||
}
|
||||
|
||||
// Write the value
|
||||
err = s.bucket.WriteAll(ctx, path, req.Value, &blob.WriterOptions{
|
||||
ContentType: req.ContentType,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
attrs, err := s.bucket.Attributes(ctx, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rsp.Size = attrs.Size
|
||||
|
||||
// Set the MD5 hash if missing
|
||||
if len(attrs.MD5) == 0 {
|
||||
h := md5.New()
|
||||
_, _ = h.Write(req.Value)
|
||||
attrs.MD5 = h.Sum(nil)
|
||||
}
|
||||
rsp.Hash = hex.EncodeToString(attrs.MD5[:])
|
||||
return rsp, err
|
||||
}
|
||||
|
||||
func (s *cdkBlobStore) GetBlob(ctx context.Context, resource *ResourceKey, info *utils.BlobInfo, mustProxy bool) (*GetBlobResponse, error) {
|
||||
path, err := s.getBlobPath(resource, info)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rsp := &GetBlobResponse{ContentType: info.ContentType()}
|
||||
if mustProxy || !s.cansignurls {
|
||||
rsp.Value, err = s.bucket.ReadAll(ctx, path)
|
||||
return rsp, err
|
||||
}
|
||||
rsp.Url, err = s.bucket.SignedURL(ctx, path, &blob.SignedURLOptions{
|
||||
Method: "GET",
|
||||
Expiry: s.expiration,
|
||||
ContentType: rsp.ContentType,
|
||||
})
|
||||
return rsp, err
|
||||
}
|
@ -1,67 +0,0 @@
|
||||
package resource
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"gocloud.dev/blob/fileblob"
|
||||
"gocloud.dev/blob/memblob"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
)
|
||||
|
||||
func TestCDKBlobStore(t *testing.T) {
|
||||
bucket := memblob.OpenBucket(nil)
|
||||
if false {
|
||||
tmp, err := os.MkdirTemp("", "xxx-*")
|
||||
require.NoError(t, err)
|
||||
|
||||
bucket, err = fileblob.OpenBucket(tmp, &fileblob.Options{
|
||||
CreateDir: true,
|
||||
Metadata: fileblob.MetadataDontWrite, // skip
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
fmt.Printf("ROOT: %s\n\n", tmp)
|
||||
}
|
||||
ctx := context.Background()
|
||||
|
||||
store, err := NewCDKBlobStore(ctx, CDKBlobStoreOptions{
|
||||
Bucket: bucket,
|
||||
//RootFolder: "xyz",
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Run("can write then read a blob", func(t *testing.T) {
|
||||
raw := testdata(t, "01_create_playlist.json")
|
||||
key := &ResourceKey{
|
||||
Group: "playlist.grafana.app",
|
||||
Resource: "rrrr", // can be anything
|
||||
Namespace: "default",
|
||||
Name: "fdgsv37qslr0ga",
|
||||
}
|
||||
|
||||
rsp, err := store.PutBlob(ctx, &PutBlobRequest{
|
||||
Resource: key,
|
||||
Method: PutBlobRequest_GRPC,
|
||||
ContentType: "application/json",
|
||||
Value: raw,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "4933beea0c6d6dfd73150451098c70f0", rsp.Hash)
|
||||
|
||||
found, err := store.GetBlob(ctx, key, &utils.BlobInfo{
|
||||
UID: rsp.Uid,
|
||||
Size: rsp.Size,
|
||||
Hash: rsp.Hash,
|
||||
MimeType: rsp.MimeType,
|
||||
Charset: rsp.Charset,
|
||||
}, false)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, raw, found.Value)
|
||||
require.Equal(t, "application/json", found.ContentType)
|
||||
})
|
||||
}
|
@ -25,22 +25,6 @@ func NewLocalResourceStoreClient(server ResourceStoreServer) ResourceStoreClient
|
||||
return NewResourceStoreClient(grpchan.InterceptClientConn(channel, grpcUtils.UnaryClientInterceptor, grpcUtils.StreamClientInterceptor))
|
||||
}
|
||||
|
||||
func NewLocalResourceSearchClient(server ResourceStoreServer) ResourceIndexClient {
|
||||
channel := &inprocgrpc.Channel{}
|
||||
|
||||
auth := &grpcUtils.Authenticator{}
|
||||
|
||||
channel.RegisterService(
|
||||
grpchan.InterceptServer(
|
||||
&ResourceStore_ServiceDesc,
|
||||
grpcAuth.UnaryServerInterceptor(auth.Authenticate),
|
||||
grpcAuth.StreamServerInterceptor(auth.Authenticate),
|
||||
),
|
||||
server,
|
||||
)
|
||||
return NewResourceIndexClient(grpchan.InterceptClientConn(channel, grpcUtils.UnaryClientInterceptor, grpcUtils.StreamClientInterceptor))
|
||||
}
|
||||
|
||||
func NewResourceStoreClientGRPC(channel *grpc.ClientConn) ResourceStoreClient {
|
||||
return NewResourceStoreClient(grpchan.InterceptClientConn(channel, grpcUtils.UnaryClientInterceptor, grpcUtils.StreamClientInterceptor))
|
||||
}
|
||||
|
@ -80,8 +80,9 @@ google.golang.org/api v0.176.0 h1:dHj1/yv5Dm/eQTXiP9hNCRT3xzJHWXeNdRq29XbMxoE=
|
||||
google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240604185151-ef581f913117 h1:+rdxYoE3E5htTEWIe15GlN6IfvbURM//Jt0mmkmm6ZU=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117 h1:1GBuWVLM/KMVUv1t1En5Gs+gFZCNd360GGb4sSxtrhU=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240701130421-f6361c86f094 h1:BwIjyKYGsK9dMCBOorzRri8MQwmi7mT9rGHsCEinZkA=
|
||||
google.golang.org/grpc v1.64.0 h1:KH3VH9y/MgNQg1dE7b3XfVK0GsPSIzJwdF617gUSbvY=
|
||||
google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg=
|
||||
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
|
@ -2,14 +2,11 @@ package resource
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
)
|
||||
|
||||
var (
|
||||
_ ResourceIndexServer = &noopService{}
|
||||
_ DiagnosticsServer = &noopService{}
|
||||
_ LifecycleHooks = &noopService{}
|
||||
_ DiagnosticsServer = &noopService{}
|
||||
_ LifecycleHooks = &noopService{}
|
||||
)
|
||||
|
||||
// noopService is a helper implementation to simplify tests
|
||||
@ -42,25 +39,3 @@ func (n *noopService) Read(context.Context, *ReadRequest) (*ReadResponse, error)
|
||||
func (n *noopService) List(context.Context, *ListRequest) (*ListResponse, error) {
|
||||
return nil, ErrNotImplementedYet
|
||||
}
|
||||
|
||||
// History implements ResourceServer.
|
||||
func (n *noopService) History(context.Context, *HistoryRequest) (*HistoryResponse, error) {
|
||||
return nil, ErrNotImplementedYet
|
||||
}
|
||||
|
||||
// Origin implements ResourceServer.
|
||||
func (n *noopService) Origin(context.Context, *OriginRequest) (*OriginResponse, error) {
|
||||
return nil, ErrNotImplementedYet
|
||||
}
|
||||
|
||||
func (n *noopService) SupportsSignedURLs() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (n *noopService) PutBlob(context.Context, *PutBlobRequest) (*PutBlobResponse, error) {
|
||||
return nil, ErrNotImplementedYet
|
||||
}
|
||||
|
||||
func (n *noopService) GetBlob(ctx context.Context, resource *ResourceKey, info *utils.BlobInfo, mustProxy bool) (*GetBlobResponse, error) {
|
||||
return nil, ErrNotImplementedYet
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -122,11 +122,8 @@ message DeleteResponse {
|
||||
// Status code
|
||||
StatusResult status = 1;
|
||||
|
||||
// The new resource version
|
||||
// The resource version for the deletion marker
|
||||
int64 resource_version = 2;
|
||||
|
||||
// The deleted payload
|
||||
bytes value = 3;
|
||||
}
|
||||
|
||||
message ReadRequest {
|
||||
@ -159,14 +156,6 @@ message Requirement {
|
||||
repeated string values = 3; // typically one value, but depends on the operator
|
||||
}
|
||||
|
||||
message Sort {
|
||||
enum Order {
|
||||
ASC = 0;
|
||||
DESC = 1;
|
||||
}
|
||||
string field = 1;
|
||||
Order order = 2;
|
||||
}
|
||||
|
||||
message ListOptions {
|
||||
// Group+Namespace+Resource (not name)
|
||||
@ -270,77 +259,6 @@ message WatchEvent {
|
||||
Resource previous = 4;
|
||||
}
|
||||
|
||||
message HistoryRequest {
|
||||
// Starting from the requested page (other query parameters must match!)
|
||||
string next_page_token = 1;
|
||||
|
||||
// Maximum number of items to return
|
||||
int64 limit = 2;
|
||||
|
||||
// Resource identifier
|
||||
ResourceKey key = 3;
|
||||
|
||||
// List the deleted values (eg, show trash)
|
||||
bool show_deleted = 4;
|
||||
}
|
||||
|
||||
message HistoryResponse {
|
||||
repeated ResourceMeta items = 1;
|
||||
|
||||
// More results exist... pass this in the next request
|
||||
string next_page_token = 2;
|
||||
|
||||
// ResourceVersion of the list response
|
||||
int64 resource_version = 3;
|
||||
}
|
||||
|
||||
message OriginRequest {
|
||||
// Starting from the requested page (other query parameters must match!)
|
||||
string next_page_token = 1;
|
||||
|
||||
// Maximum number of items to return
|
||||
int64 limit = 2;
|
||||
|
||||
// Resource identifier
|
||||
ResourceKey key = 3;
|
||||
|
||||
// List the deleted values (eg, show trash)
|
||||
string origin = 4;
|
||||
}
|
||||
|
||||
message ResourceOriginInfo {
|
||||
// The resource
|
||||
ResourceKey key = 1;
|
||||
|
||||
// Size of the full resource body
|
||||
int32 resource_size = 2;
|
||||
|
||||
// Hash for the resource
|
||||
string resource_hash = 3;
|
||||
|
||||
// The origin name
|
||||
string origin = 4;
|
||||
|
||||
// Path on the origin
|
||||
string path = 5;
|
||||
|
||||
// Verification hash from the origin
|
||||
string hash = 6;
|
||||
|
||||
// Change time from the origin
|
||||
int64 timestamp = 7;
|
||||
}
|
||||
|
||||
message OriginResponse {
|
||||
repeated ResourceOriginInfo items = 1;
|
||||
|
||||
// More results exist... pass this in the next request
|
||||
string next_page_token = 2;
|
||||
|
||||
// ResourceVersion of the list response
|
||||
int64 resource_version = 3;
|
||||
}
|
||||
|
||||
message HealthCheckRequest {
|
||||
string service = 1;
|
||||
}
|
||||
@ -356,84 +274,6 @@ message HealthCheckResponse {
|
||||
}
|
||||
|
||||
|
||||
//----------------------------
|
||||
// Blob Support
|
||||
//----------------------------
|
||||
|
||||
message PutBlobRequest {
|
||||
enum Method {
|
||||
// Use the inline raw []byte
|
||||
GRPC = 0;
|
||||
|
||||
// Get a signed URL and PUT the value
|
||||
HTTP = 1;
|
||||
}
|
||||
|
||||
// The resource that will use this blob
|
||||
// NOTE: the name may not yet exist, but group+resource are required
|
||||
ResourceKey resource = 1;
|
||||
|
||||
// How to upload
|
||||
Method method = 2;
|
||||
|
||||
// Content type header
|
||||
string content_type = 3;
|
||||
|
||||
// Raw value to write
|
||||
// Not valid when method == HTTP
|
||||
bytes value = 4;
|
||||
}
|
||||
|
||||
message PutBlobResponse {
|
||||
// Status code
|
||||
StatusResult status = 1;
|
||||
|
||||
// The blob uid. This must be saved into the resource to support access
|
||||
string uid = 2;
|
||||
|
||||
// The URL where this value can be PUT
|
||||
string url = 3;
|
||||
|
||||
// Size of the uploaded blob
|
||||
int64 size = 4;
|
||||
|
||||
// Content hash used for an etag
|
||||
string hash = 5;
|
||||
|
||||
// Validated mimetype (from content_type)
|
||||
string mime_type = 6;
|
||||
|
||||
// Validated charset (from content_type)
|
||||
string charset = 7;
|
||||
}
|
||||
|
||||
message GetBlobRequest {
|
||||
ResourceKey resource = 1;
|
||||
|
||||
// The new resource version
|
||||
int64 resource_version = 2;
|
||||
|
||||
// Do not return a pre-signed URL (when possible)
|
||||
bool must_proxy_bytes = 3;
|
||||
}
|
||||
|
||||
message GetBlobResponse {
|
||||
// Status code sent on errors
|
||||
StatusResult status = 1;
|
||||
|
||||
// (optional) When possible, the system will return a presigned URL
|
||||
// that can be used to actually read the full blob+metadata
|
||||
// When this is set, neither info nor value will be set
|
||||
string url = 2;
|
||||
|
||||
// Content type
|
||||
string content_type = 3;
|
||||
|
||||
// The raw object value
|
||||
bytes value = 4;
|
||||
}
|
||||
|
||||
|
||||
// This provides the CRUD+List+Watch support needed for a k8s apiserver
|
||||
// The semantics and behaviors of this service are constrained by kubernetes
|
||||
// This does not understand the resource schemas, only deals with json bytes
|
||||
@ -455,32 +295,6 @@ service ResourceStore {
|
||||
rpc Watch(WatchRequest) returns (stream WatchEvent);
|
||||
}
|
||||
|
||||
// Unlike the ResourceStore, this service can be exposed to clients directly
|
||||
// It should be implemented with efficient indexes and does not need read-after-write semantics
|
||||
service ResourceIndex {
|
||||
// TODO: rpc Search(...) ... eventually a typed response
|
||||
|
||||
rpc Read(ReadRequest) returns (ReadResponse); // Duplicated -- for client read only usage
|
||||
|
||||
// Show resource history (and trash)
|
||||
rpc History(HistoryRequest) returns (HistoryResponse);
|
||||
|
||||
// Used for efficient provisioning
|
||||
rpc Origin(OriginRequest) returns (OriginResponse);
|
||||
}
|
||||
|
||||
service BlobStore {
|
||||
// Upload a blob that will be saved in a resource
|
||||
rpc PutBlob(PutBlobRequest) returns (PutBlobResponse);
|
||||
|
||||
// Get blob contents. When possible, this will return a signed URL
|
||||
// For large payloads, signed URLs are required to avoid protobuf message size limits
|
||||
rpc GetBlob(GetBlobRequest) returns (GetBlobResponse);
|
||||
|
||||
// NOTE: there is no direct access to delete blobs
|
||||
// >> cleanup will be managed via garbage collection or direct access to the underlying storage
|
||||
}
|
||||
|
||||
// Clients can use this service directly
|
||||
// NOTE: This is read only, and no read afer write guarantees
|
||||
service Diagnostics {
|
||||
|
@ -347,314 +347,6 @@ var ResourceStore_ServiceDesc = grpc.ServiceDesc{
|
||||
Metadata: "resource.proto",
|
||||
}
|
||||
|
||||
const (
|
||||
ResourceIndex_Read_FullMethodName = "/resource.ResourceIndex/Read"
|
||||
ResourceIndex_History_FullMethodName = "/resource.ResourceIndex/History"
|
||||
ResourceIndex_Origin_FullMethodName = "/resource.ResourceIndex/Origin"
|
||||
)
|
||||
|
||||
// ResourceIndexClient is the client API for ResourceIndex service.
|
||||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
|
||||
//
|
||||
// Unlike the ResourceStore, this service can be exposed to clients directly
|
||||
// It should be implemented with efficient indexes and does not need read-after-write semantics
|
||||
type ResourceIndexClient interface {
|
||||
Read(ctx context.Context, in *ReadRequest, opts ...grpc.CallOption) (*ReadResponse, error)
|
||||
// Show resource history (and trash)
|
||||
History(ctx context.Context, in *HistoryRequest, opts ...grpc.CallOption) (*HistoryResponse, error)
|
||||
// Used for efficient provisioning
|
||||
Origin(ctx context.Context, in *OriginRequest, opts ...grpc.CallOption) (*OriginResponse, error)
|
||||
}
|
||||
|
||||
type resourceIndexClient struct {
|
||||
cc grpc.ClientConnInterface
|
||||
}
|
||||
|
||||
func NewResourceIndexClient(cc grpc.ClientConnInterface) ResourceIndexClient {
|
||||
return &resourceIndexClient{cc}
|
||||
}
|
||||
|
||||
func (c *resourceIndexClient) Read(ctx context.Context, in *ReadRequest, opts ...grpc.CallOption) (*ReadResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(ReadResponse)
|
||||
err := c.cc.Invoke(ctx, ResourceIndex_Read_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *resourceIndexClient) History(ctx context.Context, in *HistoryRequest, opts ...grpc.CallOption) (*HistoryResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(HistoryResponse)
|
||||
err := c.cc.Invoke(ctx, ResourceIndex_History_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *resourceIndexClient) Origin(ctx context.Context, in *OriginRequest, opts ...grpc.CallOption) (*OriginResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(OriginResponse)
|
||||
err := c.cc.Invoke(ctx, ResourceIndex_Origin_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// ResourceIndexServer is the server API for ResourceIndex service.
|
||||
// All implementations should embed UnimplementedResourceIndexServer
|
||||
// for forward compatibility
|
||||
//
|
||||
// Unlike the ResourceStore, this service can be exposed to clients directly
|
||||
// It should be implemented with efficient indexes and does not need read-after-write semantics
|
||||
type ResourceIndexServer interface {
|
||||
Read(context.Context, *ReadRequest) (*ReadResponse, error)
|
||||
// Show resource history (and trash)
|
||||
History(context.Context, *HistoryRequest) (*HistoryResponse, error)
|
||||
// Used for efficient provisioning
|
||||
Origin(context.Context, *OriginRequest) (*OriginResponse, error)
|
||||
}
|
||||
|
||||
// UnimplementedResourceIndexServer should be embedded to have forward compatible implementations.
|
||||
type UnimplementedResourceIndexServer struct {
|
||||
}
|
||||
|
||||
func (UnimplementedResourceIndexServer) Read(context.Context, *ReadRequest) (*ReadResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method Read not implemented")
|
||||
}
|
||||
func (UnimplementedResourceIndexServer) History(context.Context, *HistoryRequest) (*HistoryResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method History not implemented")
|
||||
}
|
||||
func (UnimplementedResourceIndexServer) Origin(context.Context, *OriginRequest) (*OriginResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method Origin not implemented")
|
||||
}
|
||||
|
||||
// UnsafeResourceIndexServer may be embedded to opt out of forward compatibility for this service.
|
||||
// Use of this interface is not recommended, as added methods to ResourceIndexServer will
|
||||
// result in compilation errors.
|
||||
type UnsafeResourceIndexServer interface {
|
||||
mustEmbedUnimplementedResourceIndexServer()
|
||||
}
|
||||
|
||||
func RegisterResourceIndexServer(s grpc.ServiceRegistrar, srv ResourceIndexServer) {
|
||||
s.RegisterService(&ResourceIndex_ServiceDesc, srv)
|
||||
}
|
||||
|
||||
func _ResourceIndex_Read_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(ReadRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ResourceIndexServer).Read(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ResourceIndex_Read_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ResourceIndexServer).Read(ctx, req.(*ReadRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ResourceIndex_History_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(HistoryRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ResourceIndexServer).History(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ResourceIndex_History_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ResourceIndexServer).History(ctx, req.(*HistoryRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _ResourceIndex_Origin_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(OriginRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(ResourceIndexServer).Origin(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: ResourceIndex_Origin_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(ResourceIndexServer).Origin(ctx, req.(*OriginRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
// ResourceIndex_ServiceDesc is the grpc.ServiceDesc for ResourceIndex service.
|
||||
// It's only intended for direct use with grpc.RegisterService,
|
||||
// and not to be introspected or modified (even as a copy)
|
||||
var ResourceIndex_ServiceDesc = grpc.ServiceDesc{
|
||||
ServiceName: "resource.ResourceIndex",
|
||||
HandlerType: (*ResourceIndexServer)(nil),
|
||||
Methods: []grpc.MethodDesc{
|
||||
{
|
||||
MethodName: "Read",
|
||||
Handler: _ResourceIndex_Read_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "History",
|
||||
Handler: _ResourceIndex_History_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "Origin",
|
||||
Handler: _ResourceIndex_Origin_Handler,
|
||||
},
|
||||
},
|
||||
Streams: []grpc.StreamDesc{},
|
||||
Metadata: "resource.proto",
|
||||
}
|
||||
|
||||
const (
|
||||
BlobStore_PutBlob_FullMethodName = "/resource.BlobStore/PutBlob"
|
||||
BlobStore_GetBlob_FullMethodName = "/resource.BlobStore/GetBlob"
|
||||
)
|
||||
|
||||
// BlobStoreClient is the client API for BlobStore service.
|
||||
//
|
||||
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
|
||||
type BlobStoreClient interface {
|
||||
// Upload a blob that will be saved in a resource
|
||||
PutBlob(ctx context.Context, in *PutBlobRequest, opts ...grpc.CallOption) (*PutBlobResponse, error)
|
||||
// Get blob contents. When possible, this will return a signed URL
|
||||
// For large payloads, signed URLs are required to avoid protobuf message size limits
|
||||
GetBlob(ctx context.Context, in *GetBlobRequest, opts ...grpc.CallOption) (*GetBlobResponse, error)
|
||||
}
|
||||
|
||||
type blobStoreClient struct {
|
||||
cc grpc.ClientConnInterface
|
||||
}
|
||||
|
||||
func NewBlobStoreClient(cc grpc.ClientConnInterface) BlobStoreClient {
|
||||
return &blobStoreClient{cc}
|
||||
}
|
||||
|
||||
func (c *blobStoreClient) PutBlob(ctx context.Context, in *PutBlobRequest, opts ...grpc.CallOption) (*PutBlobResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(PutBlobResponse)
|
||||
err := c.cc.Invoke(ctx, BlobStore_PutBlob_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (c *blobStoreClient) GetBlob(ctx context.Context, in *GetBlobRequest, opts ...grpc.CallOption) (*GetBlobResponse, error) {
|
||||
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
|
||||
out := new(GetBlobResponse)
|
||||
err := c.cc.Invoke(ctx, BlobStore_GetBlob_FullMethodName, in, out, cOpts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
// BlobStoreServer is the server API for BlobStore service.
|
||||
// All implementations should embed UnimplementedBlobStoreServer
|
||||
// for forward compatibility
|
||||
type BlobStoreServer interface {
|
||||
// Upload a blob that will be saved in a resource
|
||||
PutBlob(context.Context, *PutBlobRequest) (*PutBlobResponse, error)
|
||||
// Get blob contents. When possible, this will return a signed URL
|
||||
// For large payloads, signed URLs are required to avoid protobuf message size limits
|
||||
GetBlob(context.Context, *GetBlobRequest) (*GetBlobResponse, error)
|
||||
}
|
||||
|
||||
// UnimplementedBlobStoreServer should be embedded to have forward compatible implementations.
|
||||
type UnimplementedBlobStoreServer struct {
|
||||
}
|
||||
|
||||
func (UnimplementedBlobStoreServer) PutBlob(context.Context, *PutBlobRequest) (*PutBlobResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method PutBlob not implemented")
|
||||
}
|
||||
func (UnimplementedBlobStoreServer) GetBlob(context.Context, *GetBlobRequest) (*GetBlobResponse, error) {
|
||||
return nil, status.Errorf(codes.Unimplemented, "method GetBlob not implemented")
|
||||
}
|
||||
|
||||
// UnsafeBlobStoreServer may be embedded to opt out of forward compatibility for this service.
|
||||
// Use of this interface is not recommended, as added methods to BlobStoreServer will
|
||||
// result in compilation errors.
|
||||
type UnsafeBlobStoreServer interface {
|
||||
mustEmbedUnimplementedBlobStoreServer()
|
||||
}
|
||||
|
||||
func RegisterBlobStoreServer(s grpc.ServiceRegistrar, srv BlobStoreServer) {
|
||||
s.RegisterService(&BlobStore_ServiceDesc, srv)
|
||||
}
|
||||
|
||||
func _BlobStore_PutBlob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(PutBlobRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(BlobStoreServer).PutBlob(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: BlobStore_PutBlob_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(BlobStoreServer).PutBlob(ctx, req.(*PutBlobRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
func _BlobStore_GetBlob_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
|
||||
in := new(GetBlobRequest)
|
||||
if err := dec(in); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if interceptor == nil {
|
||||
return srv.(BlobStoreServer).GetBlob(ctx, in)
|
||||
}
|
||||
info := &grpc.UnaryServerInfo{
|
||||
Server: srv,
|
||||
FullMethod: BlobStore_GetBlob_FullMethodName,
|
||||
}
|
||||
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
|
||||
return srv.(BlobStoreServer).GetBlob(ctx, req.(*GetBlobRequest))
|
||||
}
|
||||
return interceptor(ctx, in, info, handler)
|
||||
}
|
||||
|
||||
// BlobStore_ServiceDesc is the grpc.ServiceDesc for BlobStore service.
|
||||
// It's only intended for direct use with grpc.RegisterService,
|
||||
// and not to be introspected or modified (even as a copy)
|
||||
var BlobStore_ServiceDesc = grpc.ServiceDesc{
|
||||
ServiceName: "resource.BlobStore",
|
||||
HandlerType: (*BlobStoreServer)(nil),
|
||||
Methods: []grpc.MethodDesc{
|
||||
{
|
||||
MethodName: "PutBlob",
|
||||
Handler: _BlobStore_PutBlob_Handler,
|
||||
},
|
||||
{
|
||||
MethodName: "GetBlob",
|
||||
Handler: _BlobStore_GetBlob_Handler,
|
||||
},
|
||||
},
|
||||
Streams: []grpc.StreamDesc{},
|
||||
Metadata: "resource.proto",
|
||||
}
|
||||
|
||||
const (
|
||||
Diagnostics_IsHealthy_FullMethodName = "/resource.Diagnostics/IsHealthy"
|
||||
)
|
||||
|
@ -22,7 +22,7 @@ import (
|
||||
|
||||
// Package-level errors.
|
||||
var (
|
||||
ErrNotFound = errors.New("resource not found")
|
||||
ErrNotFound = errors.New("entity not found")
|
||||
ErrOptimisticLockingFailed = errors.New("optimistic locking failed")
|
||||
ErrUserNotFoundInContext = errors.New("user not found in context")
|
||||
ErrUnableToReadResourceJSON = errors.New("unable to read resource json")
|
||||
@ -32,8 +32,6 @@ var (
|
||||
// ResourceServer implements all services
|
||||
type ResourceServer interface {
|
||||
ResourceStoreServer
|
||||
ResourceIndexServer
|
||||
BlobStoreServer
|
||||
DiagnosticsServer
|
||||
LifecycleHooks
|
||||
}
|
||||
@ -62,23 +60,6 @@ type StorageBackend interface {
|
||||
WatchWriteEvents(ctx context.Context) (<-chan *WrittenEvent, error)
|
||||
}
|
||||
|
||||
// This interface is not exposed to end users directly
|
||||
// Access to this interface is already gated by access control
|
||||
type BlobStore interface {
|
||||
// Indicates if storage layer supports signed urls
|
||||
SupportsSignedURLs() bool
|
||||
|
||||
// Get the raw blob bytes and metadata -- limited to protobuf message size
|
||||
// For larger payloads, we should use presigned URLs to upload from the client
|
||||
PutBlob(context.Context, *PutBlobRequest) (*PutBlobResponse, error)
|
||||
|
||||
// Get blob contents. When possible, this will return a signed URL
|
||||
// For large payloads, signed URLs are required to avoid protobuf message size limits
|
||||
GetBlob(ctx context.Context, resource *ResourceKey, info *utils.BlobInfo, mustProxy bool) (*GetBlobResponse, error)
|
||||
|
||||
// TODO? List+Delete? This is for admin access
|
||||
}
|
||||
|
||||
type ResourceServerOptions struct {
|
||||
// OTel tracer
|
||||
Tracer trace.Tracer
|
||||
@ -86,12 +67,6 @@ type ResourceServerOptions struct {
|
||||
// Real storage backend
|
||||
Backend StorageBackend
|
||||
|
||||
// The blob storage engine
|
||||
Blob BlobStore
|
||||
|
||||
// Real storage backend
|
||||
Search ResourceIndexServer
|
||||
|
||||
// Diagnostics
|
||||
Diagnostics DiagnosticsServer
|
||||
|
||||
@ -114,12 +89,6 @@ func NewResourceServer(opts ResourceServerOptions) (ResourceServer, error) {
|
||||
if opts.Backend == nil {
|
||||
return nil, fmt.Errorf("missing Backend implementation")
|
||||
}
|
||||
if opts.Search == nil {
|
||||
opts.Search = &noopService{}
|
||||
}
|
||||
if opts.Blob == nil {
|
||||
opts.Blob = &noopService{}
|
||||
}
|
||||
if opts.Diagnostics == nil {
|
||||
opts.Diagnostics = &noopService{}
|
||||
}
|
||||
@ -141,7 +110,6 @@ func NewResourceServer(opts ResourceServerOptions) (ResourceServer, error) {
|
||||
tracer: opts.Tracer,
|
||||
log: slog.Default().With("logger", "resource-server"),
|
||||
backend: opts.Backend,
|
||||
search: opts.Search,
|
||||
diagnostics: opts.Diagnostics,
|
||||
access: opts.WriteAccess,
|
||||
lifecycle: opts.Lifecycle,
|
||||
@ -157,8 +125,6 @@ type server struct {
|
||||
tracer trace.Tracer
|
||||
log *slog.Logger
|
||||
backend StorageBackend
|
||||
search ResourceIndexServer
|
||||
blob BlobStore
|
||||
diagnostics DiagnosticsServer
|
||||
access WriteAccessHooks
|
||||
lifecycle LifecycleHooks
|
||||
@ -276,24 +242,13 @@ func (s *server) newEventBuilder(ctx context.Context, key *ResourceKey, value, o
|
||||
}
|
||||
obj.SetOriginInfo(origin)
|
||||
|
||||
// Make sure old values do not mutate things they should not
|
||||
// Ensure old values do not mutate things they should not
|
||||
if event.OldMeta != nil {
|
||||
old := event.OldMeta
|
||||
|
||||
if obj.GetUID() != event.OldMeta.GetUID() {
|
||||
return nil, apierrors.NewBadRequest(
|
||||
fmt.Sprintf("UIDs do not match (old: %s, new: %s)", old.GetUID(), obj.GetUID()))
|
||||
}
|
||||
|
||||
// Can not change creation timestamps+user
|
||||
if obj.GetCreatedBy() != event.OldMeta.GetCreatedBy() {
|
||||
return nil, apierrors.NewBadRequest(
|
||||
fmt.Sprintf("created by changed (old: %s, new: %s)", old.GetCreatedBy(), obj.GetCreatedBy()))
|
||||
}
|
||||
if obj.GetCreationTimestamp() != event.OldMeta.GetCreationTimestamp() {
|
||||
return nil, apierrors.NewBadRequest(
|
||||
fmt.Sprintf("creation timestamp changed (old:%v, new:%v)", old.GetCreationTimestamp(), obj.GetCreationTimestamp()))
|
||||
}
|
||||
obj.SetUID(old.GetUID())
|
||||
obj.SetCreatedBy(old.GetCreatedBy())
|
||||
obj.SetCreationTimestamp(old.GetCreationTimestamp())
|
||||
}
|
||||
return event, nil
|
||||
}
|
||||
@ -431,7 +386,7 @@ func (s *server) Delete(ctx context.Context, req *DeleteRequest) (*DeleteRespons
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if latest.ResourceVersion != req.ResourceVersion {
|
||||
if req.ResourceVersion > 0 && latest.ResourceVersion != req.ResourceVersion {
|
||||
return nil, ErrOptimisticLockingFailed
|
||||
}
|
||||
|
||||
@ -481,17 +436,15 @@ func (s *server) Read(ctx context.Context, req *ReadRequest) (*ReadResponse, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if req.Key.Group == "" {
|
||||
status, _ := errToStatus(apierrors.NewBadRequest("missing group"))
|
||||
return &ReadResponse{Status: status}, nil
|
||||
}
|
||||
// if req.Key.Group == "" {
|
||||
// status, _ := errToStatus(apierrors.NewBadRequest("missing group"))
|
||||
// return &ReadResponse{Status: status}, nil
|
||||
// }
|
||||
if req.Key.Resource == "" {
|
||||
status, _ := errToStatus(apierrors.NewBadRequest("missing resource"))
|
||||
return &ReadResponse{Status: status}, nil
|
||||
}
|
||||
|
||||
// TODO: shall we also check for the namespace and Name ? Or is that a backend concern?
|
||||
|
||||
rsp, err := s.backend.Read(ctx, req)
|
||||
if err != nil {
|
||||
if rsp == nil {
|
||||
@ -536,8 +489,6 @@ func (s *server) Watch(req *WatchRequest, srv ResourceStore_WatchServer) error {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf("WATCH %v\n", req.Options.Key)
|
||||
|
||||
ctx := srv.Context()
|
||||
|
||||
// Start listening -- this will buffer any changes that happen while we backfill
|
||||
@ -589,76 +540,6 @@ func (s *server) Watch(req *WatchRequest, srv ResourceStore_WatchServer) error {
|
||||
}
|
||||
}
|
||||
|
||||
// GetBlob implements ResourceServer.
|
||||
func (s *server) PutBlob(ctx context.Context, req *PutBlobRequest) (*PutBlobResponse, error) {
|
||||
if err := s.Init(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rsp, err := s.blob.PutBlob(ctx, req)
|
||||
rsp.Status, err = errToStatus(err)
|
||||
return rsp, err
|
||||
}
|
||||
|
||||
func (s *server) getPartialObject(ctx context.Context, key *ResourceKey, rv int64) (utils.GrafanaMetaAccessor, *StatusResult) {
|
||||
rsp, err := s.backend.Read(ctx, &ReadRequest{
|
||||
Key: key,
|
||||
ResourceVersion: rv,
|
||||
})
|
||||
if err != nil {
|
||||
rsp.Status, _ = errToStatus(err)
|
||||
}
|
||||
if rsp.Status != nil {
|
||||
return nil, rsp.Status
|
||||
}
|
||||
|
||||
partial := &metav1.PartialObjectMetadata{}
|
||||
err = json.Unmarshal(rsp.Value, partial)
|
||||
if err != nil {
|
||||
rsp.Status, _ = errToStatus(fmt.Errorf("error reading body %w", err))
|
||||
return nil, rsp.Status
|
||||
}
|
||||
obj, err := utils.MetaAccessor(partial)
|
||||
if err != nil {
|
||||
rsp.Status, _ = errToStatus(fmt.Errorf("error getting accessor %w", err))
|
||||
return nil, rsp.Status
|
||||
}
|
||||
return obj, nil
|
||||
}
|
||||
|
||||
// GetBlob implements ResourceServer.
|
||||
func (s *server) GetBlob(ctx context.Context, req *GetBlobRequest) (*GetBlobResponse, error) {
|
||||
if err := s.Init(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// NOTE: in SQL... this could be simple select rather than a full fetch and extract
|
||||
obj, status := s.getPartialObject(ctx, req.Resource, req.ResourceVersion)
|
||||
if status != nil {
|
||||
return &GetBlobResponse{Status: status}, nil
|
||||
}
|
||||
|
||||
info := obj.GetBlob()
|
||||
if info == nil || info.UID == "" {
|
||||
return &GetBlobResponse{Status: &StatusResult{
|
||||
Status: "Failure",
|
||||
Message: "Resource does not have a linked blob",
|
||||
Code: 404,
|
||||
}}, nil
|
||||
}
|
||||
|
||||
rsp, err := s.blob.GetBlob(ctx, req.Resource, info, req.MustProxyBytes)
|
||||
rsp.Status, err = errToStatus(err)
|
||||
return rsp, err
|
||||
}
|
||||
|
||||
// History implements ResourceServer.
|
||||
func (s *server) History(ctx context.Context, req *HistoryRequest) (*HistoryResponse, error) {
|
||||
if err := s.Init(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.search.History(ctx, req)
|
||||
}
|
||||
|
||||
// IsHealthy implements ResourceServer.
|
||||
func (s *server) IsHealthy(ctx context.Context, req *HealthCheckRequest) (*HealthCheckResponse, error) {
|
||||
if err := s.Init(); err != nil {
|
||||
@ -666,11 +547,3 @@ func (s *server) IsHealthy(ctx context.Context, req *HealthCheckRequest) (*Healt
|
||||
}
|
||||
return s.diagnostics.IsHealthy(ctx, req)
|
||||
}
|
||||
|
||||
// Origin implements ResourceServer.
|
||||
func (s *server) Origin(ctx context.Context, req *OriginRequest) (*OriginResponse, error) {
|
||||
if err := s.Init(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.search.Origin(ctx, req)
|
||||
}
|
||||
|
@ -38,10 +38,6 @@ func TestSimpleServer(t *testing.T) {
|
||||
Metadata: fileblob.MetadataDontWrite, // skip
|
||||
})
|
||||
require.NoError(t, err)
|
||||
<<<<<<< HEAD
|
||||
|
||||
=======
|
||||
>>>>>>> origin/resource-store-bridge
|
||||
fmt.Printf("ROOT: %s\n\n", tmp)
|
||||
}
|
||||
store, err := NewCDKBackend(ctx, CDKBackendOptions{
|
||||
|
@ -1,13 +1,6 @@
|
||||
package sql
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"gocloud.dev/blob/fileblob"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
@ -22,32 +15,6 @@ func ProvideResourceServer(db db.DB, cfg *setting.Cfg, features featuremgmt.Feat
|
||||
Tracer: tracer,
|
||||
}
|
||||
|
||||
supportBlobs := true
|
||||
|
||||
// Create a local blob filesystem blob store
|
||||
if supportBlobs {
|
||||
dir := filepath.Join(cfg.DataPath, "unistore", "blobs")
|
||||
if err := os.MkdirAll(dir, 0o750); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bucket, err := fileblob.OpenBucket(dir, &fileblob.Options{
|
||||
CreateDir: true,
|
||||
Metadata: fileblob.MetadataDontWrite, // skip
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opts.Blob, err = resource.NewCDKBlobStore(context.Background(), resource.CDKBlobStoreOptions{
|
||||
Tracer: tracer,
|
||||
Bucket: bucket,
|
||||
URLExpiration: time.Minute * 20,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
eDB, err := dbimpl.ProvideResourceDB(db, cfg, features, tracer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -1,61 +0,0 @@
|
||||
{{/*
|
||||
This is the list of all the fields in *entity.Entity, in a way that is
|
||||
suitable to be imported by other templates that need to select these fields
|
||||
from either the "entity" or the "entity_history" tables.
|
||||
|
||||
Example usage:
|
||||
|
||||
SELECT {{ template "common_entity_select_into" . }}
|
||||
FROM {{ .Ident "entity" }} AS e
|
||||
|
||||
*/}}
|
||||
{{ define "common_entity_select_into" }}
|
||||
|
||||
e.{{ .Ident "guid" | .Into .Entity.Guid }},
|
||||
e.{{ .Ident "resource_version" | .Into .Entity.ResourceVersion }},
|
||||
|
||||
e.{{ .Ident "key" | .Into .Entity.Key }},
|
||||
|
||||
e.{{ .Ident "group" | .Into .Entity.Group }},
|
||||
e.{{ .Ident "group_version" | .Into .Entity.GroupVersion }},
|
||||
e.{{ .Ident "resource" | .Into .Entity.Resource }},
|
||||
e.{{ .Ident "namespace" | .Into .Entity.Namespace }},
|
||||
e.{{ .Ident "name" | .Into .Entity.Name }},
|
||||
|
||||
e.{{ .Ident "folder" | .Into .Entity.Folder }},
|
||||
|
||||
e.{{ .Ident "meta" | .Into .Entity.Meta }},
|
||||
e.{{ .Ident "body" | .Into .Entity.Body }},
|
||||
e.{{ .Ident "status" | .Into .Entity.Status }},
|
||||
|
||||
e.{{ .Ident "size" | .Into .Entity.Size }},
|
||||
e.{{ .Ident "etag" | .Into .Entity.ETag }},
|
||||
|
||||
e.{{ .Ident "created_at" | .Into .Entity.CreatedAt }},
|
||||
e.{{ .Ident "created_by" | .Into .Entity.CreatedBy }},
|
||||
e.{{ .Ident "updated_at" | .Into .Entity.UpdatedAt }},
|
||||
e.{{ .Ident "updated_by" | .Into .Entity.UpdatedBy }},
|
||||
|
||||
e.{{ .Ident "origin" | .Into .Entity.Origin.Source }},
|
||||
e.{{ .Ident "origin_key" | .Into .Entity.Origin.Key }},
|
||||
e.{{ .Ident "origin_ts" | .Into .Entity.Origin.Time }},
|
||||
|
||||
e.{{ .Ident "title" | .Into .Entity.Title }},
|
||||
e.{{ .Ident "slug" | .Into .Entity.Slug }},
|
||||
e.{{ .Ident "description" | .Into .Entity.Description }},
|
||||
|
||||
e.{{ .Ident "message" | .Into .Entity.Message }},
|
||||
e.{{ .Ident "labels" | .Into .Entity.Labels }},
|
||||
e.{{ .Ident "fields" | .Into .Entity.Fields }},
|
||||
e.{{ .Ident "errors" | .Into .Entity.Errors }},
|
||||
|
||||
e.{{ .Ident "action" | .Into .Entity.Action }}
|
||||
{{ end }}
|
||||
|
||||
{{/* Build an ORDER BY clause from a []SortBy contained in a .Sort field */}}
|
||||
{{ define "common_order_by" }}
|
||||
{{ $comma := listSep ", " }}
|
||||
{{ range .Sort }}
|
||||
{{- call $comma -}} {{ $.Ident .Field }} {{ .Direction.String }}
|
||||
{{ end }}
|
||||
{{ end }}
|
@ -1,34 +0,0 @@
|
||||
SELECT
|
||||
{{ .Ident "rv" | .Into .Resource.Version }},
|
||||
{{ .Ident "value" | .Into .Resource.Value }},
|
||||
{{ .Ident "blob" | .Into .Resource.Blob }},
|
||||
|
||||
FROM "resource"
|
||||
|
||||
WHERE 1 = 1
|
||||
AND {{ .Ident "namespace" }} = {{ .Arg .Key.Namespace }}
|
||||
AND {{ .Ident "group" }} = {{ .Arg .Key.Group }}
|
||||
AND {{ .Ident "resource" }} = {{ .Arg .Key.Resource }}
|
||||
AND {{ .Ident "name" }} = {{ .Arg .Key.Name }}
|
||||
|
||||
{{/*
|
||||
Resource versions work like snapshots at the kind level. Thus, a request
|
||||
to retrieve a specific resource version should be interpreted as asking
|
||||
for a resource as of how it existed at that point in time. This is why we
|
||||
request matching entities with at most the provided resource version, and
|
||||
return only the one with the highest resource version. In the case of not
|
||||
specifying a resource version (i.e. resource version zero), it is
|
||||
interpreted as the latest version of the given entity, thus we instead
|
||||
query the "entity" table (which holds only the latest version of
|
||||
non-deleted entities) and we don't need to specify anything else. The
|
||||
"entity" table has a unique constraint on (namespace, group, resource,
|
||||
name), so we're guaranteed to have at most one matching row.
|
||||
*/}}
|
||||
{{ if gt .ResourceVersion 0 }}
|
||||
AND {{ .Ident "rv" }} <= {{ .Arg .ResourceVersion }}
|
||||
ORDER BY {{ .Ident "rv" }} DESC
|
||||
LIMIT 1
|
||||
{{ else }}
|
||||
AND {{ .Ident "is_current" }} = true
|
||||
{{ end }}
|
||||
;
|
@ -1,31 +0,0 @@
|
||||
INSERT INTO "resource"
|
||||
{{/* Explicitly specify fields that will be set */}}
|
||||
(
|
||||
{{ .Ident "event" }},
|
||||
{{ .Ident "group" }},
|
||||
{{ .Ident "api_version" }},
|
||||
{{ .Ident "namespace" }},
|
||||
{{ .Ident "resource" }},
|
||||
{{ .Ident "name" }},
|
||||
{{ .Ident "operation" }},
|
||||
{{ .Ident "message" }},
|
||||
{{ .Ident "value" }},
|
||||
{{ .Ident "hash" }},
|
||||
{{ .Ident "blob" }},
|
||||
)
|
||||
|
||||
{{/* Provide the values */}}
|
||||
VALUES (
|
||||
{{ .Arg .Event.ID }},
|
||||
{{ .Arg .Event.Group }},
|
||||
{{ .Arg .Event.ApiVersion }},
|
||||
{{ .Arg .Event.Namespace }},
|
||||
{{ .Arg .Event.Resource }},
|
||||
{{ .Arg .Event.Name }},
|
||||
{{ .Arg .Event.Operation }},
|
||||
{{ .Arg .Event.Message }},
|
||||
{{ .Arg .Event.Value }},
|
||||
{{ .Arg .Event.Hash }},
|
||||
{{ .Arg .Event.Blob }},
|
||||
)
|
||||
;
|
@ -1,8 +0,0 @@
|
||||
SELECT
|
||||
{{ .Ident "rv" | .Into .ResourceVersion }}
|
||||
|
||||
FROM {{ .Ident "resource_version" }}
|
||||
WHERE 1 = 1
|
||||
AND {{ .Ident "group" }} = {{ .Arg .Group }}
|
||||
AND {{ .Ident "resource" }} = {{ .Arg .Resource }}
|
||||
;
|
@ -1,9 +0,0 @@
|
||||
UPDATE {{ .Ident "resource_version" }}
|
||||
SET
|
||||
{{ .Ident "rv" }} = {{ .Arg .ResourceVersion }} + 1,
|
||||
|
||||
WHERE 1 = 1
|
||||
AND {{ .Ident "group" }} = {{ .Arg .Group }}
|
||||
AND {{ .Ident "resource" }} = {{ .Arg .Resource }}
|
||||
AND {{ .Ident "rv" }} = {{ .Arg .ResourceVersion }}
|
||||
;
|
@ -1,13 +0,0 @@
|
||||
INSERT INTO {{ .Ident "resource_version" }}
|
||||
(
|
||||
{{ .Ident "group" }},
|
||||
{{ .Ident "resource" }},
|
||||
{{ .Ident "rv" }},
|
||||
)
|
||||
|
||||
VALUES (
|
||||
{{ .Arg .Group }},
|
||||
{{ .Arg .Resource }},
|
||||
1,
|
||||
)
|
||||
;
|
@ -1,7 +0,0 @@
|
||||
SELECT {{ .Ident "rv" | .Into .ResourceVersion }}
|
||||
FROM {{ .Ident "resource_version" }}
|
||||
WHERE 1 = 1
|
||||
AND {{ .Ident "group" }} = {{ .Arg .Group }}
|
||||
AND {{ .Ident "resource" }} = {{ .Arg .Resource }}
|
||||
{{ .SelectFor "UPDATE" }}
|
||||
;
|
@ -1,161 +0,0 @@
|
||||
package sqlnext
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
|
||||
)
|
||||
|
||||
func InitResourceTables(mg *migrator.Migrator) string {
|
||||
marker := "Initialize resource tables (vX)" // changing this key wipe+rewrite everything
|
||||
mg.AddMigration(marker, &migrator.RawSQLMigration{})
|
||||
|
||||
tables := []migrator.Table{}
|
||||
|
||||
// This table helps support incrementing the resource version within a group+resource
|
||||
tables = append(tables, migrator.Table{
|
||||
Name: "resource_version",
|
||||
Columns: []*migrator.Column{
|
||||
{Name: "group", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
{Name: "resource", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
{Name: "rv", Type: migrator.DB_BigInt, Nullable: false}, // resource version
|
||||
},
|
||||
Indices: []*migrator.Index{
|
||||
{Cols: []string{"group", "resource"}, Type: migrator.UniqueIndex},
|
||||
},
|
||||
})
|
||||
|
||||
tables = append(tables, migrator.Table{
|
||||
Name: "resource", // write only log? all events
|
||||
Columns: []*migrator.Column{
|
||||
// SnowflakeID -- Each Create/Update/Delete call is an event
|
||||
// Using snowflake ID doubles this field as an approximate timestamp
|
||||
{Name: "event", Type: migrator.DB_BigInt, Nullable: false, IsPrimaryKey: true},
|
||||
|
||||
// This will be null on insert, and then updated once we are ready to commit the transaction
|
||||
{Name: "rv", Type: migrator.DB_BigInt, Nullable: true},
|
||||
{Name: "previous_rv", Type: migrator.DB_BigInt, Nullable: true}, // needed?
|
||||
|
||||
// Allows fast search for the first page in any query.
|
||||
// Subsequent pages must use MAX(rv) AND is_compacted=false GROUP ...
|
||||
{Name: "is_current", Type: migrator.DB_Bool, Nullable: false},
|
||||
|
||||
// Indicates that this is no longer the current version
|
||||
// This value is updated every few minutes and makes the paged queries more efficient
|
||||
{Name: "is_compacted", Type: migrator.DB_Bool, Nullable: false},
|
||||
|
||||
// Properties that exist in path/key (and duplicated in the json value)
|
||||
{Name: "group", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
{Name: "api_version", Type: migrator.DB_NVarchar, Length: 32, Nullable: false},
|
||||
{Name: "namespace", Type: migrator.DB_NVarchar, Length: 63, Nullable: true}, // namespace is not required (cluster scope)
|
||||
{Name: "resource", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
{Name: "name", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
|
||||
// The operation that wrote this resource version
|
||||
// 1: created, 2: updated, 3: deleted
|
||||
{Name: "operation", Type: migrator.DB_Int, Nullable: false},
|
||||
|
||||
// Optional Commit message (currently only used for dashboards)
|
||||
{Name: "message", Type: migrator.DB_Text, Nullable: false}, // defaults to empty string
|
||||
|
||||
// The k8s resource JSON text (without the resourceVersion populated)
|
||||
{Name: "value", Type: migrator.DB_MediumText, Nullable: false},
|
||||
|
||||
// Content hash -- this is appropriate to use for an etag value
|
||||
{Name: "hash", Type: migrator.DB_NVarchar, Length: 32, Nullable: false},
|
||||
|
||||
// Path to linked blob (or null). This blob may be saved in SQL, or in an object store
|
||||
{Name: "blob_uid", Type: migrator.DB_NVarchar, Length: 60, Nullable: true},
|
||||
},
|
||||
Indices: []*migrator.Index{
|
||||
{Cols: []string{"rv"}, Type: migrator.UniqueIndex},
|
||||
{Cols: []string{"is_current"}, Type: migrator.IndexType},
|
||||
{Cols: []string{"is_compacted"}, Type: migrator.IndexType},
|
||||
{Cols: []string{"operation"}, Type: migrator.IndexType},
|
||||
{Cols: []string{"namespace"}, Type: migrator.IndexType},
|
||||
{Cols: []string{"group", "resource", "name"}, Type: migrator.IndexType},
|
||||
{Cols: []string{"blob_uid"}, Type: migrator.IndexType},
|
||||
},
|
||||
})
|
||||
|
||||
// The values in this table are created by parsing the the value JSON and writing these as searchable columns
|
||||
// These *could* be in the same table, but this structure allows us to replace the table by first
|
||||
// building a parallel structure, then swapping them... maybe :)
|
||||
tables = append(tables, migrator.Table{
|
||||
Name: "resource_meta", // write only log? all events
|
||||
Columns: []*migrator.Column{
|
||||
{Name: "event", Type: migrator.DB_BigInt, Nullable: false, IsPrimaryKey: true},
|
||||
|
||||
// Hashed label set
|
||||
{Name: "label_set", Type: migrator.DB_NVarchar, Length: 64, Nullable: true}, // null is no labels
|
||||
|
||||
// Helpful filters
|
||||
{Name: "folder", Type: migrator.DB_NVarchar, Length: 190, Nullable: true}, // uid of folder
|
||||
|
||||
// For sorting values come from metadata.annotations#grafana.app/*
|
||||
{Name: "created_at", Type: migrator.DB_BigInt, Nullable: false},
|
||||
{Name: "updated_at", Type: migrator.DB_BigInt, Nullable: false},
|
||||
|
||||
// Origin metadata helps implement efficient provisioning checks
|
||||
{Name: "origin", Type: migrator.DB_NVarchar, Length: 64, Nullable: true}, // The origin name
|
||||
{Name: "origin_path", Type: migrator.DB_Text, Nullable: true}, // Path to resource
|
||||
{Name: "origin_hash", Type: migrator.DB_NVarchar, Length: 128, Nullable: true}, // Origin hash
|
||||
{Name: "origin_ts", Type: migrator.DB_BigInt, Nullable: true}, // Origin timestamp
|
||||
},
|
||||
Indices: []*migrator.Index{
|
||||
{Cols: []string{"event"}, Type: migrator.IndexType},
|
||||
{Cols: []string{"folder"}, Type: migrator.IndexType},
|
||||
{Cols: []string{"created_at"}, Type: migrator.IndexType},
|
||||
{Cols: []string{"updated_at"}, Type: migrator.IndexType},
|
||||
{Cols: []string{"origin"}, Type: migrator.IndexType},
|
||||
},
|
||||
})
|
||||
|
||||
// This table is optional, blobs can also be saved to object store or disk
|
||||
// This is an append only store
|
||||
tables = append(tables, migrator.Table{
|
||||
Name: "resource_blob", // even things that failed?
|
||||
Columns: []*migrator.Column{
|
||||
{Name: "uid", Type: migrator.DB_NVarchar, Length: 60, Nullable: false, IsPrimaryKey: true},
|
||||
{Name: "value", Type: migrator.DB_Blob, Nullable: true},
|
||||
{Name: "etag", Type: migrator.DB_NVarchar, Length: 64, Nullable: false},
|
||||
{Name: "size", Type: migrator.DB_BigInt, Nullable: false},
|
||||
{Name: "content_type", Type: migrator.DB_NVarchar, Length: 255, Nullable: false},
|
||||
|
||||
// These is used for auditing and cleanup (could be path?)
|
||||
{Name: "namespace", Type: migrator.DB_NVarchar, Length: 63, Nullable: true},
|
||||
{Name: "group", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
{Name: "resource", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
{Name: "name", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
},
|
||||
Indices: []*migrator.Index{
|
||||
{Cols: []string{"uid"}, Type: migrator.UniqueIndex},
|
||||
|
||||
// Used for auditing
|
||||
{Cols: []string{"namespace", "group", "resource", "name"}, Type: migrator.IndexType},
|
||||
},
|
||||
})
|
||||
|
||||
tables = append(tables, migrator.Table{
|
||||
Name: "resource_label_set",
|
||||
Columns: []*migrator.Column{
|
||||
{Name: "label_set", Type: migrator.DB_NVarchar, Length: 64, Nullable: false},
|
||||
{Name: "label", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
|
||||
{Name: "value", Type: migrator.DB_Text, Nullable: false},
|
||||
},
|
||||
Indices: []*migrator.Index{
|
||||
{Cols: []string{"label_set", "label"}, Type: migrator.UniqueIndex},
|
||||
},
|
||||
})
|
||||
|
||||
// Initialize all tables
|
||||
for t := range tables {
|
||||
mg.AddMigration("drop table "+tables[t].Name, migrator.NewDropTableMigration(tables[t].Name))
|
||||
mg.AddMigration("create table "+tables[t].Name, migrator.NewAddTableMigration(tables[t]))
|
||||
for i := range tables[t].Indices {
|
||||
mg.AddMigration(fmt.Sprintf("create table %s, index: %d", tables[t].Name, i), migrator.NewAddIndexMigration(tables[t], tables[t].Indices[i]))
|
||||
}
|
||||
}
|
||||
|
||||
return marker
|
||||
}
|
@ -1,160 +0,0 @@
|
||||
package sqlnext
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/session"
|
||||
"github.com/grafana/grafana/pkg/services/store/entity/db"
|
||||
"github.com/grafana/grafana/pkg/services/store/entity/sqlstash"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/sql/sqltemplate"
|
||||
)
|
||||
|
||||
// Package-level errors.
|
||||
var (
|
||||
ErrNotImplementedYet = errors.New("not implemented yet (sqlnext)")
|
||||
)
|
||||
|
||||
func ProvideSQLResourceServer(db db.EntityDBInterface, tracer tracing.Tracer) (resource.ResourceServer, error) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
store := &sqlResourceStore{
|
||||
db: db,
|
||||
log: log.New("sql-resource-server"),
|
||||
ctx: ctx,
|
||||
cancel: cancel,
|
||||
tracer: tracer,
|
||||
}
|
||||
|
||||
if err := prometheus.Register(sqlstash.NewStorageMetrics()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return resource.NewResourceServer(resource.ResourceServerOptions{
|
||||
Tracer: tracer,
|
||||
Backend: store,
|
||||
Diagnostics: store,
|
||||
Lifecycle: store,
|
||||
})
|
||||
}
|
||||
|
||||
type sqlResourceStore struct {
|
||||
log log.Logger
|
||||
db db.EntityDBInterface // needed to keep xorm engine in scope
|
||||
sess *session.SessionDB
|
||||
dialect migrator.Dialect
|
||||
ctx context.Context // TODO: remove
|
||||
cancel context.CancelFunc
|
||||
tracer trace.Tracer
|
||||
|
||||
//broadcaster sqlstash.Broadcaster[*resource.WatchEvent]
|
||||
//stream chan *resource.WatchEvent
|
||||
|
||||
sqlDB db.DB
|
||||
sqlDialect sqltemplate.Dialect
|
||||
}
|
||||
|
||||
func (s *sqlResourceStore) Init() error {
|
||||
if s.sess != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if s.db == nil {
|
||||
return errors.New("missing db")
|
||||
}
|
||||
|
||||
err := s.db.Init()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sqlDB, err := s.db.GetDB()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.sqlDB = sqlDB
|
||||
|
||||
driverName := sqlDB.DriverName()
|
||||
driverName = strings.TrimSuffix(driverName, "WithHooks")
|
||||
switch driverName {
|
||||
case db.DriverMySQL:
|
||||
s.sqlDialect = sqltemplate.MySQL
|
||||
case db.DriverPostgres:
|
||||
s.sqlDialect = sqltemplate.PostgreSQL
|
||||
case db.DriverSQLite, db.DriverSQLite3:
|
||||
s.sqlDialect = sqltemplate.SQLite
|
||||
default:
|
||||
return fmt.Errorf("no dialect for driver %q", driverName)
|
||||
}
|
||||
|
||||
sess, err := s.db.GetSession()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
engine, err := s.db.GetEngine()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.sess = sess
|
||||
s.dialect = migrator.NewDialect(engine.DriverName())
|
||||
|
||||
// TODO.... set up the broadcaster
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *sqlResourceStore) IsHealthy(ctx context.Context, r *resource.HealthCheckRequest) (*resource.HealthCheckResponse, error) {
|
||||
// ctxLogger := s.log.FromContext(log.WithContextualAttributes(ctx, []any{"method", "isHealthy"}))
|
||||
|
||||
if err := s.sqlDB.PingContext(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// TODO: check the status of the watcher implementation as well
|
||||
return &resource.HealthCheckResponse{Status: resource.HealthCheckResponse_SERVING}, nil
|
||||
}
|
||||
|
||||
func (s *sqlResourceStore) Stop() {
|
||||
s.cancel()
|
||||
}
|
||||
|
||||
func (s *sqlResourceStore) WriteEvent(ctx context.Context, event resource.WriteEvent) (int64, error) {
|
||||
_, span := s.tracer.Start(ctx, "storage_server.WriteEvent")
|
||||
defer span.End()
|
||||
|
||||
// TODO... actually write write the event!
|
||||
|
||||
return 0, ErrNotImplementedYet
|
||||
}
|
||||
|
||||
func (s *sqlResourceStore) WatchWriteEvents(ctx context.Context) (<-chan *resource.WrittenEvent, error) {
|
||||
return nil, ErrNotImplementedYet
|
||||
}
|
||||
|
||||
func (s *sqlResourceStore) Read(ctx context.Context, req *resource.ReadRequest) (*resource.ReadResponse, error) {
|
||||
_, span := s.tracer.Start(ctx, "storage_server.GetResource")
|
||||
defer span.End()
|
||||
|
||||
fmt.Printf("TODO, GET: %+v", req.Key)
|
||||
|
||||
return nil, ErrNotImplementedYet
|
||||
}
|
||||
|
||||
func (s *sqlResourceStore) PrepareList(ctx context.Context, req *resource.ListRequest) (*resource.ListResponse, error) {
|
||||
_, span := s.tracer.Start(ctx, "storage_server.List")
|
||||
defer span.End()
|
||||
|
||||
fmt.Printf("TODO, LIST: %+v", req.Options.Key)
|
||||
|
||||
return nil, ErrNotImplementedYet
|
||||
}
|
@ -1,16 +1,19 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { DOMAttributes } from '@react-types/shared';
|
||||
import { memo, forwardRef } from 'react';
|
||||
import { memo, forwardRef, useCallback } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
import { config, reportInteraction } from '@grafana/runtime';
|
||||
import { CustomScrollbar, Icon, IconButton, useStyles2, Stack } from '@grafana/ui';
|
||||
import { useGrafana } from 'app/core/context/GrafanaContext';
|
||||
import { t } from 'app/core/internationalization';
|
||||
import { usePatchUserPreferencesMutation } from 'app/features/preferences/api/index';
|
||||
import { useSelector } from 'app/types';
|
||||
|
||||
import { MegaMenuItem } from './MegaMenuItem';
|
||||
import { usePinnedItems } from './hooks';
|
||||
import { enrichWithInteractionTracking, getActiveItem } from './utils';
|
||||
|
||||
export const MENU_WIDTH = '300px';
|
||||
@ -26,6 +29,8 @@ export const MegaMenu = memo(
|
||||
const location = useLocation();
|
||||
const { chrome } = useGrafana();
|
||||
const state = chrome.useState();
|
||||
const [patchPreferences] = usePatchUserPreferencesMutation();
|
||||
const pinnedItems = usePinnedItems();
|
||||
|
||||
// Remove profile + help from tree
|
||||
const navItems = navTree
|
||||
@ -46,6 +51,35 @@ export const MegaMenu = memo(
|
||||
});
|
||||
};
|
||||
|
||||
const isPinned = useCallback(
|
||||
(id?: string) => {
|
||||
if (!id || !pinnedItems?.length) {
|
||||
return false;
|
||||
}
|
||||
return pinnedItems?.includes(id);
|
||||
},
|
||||
[pinnedItems]
|
||||
);
|
||||
|
||||
const onPinItem = (id?: string) => {
|
||||
if (id && config.featureToggles.pinNavItems) {
|
||||
const navItem = navTree.find((item) => item.id === id);
|
||||
const isSaved = isPinned(id);
|
||||
const newItems = isSaved ? pinnedItems.filter((i) => id !== i) : [...pinnedItems, id];
|
||||
const interactionName = isSaved ? 'grafana_nav_item_unpinned' : 'grafana_nav_item_pinned';
|
||||
reportInteraction(interactionName, {
|
||||
path: navItem?.url ?? id,
|
||||
});
|
||||
patchPreferences({
|
||||
patchPrefsCmd: {
|
||||
navbar: {
|
||||
savedItemIds: newItems,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div data-testid={selectors.components.NavMenu.Menu} ref={ref} {...restProps}>
|
||||
<div className={styles.mobileHeader}>
|
||||
@ -79,8 +113,10 @@ export const MegaMenu = memo(
|
||||
)}
|
||||
<MegaMenuItem
|
||||
link={link}
|
||||
isPinned={isPinned}
|
||||
onClick={state.megaMenuDocked ? undefined : onClose}
|
||||
activeItem={activeItem}
|
||||
onPin={onPinItem}
|
||||
/>
|
||||
</Stack>
|
||||
))}
|
||||
|
@ -19,11 +19,13 @@ interface Props {
|
||||
activeItem?: NavModelItem;
|
||||
onClick?: () => void;
|
||||
level?: number;
|
||||
onPin: (id?: string) => void;
|
||||
isPinned: (id?: string) => boolean;
|
||||
}
|
||||
|
||||
const MAX_DEPTH = 2;
|
||||
|
||||
export function MegaMenuItem({ link, activeItem, level = 0, onClick }: Props) {
|
||||
export function MegaMenuItem({ link, activeItem, level = 0, onClick, onPin, isPinned }: Props) {
|
||||
const { chrome } = useGrafana();
|
||||
const state = chrome.useState();
|
||||
const menuIsDocked = state.megaMenuDocked;
|
||||
@ -102,6 +104,9 @@ export function MegaMenuItem({ link, activeItem, level = 0, onClick }: Props) {
|
||||
}}
|
||||
target={link.target}
|
||||
url={link.url}
|
||||
id={link.id}
|
||||
onPin={onPin}
|
||||
isPinned={isPinned(link.id)}
|
||||
>
|
||||
<div
|
||||
className={cx(styles.labelWrapper, {
|
||||
@ -127,6 +132,8 @@ export function MegaMenuItem({ link, activeItem, level = 0, onClick }: Props) {
|
||||
activeItem={activeItem}
|
||||
onClick={onClick}
|
||||
level={level + 1}
|
||||
onPin={onPin}
|
||||
isPinned={isPinned}
|
||||
/>
|
||||
))
|
||||
) : (
|
||||
|
@ -3,6 +3,7 @@ import * as React from 'react';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { Icon, Link, useTheme2 } from '@grafana/ui';
|
||||
|
||||
export interface Props {
|
||||
@ -11,9 +12,12 @@ export interface Props {
|
||||
onClick?: () => void;
|
||||
target?: HTMLAnchorElement['target'];
|
||||
url: string;
|
||||
id?: string;
|
||||
onPin: (id?: string) => void;
|
||||
isPinned?: boolean;
|
||||
}
|
||||
|
||||
export function MegaMenuItemText({ children, isActive, onClick, target, url }: Props) {
|
||||
export function MegaMenuItemText({ children, isActive, onClick, target, url, id, onPin, isPinned }: Props) {
|
||||
const theme = useTheme2();
|
||||
const styles = getStyles(theme, isActive);
|
||||
const LinkComponent = !target && url.startsWith('/') ? Link : 'a';
|
||||
@ -26,6 +30,17 @@ export function MegaMenuItemText({ children, isActive, onClick, target, url }: P
|
||||
// As nav links are supposed to link to internal urls this option should be used with caution
|
||||
target === '_blank' && <Icon data-testid="external-link-icon" name="external-link-alt" />
|
||||
}
|
||||
{config.featureToggles.pinNavItems && (
|
||||
<Icon
|
||||
name={isPinned ? 'favorite' : 'star'}
|
||||
className={'pin-icon'}
|
||||
onClick={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
onPin(id);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
|
||||
@ -90,5 +105,17 @@ const getStyles = (theme: GrafanaTheme2, isActive: Props['isActive']) => ({
|
||||
gap: '0.5rem',
|
||||
height: '100%',
|
||||
width: '100%',
|
||||
justifyContent: 'space-between',
|
||||
'.pin-icon': {
|
||||
display: 'none',
|
||||
padding: theme.spacing(0.5),
|
||||
width: theme.spacing(3),
|
||||
height: theme.spacing(3),
|
||||
},
|
||||
'&:hover': {
|
||||
'.pin-icon': {
|
||||
display: 'block',
|
||||
},
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
14
public/app/core/components/AppChrome/MegaMenu/hooks.ts
Normal file
14
public/app/core/components/AppChrome/MegaMenu/hooks.ts
Normal file
@ -0,0 +1,14 @@
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { config } from '@grafana/runtime';
|
||||
import { useGetUserPreferencesQuery } from 'app/features/preferences/api';
|
||||
|
||||
export const usePinnedItems = () => {
|
||||
const preferences = useGetUserPreferencesQuery();
|
||||
const pinnedItems = useMemo(() => preferences.data?.navbar?.savedItemIds || [], [preferences]);
|
||||
|
||||
if (config.featureToggles.pinNavItems) {
|
||||
return pinnedItems;
|
||||
}
|
||||
return [];
|
||||
};
|
@ -2,7 +2,7 @@ import { css } from '@emotion/css';
|
||||
import { MouseEvent } from 'react';
|
||||
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
import { Button, CallToActionCard, Icon, IconName, LinkButton } from '@grafana/ui';
|
||||
import { Alert, Button, CallToActionCard, Icon, IconName, LinkButton } from '@grafana/ui';
|
||||
|
||||
export interface Props {
|
||||
title: string;
|
||||
@ -59,10 +59,9 @@ const EmptyListCTA = ({
|
||||
''
|
||||
)}
|
||||
{infoBox ? (
|
||||
<div key="infoBoxHtml" className={`grafana-info-box ${infoBoxStyles}`}>
|
||||
{infoBoxTitle && <h5>{infoBoxTitle}</h5>}
|
||||
<Alert severity="info" title={infoBoxTitle ?? ''} className={infoBoxStyles}>
|
||||
<div dangerouslySetInnerHTML={infoBox} />
|
||||
</div>
|
||||
</Alert>
|
||||
) : (
|
||||
''
|
||||
)}
|
||||
|
@ -1,6 +1,8 @@
|
||||
import { css } from '@emotion/css';
|
||||
import * as React from 'react';
|
||||
|
||||
import { getTagColorsFromName, Icon } from '@grafana/ui';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { getTagColorsFromName, Icon, useStyles2 } from '@grafana/ui';
|
||||
|
||||
export interface Props {
|
||||
label: string;
|
||||
@ -9,26 +11,39 @@ export interface Props {
|
||||
onClick?: React.MouseEventHandler<SVGElement>;
|
||||
}
|
||||
|
||||
export class TagBadge extends React.Component<Props> {
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
}
|
||||
export const TagBadge = ({ count, label, onClick, removeIcon }: Props) => {
|
||||
const { color } = getTagColorsFromName(label);
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
render() {
|
||||
const { label, removeIcon, count, onClick } = this.props;
|
||||
const { color } = getTagColorsFromName(label);
|
||||
const countLabel = count !== 0 && <span style={{ marginLeft: '3px' }}>{`(${count})`}</span>;
|
||||
|
||||
const tagStyle = {
|
||||
backgroundColor: color,
|
||||
};
|
||||
return (
|
||||
<span
|
||||
className={styles.badge}
|
||||
style={{
|
||||
backgroundColor: color,
|
||||
}}
|
||||
>
|
||||
{removeIcon && <Icon onClick={onClick} name="times" />}
|
||||
{label} {countLabel}
|
||||
</span>
|
||||
);
|
||||
};
|
||||
|
||||
const countLabel = count !== 0 && <span style={{ marginLeft: '3px' }}>{`(${count})`}</span>;
|
||||
|
||||
return (
|
||||
<span className={`label label-tag`} style={tagStyle}>
|
||||
{removeIcon && <Icon onClick={onClick} name="times" />}
|
||||
{label} {countLabel}
|
||||
</span>
|
||||
);
|
||||
}
|
||||
}
|
||||
export const getStyles = (theme: GrafanaTheme2) => ({
|
||||
badge: css({
|
||||
...theme.typography.bodySmall,
|
||||
backgroundColor: theme.v1.palette.gray1,
|
||||
borderRadius: theme.shape.radius.default,
|
||||
color: theme.v1.palette.white,
|
||||
display: 'inline-block',
|
||||
height: '20px',
|
||||
lineHeight: '20px',
|
||||
padding: theme.spacing(0, 0.75),
|
||||
verticalAlign: 'baseline',
|
||||
whiteSpace: 'nowrap',
|
||||
'&:hover': {
|
||||
opacity: 0.85,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
@ -6,7 +6,7 @@ import { escapeStringForRegex, GrafanaTheme2 } from '@grafana/data';
|
||||
import { Icon, MultiSelect, useStyles2 } from '@grafana/ui';
|
||||
import { t } from 'app/core/internationalization';
|
||||
|
||||
import { TagBadge } from './TagBadge';
|
||||
import { TagBadge, getStyles as getTagBadgeStyles } from './TagBadge';
|
||||
import { TagOption, TagSelectOption } from './TagOption';
|
||||
|
||||
export interface TermCount {
|
||||
@ -167,31 +167,35 @@ export const TagFilter = ({
|
||||
|
||||
TagFilter.displayName = 'TagFilter';
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({
|
||||
tagFilter: css`
|
||||
position: relative;
|
||||
min-width: 180px;
|
||||
flex-grow: 1;
|
||||
const getStyles = (theme: GrafanaTheme2) => {
|
||||
const tagBadgeStyles = getTagBadgeStyles(theme);
|
||||
|
||||
.label-tag {
|
||||
margin-left: 6px;
|
||||
cursor: pointer;
|
||||
}
|
||||
`,
|
||||
clear: css`
|
||||
background: none;
|
||||
border: none;
|
||||
text-decoration: underline;
|
||||
font-size: 12px;
|
||||
padding: none;
|
||||
position: absolute;
|
||||
top: -17px;
|
||||
right: 0;
|
||||
cursor: pointer;
|
||||
color: ${theme.colors.text.secondary};
|
||||
return {
|
||||
tagFilter: css({
|
||||
position: 'relative',
|
||||
minWidth: '180px',
|
||||
flexGrow: 1,
|
||||
|
||||
&:hover {
|
||||
color: ${theme.colors.text.primary};
|
||||
}
|
||||
`,
|
||||
});
|
||||
[`.${tagBadgeStyles.badge}`]: {
|
||||
marginLeft: '6px',
|
||||
cursor: 'pointer',
|
||||
},
|
||||
}),
|
||||
clear: css({
|
||||
background: 'none',
|
||||
border: 'none',
|
||||
textDecoration: 'underline',
|
||||
fontSize: '12px',
|
||||
padding: 'none',
|
||||
position: 'absolute',
|
||||
top: '-17px',
|
||||
right: 0,
|
||||
cursor: 'pointer',
|
||||
color: theme.colors.text.secondary,
|
||||
|
||||
'&:hover': {
|
||||
color: theme.colors.text.primary,
|
||||
},
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
@ -28,6 +28,7 @@ import usersReducers from 'app/features/users/state/reducers';
|
||||
import templatingReducers from 'app/features/variables/state/keyedVariablesReducer';
|
||||
|
||||
import { alertingApi } from '../../features/alerting/unified/api/alertingApi';
|
||||
import { userPreferencesAPI } from '../../features/preferences/api';
|
||||
import { queryLibraryApi } from '../../features/query-library/api/factory';
|
||||
import { cleanUpAction } from '../actions/cleanUp';
|
||||
|
||||
@ -59,6 +60,7 @@ const rootReducers = {
|
||||
[browseDashboardsAPI.reducerPath]: browseDashboardsAPI.reducer,
|
||||
[cloudMigrationAPI.reducerPath]: cloudMigrationAPI.reducer,
|
||||
[queryLibraryApi.reducerPath]: queryLibraryApi.reducer,
|
||||
[userPreferencesAPI.reducerPath]: userPreferencesAPI.reducer,
|
||||
};
|
||||
|
||||
const addedReducers = {};
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { omitBy } from 'lodash';
|
||||
|
||||
import { deprecationWarning } from '@grafana/data';
|
||||
import { deprecationWarning, safeStringifyValue } from '@grafana/data';
|
||||
import { BackendSrvRequest } from '@grafana/runtime';
|
||||
|
||||
export const parseInitFromOptions = (options: BackendSrvRequest): RequestInit => {
|
||||
@ -93,7 +93,7 @@ export const parseBody = (options: BackendSrvRequest, isAppJson: boolean) => {
|
||||
return options.data;
|
||||
}
|
||||
|
||||
return isAppJson ? JSON.stringify(options.data) : new URLSearchParams(options.data);
|
||||
return isAppJson ? safeStringifyValue(options.data) : new URLSearchParams(options.data);
|
||||
};
|
||||
|
||||
export async function parseResponseBody<T>(
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { useAsync } from 'react-use';
|
||||
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
import { Alert } from '@grafana/ui';
|
||||
import { Page } from 'app/core/components/Page/Page';
|
||||
|
||||
import { AdminSettingsTable } from './AdminSettingsTable';
|
||||
@ -13,10 +14,10 @@ function AdminSettings() {
|
||||
return (
|
||||
<Page navId="server-settings">
|
||||
<Page.Contents>
|
||||
<div className="grafana-info-box span8" style={{ margin: '20px 0 25px 0' }}>
|
||||
<Alert severity="info" title="">
|
||||
These system settings are defined in grafana.ini or custom.ini (or overridden in ENV variables). To change
|
||||
these you currently need to restart Grafana.
|
||||
</div>
|
||||
</Alert>
|
||||
|
||||
{loading && <AdminSettingsTable.Skeleton />}
|
||||
|
||||
|
@ -5,6 +5,8 @@ import { Button, LinkButton } from '@grafana/ui';
|
||||
import { contextSrv } from 'app/core/core';
|
||||
import { AccessControlAction, SyncInfo, UserDTO } from 'app/types';
|
||||
|
||||
import { TagBadge } from '../../core/components/TagFilter/TagBadge';
|
||||
|
||||
interface Props {
|
||||
ldapSyncInfo: SyncInfo;
|
||||
user: UserDTO;
|
||||
@ -40,7 +42,7 @@ export class UserLdapSyncInfo extends PureComponent<Props, State> {
|
||||
<td>External sync</td>
|
||||
<td>User synced via LDAP. Some changes must be done in LDAP or mappings.</td>
|
||||
<td>
|
||||
<span className="label label-tag">LDAP</span>
|
||||
<TagBadge label="LDAP" removeIcon={false} count={0} onClick={undefined} />
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
|
@ -14,8 +14,9 @@ import { useCombinedRuleNamespaces } from './hooks/useCombinedRuleNamespaces';
|
||||
import { usePagination } from './hooks/usePagination';
|
||||
import { useURLSearchParams } from './hooks/useURLSearchParams';
|
||||
import { fetchPromRulesAction, fetchRulerRulesAction } from './state/actions';
|
||||
import { combineMatcherStrings, labelsMatchMatchers, parseMatchers } from './utils/alertmanager';
|
||||
import { combineMatcherStrings, labelsMatchMatchers } from './utils/alertmanager';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from './utils/datasource';
|
||||
import { parsePromQLStyleMatcherLooseSafe } from './utils/matchers';
|
||||
import { createViewLink } from './utils/misc';
|
||||
|
||||
interface Props {
|
||||
@ -168,7 +169,7 @@ function filterAndSortRules(
|
||||
labelFilter: string,
|
||||
sortOrder: SortOrder
|
||||
) {
|
||||
const matchers = parseMatchers(labelFilter);
|
||||
const matchers = parsePromQLStyleMatcherLooseSafe(labelFilter);
|
||||
let rules = originalRules.filter(
|
||||
(rule) => rule.name.toLowerCase().includes(nameFilter.toLowerCase()) && labelsMatchMatchers(rule.labels, matchers)
|
||||
);
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { render } from '@testing-library/react';
|
||||
import { TestProvider } from 'test/helpers/TestProvider';
|
||||
import { byTestId } from 'testing-library-selector';
|
||||
import { byTestId, byText } from 'testing-library-selector';
|
||||
|
||||
import { DataSourceApi } from '@grafana/data';
|
||||
import { PromOptions, PrometheusDatasource } from '@grafana/prometheus';
|
||||
@ -25,6 +25,7 @@ import {
|
||||
mockRulerAlertingRule,
|
||||
mockRulerRuleGroup,
|
||||
} from './mocks';
|
||||
import { captureRequests } from './mocks/server/events';
|
||||
import { RuleFormValues } from './types/rule-form';
|
||||
import * as config from './utils/config';
|
||||
import { Annotation } from './utils/constants';
|
||||
@ -183,6 +184,7 @@ const panel = new PanelModel({
|
||||
const ui = {
|
||||
row: byTestId('row'),
|
||||
createButton: byTestId<HTMLAnchorElement>('create-alert-rule-button'),
|
||||
notSavedYet: byText('Dashboard not saved'),
|
||||
};
|
||||
|
||||
const server = setupMswServer();
|
||||
@ -281,6 +283,29 @@ describe('PanelAlertTabContent', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('should not make requests for unsaved dashboard', async () => {
|
||||
const capture = captureRequests();
|
||||
|
||||
const unsavedDashboard = {
|
||||
...dashboard,
|
||||
uid: null,
|
||||
} as DashboardModel;
|
||||
|
||||
renderAlertTabContent(
|
||||
unsavedDashboard,
|
||||
new PanelModel({
|
||||
...panel,
|
||||
datasource: undefined,
|
||||
maxDataPoints: 100,
|
||||
interval: '10s',
|
||||
})
|
||||
);
|
||||
|
||||
expect(await ui.notSavedYet.find()).toBeInTheDocument();
|
||||
const requests = await capture;
|
||||
expect(requests.length).toBe(0);
|
||||
});
|
||||
|
||||
it('Will take into account datasource minInterval', async () => {
|
||||
(getDatasourceSrv() as unknown as MockDataSourceSrv).datasources[dataSources.prometheus.uid].interval = '7m';
|
||||
|
||||
|
@ -6,7 +6,7 @@ import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { Field, Icon, Input, Label, Stack, Tooltip, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { logInfo, LogMessages } from '../../Analytics';
|
||||
import { parseMatchers } from '../../utils/alertmanager';
|
||||
import { parsePromQLStyleMatcherLoose } from '../../utils/matchers';
|
||||
|
||||
interface Props {
|
||||
defaultQueryString?: string;
|
||||
@ -28,13 +28,22 @@ export const MatcherFilter = ({ onFilterChange, defaultQueryString }: Props) =>
|
||||
);
|
||||
|
||||
const searchIcon = <Icon name={'search'} />;
|
||||
const inputInvalid = defaultQueryString ? parseMatchers(defaultQueryString).length === 0 : false;
|
||||
let inputValid = Boolean(defaultQueryString && defaultQueryString.length >= 3);
|
||||
try {
|
||||
if (!defaultQueryString) {
|
||||
inputValid = true;
|
||||
} else {
|
||||
parsePromQLStyleMatcherLoose(defaultQueryString);
|
||||
}
|
||||
} catch (err) {
|
||||
inputValid = false;
|
||||
}
|
||||
|
||||
return (
|
||||
<Field
|
||||
className={styles.fixMargin}
|
||||
invalid={inputInvalid || undefined}
|
||||
error={inputInvalid ? 'Query must use valid matcher syntax. See the examples in the help tooltip.' : null}
|
||||
invalid={!inputValid}
|
||||
error={!inputValid ? 'Query must use valid matcher syntax. See the examples in the help tooltip.' : null}
|
||||
label={
|
||||
<Label>
|
||||
<Stack gap={0.5} alignItems="center">
|
||||
|
@ -87,7 +87,7 @@ export const AmRootRouteForm = ({
|
||||
</Field>
|
||||
<Field
|
||||
label="Group by"
|
||||
description="Group alerts when you receive a notification based on labels."
|
||||
description="Combine multiple alerts into a single notification by grouping them by the same label values."
|
||||
data-testid="am-group-select"
|
||||
>
|
||||
<Controller
|
||||
@ -119,7 +119,7 @@ export const AmRootRouteForm = ({
|
||||
<div className={styles.timingFormContainer}>
|
||||
<Field
|
||||
label="Group wait"
|
||||
description="The waiting time until the initial notification is sent for a new group created by an incoming alert. Default 30 seconds."
|
||||
description="The waiting time before sending the first notification for a new group of alerts. Default 30 seconds."
|
||||
invalid={!!errors.groupWaitValue}
|
||||
error={errors.groupWaitValue?.message}
|
||||
data-testid="am-group-wait"
|
||||
@ -133,7 +133,7 @@ export const AmRootRouteForm = ({
|
||||
</Field>
|
||||
<Field
|
||||
label="Group interval"
|
||||
description="The waiting time to send a batch of new alerts for that group after the first notification was sent. Default 5 minutes."
|
||||
description="The wait time before sending a notification about changes in the alert group after the first notification has been sent. Default is 5 minutes."
|
||||
invalid={!!errors.groupIntervalValue}
|
||||
error={errors.groupIntervalValue?.message}
|
||||
data-testid="am-group-interval"
|
||||
@ -147,7 +147,7 @@ export const AmRootRouteForm = ({
|
||||
</Field>
|
||||
<Field
|
||||
label="Repeat interval"
|
||||
description="The waiting time to resend an alert after they have successfully been sent. Default 4 hours. Should be a multiple of Group interval."
|
||||
description="The wait time before resending a notification that has already been sent successfully. Default is 4 hours. Should be a multiple of Group interval."
|
||||
invalid={!!errors.repeatIntervalValue}
|
||||
error={errors.repeatIntervalValue?.message}
|
||||
data-testid="am-repeat-interval"
|
||||
|
@ -191,7 +191,7 @@ export const AmRoutesExpandedForm = ({
|
||||
{watch().overrideGrouping && (
|
||||
<Field
|
||||
label="Group by"
|
||||
description="Group alerts when you receive a notification based on labels. If empty it will be inherited from the parent policy."
|
||||
description="Combine multiple alerts into a single notification by grouping them by the same label values. If empty, it is inherited from the parent policy."
|
||||
>
|
||||
<Controller
|
||||
rules={{
|
||||
|
@ -7,8 +7,12 @@ import { Button, Field, Icon, Input, Label, Select, Stack, Text, Tooltip, useSty
|
||||
import { ObjectMatcher, Receiver, RouteWithID } from 'app/plugins/datasource/alertmanager/types';
|
||||
|
||||
import { useURLSearchParams } from '../../hooks/useURLSearchParams';
|
||||
import { matcherToObjectMatcher, parseMatchers } from '../../utils/alertmanager';
|
||||
import { normalizeMatchers } from '../../utils/matchers';
|
||||
import { matcherToObjectMatcher } from '../../utils/alertmanager';
|
||||
import {
|
||||
normalizeMatchers,
|
||||
parsePromQLStyleMatcherLoose,
|
||||
parsePromQLStyleMatcherLooseSafe,
|
||||
} from '../../utils/matchers';
|
||||
|
||||
interface NotificationPoliciesFilterProps {
|
||||
receivers: Receiver[];
|
||||
@ -35,7 +39,7 @@ const NotificationPoliciesFilter = ({
|
||||
}, [contactPoint, onChangeReceiver]);
|
||||
|
||||
useEffect(() => {
|
||||
const matchers = parseMatchers(queryString ?? '').map(matcherToObjectMatcher);
|
||||
const matchers = parsePromQLStyleMatcherLooseSafe(queryString ?? '').map(matcherToObjectMatcher);
|
||||
handleChangeLabels()(matchers);
|
||||
}, [handleChangeLabels, queryString]);
|
||||
|
||||
@ -50,7 +54,17 @@ const NotificationPoliciesFilter = ({
|
||||
const selectedContactPoint = receiverOptions.find((option) => option.value === contactPoint) ?? null;
|
||||
|
||||
const hasFilters = queryString || contactPoint;
|
||||
const inputInvalid = queryString && queryString.length > 3 ? parseMatchers(queryString).length === 0 : false;
|
||||
|
||||
let inputValid = Boolean(queryString && queryString.length > 3);
|
||||
try {
|
||||
if (!queryString) {
|
||||
inputValid = true;
|
||||
} else {
|
||||
parsePromQLStyleMatcherLoose(queryString);
|
||||
}
|
||||
} catch (err) {
|
||||
inputValid = false;
|
||||
}
|
||||
|
||||
return (
|
||||
<Stack direction="row" alignItems="flex-end" gap={1}>
|
||||
@ -73,8 +87,8 @@ const NotificationPoliciesFilter = ({
|
||||
</Stack>
|
||||
</Label>
|
||||
}
|
||||
invalid={inputInvalid}
|
||||
error={inputInvalid ? 'Query must use valid matcher syntax' : null}
|
||||
invalid={!inputValid}
|
||||
error={!inputValid ? 'Query must use valid matcher syntax' : null}
|
||||
>
|
||||
<Input
|
||||
ref={searchInputRef}
|
||||
|
@ -2,18 +2,18 @@ export const routeTimingsFields = {
|
||||
groupWait: {
|
||||
label: 'Group wait',
|
||||
description:
|
||||
'The waiting time until the initial notification is sent for a new group created by an incoming alert. If empty it will be inherited from the parent policy.',
|
||||
'The wait time before sending the first notification for a new group of alerts. If empty, it is inherited from the parent policy.',
|
||||
ariaLabel: 'Group wait value',
|
||||
},
|
||||
groupInterval: {
|
||||
label: 'Group interval',
|
||||
description:
|
||||
'The waiting time to send a batch of new alerts for that group after the first notification was sent. If empty it will be inherited from the parent policy.',
|
||||
'The wait time before sending a notification about changes in the alert group after the first notification has been sent. If empty, it is inherited from the parent policy.',
|
||||
ariaLabel: 'Group interval value',
|
||||
},
|
||||
repeatInterval: {
|
||||
label: 'Repeat interval',
|
||||
description: 'The waiting time to resend an alert after they have successfully been sent.',
|
||||
description: 'The wait time before resending a notification that has already been sent successfully.',
|
||||
ariaLabel: 'Repeat interval value',
|
||||
},
|
||||
};
|
||||
|
@ -173,20 +173,9 @@ function NeedHelpInfoForNotificationPolicy() {
|
||||
<Stack gap={1} direction="column">
|
||||
<Stack direction="column" gap={0}>
|
||||
<>
|
||||
Firing alert rule instances are routed to notification policies based on matching labels. All alert rules
|
||||
and instances, irrespective of their labels, match the default notification policy. If there are no nested
|
||||
policies, or no nested policies match the labels in the alert rule or alert instance, then the default
|
||||
notification policy is the matching policy.
|
||||
Firing alert instances are routed to notification policies based on matching labels. The default
|
||||
notification policy matches all alert instances.
|
||||
</>
|
||||
<a
|
||||
href={`https://grafana.com/docs/grafana/latest/alerting/fundamentals/notification-policies/notifications/`}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<Text color="link">
|
||||
Read about notification routing. <Icon name="external-link-alt" />
|
||||
</Text>
|
||||
</a>
|
||||
</Stack>
|
||||
<Stack direction="column" gap={0}>
|
||||
<>
|
||||
@ -194,12 +183,12 @@ function NeedHelpInfoForNotificationPolicy() {
|
||||
connect them to your notification policy by adding label matchers.
|
||||
</>
|
||||
<a
|
||||
href={`https://grafana.com/docs/grafana/latest/alerting/fundamentals/annotation-label/`}
|
||||
href={`https://grafana.com/docs/grafana/latest/alerting/fundamentals/notifications/notification-policies/`}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
<Text color="link">
|
||||
Read about Labels and annotations. <Icon name="external-link-alt" />
|
||||
Read about notification policies. <Icon name="external-link-alt" />
|
||||
</Text>
|
||||
</a>
|
||||
</Stack>
|
||||
@ -220,20 +209,18 @@ function NeedHelpInfoForContactpoint() {
|
||||
<br />
|
||||
Notifications for firing alert instances are grouped based on folder and alert rule name.
|
||||
<br />
|
||||
The waiting time until the initial notification is sent for a new group created by an incoming alert is 30
|
||||
seconds.
|
||||
The wait time before sending the first notification for a new group of alerts is 30 seconds.
|
||||
<br />
|
||||
The waiting time to send a batch of new alerts for that group after the first notification was sent is 5
|
||||
minutes.
|
||||
The waiting time before sending a notification about changes in the alert group after the first notification
|
||||
has been sent is 5 minutes.
|
||||
<br />
|
||||
The waiting time to resend an alert after they have successfully been sent is 4 hours.
|
||||
The wait time before resending a notification that has already been sent successfully is 4 hours.
|
||||
<br />
|
||||
Grouping and wait time values are defined in your default notification policy.
|
||||
</>
|
||||
}
|
||||
// todo: update the link with the new documentation about simplified routing
|
||||
externalLink="`https://grafana.com/docs/grafana/latest/alerting/fundamentals/notification-policies/notifications/`"
|
||||
linkText="Read more about notifiying contact points"
|
||||
externalLink="https://grafana.com/docs/grafana/latest/alerting/fundamentals/notifications/"
|
||||
linkText="Read more about notifications"
|
||||
title="Notify contact points"
|
||||
/>
|
||||
);
|
||||
|
@ -5,6 +5,7 @@ import { DragDropContext, Droppable, DropResult } from 'react-beautiful-dnd';
|
||||
import {
|
||||
DataQuery,
|
||||
DataSourceInstanceSettings,
|
||||
getDataSourceRef,
|
||||
LoadingState,
|
||||
PanelData,
|
||||
rangeUtil,
|
||||
@ -226,10 +227,7 @@ function copyModel(item: AlertQuery, settings: DataSourceInstanceSettings): Omit
|
||||
...item,
|
||||
model: {
|
||||
...omit(item.model, 'datasource'),
|
||||
datasource: {
|
||||
type: settings.type,
|
||||
uid: settings.uid,
|
||||
},
|
||||
datasource: getDataSourceRef(settings),
|
||||
},
|
||||
datasourceUid: settings.uid,
|
||||
};
|
||||
@ -244,10 +242,7 @@ function newModel(item: AlertQuery, settings: DataSourceInstanceSettings): Omit<
|
||||
model: {
|
||||
refId: item.refId,
|
||||
hide: false,
|
||||
datasource: {
|
||||
type: settings.type,
|
||||
uid: settings.uid,
|
||||
},
|
||||
datasource: getDataSourceRef(settings),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ export const RoutingSettings = ({ alertManager }: RoutingSettingsProps) => {
|
||||
{overrideGrouping && (
|
||||
<Field
|
||||
label="Group by"
|
||||
description="Group alerts when you receive a notification based on labels. If empty it will be inherited from the default notification policy."
|
||||
description="Combine multiple alerts into a single notification by grouping them by the same label values. If empty, it is inherited from the default notification policy."
|
||||
{...register(`contactPoints.${alertManager}.groupBy`)}
|
||||
invalid={!!errors.contactPoints?.[alertManager]?.groupBy}
|
||||
className={styles.optionalContent}
|
||||
|
@ -1,6 +1,13 @@
|
||||
import { createAction, createReducer } from '@reduxjs/toolkit';
|
||||
|
||||
import { DataQuery, getDefaultRelativeTimeRange, getNextRefId, rangeUtil, RelativeTimeRange } from '@grafana/data';
|
||||
import {
|
||||
DataQuery,
|
||||
getDataSourceRef,
|
||||
getDefaultRelativeTimeRange,
|
||||
getNextRefId,
|
||||
rangeUtil,
|
||||
RelativeTimeRange,
|
||||
} from '@grafana/data';
|
||||
import { findDataSourceFromExpressionRecursive } from 'app/features/alerting/unified/utils/dataSourceFromExpression';
|
||||
import { dataSource as expressionDatasource } from 'app/features/expressions/ExpressionDatasource';
|
||||
import { isExpressionQuery } from 'app/features/expressions/guards';
|
||||
@ -64,10 +71,7 @@ export const queriesAndExpressionsReducer = createReducer(initialState, (builder
|
||||
datasourceUid: datasource.uid,
|
||||
model: {
|
||||
refId: '',
|
||||
datasource: {
|
||||
type: datasource.type,
|
||||
uid: datasource.uid,
|
||||
},
|
||||
datasource: getDataSourceRef(datasource),
|
||||
},
|
||||
});
|
||||
})
|
||||
|
@ -140,7 +140,7 @@ interface AnnotationValueProps {
|
||||
value: string;
|
||||
}
|
||||
|
||||
function AnnotationValue({ value }: AnnotationValueProps) {
|
||||
export function AnnotationValue({ value }: AnnotationValueProps) {
|
||||
const needsExternalLink = value && value.startsWith('http');
|
||||
const tokenizeValue = <Tokenize input={value} delimiter={['{{', '}}']} />;
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { css } from '@emotion/css';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data/src/themes';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { CallToActionCard, useStyles2, Stack } from '@grafana/ui';
|
||||
import EmptyListCTA from 'app/core/components/EmptyListCTA/EmptyListCTA';
|
||||
|
||||
|
@ -10,13 +10,14 @@ import {
|
||||
AlertInstanceStateFilter,
|
||||
InstanceStateFilter,
|
||||
} from 'app/features/alerting/unified/components/rules/AlertInstanceStateFilter';
|
||||
import { labelsMatchMatchers, parseMatchers } from 'app/features/alerting/unified/utils/alertmanager';
|
||||
import { labelsMatchMatchers } from 'app/features/alerting/unified/utils/alertmanager';
|
||||
import { createViewLink, sortAlerts } from 'app/features/alerting/unified/utils/misc';
|
||||
import { SortOrder } from 'app/plugins/panel/alertlist/types';
|
||||
import { Alert, CombinedRule, PaginationProps } from 'app/types/unified-alerting';
|
||||
import { mapStateWithReasonToBaseState } from 'app/types/unified-alerting-dto';
|
||||
|
||||
import { GRAFANA_RULES_SOURCE_NAME, isGrafanaRulesSource } from '../../utils/datasource';
|
||||
import { parsePromQLStyleMatcherLooseSafe } from '../../utils/matchers';
|
||||
import { isAlertingRule } from '../../utils/rules';
|
||||
|
||||
import { AlertInstancesTable } from './AlertInstancesTable';
|
||||
@ -148,7 +149,7 @@ function filterAlerts(
|
||||
): Alert[] {
|
||||
let filteredAlerts = [...alerts];
|
||||
if (alertInstanceLabel) {
|
||||
const matchers = parseMatchers(alertInstanceLabel || '');
|
||||
const matchers = alertInstanceLabel ? parsePromQLStyleMatcherLooseSafe(alertInstanceLabel) : [];
|
||||
filteredAlerts = filteredAlerts.filter(({ labels }) => labelsMatchMatchers(labels, matchers));
|
||||
}
|
||||
if (alertInstanceState) {
|
||||
|
@ -64,7 +64,12 @@ export const CentralAlertHistoryScene = () => {
|
||||
new SceneTimePicker({}),
|
||||
new SceneRefreshPicker({}),
|
||||
],
|
||||
$timeRange: new SceneTimeRange({}), //needed for using the time range sync in the url
|
||||
// use default time range as from 1 hour ago to now, as the limit of the history api is 5000 events,
|
||||
// and using a wider time range might lead to showing gaps in the events list and the chart.
|
||||
$timeRange: new SceneTimeRange({
|
||||
from: 'now-1h',
|
||||
to: 'now',
|
||||
}),
|
||||
$variables: new SceneVariableSet({
|
||||
variables: [filterVariable],
|
||||
}),
|
||||
@ -170,16 +175,16 @@ export const FilterInfo = () => {
|
||||
<Tooltip
|
||||
content={
|
||||
<div>
|
||||
<Trans i18nKey="central-alert-history.filter.info.label1">
|
||||
<Trans i18nKey="alerting.central-alert-history.filter.info.label1">
|
||||
Filter events using label querying without spaces, ex:
|
||||
</Trans>
|
||||
<pre>{`{severity="critical", instance=~"cluster-us-.+"}`}</pre>
|
||||
<Trans i18nKey="central-alert-history.filter.info.label2">Invalid use of spaces:</Trans>
|
||||
<pre>{`{severity= "critical"}`}</pre>
|
||||
<Trans i18nKey="alerting.central-alert-history.filter.info.label2">Invalid use of spaces:</Trans>
|
||||
<pre>{`{severity= "alerting.critical"}`}</pre>
|
||||
<pre>{`{severity ="critical"}`}</pre>
|
||||
<Trans i18nKey="central-alert-history.filter.info.label3">Valid use of spaces:</Trans>
|
||||
<Trans i18nKey="alerting.central-alert-history.filter.info.label3">Valid use of spaces:</Trans>
|
||||
<pre>{`{severity=" critical"}`}</pre>
|
||||
<Trans i18nKey="central-alert-history.filter.info.label4">
|
||||
<Trans i18nKey="alerting.central-alert-history.filter.info.label4">
|
||||
Filter alerts using label querying without braces, ex:
|
||||
</Trans>
|
||||
<pre>{`severity="critical", instance=~"cluster-us-.+"`}</pre>
|
||||
|
@ -0,0 +1,236 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { max, min, uniqBy } from 'lodash';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
import { FieldType, GrafanaTheme2, LoadingState, PanelData, dateTime, makeTimeRange } from '@grafana/data';
|
||||
import { Icon, Stack, Text, useStyles2 } from '@grafana/ui';
|
||||
import { Trans, t } from 'app/core/internationalization';
|
||||
import { CombinedRule } from 'app/types/unified-alerting';
|
||||
|
||||
import { useCombinedRule } from '../../../hooks/useCombinedRule';
|
||||
import { parse } from '../../../utils/rule-id';
|
||||
import { isGrafanaRulerRule } from '../../../utils/rules';
|
||||
import { MetaText } from '../../MetaText';
|
||||
import { VizWrapper } from '../../rule-editor/VizWrapper';
|
||||
import { AnnotationValue } from '../../rule-viewer/tabs/Details';
|
||||
import { LogRecord } from '../state-history/common';
|
||||
|
||||
import { EventState } from './EventListSceneObject';
|
||||
|
||||
interface EventDetailsProps {
|
||||
record: LogRecord;
|
||||
logRecords: LogRecord[];
|
||||
}
|
||||
export function EventDetails({ record, logRecords }: EventDetailsProps) {
|
||||
// get the rule from the ruleUID
|
||||
const ruleUID = record.line?.ruleUID ?? '';
|
||||
const identifier = useMemo(() => {
|
||||
return parse(ruleUID, true);
|
||||
}, [ruleUID]);
|
||||
const { error, loading, result: rule } = useCombinedRule({ ruleIdentifier: identifier });
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<Text>
|
||||
<Trans i18nKey="alerting.central-alert-history.details.error">Error loading rule for this event.</Trans>
|
||||
</Text>
|
||||
);
|
||||
}
|
||||
if (loading) {
|
||||
return (
|
||||
<Text>
|
||||
<Trans i18nKey="alerting.central-alert-history.details.loading">Loading...</Trans>
|
||||
</Text>
|
||||
);
|
||||
}
|
||||
|
||||
if (!rule) {
|
||||
return (
|
||||
<Text>
|
||||
<Trans i18nKey="alerting.central-alert-history.details.not-found">Rule not found for this event.</Trans>
|
||||
</Text>
|
||||
);
|
||||
}
|
||||
|
||||
const getTransitionsCountByRuleUID = (ruleUID: string) => {
|
||||
return logRecords.filter((record) => record.line.ruleUID === ruleUID).length;
|
||||
};
|
||||
|
||||
return (
|
||||
<Stack direction="column" gap={0.5}>
|
||||
<Stack direction={'row'} gap={6}>
|
||||
<StateTransition record={record} />
|
||||
<ValueInTransition record={record} />
|
||||
<NumberTransitions transitions={ruleUID ? getTransitionsCountByRuleUID(ruleUID) : 0} />
|
||||
</Stack>
|
||||
<Annotations rule={rule} />
|
||||
<QueryVizualization rule={rule} ruleUID={ruleUID} logRecords={logRecords} />
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
|
||||
interface StateTransitionProps {
|
||||
record: LogRecord;
|
||||
}
|
||||
function StateTransition({ record }: StateTransitionProps) {
|
||||
return (
|
||||
<Stack gap={0.5} direction={'column'}>
|
||||
<Text variant="body" weight="light" color="secondary">
|
||||
<Trans i18nKey="alerting.central-alert-history.details.state-transitions">State transition</Trans>
|
||||
</Text>
|
||||
<Stack gap={0.5} direction={'row'} alignItems="center">
|
||||
<EventState state={record.line.previous} showLabel />
|
||||
<Icon name="arrow-right" size="lg" />
|
||||
<EventState state={record.line.current} showLabel />
|
||||
</Stack>
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
|
||||
interface AnnotationsProps {
|
||||
rule: CombinedRule;
|
||||
}
|
||||
const Annotations = ({ rule }: AnnotationsProps) => {
|
||||
const styles = useStyles2(getStyles);
|
||||
const annotations = rule.annotations;
|
||||
if (!annotations) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<>
|
||||
<Text variant="body" color="secondary" weight="light">
|
||||
<Trans i18nKey="alerting.central-alert-history.details.annotations">Annotations</Trans>
|
||||
</Text>
|
||||
{Object.keys(annotations).length === 0 ? (
|
||||
<Text variant="body" weight="light" italic>
|
||||
<Trans i18nKey="alerting.central-alert-history.details.no-annotations">No annotations</Trans>
|
||||
</Text>
|
||||
) : (
|
||||
<div className={styles.metadataWrapper}>
|
||||
{Object.entries(annotations).map(([name, value]) => (
|
||||
<MetaText direction="column" key={name}>
|
||||
{name}
|
||||
<AnnotationValue value={value} />
|
||||
</MetaText>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* This component renders the visualization for the rule condition values over the selected time range.
|
||||
* The visualization is a time series graph with the condition values on the y-axis and time on the x-axis.
|
||||
* The values are extracted from the log records already fetched from the history api.
|
||||
* The graph is rendered only if the rule is a Grafana rule.
|
||||
*
|
||||
*/
|
||||
interface QueryVizualizationProps {
|
||||
ruleUID: string;
|
||||
rule: CombinedRule;
|
||||
logRecords: LogRecord[];
|
||||
}
|
||||
const QueryVizualization = ({ ruleUID, rule, logRecords }: QueryVizualizationProps) => {
|
||||
if (!isGrafanaRulerRule(rule?.rulerRule)) {
|
||||
return (
|
||||
<Text>
|
||||
<Trans i18nKey="alerting.central-alert-history.details.not-grafana-rule">Rule is not a Grafana rule</Trans>
|
||||
</Text>
|
||||
);
|
||||
}
|
||||
// get the condition from the rule
|
||||
const condition = rule?.rulerRule.grafana_alert?.condition ?? 'A';
|
||||
// get the panel data for the rule
|
||||
const panelData = getPanelDataForRule(ruleUID, logRecords, condition);
|
||||
// render the visualization
|
||||
return <VizWrapper data={panelData} thresholds={undefined} thresholdsType={undefined} />;
|
||||
};
|
||||
|
||||
/**
|
||||
* This function returns the time series panel data for the condtion values of the rule, within the selected time range.
|
||||
* The values are extracted from the log records already fetched from the history api.
|
||||
* @param ruleUID
|
||||
* @param logRecords
|
||||
* @param condition
|
||||
* @returns PanelData
|
||||
*/
|
||||
export function getPanelDataForRule(ruleUID: string, logRecords: LogRecord[], condition: string) {
|
||||
const ruleLogRecords = logRecords
|
||||
.filter((record) => record.line.ruleUID === ruleUID)
|
||||
// sort by timestamp as time series data is expected to be sorted by time
|
||||
.sort((a, b) => a.timestamp - b.timestamp);
|
||||
|
||||
// get unique records by timestamp, as timeseries data should have unique timestamps, and it might be possible to have multiple records with the same timestamp
|
||||
const uniqueRecords = uniqBy(ruleLogRecords, (record) => record.timestamp);
|
||||
|
||||
const timestamps = uniqueRecords.map((record) => record.timestamp);
|
||||
const values = uniqueRecords.map((record) => (record.line.values ? record.line.values[condition] : 0));
|
||||
const minTimestamp = min(timestamps);
|
||||
const maxTimestamp = max(timestamps);
|
||||
|
||||
const PanelDataObj: PanelData = {
|
||||
series: [
|
||||
{
|
||||
name: 'Rule condition history',
|
||||
fields: [
|
||||
{ name: 'Time', values: timestamps, config: {}, type: FieldType.time },
|
||||
{ name: 'values', values: values, type: FieldType.number, config: {} },
|
||||
],
|
||||
length: timestamps.length,
|
||||
},
|
||||
],
|
||||
state: LoadingState.Done,
|
||||
timeRange: makeTimeRange(dateTime(minTimestamp), dateTime(maxTimestamp)),
|
||||
};
|
||||
return PanelDataObj;
|
||||
}
|
||||
|
||||
interface ValueInTransitionProps {
|
||||
record: LogRecord;
|
||||
}
|
||||
function ValueInTransition({ record }: ValueInTransitionProps) {
|
||||
const values = record?.line?.values
|
||||
? JSON.stringify(record.line.values)
|
||||
: t('alerting.central-alert-history.details.no-values', 'No values');
|
||||
return (
|
||||
<Stack gap={0.5} direction={'column'}>
|
||||
<Text variant="body" weight="light" color="secondary">
|
||||
<Trans i18nKey="alerting.central-alert-history.details.value-in-transition">Value in transition</Trans>
|
||||
</Text>
|
||||
<Stack gap={0.5} direction={'row'} alignItems="center">
|
||||
<Text variant="body" weight="light">
|
||||
{values}
|
||||
</Text>
|
||||
</Stack>
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
interface NumberTransitionsProps {
|
||||
transitions: number;
|
||||
}
|
||||
function NumberTransitions({ transitions }: NumberTransitionsProps) {
|
||||
return (
|
||||
<Stack gap={0.5} direction={'column'} alignItems="flex-start" justifyContent={'center'}>
|
||||
<Text variant="body" weight="light" color="secondary">
|
||||
<Trans i18nKey="alerting.central-alert-history.details.number-transitions">
|
||||
State transitions for selected period
|
||||
</Trans>
|
||||
</Text>
|
||||
<Text variant="body" weight="light">
|
||||
{transitions}
|
||||
</Text>
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
const getStyles = (theme: GrafanaTheme2) => {
|
||||
return {
|
||||
metadataWrapper: css({
|
||||
display: 'grid',
|
||||
gridTemplateColumns: 'auto auto',
|
||||
rowGap: theme.spacing(3),
|
||||
columnGap: theme.spacing(12),
|
||||
}),
|
||||
};
|
||||
};
|
@ -1,13 +1,13 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { useMemo, useState } from 'react';
|
||||
import { css, cx } from '@emotion/css';
|
||||
import { ReactElement, useMemo, useState } from 'react';
|
||||
import { useMeasure } from 'react-use';
|
||||
|
||||
import { DataFrameJSON, GrafanaTheme2, TimeRange } from '@grafana/data';
|
||||
import { DataFrameJSON, GrafanaTheme2, IconName, TimeRange } from '@grafana/data';
|
||||
import { isFetchError } from '@grafana/runtime';
|
||||
import { SceneComponentProps, SceneObjectBase, TextBoxVariable, VariableValue, sceneGraph } from '@grafana/scenes';
|
||||
import { Alert, Icon, LoadingBar, Stack, Text, Tooltip, useStyles2, withErrorBoundary } from '@grafana/ui';
|
||||
import { Alert, Icon, LoadingBar, Pagination, Stack, Text, Tooltip, useStyles2, withErrorBoundary } from '@grafana/ui';
|
||||
import { EntityNotFound } from 'app/core/components/PageNotFound/EntityNotFound';
|
||||
import { t } from 'app/core/internationalization';
|
||||
import { Trans, t } from 'app/core/internationalization';
|
||||
import {
|
||||
GrafanaAlertStateWithReason,
|
||||
isAlertStateWithReason,
|
||||
@ -17,8 +17,10 @@ import {
|
||||
} from 'app/types/unified-alerting-dto';
|
||||
|
||||
import { stateHistoryApi } from '../../../api/stateHistoryApi';
|
||||
import { labelsMatchMatchers, parseMatchers } from '../../../utils/alertmanager';
|
||||
import { usePagination } from '../../../hooks/usePagination';
|
||||
import { labelsMatchMatchers } from '../../../utils/alertmanager';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from '../../../utils/datasource';
|
||||
import { parsePromQLStyleMatcherLooseSafe } from '../../../utils/matchers';
|
||||
import { stringifyErrorLike } from '../../../utils/misc';
|
||||
import { AlertLabels } from '../../AlertLabels';
|
||||
import { CollapseToggle } from '../../CollapseToggle';
|
||||
@ -26,8 +28,10 @@ import { LogRecord } from '../state-history/common';
|
||||
import { isLine, isNumbers } from '../state-history/useRuleHistoryRecords';
|
||||
|
||||
import { LABELS_FILTER } from './CentralAlertHistoryScene';
|
||||
import { EventDetails } from './EventDetails';
|
||||
|
||||
export const LIMIT_EVENTS = 5000; // limit is hard-capped at 5000 at the BE level.
|
||||
const PAGE_SIZE = 100;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -35,13 +39,11 @@ export const LIMIT_EVENTS = 5000; // limit is hard-capped at 5000 at the BE leve
|
||||
* It fetches the events from the history api and displays them in a list.
|
||||
* The list is filtered by the labels in the filter variable and by the time range variable in the scene graph.
|
||||
*/
|
||||
export const HistoryEventsList = ({
|
||||
timeRange,
|
||||
valueInfilterTextBox,
|
||||
}: {
|
||||
interface HistoryEventsListProps {
|
||||
timeRange?: TimeRange;
|
||||
valueInfilterTextBox: VariableValue;
|
||||
}) => {
|
||||
}
|
||||
export const HistoryEventsList = ({ timeRange, valueInfilterTextBox }: HistoryEventsListProps) => {
|
||||
const from = timeRange?.from.unix();
|
||||
const to = timeRange?.to.unix();
|
||||
|
||||
@ -85,12 +87,23 @@ interface HistoryLogEventsProps {
|
||||
logRecords: LogRecord[];
|
||||
}
|
||||
function HistoryLogEvents({ logRecords }: HistoryLogEventsProps) {
|
||||
const { page, pageItems, numberOfPages, onPageChange } = usePagination(logRecords, 1, PAGE_SIZE);
|
||||
return (
|
||||
<ul>
|
||||
{logRecords.map((record) => {
|
||||
return <EventRow key={record.timestamp + (record.line.fingerprint ?? '')} record={record} />;
|
||||
})}
|
||||
</ul>
|
||||
<Stack direction="column" gap={0}>
|
||||
<ul>
|
||||
{pageItems.map((record) => {
|
||||
return (
|
||||
<EventRow
|
||||
key={record.timestamp + (record.line.fingerprint ?? '')}
|
||||
record={record}
|
||||
logRecords={logRecords}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
{/* This paginations improves the performance considerably , making the page load faster */}
|
||||
<Pagination currentPage={page} numberOfPages={numberOfPages} onNavigate={onPageChange} hideWhenSinglePage />
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
|
||||
@ -102,17 +115,25 @@ function HistoryErrorMessage({ error }: HistoryErrorMessageProps) {
|
||||
if (isFetchError(error) && error.status === 404) {
|
||||
return <EntityNotFound entity="History" />;
|
||||
}
|
||||
const title = t('central-alert-history.error', 'Something went wrong loading the alert state history');
|
||||
const title = t('alerting.central-alert-history.error', 'Something went wrong loading the alert state history');
|
||||
const errorStr = stringifyErrorLike(error);
|
||||
|
||||
return <Alert title={title}>{stringifyErrorLike(error)}</Alert>;
|
||||
return <Alert title={title}>{errorStr}</Alert>;
|
||||
}
|
||||
|
||||
function EventRow({ record }: { record: LogRecord }) {
|
||||
interface EventRowProps {
|
||||
record: LogRecord;
|
||||
logRecords: LogRecord[];
|
||||
}
|
||||
function EventRow({ record, logRecords }: EventRowProps) {
|
||||
const styles = useStyles2(getStyles);
|
||||
const [isCollapsed, setIsCollapsed] = useState(true);
|
||||
return (
|
||||
<div>
|
||||
<div className={styles.header} data-testid="event-row-header">
|
||||
<Stack direction="column" gap={0}>
|
||||
<div
|
||||
className={cx(styles.header, isCollapsed ? styles.collapsedHeader : styles.notCollapsedHeader)}
|
||||
data-testid="event-row-header"
|
||||
>
|
||||
<CollapseToggle
|
||||
size="sm"
|
||||
className={styles.collapseToggle}
|
||||
@ -134,15 +155,29 @@ function EventRow({ record }: { record: LogRecord }) {
|
||||
</div>
|
||||
</Stack>
|
||||
</div>
|
||||
</div>
|
||||
{!isCollapsed && (
|
||||
<div className={styles.expandedRow}>
|
||||
<EventDetails record={record} logRecords={logRecords} />
|
||||
</div>
|
||||
)}
|
||||
</Stack>
|
||||
);
|
||||
}
|
||||
|
||||
function AlertRuleName({ labels, ruleUID }: { labels: Record<string, string>; ruleUID?: string }) {
|
||||
interface AlertRuleNameProps {
|
||||
labels: Record<string, string>;
|
||||
ruleUID?: string;
|
||||
}
|
||||
function AlertRuleName({ labels, ruleUID }: AlertRuleNameProps) {
|
||||
const styles = useStyles2(getStyles);
|
||||
const alertRuleName = labels['alertname'];
|
||||
if (!ruleUID) {
|
||||
return <Text>{alertRuleName}</Text>;
|
||||
return (
|
||||
<Text>
|
||||
<Trans i18nKey="alerting.central-alert-history.details.unknown-rule">Unknown</Trans>
|
||||
{alertRuleName}
|
||||
</Text>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<Tooltip content={alertRuleName ?? ''}>
|
||||
@ -170,55 +205,90 @@ function EventTransition({ previous, current }: EventTransitionProps) {
|
||||
);
|
||||
}
|
||||
|
||||
function EventState({ state }: { state: GrafanaAlertStateWithReason }) {
|
||||
const styles = useStyles2(getStyles);
|
||||
interface StateIconProps {
|
||||
iconName: IconName;
|
||||
iconColor: string;
|
||||
tooltipContent: string;
|
||||
labelText: ReactElement;
|
||||
showLabel: boolean;
|
||||
}
|
||||
const StateIcon = ({ iconName, iconColor, tooltipContent, labelText, showLabel }: StateIconProps) => (
|
||||
<Tooltip content={tooltipContent}>
|
||||
<Stack gap={0.5} direction={'row'} alignItems="center">
|
||||
<Icon name={iconName} size="md" className={iconColor} />
|
||||
{showLabel && (
|
||||
<Text variant="body" weight="light">
|
||||
{labelText}
|
||||
</Text>
|
||||
)}
|
||||
</Stack>
|
||||
</Tooltip>
|
||||
);
|
||||
|
||||
interface EventStateProps {
|
||||
state: GrafanaAlertStateWithReason;
|
||||
showLabel?: boolean;
|
||||
}
|
||||
export function EventState({ state, showLabel = false }: EventStateProps) {
|
||||
const styles = useStyles2(getStyles);
|
||||
const toolTip = t('alerting.central-alert-history.details.no-recognized-state', 'No recognized state');
|
||||
if (!isGrafanaAlertState(state) && !isAlertStateWithReason(state)) {
|
||||
return (
|
||||
<Tooltip content={'No recognized state'}>
|
||||
<Icon name="exclamation-triangle" size="md" />
|
||||
</Tooltip>
|
||||
<StateIcon
|
||||
iconName="exclamation-triangle"
|
||||
tooltipContent={toolTip}
|
||||
labelText={<Trans i18nKey="alerting.central-alert-history.details.unknown-event-state">Unknown</Trans>}
|
||||
showLabel={Boolean(showLabel)}
|
||||
iconColor={styles.warningColor}
|
||||
/>
|
||||
);
|
||||
}
|
||||
const baseState = mapStateWithReasonToBaseState(state);
|
||||
const reason = mapStateWithReasonToReason(state);
|
||||
|
||||
switch (baseState) {
|
||||
case 'Normal':
|
||||
return (
|
||||
<Tooltip content={Boolean(reason) ? `Normal (${reason})` : 'Normal'}>
|
||||
<Icon name="check-circle" size="md" className={Boolean(reason) ? styles.warningColor : styles.normalColor} />
|
||||
</Tooltip>
|
||||
);
|
||||
case 'Alerting':
|
||||
return (
|
||||
<Tooltip content={'Alerting'}>
|
||||
<Icon name="exclamation-circle" size="md" className={styles.alertingColor} />
|
||||
</Tooltip>
|
||||
);
|
||||
case 'NoData': //todo:change icon
|
||||
return (
|
||||
<Tooltip content={'Insufficient data'}>
|
||||
<Icon name="exclamation-triangle" size="md" className={styles.warningColor} />
|
||||
{/* no idea which icon to use */}
|
||||
</Tooltip>
|
||||
);
|
||||
case 'Error':
|
||||
return (
|
||||
<Tooltip content={'Error'}>
|
||||
<Icon name="exclamation-circle" size="md" />
|
||||
</Tooltip>
|
||||
);
|
||||
|
||||
case 'Pending':
|
||||
return (
|
||||
<Tooltip content={Boolean(reason) ? `Pending (${reason})` : 'Pending'}>
|
||||
<Icon name="circle" size="md" className={styles.warningColor} />
|
||||
</Tooltip>
|
||||
);
|
||||
default:
|
||||
return <Icon name="exclamation-triangle" size="md" />;
|
||||
interface StateConfig {
|
||||
iconName: IconName;
|
||||
iconColor: string;
|
||||
tooltipContent: string;
|
||||
labelText: ReactElement;
|
||||
}
|
||||
interface StateConfigMap {
|
||||
[key: string]: StateConfig;
|
||||
}
|
||||
const stateConfig: StateConfigMap = {
|
||||
Normal: {
|
||||
iconName: 'check-circle',
|
||||
iconColor: Boolean(reason) ? styles.warningColor : styles.normalColor,
|
||||
tooltipContent: Boolean(reason) ? `Normal (${reason})` : 'Normal',
|
||||
labelText: <Trans i18nKey="alerting.central-alert-history.details.state.normal">Normal</Trans>,
|
||||
},
|
||||
Alerting: {
|
||||
iconName: 'exclamation-circle',
|
||||
iconColor: styles.alertingColor,
|
||||
tooltipContent: 'Alerting',
|
||||
labelText: <Trans i18nKey="alerting.central-alert-history.details.state.alerting">Alerting</Trans>,
|
||||
},
|
||||
NoData: {
|
||||
iconName: 'exclamation-triangle',
|
||||
iconColor: styles.warningColor,
|
||||
tooltipContent: 'Insufficient data',
|
||||
labelText: <Trans i18nKey="alerting.central-alert-history.details.state.no-data">No data</Trans>,
|
||||
},
|
||||
Error: {
|
||||
iconName: 'exclamation-circle',
|
||||
tooltipContent: 'Error',
|
||||
iconColor: styles.warningColor,
|
||||
labelText: <Trans i18nKey="alerting.central-alert-history.details.state.error">Error</Trans>,
|
||||
},
|
||||
Pending: {
|
||||
iconName: 'circle',
|
||||
iconColor: styles.warningColor,
|
||||
tooltipContent: Boolean(reason) ? `Pending (${reason})` : 'Pending',
|
||||
labelText: <Trans i18nKey="alerting.central-alert-history.details.state.pending">Pending</Trans>,
|
||||
},
|
||||
};
|
||||
|
||||
const config = stateConfig[baseState] || { iconName: 'exclamation-triangle', tooltipContent: 'Unknown State' };
|
||||
return <StateIcon {...config} showLabel={showLabel} />;
|
||||
}
|
||||
|
||||
interface TimestampProps {
|
||||
@ -253,12 +323,16 @@ export const getStyles = (theme: GrafanaTheme2) => {
|
||||
alignItems: 'center',
|
||||
padding: `${theme.spacing(1)} ${theme.spacing(1)} ${theme.spacing(1)} 0`,
|
||||
flexWrap: 'nowrap',
|
||||
borderBottom: `1px solid ${theme.colors.border.weak}`,
|
||||
|
||||
'&:hover': {
|
||||
backgroundColor: theme.components.table.rowHoverBackground,
|
||||
},
|
||||
}),
|
||||
collapsedHeader: css({
|
||||
borderBottom: `1px solid ${theme.colors.border.weak}`,
|
||||
}),
|
||||
notCollapsedHeader: css({
|
||||
borderBottom: 'none',
|
||||
}),
|
||||
|
||||
collapseToggle: css({
|
||||
background: 'none',
|
||||
@ -303,6 +377,11 @@ export const getStyles = (theme: GrafanaTheme2) => {
|
||||
display: 'block',
|
||||
color: theme.colors.text.link,
|
||||
}),
|
||||
expandedRow: css({
|
||||
padding: theme.spacing(2),
|
||||
marginLeft: theme.spacing(2),
|
||||
borderLeft: `1px solid ${theme.colors.border.weak}`,
|
||||
}),
|
||||
};
|
||||
};
|
||||
|
||||
@ -334,7 +413,7 @@ function useRuleHistoryRecords(stateHistory?: DataFrameJSON, filter?: string) {
|
||||
return { historyRecords: [] };
|
||||
}
|
||||
|
||||
const filterMatchers = filter ? parseMatchers(filter) : [];
|
||||
const filterMatchers = filter ? parsePromQLStyleMatcherLooseSafe(filter) : [];
|
||||
|
||||
const [tsValues, lines] = stateHistory.data.values;
|
||||
const timestamps = isNumbers(tsValues) ? tsValues : [];
|
||||
|
@ -0,0 +1,53 @@
|
||||
import { dateTime } from '@grafana/data';
|
||||
|
||||
import { LogRecord } from '../state-history/common';
|
||||
|
||||
import { getPanelDataForRule } from './EventDetails';
|
||||
|
||||
const initialTimeStamp = 1000000;
|
||||
const instanceLabels = { foo: 'bar', severity: 'critical', cluster: 'dev-us' }; // actually, it doesn't matter what is here
|
||||
const records: LogRecord[] = [
|
||||
{
|
||||
timestamp: initialTimeStamp,
|
||||
line: { previous: 'Normal', current: 'Alerting', labels: instanceLabels, ruleUID: 'ruleUID1', values: { C: 1 } },
|
||||
},
|
||||
{
|
||||
timestamp: initialTimeStamp + 1000,
|
||||
line: { previous: 'Alerting', current: 'Normal', labels: instanceLabels, ruleUID: 'ruleUID2' },
|
||||
},
|
||||
{
|
||||
timestamp: initialTimeStamp + 2000,
|
||||
line: { previous: 'Normal', current: 'Alerting', labels: instanceLabels, ruleUID: 'ruleUID3' },
|
||||
},
|
||||
// not sorted by timestamp
|
||||
{
|
||||
timestamp: initialTimeStamp + 4000,
|
||||
line: { previous: 'Normal', current: 'Alerting', labels: instanceLabels, ruleUID: 'ruleUID1', values: { C: 8 } },
|
||||
},
|
||||
{
|
||||
timestamp: initialTimeStamp + 3000,
|
||||
line: { previous: 'Alerting', current: 'Normal', labels: instanceLabels, ruleUID: 'ruleUID1', values: { C: 0 } },
|
||||
},
|
||||
//duplicate record in the same timestamp
|
||||
{
|
||||
timestamp: initialTimeStamp + 3000,
|
||||
line: { previous: 'Alerting', current: 'Normal', labels: instanceLabels, ruleUID: 'ruleUID1', values: { C: 0 } },
|
||||
},
|
||||
{
|
||||
timestamp: initialTimeStamp + 5000,
|
||||
line: { previous: 'Alerting', current: 'Normal', labels: instanceLabels, ruleUID: 'ruleUID1', values: { C: 0 } },
|
||||
},
|
||||
];
|
||||
describe('getPanelDataForRule', () => {
|
||||
it('should return correct panel data for a given rule (sorted by time and unique)', () => {
|
||||
const result = getPanelDataForRule('ruleUID1', records, 'C');
|
||||
|
||||
expect(result.series[0].fields[0].values).toEqual([1000000, 1003000, 1004000, 1005000]);
|
||||
expect(result.series[0].fields[1].values).toEqual([1, 0, 8, 0]);
|
||||
expect(result.series[0].fields[0].type).toEqual('time');
|
||||
expect(result.series[0].fields[1].type).toEqual('number');
|
||||
expect(result.state).toEqual('Done');
|
||||
expect(result.timeRange.from).toEqual(dateTime(1000000));
|
||||
expect(result.timeRange.to).toEqual(dateTime(1005000));
|
||||
});
|
||||
});
|
@ -3,7 +3,8 @@ import { groupBy } from 'lodash';
|
||||
import { DataFrame, Field as DataFrameField, DataFrameJSON, Field, FieldType } from '@grafana/data';
|
||||
import { fieldIndexComparer } from '@grafana/data/src/field/fieldComparers';
|
||||
|
||||
import { labelsMatchMatchers, parseMatchers } from '../../../utils/alertmanager';
|
||||
import { labelsMatchMatchers } from '../../../utils/alertmanager';
|
||||
import { parsePromQLStyleMatcherLooseSafe } from '../../../utils/matchers';
|
||||
import { LogRecord } from '../state-history/common';
|
||||
import { isLine, isNumbers } from '../state-history/useRuleHistoryRecords';
|
||||
|
||||
@ -61,7 +62,7 @@ function groupDataFramesByTimeAndFilterByLabels(dataFrames: DataFrame[]): DataFr
|
||||
const filterValue = getFilterInQueryParams();
|
||||
const dataframesFiltered = dataFrames.filter((frame) => {
|
||||
const labels = JSON.parse(frame.name ?? ''); // in name we store the labels stringified
|
||||
const matchers = Boolean(filterValue) ? parseMatchers(filterValue) : [];
|
||||
const matchers = Boolean(filterValue) ? parsePromQLStyleMatcherLooseSafe(filterValue) : [];
|
||||
return labelsMatchMatchers(labels, matchers);
|
||||
});
|
||||
// Extract time fields from filtered data frames
|
||||
|
@ -13,7 +13,8 @@ import { fieldIndexComparer } from '@grafana/data/src/field/fieldComparers';
|
||||
import { MappingType, ThresholdsMode } from '@grafana/schema';
|
||||
import { useTheme2 } from '@grafana/ui';
|
||||
|
||||
import { labelsMatchMatchers, parseMatchers } from '../../../utils/alertmanager';
|
||||
import { labelsMatchMatchers } from '../../../utils/alertmanager';
|
||||
import { parsePromQLStyleMatcherLooseSafe } from '../../../utils/matchers';
|
||||
|
||||
import { extractCommonLabels, Line, LogRecord, omitLabels } from './common';
|
||||
|
||||
@ -50,7 +51,7 @@ export function useRuleHistoryRecords(stateHistory?: DataFrameJSON, filter?: str
|
||||
|
||||
const commonLabels = extractCommonLabels(groupLabelsArray);
|
||||
|
||||
const filterMatchers = filter ? parseMatchers(filter) : [];
|
||||
const filterMatchers = filter ? parsePromQLStyleMatcherLooseSafe(filter) : [];
|
||||
const filteredGroupedLines = Object.entries(logRecordsByInstance).filter(([key]) => {
|
||||
const labels = JSON.parse(key);
|
||||
return labelsMatchMatchers(labels, filterMatchers);
|
||||
|
@ -6,7 +6,7 @@ import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { Button, Field, Icon, Input, Label, Tooltip, useStyles2, Stack } from '@grafana/ui';
|
||||
import { useQueryParams } from 'app/core/hooks/useQueryParams';
|
||||
|
||||
import { parseMatchers } from '../../utils/alertmanager';
|
||||
import { parsePromQLStyleMatcherLoose } from '../../utils/matchers';
|
||||
import { getSilenceFiltersFromUrlParams } from '../../utils/misc';
|
||||
|
||||
const getQueryStringKey = () => uniqueId('query-string-');
|
||||
@ -30,7 +30,16 @@ export const SilencesFilter = () => {
|
||||
setTimeout(() => setQueryStringKey(getQueryStringKey()));
|
||||
};
|
||||
|
||||
const inputInvalid = queryString && queryString.length > 3 ? parseMatchers(queryString).length === 0 : false;
|
||||
let inputValid = queryString && queryString.length > 3;
|
||||
try {
|
||||
if (!queryString) {
|
||||
inputValid = true;
|
||||
} else {
|
||||
parsePromQLStyleMatcherLoose(queryString);
|
||||
}
|
||||
} catch (err) {
|
||||
inputValid = false;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={styles.flexRow}>
|
||||
@ -53,8 +62,8 @@ export const SilencesFilter = () => {
|
||||
</Stack>
|
||||
</Label>
|
||||
}
|
||||
invalid={inputInvalid}
|
||||
error={inputInvalid ? 'Query must use valid matcher syntax' : null}
|
||||
invalid={!inputValid}
|
||||
error={!inputValid ? 'Query must use valid matcher syntax' : null}
|
||||
>
|
||||
<Input
|
||||
key={queryStringKey}
|
||||
|
@ -23,7 +23,7 @@ import { AlertmanagerAlert, Silence, SilenceState } from 'app/plugins/datasource
|
||||
|
||||
import { alertmanagerApi } from '../../api/alertmanagerApi';
|
||||
import { AlertmanagerAction, useAlertmanagerAbility } from '../../hooks/useAbilities';
|
||||
import { parseMatchers } from '../../utils/alertmanager';
|
||||
import { parsePromQLStyleMatcherLooseSafe } from '../../utils/matchers';
|
||||
import { getSilenceFiltersFromUrlParams, makeAMLink, stringifyErrorLike } from '../../utils/misc';
|
||||
import { Authorize } from '../Authorize';
|
||||
import { DynamicTable, DynamicTableColumnProps, DynamicTableItemProps } from '../DynamicTable';
|
||||
@ -220,7 +220,7 @@ const useFilteredSilences = (silences: Silence[], expired = false) => {
|
||||
}
|
||||
}
|
||||
if (queryString) {
|
||||
const matchers = parseMatchers(queryString);
|
||||
const matchers = parsePromQLStyleMatcherLooseSafe(queryString);
|
||||
const matchersMatch = matchers.every((matcher) =>
|
||||
silence.matchers?.some(
|
||||
({ name, value, isEqual, isRegex }) =>
|
||||
|
@ -468,7 +468,7 @@ function hashQuery(query: string) {
|
||||
This hook returns combined Grafana rules. Optionally, it can filter rules by dashboard UID and panel ID.
|
||||
*/
|
||||
export function useCombinedRules(
|
||||
dashboardUID?: string,
|
||||
dashboardUID?: string | null,
|
||||
panelId?: number,
|
||||
poll?: boolean
|
||||
): {
|
||||
@ -483,10 +483,12 @@ export function useCombinedRules(
|
||||
} = alertRuleApi.endpoints.prometheusRuleNamespaces.useQuery(
|
||||
{
|
||||
ruleSourceName: GRAFANA_RULES_SOURCE_NAME,
|
||||
dashboardUid: dashboardUID,
|
||||
dashboardUid: dashboardUID ?? undefined,
|
||||
panelId,
|
||||
},
|
||||
{
|
||||
// "null" means the dashboard isn't saved yet, as opposed to "undefined" which means we don't want to filter by dashboard UID
|
||||
skip: dashboardUID === null,
|
||||
pollingInterval: poll ? RULE_LIST_POLL_INTERVAL_MS : undefined,
|
||||
}
|
||||
);
|
||||
@ -498,10 +500,11 @@ export function useCombinedRules(
|
||||
} = alertRuleApi.endpoints.rulerRules.useQuery(
|
||||
{
|
||||
rulerConfig: grafanaRulerConfig,
|
||||
filter: { dashboardUID, panelId },
|
||||
filter: { dashboardUID: dashboardUID ?? undefined, panelId },
|
||||
},
|
||||
{
|
||||
pollingInterval: poll ? RULE_LIST_POLL_INTERVAL_MS : undefined,
|
||||
skip: dashboardUID === null,
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -3,19 +3,21 @@ import { useMemo } from 'react';
|
||||
import { useQueryParams } from 'app/core/hooks/useQueryParams';
|
||||
import { AlertmanagerGroup } from 'app/plugins/datasource/alertmanager/types';
|
||||
|
||||
import { labelsMatchMatchers, parseMatchers } from '../utils/alertmanager';
|
||||
import { labelsMatchMatchers } from '../utils/alertmanager';
|
||||
import { parsePromQLStyleMatcherLooseSafe } from '../utils/matchers';
|
||||
import { getFiltersFromUrlParams } from '../utils/misc';
|
||||
|
||||
export const useFilteredAmGroups = (groups: AlertmanagerGroup[]) => {
|
||||
const [queryParams] = useQueryParams();
|
||||
const filters = getFiltersFromUrlParams(queryParams);
|
||||
const matchers = parseMatchers(filters.queryString || '');
|
||||
const { queryString, alertState } = getFiltersFromUrlParams(queryParams);
|
||||
|
||||
return useMemo(() => {
|
||||
const matchers = queryString ? parsePromQLStyleMatcherLooseSafe(queryString) : [];
|
||||
|
||||
return groups.reduce((filteredGroup: AlertmanagerGroup[], group) => {
|
||||
const alerts = group.alerts.filter(({ labels, status }) => {
|
||||
const labelsMatch = labelsMatchMatchers(labels, matchers);
|
||||
const filtersMatch = filters.alertState ? status.state === filters.alertState : true;
|
||||
const filtersMatch = alertState ? status.state === alertState : true;
|
||||
return labelsMatch && filtersMatch;
|
||||
});
|
||||
if (alerts.length > 0) {
|
||||
@ -28,5 +30,5 @@ export const useFilteredAmGroups = (groups: AlertmanagerGroup[]) => {
|
||||
}
|
||||
return filteredGroup;
|
||||
}, []);
|
||||
}, [groups, filters, matchers]);
|
||||
}, [queryString, groups, alertState]);
|
||||
};
|
||||
|
@ -9,10 +9,10 @@ import { CombinedRuleGroup, CombinedRuleNamespace, Rule } from 'app/types/unifie
|
||||
import { isPromAlertingRuleState, PromRuleType, RulerGrafanaRuleDTO } from 'app/types/unified-alerting-dto';
|
||||
|
||||
import { applySearchFilterToQuery, getSearchFilterFromQuery, RulesFilter } from '../search/rulesSearchParser';
|
||||
import { labelsMatchMatchers, matcherToMatcherField, parseMatchers } from '../utils/alertmanager';
|
||||
import { labelsMatchMatchers, matcherToMatcherField } from '../utils/alertmanager';
|
||||
import { Annotation } from '../utils/constants';
|
||||
import { isCloudRulesSource } from '../utils/datasource';
|
||||
import { parseMatcher } from '../utils/matchers';
|
||||
import { parseMatcher, parsePromQLStyleMatcherLoose } from '../utils/matchers';
|
||||
import {
|
||||
getRuleHealth,
|
||||
isAlertingRule,
|
||||
@ -71,7 +71,7 @@ export function useRulesFilter() {
|
||||
dataSource: queryParams.get('dataSource') ?? undefined,
|
||||
alertState: queryParams.get('alertState') ?? undefined,
|
||||
ruleType: queryParams.get('ruleType') ?? undefined,
|
||||
labels: parseMatchers(queryParams.get('queryString') ?? '').map(matcherToMatcherField),
|
||||
labels: parsePromQLStyleMatcherLoose(queryParams.get('queryString') ?? '').map(matcherToMatcherField),
|
||||
};
|
||||
|
||||
const hasLegacyFilters = Object.values(legacyFilters).some((legacyFilter) => !isEmpty(legacyFilter));
|
||||
|
@ -3,7 +3,7 @@ import { CombinedRule } from 'app/types/unified-alerting';
|
||||
import { useCombinedRules } from './useCombinedRuleNamespaces';
|
||||
|
||||
interface Options {
|
||||
dashboardUID: string;
|
||||
dashboardUID: string | null;
|
||||
panelId: number;
|
||||
|
||||
poll?: boolean;
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { css } from '@emotion/css';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data/src/themes';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { DataSourceInformation } from '../home/Insights';
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { useState } from 'react';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data/src/themes';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { Button, Dropdown, Icon, IconButton, Menu, Modal, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { trackInsightsFeedback } from '../Analytics';
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { css } from '@emotion/css';
|
||||
import * as React from 'react';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data/src/themes';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { useStyles2 } from '@grafana/ui';
|
||||
|
||||
export function SectionFooter({ children }: React.PropsWithChildren<{}>) {
|
||||
|
@ -1,7 +1,7 @@
|
||||
import { css } from '@emotion/css';
|
||||
import * as React from 'react';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data/src/themes';
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { DataSourceInformation } from '../home/Insights';
|
||||
|
@ -21,7 +21,6 @@ import { DataSourceSrv, GetDataSourceListFilters, config } from '@grafana/runtim
|
||||
import { defaultDashboard } from '@grafana/schema';
|
||||
import { contextSrv } from 'app/core/services/context_srv';
|
||||
import { MOCK_GRAFANA_ALERT_RULE_TITLE } from 'app/features/alerting/unified/mocks/server/handlers/alertRules';
|
||||
import { parseMatchers } from 'app/features/alerting/unified/utils/alertmanager';
|
||||
import { DatasourceSrv } from 'app/features/plugins/datasource_srv';
|
||||
import {
|
||||
AlertManagerCortexConfig,
|
||||
@ -64,6 +63,8 @@ import {
|
||||
|
||||
import { DashboardSearchItem, DashboardSearchItemType } from '../../search/types';
|
||||
|
||||
import { parsePromQLStyleMatcherLooseSafe } from './utils/matchers';
|
||||
|
||||
let nextDataSourceId = 1;
|
||||
|
||||
export function mockDataSource<T extends DataSourceJsonData = DataSourceJsonData>(
|
||||
@ -328,12 +329,12 @@ export const mockSilences = [
|
||||
mockSilence({ id: MOCK_SILENCE_ID_EXISTING, comment: 'Happy path silence' }),
|
||||
mockSilence({
|
||||
id: 'ce031625-61c7-47cd-9beb-8760bccf0ed7',
|
||||
matchers: parseMatchers('foo!=bar'),
|
||||
matchers: parsePromQLStyleMatcherLooseSafe('foo!=bar'),
|
||||
comment: 'Silence with negated matcher',
|
||||
}),
|
||||
mockSilence({
|
||||
id: MOCK_SILENCE_ID_EXISTING_ALERT_RULE_UID,
|
||||
matchers: parseMatchers(`__alert_rule_uid__=${MOCK_SILENCE_ID_EXISTING_ALERT_RULE_UID}`),
|
||||
matchers: parsePromQLStyleMatcherLooseSafe(`__alert_rule_uid__=${MOCK_SILENCE_ID_EXISTING_ALERT_RULE_UID}`),
|
||||
comment: 'Silence with alert rule UID matcher',
|
||||
metadata: {
|
||||
rule_title: MOCK_GRAFANA_ALERT_RULE_TITLE,
|
||||
@ -341,7 +342,7 @@ export const mockSilences = [
|
||||
}),
|
||||
mockSilence({
|
||||
id: MOCK_SILENCE_ID_LACKING_PERMISSIONS,
|
||||
matchers: parseMatchers('something=else'),
|
||||
matchers: parsePromQLStyleMatcherLooseSafe('something=else'),
|
||||
comment: 'Silence without permissions to edit',
|
||||
accessControl: {},
|
||||
}),
|
||||
|
@ -1,8 +1,8 @@
|
||||
import { Matcher, MatcherOperator, Route } from 'app/plugins/datasource/alertmanager/types';
|
||||
import { Labels } from 'app/types/unified-alerting-dto';
|
||||
|
||||
import { parseMatchers, labelsMatchMatchers, removeMuteTimingFromRoute, matchersToString } from './alertmanager';
|
||||
import { parseMatcher } from './matchers';
|
||||
import { labelsMatchMatchers, removeMuteTimingFromRoute, matchersToString } from './alertmanager';
|
||||
import { parseMatcher, parsePromQLStyleMatcherLooseSafe } from './matchers';
|
||||
|
||||
describe('Alertmanager utils', () => {
|
||||
describe('parseMatcher', () => {
|
||||
@ -64,57 +64,6 @@ describe('Alertmanager utils', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseMatchers', () => {
|
||||
it('should parse all operators', () => {
|
||||
expect(parseMatchers('foo=bar, bar=~ba.+, severity!=warning, email!~@grafana.com')).toEqual<Matcher[]>([
|
||||
{ name: 'foo', value: 'bar', isRegex: false, isEqual: true },
|
||||
{ name: 'bar', value: 'ba.+', isEqual: true, isRegex: true },
|
||||
{ name: 'severity', value: 'warning', isRegex: false, isEqual: false },
|
||||
{ name: 'email', value: '@grafana.com', isRegex: true, isEqual: false },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse with spaces and brackets', () => {
|
||||
expect(parseMatchers('{ foo=bar }')).toEqual<Matcher[]>([
|
||||
{
|
||||
name: 'foo',
|
||||
value: 'bar',
|
||||
isRegex: false,
|
||||
isEqual: true,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse with spaces in the value', () => {
|
||||
expect(parseMatchers('foo=bar bazz')).toEqual<Matcher[]>([
|
||||
{
|
||||
name: 'foo',
|
||||
value: 'bar bazz',
|
||||
isRegex: false,
|
||||
isEqual: true,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return nothing for invalid operator', () => {
|
||||
expect(parseMatchers('foo=!bar')).toEqual([]);
|
||||
});
|
||||
|
||||
it('should parse matchers with or without quotes', () => {
|
||||
expect(parseMatchers('foo="bar",bar=bazz')).toEqual<Matcher[]>([
|
||||
{ name: 'foo', value: 'bar', isRegex: false, isEqual: true },
|
||||
{ name: 'bar', value: 'bazz', isEqual: true, isRegex: false },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse matchers for key with special characters', () => {
|
||||
expect(parseMatchers('foo.bar-baz="bar",baz-bar.foo=bazz')).toEqual<Matcher[]>([
|
||||
{ name: 'foo.bar-baz', value: 'bar', isRegex: false, isEqual: true },
|
||||
{ name: 'baz-bar.foo', value: 'bazz', isEqual: true, isRegex: false },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('labelsMatchMatchers', () => {
|
||||
it('should return true for matching labels', () => {
|
||||
const labels: Labels = {
|
||||
@ -123,7 +72,7 @@ describe('Alertmanager utils', () => {
|
||||
bazz: 'buzz',
|
||||
};
|
||||
|
||||
const matchers = parseMatchers('foo=bar,bar=bazz');
|
||||
const matchers = parsePromQLStyleMatcherLooseSafe('foo=bar,bar=bazz');
|
||||
expect(labelsMatchMatchers(labels, matchers)).toBe(true);
|
||||
});
|
||||
it('should return false for no matching labels', () => {
|
||||
@ -131,7 +80,7 @@ describe('Alertmanager utils', () => {
|
||||
foo: 'bar',
|
||||
bar: 'bazz',
|
||||
};
|
||||
const matchers = parseMatchers('foo=buzz');
|
||||
const matchers = parsePromQLStyleMatcherLooseSafe('foo=buzz');
|
||||
expect(labelsMatchMatchers(labels, matchers)).toBe(false);
|
||||
});
|
||||
it('should match with different operators', () => {
|
||||
@ -140,7 +89,7 @@ describe('Alertmanager utils', () => {
|
||||
bar: 'bazz',
|
||||
email: 'admin@grafana.com',
|
||||
};
|
||||
const matchers = parseMatchers('foo!=bazz,bar=~ba.+');
|
||||
const matchers = parsePromQLStyleMatcherLooseSafe('foo!=bazz,bar=~ba.+');
|
||||
expect(labelsMatchMatchers(labels, matchers)).toBe(true);
|
||||
});
|
||||
});
|
||||
@ -198,7 +147,7 @@ describe('Alertmanager utils', () => {
|
||||
|
||||
const matchersString = matchersToString(matchers);
|
||||
|
||||
expect(matchersString).toBe('{severity="critical",resource=~"cpu",rule_uid!="2Otf8canzz",cluster!~"prom"}');
|
||||
expect(matchersString).toBe('{ severity="critical", resource=~"cpu", rule_uid!="2Otf8canzz", cluster!~"prom" }');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -16,7 +16,7 @@ import { MatcherFieldValue } from '../types/silence-form';
|
||||
|
||||
import { getAllDataSources } from './config';
|
||||
import { DataSourceType, GRAFANA_RULES_SOURCE_NAME } from './datasource';
|
||||
import { MatcherFormatter, unquoteWithUnescape } from './matchers';
|
||||
import { MatcherFormatter, parsePromQLStyleMatcherLooseSafe, unquoteWithUnescape } from './matchers';
|
||||
|
||||
export function addDefaultsToAlertmanagerConfig(config: AlertManagerCortexConfig): AlertManagerCortexConfig {
|
||||
// add default receiver if it does not exist
|
||||
@ -106,10 +106,10 @@ export function matchersToString(matchers: Matcher[]) {
|
||||
|
||||
const combinedMatchers = matcherFields.reduce((acc, current) => {
|
||||
const currentMatcherString = `${current.name}${current.operator}"${current.value}"`;
|
||||
return acc ? `${acc},${currentMatcherString}` : currentMatcherString;
|
||||
return acc ? `${acc}, ${currentMatcherString}` : currentMatcherString;
|
||||
}, '');
|
||||
|
||||
return `{${combinedMatchers}}`;
|
||||
return `{ ${combinedMatchers} }`;
|
||||
}
|
||||
|
||||
export const matcherFieldOptions: SelectableValue[] = [
|
||||
@ -124,35 +124,6 @@ export function matcherToObjectMatcher(matcher: Matcher): ObjectMatcher {
|
||||
return [matcher.name, operator, matcher.value];
|
||||
}
|
||||
|
||||
export function parseMatchers(matcherQueryString: string): Matcher[] {
|
||||
const matcherRegExp = /\b([\w.-]+)(=~|!=|!~|=(?="?\w))"?([^"\n,}]*)"?/g;
|
||||
const matchers: Matcher[] = [];
|
||||
|
||||
matcherQueryString.replace(matcherRegExp, (_, key, operator, value) => {
|
||||
const isEqual = operator === MatcherOperator.equal || operator === MatcherOperator.regex;
|
||||
const isRegex = operator === MatcherOperator.regex || operator === MatcherOperator.notRegex;
|
||||
matchers.push({
|
||||
name: key,
|
||||
value: isRegex ? getValidRegexString(value.trim()) : value.trim(),
|
||||
isEqual,
|
||||
isRegex,
|
||||
});
|
||||
return '';
|
||||
});
|
||||
|
||||
return matchers;
|
||||
}
|
||||
|
||||
function getValidRegexString(regex: string): string {
|
||||
// Regexes provided by users might be invalid, so we need to catch the error
|
||||
try {
|
||||
new RegExp(regex);
|
||||
return regex;
|
||||
} catch (error) {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
export function labelsMatchMatchers(labels: Labels, matchers: Matcher[]): boolean {
|
||||
return matchers.every(({ name, value, isRegex, isEqual }) => {
|
||||
return Object.entries(labels).some(([labelKey, labelValue]) => {
|
||||
@ -177,7 +148,7 @@ export function labelsMatchMatchers(labels: Labels, matchers: Matcher[]): boolea
|
||||
}
|
||||
|
||||
export function combineMatcherStrings(...matcherStrings: string[]): string {
|
||||
const matchers = matcherStrings.map(parseMatchers).flat();
|
||||
const matchers = matcherStrings.map(parsePromQLStyleMatcherLooseSafe).flat();
|
||||
const uniqueMatchers = uniqWith(matchers, isEqual);
|
||||
return matchersToString(uniqueMatchers);
|
||||
}
|
||||
|
@ -72,10 +72,10 @@ describe('formAmRouteToAmRoute', () => {
|
||||
|
||||
// Assert
|
||||
expect(amRoute.matchers).toStrictEqual([
|
||||
'"foo"="bar"',
|
||||
'"foo"="bar\\"baz"',
|
||||
'"foo"="bar\\\\baz"',
|
||||
'"foo"="\\\\bar\\\\baz\\"\\\\"',
|
||||
'foo="bar"',
|
||||
'foo="bar\\"baz"',
|
||||
'foo="bar\\\\baz"',
|
||||
'foo="\\\\bar\\\\baz\\"\\\\"',
|
||||
]);
|
||||
});
|
||||
|
||||
@ -97,7 +97,7 @@ describe('formAmRouteToAmRoute', () => {
|
||||
|
||||
// Assert
|
||||
expect(amRoute.matchers).toStrictEqual([
|
||||
'"foo"="bar"',
|
||||
'foo="bar"',
|
||||
'"foo with spaces"="bar"',
|
||||
'"foo\\\\slash"="bar"',
|
||||
'"foo\\"quote"="bar"',
|
||||
@ -116,7 +116,7 @@ describe('formAmRouteToAmRoute', () => {
|
||||
const amRoute = formAmRouteToAmRoute('mimir-am', route, { id: 'root' });
|
||||
|
||||
// Assert
|
||||
expect(amRoute.matchers).toStrictEqual(['"foo"=""']);
|
||||
expect(amRoute.matchers).toStrictEqual(['foo=""']);
|
||||
});
|
||||
|
||||
it('should allow matchers with empty values for Grafana AM', () => {
|
||||
|
@ -8,7 +8,7 @@ import { MatcherFieldValue } from '../types/silence-form';
|
||||
|
||||
import { matcherToMatcherField } from './alertmanager';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from './datasource';
|
||||
import { normalizeMatchers, parseMatcherToArray, quoteWithEscape, unquoteWithUnescape } from './matchers';
|
||||
import { encodeMatcher, normalizeMatchers, parseMatcherToArray, unquoteWithUnescape } from './matchers';
|
||||
import { findExistingRoute } from './routeTree';
|
||||
import { isValidPrometheusDuration, safeParsePrometheusDuration } from './time';
|
||||
|
||||
@ -189,9 +189,8 @@ export const formAmRouteToAmRoute = (
|
||||
// Grafana maintains a fork of AM to support all utf-8 characters in the "object_matchers" property values but this
|
||||
// does not exist in upstream AlertManager
|
||||
if (alertManagerSourceName !== GRAFANA_RULES_SOURCE_NAME) {
|
||||
amRoute.matchers = formAmRoute.object_matchers?.map(
|
||||
({ name, operator, value }) => `${quoteWithEscape(name)}${operator}${quoteWithEscape(value)}`
|
||||
);
|
||||
// to support UTF-8 characters we must wrap label keys and values with double quotes if they contain reserved characters.
|
||||
amRoute.matchers = formAmRoute.object_matchers?.map(encodeMatcher);
|
||||
amRoute.object_matchers = undefined;
|
||||
} else {
|
||||
amRoute.object_matchers = normalizeMatchers(amRoute);
|
||||
|
@ -1,14 +1,18 @@
|
||||
import { MatcherOperator, Route } from '../../../../plugins/datasource/alertmanager/types';
|
||||
import { Matcher, MatcherOperator, Route } from '../../../../plugins/datasource/alertmanager/types';
|
||||
|
||||
import {
|
||||
encodeMatcher,
|
||||
getMatcherQueryParams,
|
||||
isPromQLStyleMatcher,
|
||||
matcherToObjectMatcher,
|
||||
normalizeMatchers,
|
||||
parseMatcher,
|
||||
parsePromQLStyleMatcher,
|
||||
parsePromQLStyleMatcherLoose,
|
||||
parsePromQLStyleMatcherLooseSafe,
|
||||
parseQueryParamMatchers,
|
||||
quoteWithEscape,
|
||||
quoteWithEscapeIfRequired,
|
||||
unquoteWithUnescape,
|
||||
} from './matchers';
|
||||
|
||||
@ -175,4 +179,102 @@ describe('parsePromQLStyleMatcher', () => {
|
||||
it('should throw when not using correct syntax', () => {
|
||||
expect(() => parsePromQLStyleMatcher('foo="bar"')).toThrow();
|
||||
});
|
||||
|
||||
it('should only encode matchers if the label key contains reserved characters', () => {
|
||||
expect(quoteWithEscapeIfRequired('foo')).toBe('foo');
|
||||
expect(quoteWithEscapeIfRequired('foo bar')).toBe('"foo bar"');
|
||||
expect(quoteWithEscapeIfRequired('foo{}bar')).toBe('"foo{}bar"');
|
||||
expect(quoteWithEscapeIfRequired('foo\\bar')).toBe('"foo\\\\bar"');
|
||||
});
|
||||
|
||||
it('should properly encode a matcher field', () => {
|
||||
expect(encodeMatcher({ name: 'foo', operator: MatcherOperator.equal, value: 'baz' })).toBe('foo="baz"');
|
||||
expect(encodeMatcher({ name: 'foo bar', operator: MatcherOperator.equal, value: 'baz' })).toBe('"foo bar"="baz"');
|
||||
expect(encodeMatcher({ name: 'foo{}bar', operator: MatcherOperator.equal, value: 'baz qux' })).toBe(
|
||||
'"foo{}bar"="baz qux"'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parsePromQLStyleMatcherLooseSafe', () => {
|
||||
it('should parse all operators', () => {
|
||||
expect(parsePromQLStyleMatcherLooseSafe('foo=bar, bar=~ba.+, severity!=warning, email!~@grafana.com')).toEqual<
|
||||
Matcher[]
|
||||
>([
|
||||
{ name: 'foo', value: 'bar', isRegex: false, isEqual: true },
|
||||
{ name: 'bar', value: 'ba.+', isEqual: true, isRegex: true },
|
||||
{ name: 'severity', value: 'warning', isRegex: false, isEqual: false },
|
||||
{ name: 'email', value: '@grafana.com', isRegex: true, isEqual: false },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse with spaces and brackets', () => {
|
||||
expect(parsePromQLStyleMatcherLooseSafe('{ foo=bar }')).toEqual<Matcher[]>([
|
||||
{
|
||||
name: 'foo',
|
||||
value: 'bar',
|
||||
isRegex: false,
|
||||
isEqual: true,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse with spaces in the value', () => {
|
||||
expect(parsePromQLStyleMatcherLooseSafe('foo=bar bazz')).toEqual<Matcher[]>([
|
||||
{
|
||||
name: 'foo',
|
||||
value: 'bar bazz',
|
||||
isRegex: false,
|
||||
isEqual: true,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should return nothing for invalid operator', () => {
|
||||
expect(parsePromQLStyleMatcherLooseSafe('foo=!bar')).toEqual([
|
||||
{
|
||||
name: 'foo',
|
||||
value: '!bar',
|
||||
isRegex: false,
|
||||
isEqual: true,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse matchers with or without quotes', () => {
|
||||
expect(parsePromQLStyleMatcherLooseSafe('foo="bar",bar=bazz')).toEqual<Matcher[]>([
|
||||
{ name: 'foo', value: 'bar', isRegex: false, isEqual: true },
|
||||
{ name: 'bar', value: 'bazz', isEqual: true, isRegex: false },
|
||||
]);
|
||||
});
|
||||
|
||||
it('should parse matchers for key with special characters', () => {
|
||||
expect(parsePromQLStyleMatcherLooseSafe('foo.bar-baz="bar",baz-bar.foo=bazz')).toEqual<Matcher[]>([
|
||||
{ name: 'foo.bar-baz', value: 'bar', isRegex: false, isEqual: true },
|
||||
{ name: 'baz-bar.foo', value: 'bazz', isEqual: true, isRegex: false },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('parsePromQLStyleMatcherLoose', () => {
|
||||
it('should throw on invalid matcher', () => {
|
||||
expect(() => {
|
||||
parsePromQLStyleMatcherLoose('foo');
|
||||
}).toThrow();
|
||||
|
||||
expect(() => {
|
||||
parsePromQLStyleMatcherLoose('foo;bar');
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
it('should return empty array for empty input', () => {
|
||||
expect(parsePromQLStyleMatcherLoose('')).toStrictEqual([]);
|
||||
});
|
||||
|
||||
it('should also accept { } syntax', () => {
|
||||
expect(parsePromQLStyleMatcherLoose('{ foo=bar, bar=baz }')).toStrictEqual([
|
||||
{ isEqual: true, isRegex: false, name: 'foo', value: 'bar' },
|
||||
{ isEqual: true, isRegex: false, name: 'bar', value: 'baz' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
@ -10,6 +10,7 @@ import { compact, uniqBy } from 'lodash';
|
||||
import { Matcher, MatcherOperator, ObjectMatcher, Route } from 'app/plugins/datasource/alertmanager/types';
|
||||
|
||||
import { Labels } from '../../../../types/unified-alerting-dto';
|
||||
import { MatcherFieldValue } from '../types/silence-form';
|
||||
|
||||
import { isPrivateLabelKey } from './labels';
|
||||
|
||||
@ -57,6 +58,8 @@ export function parseMatcher(matcher: string): Matcher {
|
||||
|
||||
/**
|
||||
* This function combines parseMatcher and parsePromQLStyleMatcher, always returning an array of Matcher[] regardless of input syntax
|
||||
* 1. { foo=bar, bar=baz }
|
||||
* 2. foo=bar
|
||||
*/
|
||||
export function parseMatcherToArray(matcher: string): Matcher[] {
|
||||
return isPromQLStyleMatcher(matcher) ? parsePromQLStyleMatcher(matcher) : [parseMatcher(matcher)];
|
||||
@ -70,6 +73,15 @@ export function parsePromQLStyleMatcher(matcher: string): Matcher[] {
|
||||
throw new Error('not a PromQL style matcher');
|
||||
}
|
||||
|
||||
return parsePromQLStyleMatcherLoose(matcher);
|
||||
}
|
||||
|
||||
/**
|
||||
* This function behaves the same as "parsePromQLStyleMatcher" but does not check if the matcher is formatted with { }
|
||||
* In other words; it accepts both "{ foo=bar, bar=baz }" and "foo=bar,bar=baz"
|
||||
* @throws
|
||||
*/
|
||||
export function parsePromQLStyleMatcherLoose(matcher: string): Matcher[] {
|
||||
// split by `,` but not when it's used as a label value
|
||||
const commaUnlessQuoted = /,(?=(?:[^"]*"[^"]*")*[^"]*$)/;
|
||||
const parts = matcher.replace(/^\{/, '').replace(/\}$/, '').trim().split(commaUnlessQuoted);
|
||||
@ -83,6 +95,18 @@ export function parsePromQLStyleMatcher(matcher: string): Matcher[] {
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* This function behaves the same as "parsePromQLStyleMatcherLoose" but instead of throwing an error for incorrect syntax
|
||||
* it returns an empty Array of matchers instead.
|
||||
*/
|
||||
export function parsePromQLStyleMatcherLooseSafe(matcher: string): Matcher[] {
|
||||
try {
|
||||
return parsePromQLStyleMatcherLoose(matcher);
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// Parses a list of entries like like "['foo=bar', 'baz=~bad*']" into SilenceMatcher[]
|
||||
export function parseQueryParamMatchers(matcherPairs: string[]): Matcher[] {
|
||||
const parsedMatchers = matcherPairs.filter((x) => !!x.trim()).map((x) => parseMatcher(x));
|
||||
@ -144,6 +168,27 @@ export function quoteWithEscape(input: string) {
|
||||
return `"${escaped}"`;
|
||||
}
|
||||
|
||||
// The list of reserved characters that indicate we should be escaping the label key / value are
|
||||
// { } ! = ~ , \ " ' ` and any whitespace (\s), encoded in the regular expression below
|
||||
//
|
||||
// See Alertmanager PR: https://github.com/prometheus/alertmanager/pull/3453
|
||||
const RESERVED_CHARACTERS = /[\{\}\!\=\~\,\\\"\'\`\s]+/;
|
||||
|
||||
/**
|
||||
* Quotes string only when reserved characters are used
|
||||
*/
|
||||
export function quoteWithEscapeIfRequired(input: string) {
|
||||
const shouldQuote = RESERVED_CHARACTERS.test(input);
|
||||
return shouldQuote ? quoteWithEscape(input) : input;
|
||||
}
|
||||
|
||||
export const encodeMatcher = ({ name, operator, value }: MatcherFieldValue) => {
|
||||
const encodedLabelName = quoteWithEscapeIfRequired(name);
|
||||
const encodedLabelValue = quoteWithEscape(value);
|
||||
|
||||
return `${encodedLabelName}${operator}${encodedLabelValue}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Unquotes and unescapes a string **if it has been quoted**
|
||||
*/
|
||||
|
35
public/app/features/apiserver/client.test.ts
Normal file
35
public/app/features/apiserver/client.test.ts
Normal file
@ -0,0 +1,35 @@
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
|
||||
import { DatasourceAPIVersions } from './client';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
getBackendSrv: jest.fn().mockReturnValue({
|
||||
get: jest.fn(),
|
||||
}),
|
||||
config: {},
|
||||
}));
|
||||
|
||||
describe('DatasourceAPIVersions', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('get', async () => {
|
||||
const getMock = jest.fn().mockResolvedValue({
|
||||
groups: [
|
||||
{ name: 'testdata.datasource.grafana.app', preferredVersion: { version: 'v1' } },
|
||||
{ name: 'prometheus.datasource.grafana.app', preferredVersion: { version: 'v2' } },
|
||||
{ name: 'myorg-myplugin.datasource.grafana.app', preferredVersion: { version: 'v3' } },
|
||||
],
|
||||
});
|
||||
getBackendSrv().get = getMock;
|
||||
const apiVersions = new DatasourceAPIVersions();
|
||||
expect(await apiVersions.get('testdata')).toBe('v1');
|
||||
expect(await apiVersions.get('grafana-testdata-datasource')).toBe('v1');
|
||||
expect(await apiVersions.get('prometheus')).toBe('v2');
|
||||
expect(await apiVersions.get('graphite')).toBeUndefined();
|
||||
expect(await apiVersions.get('myorg-myplugin-datasource')).toBe('v3');
|
||||
expect(getMock).toHaveBeenCalledTimes(1);
|
||||
expect(getMock).toHaveBeenCalledWith('/apis');
|
||||
});
|
||||
});
|
@ -14,6 +14,7 @@ import {
|
||||
AnnoKeyOriginPath,
|
||||
AnnoKeyOriginHash,
|
||||
AnnoKeyOriginName,
|
||||
K8sAPIGroupList,
|
||||
} from './types';
|
||||
|
||||
export interface GroupVersionResource {
|
||||
@ -110,3 +111,34 @@ function setOriginAsUI(meta: Partial<ObjectMeta>) {
|
||||
meta.annotations[AnnoKeyOriginPath] = window.location.pathname;
|
||||
meta.annotations[AnnoKeyOriginHash] = config.buildInfo.versionString;
|
||||
}
|
||||
|
||||
export class DatasourceAPIVersions {
|
||||
private apiVersions?: { [pluginID: string]: string };
|
||||
|
||||
async get(pluginID: string): Promise<string | undefined> {
|
||||
if (this.apiVersions) {
|
||||
return this.apiVersions[pluginID];
|
||||
}
|
||||
const apis = await getBackendSrv().get<K8sAPIGroupList>('/apis');
|
||||
const apiVersions: { [pluginID: string]: string } = {};
|
||||
apis.groups.forEach((group) => {
|
||||
if (group.name.includes('datasource.grafana.app')) {
|
||||
const id = group.name.split('.')[0];
|
||||
apiVersions[id] = group.preferredVersion.version;
|
||||
// workaround for plugins that don't append '-datasource' for the group name
|
||||
// e.g. org-plugin-datasource uses org-plugin.datasource.grafana.app
|
||||
if (!id.endsWith('-datasource')) {
|
||||
if (!id.includes('-')) {
|
||||
// workaroud for Grafana plugins that don't include the org either
|
||||
// e.g. testdata uses testdata.datasource.grafana.app
|
||||
apiVersions[`grafana-${id}-datasource`] = group.preferredVersion.version;
|
||||
} else {
|
||||
apiVersions[`${id}-datasource`] = group.preferredVersion.version;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
this.apiVersions = apiVersions;
|
||||
return apiVersions[pluginID];
|
||||
}
|
||||
}
|
||||
|
@ -150,3 +150,13 @@ export interface ResourceClient<T = object, K = string> {
|
||||
update(obj: ResourceForCreate<T, K>): Promise<Resource<T, K>>;
|
||||
delete(name: string): Promise<MetaStatus>;
|
||||
}
|
||||
|
||||
export interface K8sAPIGroup {
|
||||
name: string;
|
||||
versions: Array<{ groupVersion: string; version: string }>;
|
||||
preferredVersion: { groupVersion: string; version: string };
|
||||
}
|
||||
export interface K8sAPIGroupList {
|
||||
kind: 'APIGroupList';
|
||||
groups: K8sAPIGroup[];
|
||||
}
|
||||
|
@ -57,9 +57,18 @@ const testConfig: SSOProvider = {
|
||||
allowedDomains: '',
|
||||
allowedGroups: '',
|
||||
scopes: '',
|
||||
orgMapping: '',
|
||||
},
|
||||
};
|
||||
|
||||
jest.mock('app/core/core', () => {
|
||||
return {
|
||||
contextSrv: {
|
||||
isGrafanaAdmin: true,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
const emptyConfig = {
|
||||
...testConfig,
|
||||
settings: { ...testConfig.settings, enabled: false, clientId: '', clientSecret: '' },
|
||||
@ -120,6 +129,8 @@ describe('ProviderConfigForm', () => {
|
||||
await user.click(screen.getByText('User mapping'));
|
||||
await user.type(screen.getByRole('textbox', { name: /Role attribute path/i }), 'new-attribute-path');
|
||||
await user.click(screen.getByRole('checkbox', { name: /Role attribute strict mode/i }));
|
||||
await user.type(screen.getByRole('combobox', { name: /Organization mapping/i }), 'Group A:1:Editor{enter}');
|
||||
await user.type(screen.getByRole('combobox', { name: /Organization mapping/i }), 'Group B:2:Admin{enter}');
|
||||
|
||||
await user.click(screen.getByText('Extra security measures'));
|
||||
await user.type(screen.getByRole('combobox', { name: /Allowed domains/i }), 'grafana.com{enter}');
|
||||
@ -143,6 +154,7 @@ describe('ProviderConfigForm', () => {
|
||||
clientSecret: 'test-client-secret',
|
||||
enabled: true,
|
||||
name: 'GitHub',
|
||||
orgMapping: '["Group A:1:Editor","Group B:2:Admin"]',
|
||||
roleAttributePath: 'new-attribute-path',
|
||||
roleAttributeStrict: true,
|
||||
scopes: 'user:email',
|
||||
@ -203,6 +215,7 @@ describe('ProviderConfigForm', () => {
|
||||
tlsClientKey: '',
|
||||
usePkce: false,
|
||||
useRefreshToken: false,
|
||||
orgMapping: '',
|
||||
},
|
||||
},
|
||||
{ showErrorAlert: false }
|
||||
|
@ -38,7 +38,7 @@ export const sectionFields: Section = {
|
||||
{
|
||||
name: 'User mapping',
|
||||
id: 'user',
|
||||
fields: ['roleAttributePath', 'roleAttributeStrict', 'allowAssignGrafanaAdmin', 'skipOrgRoleSync'],
|
||||
fields: ['roleAttributePath', 'roleAttributeStrict', 'orgMapping', 'allowAssignGrafanaAdmin', 'skipOrgRoleSync'],
|
||||
},
|
||||
{
|
||||
name: 'Extra security measures',
|
||||
@ -86,6 +86,8 @@ export const sectionFields: Section = {
|
||||
'idTokenAttributeName',
|
||||
'roleAttributePath',
|
||||
'roleAttributeStrict',
|
||||
'orgMapping',
|
||||
'orgAttributePath',
|
||||
'allowAssignGrafanaAdmin',
|
||||
'skipOrgRoleSync',
|
||||
],
|
||||
@ -121,7 +123,7 @@ export const sectionFields: Section = {
|
||||
{
|
||||
name: 'User mapping',
|
||||
id: 'user',
|
||||
fields: ['roleAttributePath', 'roleAttributeStrict', 'allowAssignGrafanaAdmin', 'skipOrgRoleSync'],
|
||||
fields: ['roleAttributePath', 'roleAttributeStrict', 'orgMapping', 'allowAssignGrafanaAdmin', 'skipOrgRoleSync'],
|
||||
},
|
||||
{
|
||||
name: 'Extra security measures',
|
||||
@ -149,7 +151,7 @@ export const sectionFields: Section = {
|
||||
{
|
||||
name: 'User mapping',
|
||||
id: 'user',
|
||||
fields: ['roleAttributePath', 'roleAttributeStrict', 'allowAssignGrafanaAdmin', 'skipOrgRoleSync'],
|
||||
fields: ['roleAttributePath', 'roleAttributeStrict', 'orgMapping', 'allowAssignGrafanaAdmin', 'skipOrgRoleSync'],
|
||||
},
|
||||
{
|
||||
name: 'Extra security measures',
|
||||
@ -176,7 +178,7 @@ export const sectionFields: Section = {
|
||||
{
|
||||
name: 'User mapping',
|
||||
id: 'user',
|
||||
fields: ['roleAttributePath', 'roleAttributeStrict', 'allowAssignGrafanaAdmin', 'skipOrgRoleSync'],
|
||||
fields: ['roleAttributePath', 'roleAttributeStrict', 'orgMapping', 'allowAssignGrafanaAdmin', 'skipOrgRoleSync'],
|
||||
},
|
||||
{
|
||||
name: 'Extra security measures',
|
||||
@ -213,7 +215,14 @@ export const sectionFields: Section = {
|
||||
{
|
||||
name: 'User mapping',
|
||||
id: 'user',
|
||||
fields: ['roleAttributePath', 'roleAttributeStrict', 'allowAssignGrafanaAdmin', 'skipOrgRoleSync'],
|
||||
fields: [
|
||||
'roleAttributePath',
|
||||
'roleAttributeStrict',
|
||||
'orgMapping',
|
||||
'orgAttributePath',
|
||||
'allowAssignGrafanaAdmin',
|
||||
'skipOrgRoleSync',
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Extra security measures',
|
||||
@ -448,6 +457,22 @@ export function fieldMap(provider: string): Record<string, FieldData> {
|
||||
description: 'Prevent synchronizing users’ organization roles from your IdP.',
|
||||
type: 'switch',
|
||||
},
|
||||
orgMapping: {
|
||||
label: 'Organization mapping',
|
||||
description: orgMappingDescription(provider),
|
||||
type: 'select',
|
||||
hidden: !contextSrv.isGrafanaAdmin,
|
||||
multi: true,
|
||||
allowCustomValue: true,
|
||||
options: [],
|
||||
placeholder: 'Enter mappings (my-team:1:Viewer...) and press Enter to add',
|
||||
},
|
||||
orgAttributePath: {
|
||||
label: 'Organization attribute path',
|
||||
description: 'JMESPath expression to use for organization lookup.',
|
||||
type: 'text',
|
||||
hidden: !['generic_oauth', 'okta'].includes(provider),
|
||||
},
|
||||
defineAllowedGroups: {
|
||||
label: 'Define allowed groups',
|
||||
type: 'switch',
|
||||
@ -602,3 +627,19 @@ export function fieldMap(provider: string): Record<string, FieldData> {
|
||||
function isNumeric(value: string) {
|
||||
return /^-?\d+$/.test(value);
|
||||
}
|
||||
|
||||
function orgMappingDescription(provider: string): string {
|
||||
switch (provider) {
|
||||
case 'azuread':
|
||||
return 'List of "<GroupID>:<OrgIdOrName>:<Role>" mappings.';
|
||||
case 'github':
|
||||
return 'List of "<GitHubTeamName>:<OrgIdOrName>:<Role>" mappings.';
|
||||
case 'gitlab':
|
||||
return 'List of "<GitlabGroupName>:<OrgIdOrName>:<Role>';
|
||||
case 'google':
|
||||
return 'List of "<GoogleGroupName>:<OrgIdOrName>:<Role>';
|
||||
default:
|
||||
// Generic OAuth, Okta
|
||||
return 'List of "<ExternalName>:<OrgIdOrName>:<Role>" mappings.';
|
||||
}
|
||||
}
|
||||
|
@ -36,6 +36,7 @@ export type SSOProviderSettingsBase = {
|
||||
roleAttributeStrict?: boolean;
|
||||
signoutRedirectUrl?: string;
|
||||
skipOrgRoleSync?: boolean;
|
||||
orgAttributePath?: string;
|
||||
teamIdsAttributePath?: string;
|
||||
teamsUrl?: string;
|
||||
tlsClientCa?: string;
|
||||
@ -70,6 +71,7 @@ export type SSOProvider = {
|
||||
allowedDomains?: string;
|
||||
allowedGroups?: string;
|
||||
scopes?: string;
|
||||
orgMapping?: string;
|
||||
};
|
||||
};
|
||||
|
||||
@ -80,6 +82,7 @@ export type SSOProviderDTO = Partial<SSOProviderSettingsBase> & {
|
||||
allowedDomains?: Array<SelectableValue<string>>;
|
||||
allowedGroups?: Array<SelectableValue<string>>;
|
||||
scopes?: Array<SelectableValue<string>>;
|
||||
orgMapping?: Array<SelectableValue<string>>;
|
||||
};
|
||||
|
||||
export interface AuthConfigState {
|
||||
|
@ -51,6 +51,11 @@ const strToValue = (val: string | string[]): SelectableValue[] => {
|
||||
if (Array.isArray(val)) {
|
||||
return val.map((v) => ({ label: v, value: v }));
|
||||
}
|
||||
// Stored as JSON Array
|
||||
if (val.startsWith('[') && val.endsWith(']')) {
|
||||
return JSON.parse(val).map((v: string) => ({ label: v, value: v }));
|
||||
}
|
||||
|
||||
return val.split(/[\s,]/).map((s) => ({ label: s, value: s }));
|
||||
};
|
||||
|
||||
@ -70,7 +75,11 @@ export function dataToDTO(data?: SSOProvider): SSOProviderDTO {
|
||||
}
|
||||
|
||||
const valuesToString = (values: Array<SelectableValue<string>>) => {
|
||||
return values.map(({ value }) => value).join(',');
|
||||
if (values.length <= 1) {
|
||||
return values.map(({ value }) => value).join(',');
|
||||
}
|
||||
// Store as JSON array if there are multiple values
|
||||
return JSON.stringify(values.map(({ value }) => value));
|
||||
};
|
||||
|
||||
const getFieldsForProvider = (provider: string) => {
|
||||
|
@ -1,9 +1,10 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { memo, useEffect, useMemo } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import AutoSizer from 'react-virtualized-auto-sizer';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { locationService, reportInteraction } from '@grafana/runtime';
|
||||
import { reportInteraction } from '@grafana/runtime';
|
||||
import { FilterInput, useStyles2 } from '@grafana/ui';
|
||||
import { Page } from 'app/core/components/Page/Page';
|
||||
import { GrafanaRouteComponentProps } from 'app/core/navigation/types';
|
||||
@ -39,7 +40,8 @@ const BrowseDashboardsPage = memo(({ match }: Props) => {
|
||||
const styles = useStyles2(getStyles);
|
||||
const [searchState, stateManager] = useSearchStateManager();
|
||||
const isSearching = stateManager.hasSearchFilters();
|
||||
const search = locationService.getSearch();
|
||||
const location = useLocation();
|
||||
const search = useMemo(() => new URLSearchParams(location.search), [location.search]);
|
||||
|
||||
useEffect(() => {
|
||||
stateManager.initStateFromUrl(folderUID);
|
||||
|
@ -15,6 +15,7 @@ import { VizPanel } from '@grafana/scenes';
|
||||
import { GrafanaQueryType } from 'app/plugins/datasource/grafana/types';
|
||||
|
||||
import { DashboardGridItem } from '../../scene/DashboardGridItem';
|
||||
import { DashboardScene } from '../../scene/DashboardScene';
|
||||
import { LibraryVizPanel } from '../../scene/LibraryVizPanel';
|
||||
import { gridItemToPanel, vizPanelToPanel } from '../../serialization/transformSceneToSaveModel';
|
||||
import { getQueryRunnerFor } from '../../utils/utils';
|
||||
@ -62,15 +63,28 @@ export function getGithubMarkdown(panel: VizPanel, snapshot: string): string {
|
||||
}
|
||||
|
||||
export async function getDebugDashboard(panel: VizPanel, rand: Randomize, timeRange: TimeRange) {
|
||||
let saveModel;
|
||||
let saveModel: ReturnType<typeof gridItemToPanel> = { type: '' };
|
||||
const isLibraryPanel = panel.parent instanceof LibraryVizPanel;
|
||||
const gridItem = (isLibraryPanel ? panel.parent.parent : panel.parent) as DashboardGridItem;
|
||||
const scene = panel.getRoot() as DashboardScene;
|
||||
|
||||
if (isLibraryPanel) {
|
||||
saveModel = {
|
||||
...gridItemToPanel(gridItem),
|
||||
...vizPanelToPanel(panel),
|
||||
};
|
||||
} else if (scene.state.editPanel) {
|
||||
// If panel edit mode is open when the user chooses the "get help" panel menu option
|
||||
// we want the debug dashboard to include the panel with any changes that were made while
|
||||
// in panel edit mode.
|
||||
const sourcePanel = scene.state.editPanel.state.vizManager.state.sourcePanel.resolve();
|
||||
const dashGridItem = sourcePanel.parent instanceof LibraryVizPanel ? sourcePanel.parent.parent : sourcePanel.parent;
|
||||
if (dashGridItem instanceof DashboardGridItem) {
|
||||
saveModel = {
|
||||
...gridItemToPanel(dashGridItem),
|
||||
...vizPanelToPanel(scene.state.editPanel.state.vizManager.state.panel.clone()),
|
||||
};
|
||||
}
|
||||
} else {
|
||||
saveModel = gridItemToPanel(gridItem);
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ export class InspectDataTab extends SceneObjectBase<InspectDataTabState> {
|
||||
super({
|
||||
...state,
|
||||
options: {
|
||||
withTransforms: true,
|
||||
withTransforms: false,
|
||||
withFieldConfig: true,
|
||||
},
|
||||
});
|
||||
|
@ -1,6 +1,6 @@
|
||||
import * as React from 'react';
|
||||
|
||||
import { CoreApp, DataSourceApi, DataSourceInstanceSettings, IconName } from '@grafana/data';
|
||||
import { CoreApp, DataSourceApi, DataSourceInstanceSettings, IconName, getDataSourceRef } from '@grafana/data';
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
import { config, getDataSourceSrv } from '@grafana/runtime';
|
||||
import { SceneObjectBase, SceneComponentProps, sceneGraph, SceneQueryRunner } from '@grafana/scenes';
|
||||
@ -81,8 +81,9 @@ export class PanelDataQueriesTab extends SceneObjectBase<PanelDataQueriesTabStat
|
||||
: undefined,
|
||||
dataSource: {
|
||||
default: panelManager.state.dsSettings?.isDefault,
|
||||
type: panelManager.state.dsSettings?.type,
|
||||
uid: panelManager.state.dsSettings?.uid,
|
||||
...(panelManager.state.dsSettings
|
||||
? getDataSourceRef(panelManager.state.dsSettings)
|
||||
: { type: undefined, uid: undefined }),
|
||||
},
|
||||
queries,
|
||||
maxDataPoints: queryRunner.state.maxDataPoints,
|
||||
@ -145,7 +146,9 @@ export class PanelDataQueriesTab extends SceneObjectBase<PanelDataQueriesTabStat
|
||||
onAddQuery = (query: Partial<DataQuery>) => {
|
||||
const queries = this.getQueries();
|
||||
const dsSettings = this._panelManager.state.dsSettings;
|
||||
this.onQueriesChange(addQuery(queries, query, { type: dsSettings?.type, uid: dsSettings?.uid }));
|
||||
this.onQueriesChange(
|
||||
addQuery(queries, query, dsSettings ? getDataSourceRef(dsSettings) : { type: undefined, uid: undefined })
|
||||
);
|
||||
};
|
||||
|
||||
isExpressionsSupported(dsSettings: DataSourceInstanceSettings): boolean {
|
||||
|
@ -9,6 +9,7 @@ import {
|
||||
GrafanaTheme2,
|
||||
PanelModel,
|
||||
filterFieldConfigOverrides,
|
||||
getDataSourceRef,
|
||||
isStandardFieldProp,
|
||||
restoreCustomOverrideRules,
|
||||
} from '@grafana/data';
|
||||
@ -159,8 +160,8 @@ export class VizPanelManager extends SceneObjectBase<VizPanelManagerState> {
|
||||
|
||||
this.queryRunner.setState({
|
||||
datasource: {
|
||||
...getDataSourceRef(dsSettings),
|
||||
uid: lastUsedDatasource?.datasourceUid,
|
||||
type: dsSettings.type,
|
||||
},
|
||||
});
|
||||
}
|
||||
@ -173,12 +174,7 @@ export class VizPanelManager extends SceneObjectBase<VizPanelManagerState> {
|
||||
if (datasource && dsSettings) {
|
||||
this.setState({ datasource, dsSettings });
|
||||
|
||||
storeLastUsedDataSourceInLocalStorage(
|
||||
{
|
||||
type: dsSettings.type,
|
||||
uid: dsSettings.uid,
|
||||
} || { default: true }
|
||||
);
|
||||
storeLastUsedDataSourceInLocalStorage(getDataSourceRef(dsSettings) || { default: true });
|
||||
}
|
||||
} catch (err) {
|
||||
//set default datasource if we fail to load the datasource
|
||||
@ -192,10 +188,7 @@ export class VizPanelManager extends SceneObjectBase<VizPanelManagerState> {
|
||||
});
|
||||
|
||||
this.queryRunner.setState({
|
||||
datasource: {
|
||||
uid: dsSettings.uid,
|
||||
type: dsSettings.type,
|
||||
},
|
||||
datasource: getDataSourceRef(dsSettings),
|
||||
});
|
||||
}
|
||||
|
||||
@ -296,10 +289,7 @@ export class VizPanelManager extends SceneObjectBase<VizPanelManagerState> {
|
||||
const queries = defaultQueries || (await updateQueries(nextDS, newSettings.uid, currentQueries, currentDS));
|
||||
|
||||
queryRunner.setState({
|
||||
datasource: {
|
||||
type: newSettings.type,
|
||||
uid: newSettings.uid,
|
||||
},
|
||||
datasource: getDataSourceRef(newSettings),
|
||||
queries,
|
||||
});
|
||||
if (defaultQueries) {
|
||||
|
@ -1,6 +1,7 @@
|
||||
import { getPanelPlugin } from '@grafana/data/test/__mocks__/pluginMocks';
|
||||
import { setPluginImportUtils } from '@grafana/runtime';
|
||||
import { SceneGridLayout, VizPanel } from '@grafana/scenes';
|
||||
import { SceneGridLayout, TestVariable, VizPanel } from '@grafana/scenes';
|
||||
import { ALL_VARIABLE_TEXT, ALL_VARIABLE_VALUE } from 'app/features/variables/constants';
|
||||
|
||||
import { activateFullSceneTree, buildPanelRepeaterScene } from '../utils/test-utils';
|
||||
|
||||
@ -40,7 +41,7 @@ describe('PanelRepeaterGridItem', () => {
|
||||
expect(repeater.state.repeatedPanels?.length).toBe(5);
|
||||
});
|
||||
|
||||
it('Should adjust container height to fit panels direction is horizontal', async () => {
|
||||
it('Should adjust container height to fit panels if direction is horizontal', async () => {
|
||||
const { scene, repeater } = buildPanelRepeaterScene({ variableQueryTime: 0, maxPerRow: 2, itemHeight: 10 });
|
||||
|
||||
const layoutForceRender = jest.fn();
|
||||
@ -144,4 +145,56 @@ describe('PanelRepeaterGridItem', () => {
|
||||
|
||||
expect(gridItem.getClassName()).toBe('');
|
||||
});
|
||||
|
||||
it('should have correct height after repeat is performed', () => {
|
||||
const { scene, repeater } = buildPanelRepeaterScene({
|
||||
variableQueryTime: 0,
|
||||
height: 4,
|
||||
maxPerRow: 4,
|
||||
repeatDirection: 'h',
|
||||
numberOfOptions: 5,
|
||||
});
|
||||
|
||||
activateFullSceneTree(scene);
|
||||
|
||||
expect(repeater.state.height).toBe(4);
|
||||
});
|
||||
|
||||
it('should have same item height if number of repititions changes', async () => {
|
||||
const { scene, repeater } = buildPanelRepeaterScene({
|
||||
variableQueryTime: 0,
|
||||
height: 4,
|
||||
maxPerRow: 4,
|
||||
repeatDirection: 'h',
|
||||
numberOfOptions: 5,
|
||||
});
|
||||
activateFullSceneTree(scene);
|
||||
|
||||
scene.state.$variables!.setState({
|
||||
variables: [
|
||||
new TestVariable({
|
||||
name: 'server',
|
||||
query: 'A.*',
|
||||
value: ALL_VARIABLE_VALUE,
|
||||
text: ALL_VARIABLE_TEXT,
|
||||
isMulti: true,
|
||||
includeAll: true,
|
||||
delayMs: 0,
|
||||
optionsToReturn: [
|
||||
{ label: 'A', value: '1' },
|
||||
{ label: 'B', value: '2' },
|
||||
{ label: 'C', value: '3' },
|
||||
{ label: 'D', value: '4' },
|
||||
{ label: 'E', value: '5' },
|
||||
{ label: 'F', value: '6' },
|
||||
{ label: 'G', value: '7' },
|
||||
{ label: 'H', value: '8' },
|
||||
{ label: 'I', value: '9' },
|
||||
{ label: 'J', value: '10' },
|
||||
],
|
||||
}),
|
||||
],
|
||||
});
|
||||
expect(repeater.state.height).toBe(6);
|
||||
});
|
||||
});
|
||||
|
@ -163,6 +163,8 @@ export class DashboardGridItem extends SceneObjectBase<DashboardGridItemState> i
|
||||
return;
|
||||
}
|
||||
|
||||
// Needed to calculate item height
|
||||
const prevRepeatCount = this._prevRepeatValues?.length ?? values.length;
|
||||
this._prevRepeatValues = values;
|
||||
const panelToRepeat = this.state.body instanceof LibraryVizPanel ? this.state.body.state.panel! : this.state.body;
|
||||
const repeatedPanels: VizPanel[] = [];
|
||||
@ -188,16 +190,15 @@ export class DashboardGridItem extends SceneObjectBase<DashboardGridItemState> i
|
||||
|
||||
const direction = this.getRepeatDirection();
|
||||
const stateChange: Partial<DashboardGridItemState> = { repeatedPanels: repeatedPanels };
|
||||
const itemHeight = this.state.itemHeight ?? 10;
|
||||
const prevHeight = this.state.height;
|
||||
const maxPerRow = this.getMaxPerRow();
|
||||
const prevHeight = this.state.height ?? 0;
|
||||
const maxPerRow = direction === 'h' ? this.getMaxPerRow() : 1;
|
||||
const prevRowCount = Math.ceil(prevRepeatCount / maxPerRow);
|
||||
const newRowCount = Math.ceil(repeatedPanels.length / maxPerRow);
|
||||
|
||||
if (direction === 'h') {
|
||||
const rowCount = Math.ceil(repeatedPanels.length / maxPerRow);
|
||||
stateChange.height = rowCount * itemHeight;
|
||||
} else {
|
||||
stateChange.height = repeatedPanels.length * itemHeight;
|
||||
}
|
||||
// If item height is not defined, calculate based on total height and row count
|
||||
const itemHeight = this.state.itemHeight ?? prevHeight / prevRowCount;
|
||||
stateChange.itemHeight = itemHeight;
|
||||
stateChange.height = Math.ceil(newRowCount * itemHeight);
|
||||
|
||||
this.setState(stateChange);
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { CoreApp, LoadingState, getDefaultTimeRange } from '@grafana/data';
|
||||
import { CoreApp, LoadingState, getDefaultTimeRange, store } from '@grafana/data';
|
||||
import { locationService } from '@grafana/runtime';
|
||||
import {
|
||||
sceneGraph,
|
||||
@ -14,6 +14,7 @@ import {
|
||||
} from '@grafana/scenes';
|
||||
import { Dashboard, DashboardCursorSync, LibraryPanel } from '@grafana/schema';
|
||||
import appEvents from 'app/core/app_events';
|
||||
import { LS_PANEL_COPY_KEY } from 'app/core/constants';
|
||||
import { getDashboardSrv } from 'app/features/dashboard/services/DashboardSrv';
|
||||
import { VariablesChanged } from 'app/features/variables/types';
|
||||
|
||||
@ -25,7 +26,7 @@ import {
|
||||
transformSaveModelToScene,
|
||||
} from '../serialization/transformSaveModelToScene';
|
||||
import { DecoratedRevisionModel } from '../settings/VersionsEditView';
|
||||
import { getHistorySrv } from '../settings/version-history/HistorySrv';
|
||||
import { historySrv } from '../settings/version-history/HistorySrv';
|
||||
import { dashboardSceneGraph } from '../utils/dashboardSceneGraph';
|
||||
import { djb2Hash } from '../utils/djb2Hash';
|
||||
import { findVizPanelByKey } from '../utils/utils';
|
||||
@ -620,7 +621,7 @@ describe('DashboardScene', () => {
|
||||
|
||||
scene.copyPanel(vizPanel);
|
||||
|
||||
expect(scene.state.hasCopiedPanel).toBe(false);
|
||||
expect(store.exists(LS_PANEL_COPY_KEY)).toBe(false);
|
||||
});
|
||||
|
||||
it('Should fail to copy a library panel if it does not have a grid item parent', () => {
|
||||
@ -638,14 +639,14 @@ describe('DashboardScene', () => {
|
||||
|
||||
scene.copyPanel(libVizPanel.state.panel as VizPanel);
|
||||
|
||||
expect(scene.state.hasCopiedPanel).toBe(false);
|
||||
expect(store.exists(LS_PANEL_COPY_KEY)).toBe(false);
|
||||
});
|
||||
|
||||
it('Should copy a panel', () => {
|
||||
const vizPanel = ((scene.state.body as SceneGridLayout).state.children[0] as DashboardGridItem).state.body;
|
||||
scene.copyPanel(vizPanel as VizPanel);
|
||||
|
||||
expect(scene.state.hasCopiedPanel).toBe(true);
|
||||
expect(store.exists(LS_PANEL_COPY_KEY)).toBe(true);
|
||||
});
|
||||
|
||||
it('Should copy a library viz panel', () => {
|
||||
@ -654,11 +655,11 @@ describe('DashboardScene', () => {
|
||||
|
||||
scene.copyPanel(libVizPanel.state.panel as VizPanel);
|
||||
|
||||
expect(scene.state.hasCopiedPanel).toBe(true);
|
||||
expect(store.exists(LS_PANEL_COPY_KEY)).toBe(true);
|
||||
});
|
||||
|
||||
it('Should paste a panel', () => {
|
||||
scene.setState({ hasCopiedPanel: true });
|
||||
store.set(LS_PANEL_COPY_KEY, JSON.stringify({ key: 'panel-7' }));
|
||||
jest.spyOn(JSON, 'parse').mockReturnThis();
|
||||
jest.mocked(buildGridItemForPanel).mockReturnValue(
|
||||
new DashboardGridItem({
|
||||
@ -680,11 +681,11 @@ describe('DashboardScene', () => {
|
||||
expect(body.state.children.length).toBe(6);
|
||||
expect(gridItem.state.body!.state.key).toBe('panel-7');
|
||||
expect(gridItem.state.y).toBe(0);
|
||||
expect(scene.state.hasCopiedPanel).toBe(false);
|
||||
expect(store.exists(LS_PANEL_COPY_KEY)).toBe(false);
|
||||
});
|
||||
|
||||
it('Should paste a library viz panel', () => {
|
||||
scene.setState({ hasCopiedPanel: true });
|
||||
store.set(LS_PANEL_COPY_KEY, JSON.stringify({ key: 'panel-7' }));
|
||||
jest.spyOn(JSON, 'parse').mockReturnValue({ libraryPanel: { uid: 'uid', name: 'libraryPanel' } });
|
||||
jest.mocked(buildGridItemForLibPanel).mockReturnValue(
|
||||
new DashboardGridItem({
|
||||
@ -709,7 +710,7 @@ describe('DashboardScene', () => {
|
||||
expect(libVizPanel.state.panelKey).toBe('panel-7');
|
||||
expect(libVizPanel.state.panel?.state.key).toBe('panel-7');
|
||||
expect(gridItem.state.y).toBe(0);
|
||||
expect(scene.state.hasCopiedPanel).toBe(false);
|
||||
expect(store.exists(LS_PANEL_COPY_KEY)).toBe(false);
|
||||
});
|
||||
|
||||
it('Should remove a panel', () => {
|
||||
@ -1137,7 +1138,7 @@ describe('DashboardScene', () => {
|
||||
version: 4,
|
||||
});
|
||||
|
||||
jest.mocked(getHistorySrv().restoreDashboard).mockResolvedValue({ version: newVersion });
|
||||
jest.mocked(historySrv.restoreDashboard).mockResolvedValue({ version: newVersion });
|
||||
jest.mocked(transformSaveModelToScene).mockReturnValue(mockScene);
|
||||
|
||||
return scene.onRestore(getVersionMock()).then((res) => {
|
||||
@ -1150,7 +1151,7 @@ describe('DashboardScene', () => {
|
||||
|
||||
it('should return early if historySrv does not return a valid version number', () => {
|
||||
jest
|
||||
.mocked(getHistorySrv().restoreDashboard)
|
||||
.mocked(historySrv.restoreDashboard)
|
||||
.mockResolvedValueOnce({ version: null })
|
||||
.mockResolvedValueOnce({ version: undefined })
|
||||
.mockResolvedValueOnce({ version: Infinity })
|
||||
|
@ -49,7 +49,7 @@ import {
|
||||
import { gridItemToPanel } from '../serialization/transformSceneToSaveModel';
|
||||
import { DecoratedRevisionModel } from '../settings/VersionsEditView';
|
||||
import { DashboardEditView } from '../settings/utils';
|
||||
import { getHistorySrv } from '../settings/version-history';
|
||||
import { historySrv } from '../settings/version-history';
|
||||
import { DashboardModelCompatibilityWrapper } from '../utils/DashboardModelCompatibilityWrapper';
|
||||
import { dashboardSceneGraph, getLibraryVizPanelFromVizPanel } from '../utils/dashboardSceneGraph';
|
||||
import { djb2Hash } from '../utils/djb2Hash';
|
||||
@ -121,8 +121,6 @@ export interface DashboardSceneState extends SceneObjectState {
|
||||
editPanel?: PanelEditor;
|
||||
/** Scene object that handles the current drawer or modal */
|
||||
overlay?: SceneObject;
|
||||
/** True when a user copies a panel in the dashboard */
|
||||
hasCopiedPanel?: boolean;
|
||||
/** The dashboard doesn't have panels */
|
||||
isEmpty?: boolean;
|
||||
/** Scene object that handles the scopes selector */
|
||||
@ -172,7 +170,6 @@ export class DashboardScene extends SceneObjectBase<DashboardSceneState> {
|
||||
editable: true,
|
||||
body: state.body ?? new SceneFlexLayout({ children: [] }),
|
||||
links: state.links ?? [],
|
||||
hasCopiedPanel: store.exists(LS_PANEL_COPY_KEY),
|
||||
scopes: state.uid && config.featureToggles.scopeFilters ? new ScopesScene() : undefined,
|
||||
...state,
|
||||
});
|
||||
@ -357,20 +354,19 @@ export class DashboardScene extends SceneObjectBase<DashboardSceneState> {
|
||||
}
|
||||
|
||||
public onRestore = async (version: DecoratedRevisionModel): Promise<boolean> => {
|
||||
const versionRsp = await getHistorySrv().restoreDashboard(version.uid, version.version);
|
||||
const versionRsp = await historySrv.restoreDashboard(version.uid, version.version);
|
||||
|
||||
const rev = (versionRsp as SaveDashboardResponseDTO).version;
|
||||
if (!Number.isInteger(version)) {
|
||||
if (!Number.isInteger(versionRsp.version)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const dashboardDTO: DashboardDTO = {
|
||||
dashboard: new DashboardModel(version.data!),
|
||||
dashboard: new DashboardModel(version.data),
|
||||
meta: this.state.meta,
|
||||
};
|
||||
const dashScene = transformSaveModelToScene(dashboardDTO);
|
||||
const newState = sceneUtils.cloneSceneObjectState(dashScene.state);
|
||||
newState.version = rev;
|
||||
newState.version = versionRsp.version;
|
||||
|
||||
this.setState(newState);
|
||||
this.exitEditMode({ skipConfirm: true, restoreInitialState: false });
|
||||
@ -649,7 +645,6 @@ export class DashboardScene extends SceneObjectBase<DashboardSceneState> {
|
||||
|
||||
store.set(LS_PANEL_COPY_KEY, JSON.stringify(jsonData));
|
||||
appEvents.emit(AppEvents.alertSuccess, ['Panel copied. Use **Paste panel** toolbar action to paste.']);
|
||||
this.setState({ hasCopiedPanel: true });
|
||||
}
|
||||
|
||||
public pastePanel() {
|
||||
@ -704,7 +699,6 @@ export class DashboardScene extends SceneObjectBase<DashboardSceneState> {
|
||||
children: [gridItem, ...sceneGridLayout.state.children],
|
||||
});
|
||||
|
||||
this.setState({ hasCopiedPanel: false });
|
||||
store.delete(LS_PANEL_COPY_KEY);
|
||||
}
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user