mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Search: add feature flag and basic service (#45112)
Co-authored-by: Artur Wierzbicki <artur@arturwierzbicki.com>
This commit is contained in:
parent
11433cba97
commit
d665306ad1
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@ -55,6 +55,7 @@ go.sum @grafana/backend-platform
|
||||
|
||||
# Grafana live
|
||||
/pkg/services/live/ @grafana/grafana-edge-squad
|
||||
/pkg/services/searchV2/ @grafana/grafana-edge-squad
|
||||
|
||||
# Alerting
|
||||
/pkg/services/ngalert @grafana/alerting-squad-backend
|
||||
|
@ -27,6 +27,7 @@ export interface FeatureToggles {
|
||||
['live-pipeline']?: boolean;
|
||||
['live-service-web-worker']?: boolean;
|
||||
queryOverLive?: boolean;
|
||||
panelTitleSearch?: boolean;
|
||||
tempoSearch?: boolean;
|
||||
tempoBackendSearch?: boolean;
|
||||
tempoServiceGraph?: boolean;
|
||||
|
@ -19,6 +19,8 @@ import (
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/signature"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/licensing"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor"
|
||||
"github.com/grafana/grafana/pkg/tsdb/cloudmonitoring"
|
||||
@ -86,7 +88,8 @@ func TestPluginManager_int_init(t *testing.T) {
|
||||
pg := postgres.ProvideService(cfg)
|
||||
my := mysql.ProvideService(cfg, hcp)
|
||||
ms := mssql.ProvideService(cfg)
|
||||
graf := grafanads.ProvideService(cfg)
|
||||
sv2 := searchV2.ProvideService(sqlstore.InitTestDB(t))
|
||||
graf := grafanads.ProvideService(cfg, sv2)
|
||||
|
||||
coreRegistry := coreplugin.ProvideCoreRegistry(am, cw, cm, es, grap, idb, lk, otsdb, pr, tmpo, td, pg, my, ms, graf)
|
||||
|
||||
|
@ -60,6 +60,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/rendering"
|
||||
"github.com/grafana/grafana/pkg/services/schemaloader"
|
||||
"github.com/grafana/grafana/pkg/services/search"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
"github.com/grafana/grafana/pkg/services/secrets"
|
||||
secretsDatabase "github.com/grafana/grafana/pkg/services/secrets/database"
|
||||
secretsManager "github.com/grafana/grafana/pkg/services/secrets/manager"
|
||||
@ -150,6 +151,7 @@ var wireBasicSet = wire.NewSet(
|
||||
wire.Bind(new(login.Store), new(*authinfodatabase.AuthInfoStore)),
|
||||
datasourceproxy.ProvideService,
|
||||
search.ProvideService,
|
||||
searchV2.ProvideService,
|
||||
live.ProvideService,
|
||||
pushhttp.ProvideService,
|
||||
plugincontext.ProvideService,
|
||||
|
@ -60,6 +60,12 @@ var (
|
||||
State: FeatureStateAlpha,
|
||||
FrontendOnly: true,
|
||||
},
|
||||
{
|
||||
Name: "panelTitleSearch",
|
||||
Description: "Search for dashboards using panel title",
|
||||
State: FeatureStateAlpha,
|
||||
RequiresDevMode: true, // only supported in dev mode right now
|
||||
},
|
||||
{
|
||||
Name: "tempoSearch",
|
||||
Description: "Enable searching in tempo datasources",
|
||||
|
@ -47,6 +47,10 @@ const (
|
||||
// Use grafana live websocket to execute backend queries
|
||||
FlagQueryOverLive = "queryOverLive"
|
||||
|
||||
// FlagPanelTitleSearch
|
||||
// Search for dashboards using panel title
|
||||
FlagPanelTitleSearch = "panelTitleSearch"
|
||||
|
||||
// FlagTempoSearch
|
||||
// Enable searching in tempo datasources
|
||||
FlagTempoSearch = "tempoSearch"
|
||||
|
206
pkg/services/searchV2/extract/dashboard.go
Normal file
206
pkg/services/searchV2/extract/dashboard.go
Normal file
@ -0,0 +1,206 @@
|
||||
package extract
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
func logf(format string, a ...interface{}) {
|
||||
fmt.Printf(format, a...)
|
||||
}
|
||||
|
||||
// nolint:gocyclo
|
||||
// ReadDashboard will take a byte stream and return dashboard info
|
||||
func ReadDashboard(stream io.Reader, datasource DatasourceLookup) *DashboardInfo {
|
||||
iter := jsoniter.Parse(jsoniter.ConfigDefault, stream, 1024)
|
||||
dash := &DashboardInfo{}
|
||||
|
||||
for l1Field := iter.ReadObject(); l1Field != ""; l1Field = iter.ReadObject() {
|
||||
// Skip null values so we don't need special int handling
|
||||
if iter.WhatIsNext() == jsoniter.NilValue {
|
||||
iter.Skip()
|
||||
continue
|
||||
}
|
||||
|
||||
switch l1Field {
|
||||
case "id":
|
||||
dash.ID = iter.ReadInt64()
|
||||
|
||||
case "uid":
|
||||
dash.UID = iter.ReadString()
|
||||
|
||||
case "title":
|
||||
dash.Title = iter.ReadString()
|
||||
|
||||
case "description":
|
||||
dash.Description = iter.ReadString()
|
||||
|
||||
case "schemaVersion":
|
||||
dash.SchemaVersion = iter.ReadInt64()
|
||||
|
||||
case "timezone":
|
||||
dash.TimeZone = iter.ReadString()
|
||||
|
||||
case "editable":
|
||||
dash.ReadOnly = !iter.ReadBool()
|
||||
|
||||
case "refresh":
|
||||
nxt := iter.WhatIsNext()
|
||||
if nxt == jsoniter.StringValue {
|
||||
dash.Refresh = iter.ReadString()
|
||||
} else {
|
||||
iter.Skip()
|
||||
}
|
||||
|
||||
case "tags":
|
||||
for iter.ReadArray() {
|
||||
dash.Tags = append(dash.Tags, iter.ReadString())
|
||||
}
|
||||
|
||||
case "links":
|
||||
for iter.ReadArray() {
|
||||
iter.Skip()
|
||||
dash.LinkCount++
|
||||
}
|
||||
|
||||
case "time":
|
||||
obj, ok := iter.Read().(map[string]interface{})
|
||||
if ok {
|
||||
dash.TimeFrom, _ = obj["from"].(string)
|
||||
dash.TimeTo, _ = obj["to"].(string)
|
||||
}
|
||||
|
||||
case "panels":
|
||||
for iter.ReadArray() {
|
||||
dash.Panels = append(dash.Panels, readPanelInfo(iter))
|
||||
}
|
||||
|
||||
case "rows":
|
||||
for iter.ReadArray() {
|
||||
v := iter.Read()
|
||||
logf("[DASHBOARD.ROW???] id=%s // %v\n", dash.UID, v)
|
||||
}
|
||||
|
||||
case "annotations":
|
||||
for sub := iter.ReadObject(); sub != ""; sub = iter.ReadObject() {
|
||||
if sub == "list" {
|
||||
for iter.ReadArray() {
|
||||
v := iter.Read()
|
||||
logf("[dash.anno] %v\n", v)
|
||||
}
|
||||
} else {
|
||||
iter.Skip()
|
||||
}
|
||||
}
|
||||
|
||||
case "templating":
|
||||
for sub := iter.ReadObject(); sub != ""; sub = iter.ReadObject() {
|
||||
if sub == "list" {
|
||||
for iter.ReadArray() {
|
||||
for k := iter.ReadObject(); k != ""; k = iter.ReadObject() {
|
||||
if k == "name" {
|
||||
dash.TemplateVars = append(dash.TemplateVars, iter.ReadString())
|
||||
} else {
|
||||
iter.Skip()
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
iter.Skip()
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore these properties
|
||||
case "timepicker":
|
||||
fallthrough
|
||||
case "version":
|
||||
fallthrough
|
||||
case "iteration":
|
||||
iter.Skip()
|
||||
|
||||
default:
|
||||
v := iter.Read()
|
||||
logf("[DASHBOARD] support key: %s / %v\n", l1Field, v)
|
||||
}
|
||||
}
|
||||
|
||||
if dash.UID == "" {
|
||||
logf("All dashbaords should have a UID defined")
|
||||
}
|
||||
|
||||
return dash
|
||||
}
|
||||
|
||||
// will always return strings for now
|
||||
func readPanelInfo(iter *jsoniter.Iterator) PanelInfo {
|
||||
panel := PanelInfo{}
|
||||
|
||||
for l1Field := iter.ReadObject(); l1Field != ""; l1Field = iter.ReadObject() {
|
||||
// Skip null values so we don't need special int handling
|
||||
if iter.WhatIsNext() == jsoniter.NilValue {
|
||||
iter.Skip()
|
||||
continue
|
||||
}
|
||||
|
||||
switch l1Field {
|
||||
case "id":
|
||||
panel.ID = iter.ReadInt64()
|
||||
|
||||
case "type":
|
||||
panel.Type = iter.ReadString()
|
||||
|
||||
case "title":
|
||||
panel.Title = iter.ReadString()
|
||||
|
||||
case "description":
|
||||
panel.Description = iter.ReadString()
|
||||
|
||||
case "pluginVersion":
|
||||
panel.PluginVersion = iter.ReadString() // since 7x (the saved version for the plugin model)
|
||||
|
||||
case "datasource":
|
||||
v := iter.Read()
|
||||
logf(">>Panel.datasource = %v\n", v) // string or object!!!
|
||||
|
||||
case "targets":
|
||||
for iter.ReadArray() {
|
||||
v := iter.Read()
|
||||
logf("[Panel.TARGET] %v\n", v)
|
||||
}
|
||||
|
||||
case "transformations":
|
||||
for iter.ReadArray() {
|
||||
for sub := iter.ReadObject(); sub != ""; sub = iter.ReadObject() {
|
||||
if sub == "id" {
|
||||
panel.Transformations = append(panel.Transformations, iter.ReadString())
|
||||
} else {
|
||||
iter.Skip()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Rows have nested panels
|
||||
case "panels":
|
||||
for iter.ReadArray() {
|
||||
panel.Collapsed = append(panel.Collapsed, readPanelInfo(iter))
|
||||
}
|
||||
|
||||
case "options":
|
||||
fallthrough
|
||||
|
||||
case "gridPos":
|
||||
fallthrough
|
||||
|
||||
case "fieldConfig":
|
||||
iter.Skip()
|
||||
|
||||
default:
|
||||
v := iter.Read()
|
||||
logf("[PANEL] support key: %s / %v\n", l1Field, v)
|
||||
}
|
||||
}
|
||||
|
||||
return panel
|
||||
}
|
48
pkg/services/searchV2/extract/dashboard_test.go
Normal file
48
pkg/services/searchV2/extract/dashboard_test.go
Normal file
@ -0,0 +1,48 @@
|
||||
package extract
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestReadDashboard(t *testing.T) {
|
||||
inputs := []string{
|
||||
"all-panels.json",
|
||||
"panel-graph/graph-shared-tooltips.json",
|
||||
}
|
||||
|
||||
// key will allow name or uid
|
||||
ds := func(key string) *DatasourceInfo {
|
||||
return nil // TODO!
|
||||
}
|
||||
|
||||
for _, input := range inputs {
|
||||
// nolint:gosec
|
||||
// We can ignore the gosec G304 warning because this is a test with hardcoded input values
|
||||
f, err := os.Open("../../../../devenv/dev-dashboards/" + input)
|
||||
require.NoError(t, err)
|
||||
|
||||
dash := ReadDashboard(f, ds)
|
||||
out, err := json.MarshalIndent(dash, "", " ")
|
||||
require.NoError(t, err)
|
||||
|
||||
update := false
|
||||
savedPath := "testdata/" + filepath.Base(input)
|
||||
saved, err := os.ReadFile(savedPath)
|
||||
if err != nil {
|
||||
update = true
|
||||
assert.NoError(t, err)
|
||||
} else if !assert.JSONEq(t, string(saved), string(out)) {
|
||||
update = true
|
||||
}
|
||||
|
||||
if update {
|
||||
_ = os.WriteFile(savedPath, out, 0600)
|
||||
}
|
||||
}
|
||||
}
|
128
pkg/services/searchV2/extract/testdata/all-panels.json
vendored
Normal file
128
pkg/services/searchV2/extract/testdata/all-panels.json
vendored
Normal file
@ -0,0 +1,128 @@
|
||||
{
|
||||
"uid": "n1jR8vnnz",
|
||||
"title": "Panel tests - All panels",
|
||||
"tags": [
|
||||
"gdev",
|
||||
"panel-tests",
|
||||
"all-panels"
|
||||
],
|
||||
"templateVars": [
|
||||
"query0",
|
||||
"query1",
|
||||
"text"
|
||||
],
|
||||
"panels": [
|
||||
{
|
||||
"id": 34,
|
||||
"title": "",
|
||||
"type": "text",
|
||||
"pluginVersion": "8.1.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 35,
|
||||
"title": "",
|
||||
"type": "text",
|
||||
"pluginVersion": "8.1.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 32,
|
||||
"title": "Row title",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"id": 41,
|
||||
"title": "State timeline",
|
||||
"type": "state-timeline"
|
||||
},
|
||||
{
|
||||
"id": 62,
|
||||
"title": "Size, color mapped to different fields + share view",
|
||||
"type": "geomap"
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"title": "Histogram",
|
||||
"type": "histogram"
|
||||
},
|
||||
{
|
||||
"id": 28,
|
||||
"title": "Logs",
|
||||
"type": "logs"
|
||||
},
|
||||
{
|
||||
"id": 8,
|
||||
"title": "Dashboard list",
|
||||
"type": "dashlist",
|
||||
"pluginVersion": "8.1.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 30,
|
||||
"title": "Panel list",
|
||||
"type": "pluginlist"
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"title": "Alert list",
|
||||
"type": "alertlist"
|
||||
},
|
||||
{
|
||||
"id": 26,
|
||||
"title": "Heatmap",
|
||||
"type": "heatmap"
|
||||
},
|
||||
{
|
||||
"id": 20,
|
||||
"title": "Bar gauge",
|
||||
"type": "bargauge",
|
||||
"pluginVersion": "8.1.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 24,
|
||||
"title": "Pie chart",
|
||||
"type": "piechart"
|
||||
},
|
||||
{
|
||||
"id": 18,
|
||||
"title": "Gauge",
|
||||
"type": "gauge",
|
||||
"pluginVersion": "8.1.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 22,
|
||||
"title": "Tabel",
|
||||
"type": "table",
|
||||
"pluginVersion": "8.1.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"title": "Annotation list",
|
||||
"type": "annolist"
|
||||
},
|
||||
{
|
||||
"id": 16,
|
||||
"title": "Stat",
|
||||
"type": "stat",
|
||||
"pluginVersion": "8.1.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"title": "Graph NG",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"id": 14,
|
||||
"title": "Bar chart",
|
||||
"type": "barchart"
|
||||
},
|
||||
{
|
||||
"id": 12,
|
||||
"title": "News panel",
|
||||
"type": "news"
|
||||
}
|
||||
],
|
||||
"schemaVersion": 33,
|
||||
"linkCount": 2,
|
||||
"timeFrom": "now-6h",
|
||||
"timeTo": "now",
|
||||
"timezone": ""
|
||||
}
|
68
pkg/services/searchV2/extract/testdata/graph-shared-tooltips.json
vendored
Normal file
68
pkg/services/searchV2/extract/testdata/graph-shared-tooltips.json
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
{
|
||||
"uid": "TX2VU59MZ",
|
||||
"title": "Panel Tests - shared tooltips",
|
||||
"tags": [
|
||||
"gdev",
|
||||
"panel-tests",
|
||||
"graph-ng"
|
||||
],
|
||||
"panels": [
|
||||
{
|
||||
"id": 4,
|
||||
"title": "two units",
|
||||
"type": "timeseries",
|
||||
"pluginVersion": "7.5.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 13,
|
||||
"title": "Speed vs Temperature (XY)",
|
||||
"type": "xychart",
|
||||
"pluginVersion": "7.5.0-pre",
|
||||
"transformations": [
|
||||
"seriesToColumns",
|
||||
"organize"
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"title": "Cursor info",
|
||||
"type": "debug",
|
||||
"pluginVersion": "7.5.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"title": "Only temperature",
|
||||
"type": "timeseries",
|
||||
"pluginVersion": "7.5.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 9,
|
||||
"title": "Only Speed",
|
||||
"type": "timeseries",
|
||||
"pluginVersion": "7.5.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 11,
|
||||
"title": "Panel Title",
|
||||
"type": "timeseries",
|
||||
"pluginVersion": "7.5.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 8,
|
||||
"title": "flot panel (temperature)",
|
||||
"type": "graph",
|
||||
"pluginVersion": "7.5.0-pre"
|
||||
},
|
||||
{
|
||||
"id": 10,
|
||||
"title": "flot panel (no units)",
|
||||
"type": "graph",
|
||||
"pluginVersion": "7.5.0-pre"
|
||||
}
|
||||
],
|
||||
"schemaVersion": 28,
|
||||
"linkCount": 0,
|
||||
"timeFrom": "2020-09-14T16:13:20.000Z",
|
||||
"timeTo": "2020-09-15T20:00:00.000Z",
|
||||
"timezone": ""
|
||||
}
|
45
pkg/services/searchV2/extract/types.go
Normal file
45
pkg/services/searchV2/extract/types.go
Normal file
@ -0,0 +1,45 @@
|
||||
package extract
|
||||
|
||||
type DatasourceLookup = func(key string) *DatasourceInfo
|
||||
|
||||
type DatasourceInfo struct {
|
||||
UID string `json:"uid"`
|
||||
Name string `json:"name"`
|
||||
Type string `json:"type"` // plugin name
|
||||
Version string `json:"version"`
|
||||
Access string `json:"access,omitempty"` // proxy, direct, or empty
|
||||
}
|
||||
|
||||
type PanelInfo struct {
|
||||
ID int64 `json:"id"`
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Type string `json:"type,omitempty"` // PluginID
|
||||
PluginVersion string `json:"pluginVersion,omitempty"`
|
||||
Datasource []string `json:"datasource,omitempty"` // UIDs
|
||||
DatasourceType []string `json:"datasourceType,omitempty"` // PluginIDs
|
||||
Transformations []string `json:"transformations,omitempty"` // ids of the transformation steps
|
||||
|
||||
// Rows define panels as sub objects
|
||||
Collapsed []PanelInfo `json:"collapsed,omitempty"`
|
||||
}
|
||||
|
||||
type DashboardInfo struct {
|
||||
ID int64 `json:"id,omitempty"`
|
||||
UID string `json:"uid,omitempty"`
|
||||
Path string `json:"path,omitempty"`
|
||||
Title string `json:"title"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Tags []string `json:"tags"` // UIDs
|
||||
Datasource []string `json:"datasource,omitempty"` // UIDs
|
||||
DatasourceType []string `json:"datasourceType,omitempty"` // PluginIDs
|
||||
TemplateVars []string `json:"templateVars,omitempty"` // the keys used
|
||||
Panels []PanelInfo `json:"panels"` // nesed documents
|
||||
SchemaVersion int64 `json:"schemaVersion"`
|
||||
LinkCount int64 `json:"linkCount"`
|
||||
TimeFrom string `json:"timeFrom"`
|
||||
TimeTo string `json:"timeTo"`
|
||||
TimeZone string `json:"timezone"`
|
||||
Refresh string `json:"refresh,omitempty"`
|
||||
ReadOnly bool `json:"readOnly,omitempty"` // editable = false
|
||||
}
|
234
pkg/services/searchV2/service.go
Normal file
234
pkg/services/searchV2/service.go
Normal file
@ -0,0 +1,234 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2/extract"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore"
|
||||
)
|
||||
|
||||
type StandardSearchService struct {
|
||||
sql *sqlstore.SQLStore
|
||||
}
|
||||
|
||||
func ProvideService(sql *sqlstore.SQLStore) SearchService {
|
||||
return &StandardSearchService{
|
||||
sql: sql,
|
||||
}
|
||||
}
|
||||
|
||||
type dashMeta struct {
|
||||
id int64
|
||||
is_folder bool
|
||||
folder_id int64
|
||||
created time.Time
|
||||
updated time.Time
|
||||
dash *extract.DashboardInfo
|
||||
}
|
||||
|
||||
func (s *StandardSearchService) DoDashboardQuery(ctx context.Context, user *backend.User, orgId int64, query DashboardQuery) *backend.DataResponse {
|
||||
rsp := &backend.DataResponse{}
|
||||
|
||||
if user == nil || user.Role != string(models.ROLE_ADMIN) {
|
||||
rsp.Error = fmt.Errorf("search is only supported for admin users while in early development")
|
||||
return rsp
|
||||
}
|
||||
|
||||
// Load and parse all dashboards for given orgId
|
||||
dash, err := loadDashboards(ctx, orgId, s.sql)
|
||||
if err != nil {
|
||||
rsp.Error = err
|
||||
return rsp
|
||||
}
|
||||
|
||||
rsp.Frames = metaToFrame(dash)
|
||||
|
||||
return rsp
|
||||
}
|
||||
|
||||
type dashDataQueryResult struct {
|
||||
Id int64
|
||||
IsFolder bool `xorm:"is_folder"`
|
||||
FolderID int64 `xorm:"folder_id"`
|
||||
Data []byte
|
||||
Created time.Time
|
||||
Updated time.Time
|
||||
}
|
||||
|
||||
func loadDashboards(ctx context.Context, orgID int64, sql *sqlstore.SQLStore) ([]dashMeta, error) {
|
||||
meta := make([]dashMeta, 0, 200)
|
||||
|
||||
// key will allow name or uid
|
||||
lookup := func(key string) *extract.DatasourceInfo {
|
||||
return nil // TODO!
|
||||
}
|
||||
|
||||
err := sql.WithDbSession(ctx, func(sess *sqlstore.DBSession) error {
|
||||
rows := make([]*dashDataQueryResult, 0)
|
||||
|
||||
sess.Table("dashboard").
|
||||
Where("org_id = ?", orgID).
|
||||
Cols("id", "is_folder", "folder_id", "data", "created", "updated")
|
||||
|
||||
err := sess.Find(&rows)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, row := range rows {
|
||||
dash := extract.ReadDashboard(bytes.NewReader(row.Data), lookup)
|
||||
|
||||
meta = append(meta, dashMeta{
|
||||
id: row.Id,
|
||||
is_folder: row.IsFolder,
|
||||
folder_id: row.FolderID,
|
||||
created: row.Created,
|
||||
updated: row.Updated,
|
||||
dash: dash,
|
||||
})
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
return meta, err
|
||||
}
|
||||
|
||||
type simpleCounter struct {
|
||||
values map[string]int64
|
||||
}
|
||||
|
||||
func (c *simpleCounter) add(key string) {
|
||||
v, ok := c.values[key]
|
||||
if !ok {
|
||||
v = 0
|
||||
}
|
||||
c.values[key] = v + 1
|
||||
}
|
||||
|
||||
func (c *simpleCounter) toFrame(name string) *data.Frame {
|
||||
key := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
val := data.NewFieldFromFieldType(data.FieldTypeInt64, 0)
|
||||
for k, v := range c.values {
|
||||
key.Append(k)
|
||||
val.Append(v)
|
||||
}
|
||||
return data.NewFrame(name, key, val)
|
||||
}
|
||||
|
||||
// UGLY... but helpful for now
|
||||
func metaToFrame(meta []dashMeta) data.Frames {
|
||||
folderID := data.NewFieldFromFieldType(data.FieldTypeInt64, 0)
|
||||
folderUID := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
folderName := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
|
||||
folderID.Name = "ID"
|
||||
folderUID.Name = "UID"
|
||||
folderName.Name = "Name"
|
||||
|
||||
dashID := data.NewFieldFromFieldType(data.FieldTypeInt64, 0)
|
||||
dashUID := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
dashFolderID := data.NewFieldFromFieldType(data.FieldTypeInt64, 0)
|
||||
dashName := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
dashDescr := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
dashCreated := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
|
||||
dashUpdated := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
|
||||
dashSchemaVersion := data.NewFieldFromFieldType(data.FieldTypeInt64, 0)
|
||||
dashTags := data.NewFieldFromFieldType(data.FieldTypeNullableString, 0)
|
||||
|
||||
dashID.Name = "ID"
|
||||
dashUID.Name = "UID"
|
||||
dashFolderID.Name = "FolderID"
|
||||
dashName.Name = "Name"
|
||||
dashDescr.Name = "Description"
|
||||
dashTags.Name = "Tags"
|
||||
dashSchemaVersion.Name = "SchemaVersion"
|
||||
dashCreated.Name = "Created"
|
||||
dashUpdated.Name = "Updated"
|
||||
|
||||
dashTags.Config = &data.FieldConfig{
|
||||
Custom: map[string]interface{}{
|
||||
// Table panel default styling
|
||||
"displayMode": "json-view",
|
||||
},
|
||||
}
|
||||
|
||||
panelDashID := data.NewFieldFromFieldType(data.FieldTypeInt64, 0)
|
||||
panelID := data.NewFieldFromFieldType(data.FieldTypeInt64, 0)
|
||||
panelName := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
panelDescr := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
panelType := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
|
||||
panelDashID.Name = "DashboardID"
|
||||
panelID.Name = "ID"
|
||||
panelName.Name = "Name"
|
||||
panelDescr.Name = "Description"
|
||||
panelType.Name = "Type"
|
||||
|
||||
panelTypeCounter := simpleCounter{
|
||||
values: make(map[string]int64, 30),
|
||||
}
|
||||
|
||||
schemaVersionCounter := simpleCounter{
|
||||
values: make(map[string]int64, 30),
|
||||
}
|
||||
|
||||
var tags *string
|
||||
for _, row := range meta {
|
||||
if row.is_folder {
|
||||
folderID.Append(row.id)
|
||||
folderUID.Append(row.dash.UID)
|
||||
folderName.Append(row.dash.Title)
|
||||
continue
|
||||
}
|
||||
|
||||
dashID.Append(row.id)
|
||||
dashUID.Append(row.dash.UID)
|
||||
dashFolderID.Append(row.folder_id)
|
||||
dashName.Append(row.dash.Title)
|
||||
dashDescr.Append(row.dash.Title)
|
||||
dashSchemaVersion.Append(row.dash.SchemaVersion)
|
||||
dashCreated.Append(row.created)
|
||||
dashUpdated.Append(row.updated)
|
||||
|
||||
// stats
|
||||
schemaVersionCounter.add(strconv.FormatInt(row.dash.SchemaVersion, 10))
|
||||
|
||||
// Send tags as JSON array
|
||||
tags = nil
|
||||
if len(row.dash.Tags) > 0 {
|
||||
b, err := json.Marshal(row.dash.Tags)
|
||||
if err == nil {
|
||||
s := string(b)
|
||||
tags = &s
|
||||
}
|
||||
}
|
||||
dashTags.Append(tags)
|
||||
|
||||
// Row for each panel
|
||||
for _, panel := range row.dash.Panels {
|
||||
panelDashID.Append(row.id)
|
||||
panelID.Append(panel.ID)
|
||||
panelName.Append(panel.Title)
|
||||
panelDescr.Append(panel.Description)
|
||||
panelType.Append(panel.Type)
|
||||
panelTypeCounter.add(panel.Type)
|
||||
}
|
||||
}
|
||||
|
||||
return data.Frames{
|
||||
data.NewFrame("folders", folderID, folderUID, folderName),
|
||||
data.NewFrame("dashboards", dashID, dashUID, dashFolderID, dashName, dashDescr, dashTags, dashSchemaVersion, dashCreated, dashUpdated),
|
||||
data.NewFrame("panels", panelDashID, panelID, panelName, panelDescr, panelType),
|
||||
panelTypeCounter.toFrame("panel-type-counts"),
|
||||
schemaVersionCounter.toFrame("schema-version-counts"),
|
||||
}
|
||||
}
|
30
pkg/services/searchV2/stub.go
Normal file
30
pkg/services/searchV2/stub.go
Normal file
@ -0,0 +1,30 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
)
|
||||
|
||||
type stubSearchService struct {
|
||||
}
|
||||
|
||||
func NewStubSearchService() SearchService {
|
||||
return &stubSearchService{}
|
||||
}
|
||||
|
||||
func (s *stubSearchService) DoDashboardQuery(ctx context.Context, user *backend.User, orgId int64, query DashboardQuery) *backend.DataResponse {
|
||||
rsp := &backend.DataResponse{}
|
||||
|
||||
// dashboards
|
||||
fid := data.NewFieldFromFieldType(data.FieldTypeInt64, 0)
|
||||
uid := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
|
||||
fid.Append(int64(2))
|
||||
uid.Append("hello")
|
||||
|
||||
rsp.Frames = append(rsp.Frames, data.NewFrame("dasboards", fid, uid))
|
||||
|
||||
return rsp
|
||||
}
|
15
pkg/services/searchV2/types.go
Normal file
15
pkg/services/searchV2/types.go
Normal file
@ -0,0 +1,15 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
)
|
||||
|
||||
type DashboardQuery struct {
|
||||
Query string
|
||||
}
|
||||
|
||||
type SearchService interface {
|
||||
DoDashboardQuery(ctx context.Context, user *backend.User, orgId int64, query DashboardQuery) *backend.DataResponse
|
||||
}
|
@ -14,6 +14,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/tsdb/testdatasource"
|
||||
)
|
||||
@ -39,11 +40,11 @@ var (
|
||||
logger = log.New("tsdb.grafana")
|
||||
)
|
||||
|
||||
func ProvideService(cfg *setting.Cfg) *Service {
|
||||
return newService(cfg)
|
||||
func ProvideService(cfg *setting.Cfg, search searchV2.SearchService) *Service {
|
||||
return newService(cfg, search)
|
||||
}
|
||||
|
||||
func newService(cfg *setting.Cfg) *Service {
|
||||
func newService(cfg *setting.Cfg, search searchV2.SearchService) *Service {
|
||||
s := &Service{
|
||||
staticRootPath: cfg.StaticRootPath,
|
||||
roots: []string{
|
||||
@ -54,6 +55,7 @@ func newService(cfg *setting.Cfg) *Service {
|
||||
"maps",
|
||||
"upload", // does not exist yet
|
||||
},
|
||||
search: search,
|
||||
}
|
||||
|
||||
return s
|
||||
@ -64,6 +66,7 @@ type Service struct {
|
||||
// path to the public folder
|
||||
staticRootPath string
|
||||
roots []string
|
||||
search searchV2.SearchService
|
||||
}
|
||||
|
||||
func DataSourceModel(orgId int64) *models.DataSource {
|
||||
@ -78,7 +81,7 @@ func DataSourceModel(orgId int64) *models.DataSource {
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Service) QueryData(_ context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
||||
func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
||||
response := backend.NewQueryDataResponse()
|
||||
|
||||
for _, q := range req.Queries {
|
||||
@ -89,6 +92,8 @@ func (s *Service) QueryData(_ context.Context, req *backend.QueryDataRequest) (*
|
||||
response.Responses[q.RefID] = s.doListQuery(q)
|
||||
case queryTypeRead:
|
||||
response.Responses[q.RefID] = s.doReadQuery(q)
|
||||
case queryTypeSearch:
|
||||
response.Responses[q.RefID] = s.doSearchQuery(ctx, req, q)
|
||||
default:
|
||||
response.Responses[q.RefID] = backend.DataResponse{
|
||||
Error: fmt.Errorf("unknown query type"),
|
||||
@ -216,3 +221,15 @@ func (s *Service) doRandomWalk(query backend.DataQuery) backend.DataResponse {
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
func (s *Service) doSearchQuery(ctx context.Context, req *backend.QueryDataRequest, query backend.DataQuery) backend.DataResponse {
|
||||
q := searchV2.DashboardQuery{}
|
||||
err := json.Unmarshal(query.JSON, &q)
|
||||
if err != nil {
|
||||
return backend.DataResponse{
|
||||
Error: err,
|
||||
}
|
||||
}
|
||||
|
||||
return *s.search.DoDashboardQuery(ctx, req.PluginContext.User, req.PluginContext.OrgID, q)
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"path"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
@ -18,7 +19,7 @@ func asJSON(v interface{}) json.RawMessage {
|
||||
}
|
||||
|
||||
func TestReadFolderListing(t *testing.T) {
|
||||
ds := newService(&setting.Cfg{StaticRootPath: "../../../public"})
|
||||
ds := newService(&setting.Cfg{StaticRootPath: "../../../public"}, searchV2.NewStubSearchService())
|
||||
dr := ds.doListQuery(backend.DataQuery{
|
||||
QueryType: "x",
|
||||
JSON: asJSON(listQueryModel{
|
||||
@ -30,7 +31,7 @@ func TestReadFolderListing(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestReadCSVFile(t *testing.T) {
|
||||
ds := newService(&setting.Cfg{StaticRootPath: "../../../public"})
|
||||
ds := newService(&setting.Cfg{StaticRootPath: "../../../public"}, searchV2.NewStubSearchService())
|
||||
dr := ds.doReadQuery(backend.DataQuery{
|
||||
QueryType: "x",
|
||||
JSON: asJSON(readQueryModel{
|
||||
|
@ -4,6 +4,9 @@ const (
|
||||
// QueryTypeRandomWalk returns a random walk series
|
||||
queryTypeRandomWalk = "randomWalk"
|
||||
|
||||
// QueryTypeList will list the files in a folder
|
||||
queryTypeSearch = "search"
|
||||
|
||||
// QueryTypeList will list the files in a folder
|
||||
queryTypeList = "list"
|
||||
|
||||
|
@ -10,7 +10,8 @@ import {
|
||||
} from '@grafana/data';
|
||||
import { GrafanaDatasource } from '../datasource';
|
||||
import { defaultQuery, GrafanaQuery, GrafanaQueryType } from '../types';
|
||||
import { getBackendSrv, getDataSourceSrv } from '@grafana/runtime';
|
||||
import { config, getBackendSrv, getDataSourceSrv } from '@grafana/runtime';
|
||||
import { contextSrv } from 'app/core/services/context_srv';
|
||||
|
||||
type Props = QueryEditorProps<GrafanaDatasource, GrafanaQuery>;
|
||||
|
||||
@ -43,6 +44,18 @@ export class QueryEditor extends PureComponent<Props, State> {
|
||||
},
|
||||
];
|
||||
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
|
||||
if (config.featureToggles.panelTitleSearch && contextSrv.isGrafanaAdmin) {
|
||||
this.queryTypes.push({
|
||||
label: 'Search',
|
||||
value: GrafanaQueryType.Search,
|
||||
description: 'Search for grafana resources',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
loadChannelInfo() {
|
||||
getBackendSrv()
|
||||
.fetch({ url: 'api/live/list' })
|
||||
@ -146,20 +159,27 @@ export class QueryEditor extends PureComponent<Props, State> {
|
||||
onRunQuery();
|
||||
};
|
||||
|
||||
checkAndUpdateBuffer = (txt: string) => {
|
||||
checkAndUpdateValue = (key: keyof GrafanaQuery, txt: string) => {
|
||||
const { onChange, query, onRunQuery } = this.props;
|
||||
let buffer: number | undefined;
|
||||
if (txt) {
|
||||
try {
|
||||
buffer = rangeUtil.intervalToSeconds(txt) * 1000;
|
||||
} catch (err) {
|
||||
console.warn('ERROR', err);
|
||||
if (key === 'buffer') {
|
||||
let buffer: number | undefined;
|
||||
if (txt) {
|
||||
try {
|
||||
buffer = rangeUtil.intervalToSeconds(txt) * 1000;
|
||||
} catch (err) {
|
||||
console.warn('ERROR', err);
|
||||
}
|
||||
}
|
||||
onChange({
|
||||
...query,
|
||||
buffer,
|
||||
});
|
||||
} else {
|
||||
onChange({
|
||||
...query,
|
||||
[key]: txt,
|
||||
});
|
||||
}
|
||||
onChange({
|
||||
...query,
|
||||
buffer,
|
||||
});
|
||||
onRunQuery();
|
||||
};
|
||||
|
||||
@ -167,11 +187,11 @@ export class QueryEditor extends PureComponent<Props, State> {
|
||||
if (e.key !== 'Enter') {
|
||||
return;
|
||||
}
|
||||
this.checkAndUpdateBuffer((e.target as any).value);
|
||||
this.checkAndUpdateValue('buffer', (e.target as any).value);
|
||||
};
|
||||
|
||||
handleBlur = (e: React.FocusEvent<HTMLInputElement>) => {
|
||||
this.checkAndUpdateBuffer(e.target.value);
|
||||
this.checkAndUpdateValue('buffer', e.target.value);
|
||||
};
|
||||
|
||||
renderMeasurementsQuery() {
|
||||
@ -322,6 +342,34 @@ export class QueryEditor extends PureComponent<Props, State> {
|
||||
);
|
||||
}
|
||||
|
||||
handleSearchEnterKey = (e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||
if (e.key !== 'Enter') {
|
||||
return;
|
||||
}
|
||||
this.checkAndUpdateValue('query', (e.target as any).value);
|
||||
};
|
||||
|
||||
handleSearchBlur = (e: React.FocusEvent<HTMLInputElement>) => {
|
||||
this.checkAndUpdateValue('query', e.target.value);
|
||||
};
|
||||
|
||||
renderSearch() {
|
||||
let { query } = this.props.query;
|
||||
return (
|
||||
<InlineFieldRow>
|
||||
<InlineField label="Query" grow={true} labelWidth={labelWidth}>
|
||||
<Input
|
||||
placeholder="Everything"
|
||||
defaultValue={query ?? ''}
|
||||
onKeyDown={this.handleSearchEnterKey}
|
||||
onBlur={this.handleSearchBlur}
|
||||
spellCheck={false}
|
||||
/>
|
||||
</InlineField>
|
||||
</InlineFieldRow>
|
||||
);
|
||||
}
|
||||
|
||||
render() {
|
||||
const query = {
|
||||
...defaultQuery,
|
||||
@ -342,6 +390,7 @@ export class QueryEditor extends PureComponent<Props, State> {
|
||||
</InlineFieldRow>
|
||||
{query.queryType === GrafanaQueryType.LiveMeasurements && this.renderMeasurementsQuery()}
|
||||
{query.queryType === GrafanaQueryType.List && this.renderListPublicFiles()}
|
||||
{query.queryType === GrafanaQueryType.Search && this.renderSearch()}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
@ -13,6 +13,7 @@ export enum GrafanaQueryType {
|
||||
RandomWalk = 'randomWalk',
|
||||
List = 'list',
|
||||
Read = 'read',
|
||||
Search = 'search',
|
||||
}
|
||||
|
||||
export interface GrafanaQuery extends DataQuery {
|
||||
@ -21,6 +22,7 @@ export interface GrafanaQuery extends DataQuery {
|
||||
filter?: LiveDataFilter;
|
||||
buffer?: number;
|
||||
path?: string; // for list and read
|
||||
query?: string; // for query endpoint
|
||||
}
|
||||
|
||||
export const defaultQuery: GrafanaQuery = {
|
||||
|
Loading…
Reference in New Issue
Block a user