Export: include alerts, thumbnails, usage stats, and short urls (#51938)

This commit is contained in:
Ryan McKinley 2022-07-11 16:25:40 -07:00 committed by GitHub
parent 0e066dd5f8
commit 3003a48dc6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 454 additions and 211 deletions

View File

@ -0,0 +1,51 @@
package export
import (
"encoding/json"
"fmt"
"path"
"time"
"github.com/grafana/grafana/pkg/services/sqlstore"
)
func exportAlerts(helper *commitHelper, job *gitExportJob) error {
alertDir := path.Join(helper.orgDir, "alerts")
return job.sql.WithDbSession(helper.ctx, func(sess *sqlstore.DBSession) error {
type ruleResult struct {
Title string `xorm:"title"`
UID string `xorm:"uid"`
NamespaceUID string `xorm:"namespace_uid"`
RuleGroup string `xorm:"rule_group"`
Condition json.RawMessage `xorm:"data"`
DashboardUID string `xorm:"dashboard_uid"`
PanelID int64 `xorm:"panel_id"`
Updated time.Time `xorm:"updated" json:"-"`
}
rows := make([]*ruleResult, 0)
sess.Table("alert_rule").Where("org_id = ?", helper.orgID)
err := sess.Find(&rows)
if err != nil {
return err
}
for _, row := range rows {
err = helper.add(commitOptions{
body: []commitBody{{
body: prettyJSON(row),
fpath: path.Join(alertDir, row.UID) + ".json", // must be JSON files
}},
comment: fmt.Sprintf("Alert: %s", row.Title),
when: row.Updated,
})
if err != nil {
return err
}
}
return err
})
}

View File

@ -105,24 +105,26 @@ func exportAnnotations(helper *commitHelper, job *gitExportJob) error {
}
}
frame := data.NewFrame("", f_ID, f_DashboardID, f_PanelID, f_Epoch, f_EpochEnd, f_Text, f_Tags)
js, err := jsoniter.ConfigCompatibleWithStandardLibrary.MarshalIndent(frame, "", " ")
if err != nil {
return err
}
if f_ID.Len() > 0 {
frame := data.NewFrame("", f_ID, f_DashboardID, f_PanelID, f_Epoch, f_EpochEnd, f_Text, f_Tags)
js, err := jsoniter.ConfigCompatibleWithStandardLibrary.MarshalIndent(frame, "", " ")
if err != nil {
return err
}
err = helper.add(commitOptions{
body: []commitBody{
{
fpath: filepath.Join(helper.orgDir, "annotations", "annotations.json"),
body: js, // TODO, pretty?
err = helper.add(commitOptions{
body: []commitBody{
{
fpath: filepath.Join(helper.orgDir, "annotations", "annotations.json"),
body: js, // TODO, pretty?
},
},
},
when: time.Now(),
comment: "Exported annotations",
})
if err != nil {
return err
when: time.Now(),
comment: "Exported annotations",
})
if err != nil {
return err
}
}
return err
})

View File

@ -1,9 +1,12 @@
package export
import (
"fmt"
"path"
"strconv"
"strings"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/data/sqlutil"
"github.com/grafana/grafana/pkg/services/sqlstore"
)
@ -13,58 +16,119 @@ func dumpAuthTables(helper *commitHelper, job *gitExportJob) error {
comment: "auth tables dump",
}
tables := []string{
"user", // joined with "org_user" to get the role
"user_role",
"builtin_role",
"api_key",
"team", "team_group", "team_role", "team_member",
"role",
"temp_user",
"user_auth_token", // no org_id... is it temporary?
"permission",
type statsTables struct {
table string
sql string
converters []sqlutil.Converter
drop []string
}
for _, table := range tables {
switch table {
case "permission":
sess.Table(table).
Join("left", "role", "permission.role_id = role.id").
Cols("permission.*").
Where("org_id = ?", helper.orgID).
Asc("permission.id")
case "user":
sess.Table(table).
Join("inner", "org_user", "user.id = org_user.user_id").
Cols("user.*", "org_user.role").
Where("org_user.org_id = ?", helper.orgID).
Asc("user.id")
case "user_auth_token":
sess.Table(table).
Join("inner", "org_user", "user_auth_token.id = org_user.user_id").
Cols("user_auth_token.*").
Where("org_user.org_id = ?", helper.orgID).
Asc("user_auth_token.id")
default:
sess.Table(table).Where("org_id = ?", helper.orgID).Asc("id")
dump := []statsTables{
{
table: "user",
sql: `
SELECT user.*, org_user.role
FROM user
JOIN org_user ON user.id = org_user.user_id
WHERE org_user.org_id =` + strconv.FormatInt(helper.orgID, 10),
converters: []sqlutil.Converter{{Dynamic: true}},
drop: []string{
"id", "version",
"password", // UMMMMM... for now
"org_id",
},
},
{
table: "user_role",
sql: `
SELECT * FROM user_role
WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
},
{
table: "builtin_role",
sql: `
SELECT * FROM builtin_role
WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
},
{
table: "api_key",
sql: `
SELECT * FROM api_key
WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
},
{
table: "permission",
sql: `
SELECT permission.*
FROM permission
JOIN role ON permission.role_id = role.id
WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
},
{
table: "user_auth_token",
sql: `
SELECT user_auth_token.*
FROM user_auth_token
JOIN org_user ON user_auth_token.id = org_user.user_id
WHERE org_user.org_id =` + strconv.FormatInt(helper.orgID, 10),
},
{table: "team"},
{table: "team_group"},
{table: "team_role"},
{table: "team_member"},
{table: "temp_user"},
{table: "role"},
}
for _, auth := range dump {
if auth.sql == "" {
auth.sql = `
SELECT * FROM ` + auth.table + `
WHERE org_id =` + strconv.FormatInt(helper.orgID, 10)
}
if auth.converters == nil {
auth.converters = []sqlutil.Converter{{Dynamic: true}}
}
if auth.drop == nil {
auth.drop = []string{
"id",
"org_id",
}
}
raw, err := sess.QueryInterface()
rows, err := sess.DB().QueryContext(helper.ctx, auth.sql)
if err != nil {
return fmt.Errorf("unable to read: %s // %s", table, err.Error())
if strings.HasPrefix(err.Error(), "no such table") {
continue
}
return err
}
if len(raw) < 1 {
continue // don't write empty files
}
frame, err := queryResultToDataFrame(raw, frameOpts{
skip: []string{"org_id", "version", "help_flags1", "theme"},
})
frame, err := sqlutil.FrameFromRows(rows.Rows, -1, auth.converters...)
if err != nil {
return err
}
frame.Name = table
if frame.Fields[0].Len() < 1 {
continue // do not write empty structures
}
if len(auth.drop) > 0 {
lookup := make(map[string]bool, len(auth.drop))
for _, v := range auth.drop {
lookup[v] = true
}
fields := make([]*data.Field, 0, len(frame.Fields))
for _, f := range frame.Fields {
if lookup[f.Name] {
continue
}
fields = append(fields, f)
}
frame.Fields = fields
}
frame.Name = auth.table
commit.body = append(commit.body, commitBody{
fpath: path.Join(helper.orgDir, "auth", "sql.dump", table+".json"),
fpath: path.Join(helper.orgDir, "auth", "sql.dump", auth.table+".json"),
frame: frame,
})
}

View File

@ -151,7 +151,7 @@ func exportDashboards(helper *commitHelper, job *gitExportJob) error {
if job.cfg.KeepHistory {
sess.Table("dashboard_version").
Join("INNER", "dashboard", "dashboard.id = dashboard_version.dashboard_id").
Where("org_id = ?", job.orgID).
Where("org_id = ?", helper.orgID).
Cols("dashboard.id",
"dashboard_version.version",
"dashboard_version.created",
@ -161,7 +161,7 @@ func exportDashboards(helper *commitHelper, job *gitExportJob) error {
Asc("dashboard_version.created")
} else {
sess.Table("dashboard").
Where("org_id = ?", job.orgID).
Where("org_id = ?", helper.orgID).
Cols("id",
"version",
"created",

View File

@ -0,0 +1,80 @@
package export
import (
"encoding/json"
"fmt"
"io/ioutil"
"path/filepath"
"strings"
"time"
"github.com/grafana/grafana/pkg/services/sqlstore"
)
func exportDashboardThumbnails(helper *commitHelper, job *gitExportJob) error {
alias := make(map[string]string, 100)
aliasLookup, err := ioutil.ReadFile(filepath.Join(helper.orgDir, "root-alias.json"))
if err != nil {
return fmt.Errorf("missing dashboard alias files (must export dashboards first)")
}
err = json.Unmarshal(aliasLookup, &alias)
if err != nil {
return err
}
return job.sql.WithDbSession(helper.ctx, func(sess *sqlstore.DBSession) error {
type dashboardThumb struct {
UID string `xorm:"uid"`
Image []byte `xorm:"image"`
Theme string `xorm:"theme"`
Kind string `xorm:"kind"`
MimeType string `xorm:"mime_type"`
Updated time.Time
}
rows := make([]*dashboardThumb, 0)
// SELECT uid,image,theme,kind,mime_type,dashboard_thumbnail.updated
// FROM dashboard_thumbnail
// JOIN dashboard ON dashboard.id = dashboard_thumbnail.dashboard_id
// WHERE org_id = 2; //dashboard.uid = '2VVbg06nz';
sess.Table("dashboard_thumbnail").
Join("INNER", "dashboard", "dashboard.id = dashboard_thumbnail.dashboard_id").
Cols("uid", "image", "theme", "kind", "mime_type", "dashboard_thumbnail.updated").
Where("dashboard.org_id = ?", helper.orgID)
err := sess.Find(&rows)
if err != nil {
if strings.HasPrefix(err.Error(), "no such table") {
return nil
}
return err
}
// Process all folders
for _, row := range rows {
p, ok := alias[row.UID]
if !ok {
p = "uid/" + row.UID
} else {
p = strings.TrimSuffix(p, "-dash.json")
}
err := helper.add(commitOptions{
body: []commitBody{
{
fpath: filepath.Join(helper.orgDir, "thumbs", fmt.Sprintf("%s.thumb-%s.png", p, row.Theme)),
body: row.Image,
},
},
when: row.Updated,
comment: "Thumbnail",
})
if err != nil {
return err
}
}
return nil
})
}

View File

@ -10,7 +10,7 @@ import (
func exportDataSources(helper *commitHelper, job *gitExportJob) error {
cmd := &datasources.GetDataSourcesQuery{
OrgId: job.orgID,
OrgId: helper.orgID,
}
err := job.sql.GetDataSources(helper.ctx, cmd)
if err != nil {

View File

@ -3,6 +3,7 @@ package export
import (
"fmt"
"path"
"strings"
"time"
"github.com/grafana/grafana/pkg/services/sqlstore"
@ -25,6 +26,9 @@ func exportLive(helper *commitHelper, job *gitExportJob) error {
err := sess.Find(&rows)
if err != nil {
if strings.HasPrefix(err.Error(), "no such table") {
return nil
}
return err
}

View File

@ -10,7 +10,7 @@ import (
func exportSnapshots(helper *commitHelper, job *gitExportJob) error {
cmd := &dashboardsnapshots.GetDashboardSnapshotsQuery{
OrgId: job.orgID,
OrgId: helper.orgID,
Limit: 500000,
SignedInUser: nil,
}

View File

@ -10,7 +10,7 @@ import (
func exportSystemPlaylists(helper *commitHelper, job *gitExportJob) error {
cmd := &models.GetPlaylistsQuery{
OrgId: job.orgID,
OrgId: helper.orgID,
Limit: 500000,
}
err := job.sql.SearchPlaylists(helper.ctx, cmd)

View File

@ -0,0 +1,72 @@
package export
import (
"fmt"
"path/filepath"
"time"
"github.com/grafana/grafana/pkg/services/sqlstore"
)
func exportSystemShortURL(helper *commitHelper, job *gitExportJob) error {
mostRecent := int64(0)
lastSeen := make(map[string]int64, 50)
dir := filepath.Join(helper.orgDir, "system", "short_url")
err := job.sql.WithDbSession(helper.ctx, func(sess *sqlstore.DBSession) error {
type urlResult struct {
UID string `xorm:"uid" json:"-"`
Path string `xorm:"path" json:"path"`
CreatedBy int64 `xorm:"created_by" json:"-"`
CreatedAt time.Time `xorm:"created_at" json:"-"`
LastSeenAt int64 `xorm:"last_seen_at" json:"-"`
}
rows := make([]*urlResult, 0)
sess.Table("short_url").Where("org_id = ?", helper.orgID)
err := sess.Find(&rows)
if err != nil {
return err
}
for _, row := range rows {
if row.LastSeenAt > 0 {
lastSeen[row.UID] = row.LastSeenAt
if mostRecent < row.LastSeenAt {
mostRecent = row.LastSeenAt
}
}
err := helper.add(commitOptions{
body: []commitBody{
{
fpath: filepath.Join(dir, "uid", fmt.Sprintf("%s.json", row.UID)),
body: prettyJSON(row),
},
},
when: row.CreatedAt,
comment: "short URL",
userID: row.CreatedBy,
})
if err != nil {
return err
}
}
return err
})
if err != nil || len(lastSeen) < 1 {
return err
}
return helper.add(commitOptions{
body: []commitBody{
{
fpath: filepath.Join(dir, "last_seen_at.json"),
body: prettyJSON(lastSeen),
},
},
when: time.UnixMilli(mostRecent),
comment: "short URL",
})
}

View File

@ -0,0 +1,85 @@
package export
import (
"path"
"strconv"
"strings"
"github.com/grafana/grafana-plugin-sdk-go/data/sqlutil"
"github.com/grafana/grafana/pkg/services/sqlstore"
)
func exportUsage(helper *commitHelper, job *gitExportJob) error {
return job.sql.WithDbSession(helper.ctx, func(sess *sqlstore.DBSession) error {
commit := commitOptions{
comment: "usage stats",
}
type statsTables struct {
table string
sql string
converters []sqlutil.Converter
}
dump := []statsTables{
{
table: "data_source_usage_by_day",
sql: `SELECT day,uid,queries,errors,load_duration_ms
FROM data_source_usage_by_day
JOIN data_source ON data_source.id = data_source_usage_by_day.data_source_id
WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
converters: []sqlutil.Converter{{Dynamic: true}},
},
{
table: "dashboard_usage_by_day",
sql: `SELECT uid,day,views,queries,errors,load_duration
FROM dashboard_usage_by_day
JOIN dashboard ON dashboard_usage_by_day.dashboard_id = dashboard.id
WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
converters: []sqlutil.Converter{{Dynamic: true}},
},
{
table: "dashboard_usage_sums",
sql: `SELECT uid,
views_last_1_days,
views_last_7_days,
views_last_30_days,
views_total,
queries_last_1_days,
queries_last_7_days,
queries_last_30_days,
queries_total,
errors_last_1_days,
errors_last_7_days,
errors_last_30_days,
errors_total
FROM dashboard_usage_sums
JOIN dashboard ON dashboard_usage_sums.dashboard_id = dashboard.id
WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
converters: []sqlutil.Converter{{Dynamic: true}},
},
}
for _, usage := range dump {
rows, err := sess.DB().QueryContext(helper.ctx, usage.sql)
if err != nil {
if strings.HasPrefix(err.Error(), "no such table") {
continue
}
return err
}
frame, err := sqlutil.FrameFromRows(rows.Rows, -1, usage.converters...)
if err != nil {
return err
}
frame.Name = usage.table
commit.body = append(commit.body, commitBody{
fpath: path.Join(helper.orgDir, "usage", usage.table+".json"),
frame: frame,
})
}
return helper.add(commit)
})
}

View File

@ -1,138 +0,0 @@
package export
import (
"encoding/json"
"fmt"
"strings"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
type fieldInfo struct {
Name string
Conv data.FieldConverter
}
type frameOpts struct {
schema []fieldInfo
skip []string
}
func prettyJSON(v interface{}) []byte {
b, _ := json.MarshalIndent(v, "", " ")
return b
}
func queryResultToDataFrame(rows []map[string]interface{}, opts frameOpts) (*data.Frame, error) {
count := len(rows)
if count < 1 {
return nil, nil // empty frame
}
schema := opts.schema
if len(schema) < 1 {
skip := make(map[string]bool, len(opts.skip))
for _, k := range opts.skip {
skip[k] = true
}
for k, v := range rows[0] {
if skip[k] {
continue
}
field := fieldInfo{
Name: k,
Conv: data.FieldConverter{
OutputFieldType: data.FieldTypeFor(v),
},
}
if field.Conv.OutputFieldType == data.FieldTypeUnknown {
fmt.Printf("UNKNOWN type: %s / %v\n", k, v)
continue
}
// Don't write passwords to disk for now!!!!
if k == "password" || k == "salt" {
field.Conv.Converter = func(v interface{}) (interface{}, error) {
return fmt.Sprintf("<%s>", k), nil
}
}
schema = append(schema, field)
}
}
fields := make([]*data.Field, len(schema))
for i, s := range schema {
fields[i] = data.NewFieldFromFieldType(s.Conv.OutputFieldType, count)
fields[i].Name = s.Name
}
var err error
for i, row := range rows {
for j, s := range schema {
v, ok := row[s.Name]
if ok && v != nil {
if s.Conv.Converter != nil {
v, err = s.Conv.Converter(v)
if err != nil {
return nil, fmt.Errorf("converting field: %s // %s", s.Name, err.Error())
}
}
fields[j].Set(i, v)
}
}
}
// Fields are in random order
if len(opts.schema) < 1 {
last := []*data.Field{}
frame := data.NewFrame("")
lookup := make(map[string]*data.Field, len(fields))
for _, f := range fields {
if f.Name == "id" {
frame.Fields = append(frame.Fields, f) // first
continue
}
lookup[f.Name] = f
}
// First items
for _, name := range []string{"name", "login", "email", "role", "description", "uid"} {
f, ok := lookup[name]
if ok {
frame.Fields = append(frame.Fields, f) // first
delete(lookup, name)
}
}
// IDs
for k, f := range lookup {
if strings.HasSuffix(k, "_id") {
frame.Fields = append(frame.Fields, f) // first
delete(lookup, k)
} else if strings.HasPrefix(k, "is_") {
last = append(last, f) // first
delete(lookup, k)
}
}
// Last items
for _, name := range []string{"created", "updated"} {
f, ok := lookup[name]
if ok {
last = append(last, f) // first
delete(lookup, name)
}
}
// Rest
for _, f := range lookup {
frame.Fields = append(frame.Fields, f)
}
frame.Fields = append(frame.Fields, last...)
return frame, nil
}
return data.NewFrame("", fields...), nil
}

View File

@ -2,6 +2,7 @@ package export
import (
"context"
"encoding/json"
"fmt"
"path"
"sync"
@ -22,7 +23,6 @@ type gitExportJob struct {
logger log.Logger
sql *sqlstore.SQLStore
dashboardsnapshotsService dashboardsnapshots.Service
orgID int64
rootDir string
statusMu sync.Mutex
@ -40,7 +40,6 @@ func startGitExportJob(cfg ExportConfig, sql *sqlstore.SQLStore, dashboardsnapsh
cfg: cfg,
sql: sql,
dashboardsnapshotsService: dashboardsnapshotsService,
orgID: orgID,
rootDir: rootDir,
broadcaster: broadcaster,
status: ExportStatus{
@ -176,6 +175,14 @@ func (e *gitExportJob) doOrgExportWithHistory(helper *commitHelper) error {
exporters := []simpleExporter{}
if include.Dash {
exporters = append(exporters, exportDashboards)
if include.DashThumbs {
exporters = append(exporters, exportDashboardThumbnails)
}
}
if include.Alerts {
exporters = append(exporters, exportAlerts)
}
if include.DS {
@ -186,12 +193,17 @@ func (e *gitExportJob) doOrgExportWithHistory(helper *commitHelper) error {
exporters = append(exporters, dumpAuthTables)
}
if include.Usage {
exporters = append(exporters, exportUsage)
}
if include.Services {
exporters = append(exporters, exportFiles,
exportSystemPreferences,
exportSystemStars,
exportSystemPlaylists,
exportKVStore,
exportSystemShortURL,
exportLive)
}
@ -212,6 +224,11 @@ func (e *gitExportJob) doOrgExportWithHistory(helper *commitHelper) error {
return nil
}
func prettyJSON(v interface{}) []byte {
b, _ := json.MarshalIndent(v, "", " ")
return b
}
/**
git remote add origin git@github.com:ryantxu/test-dash-repo.git

View File

@ -20,13 +20,15 @@ type ExportConfig struct {
KeepHistory bool `json:"history"`
Include struct {
Auth bool `json:"auth"`
DS bool `json:"ds"`
Dash bool `json:"dash"`
Services bool `json:"services"`
Usage bool `json:"usage"`
Anno bool `json:"anno"`
Snapshots bool `json:"snapshots"`
Auth bool `json:"auth"`
DS bool `json:"ds"`
Dash bool `json:"dash"`
DashThumbs bool `json:"dash_thumbs"`
Alerts bool `json:"alerts"`
Services bool `json:"services"`
Usage bool `json:"usage"`
Anno bool `json:"anno"`
Snapshots bool `json:"snapshots"`
} `json:"include"`
// Depends on the format

View File

@ -25,6 +25,8 @@ interface ExportInclude {
auth: boolean;
ds: boolean;
dash: boolean;
dash_thumbs: boolean;
alerts: boolean;
services: boolean;
usage: boolean;
anno: boolean;
@ -44,6 +46,8 @@ const includAll: ExportInclude = {
auth: true,
ds: true,
dash: true,
dash_thumbs: true,
alerts: true,
services: true,
usage: true,
anno: true,