mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Alerting: Remove datasource (name) from migration (#33544)
no longer needed as of https://github.com/grafana/grafana/pull/33416 for https://github.com/grafana/alerting-squad/issues/126
This commit is contained in:
parent
c0d28d9ed7
commit
713260f6fa
@ -8,7 +8,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
func transConditions(set dashAlertSettings, orgID int64, dsIDMap map[[2]int64][2]string) (*condition, error) {
|
func transConditions(set dashAlertSettings, orgID int64, dsUIDMap dsUIDLookup) (*condition, error) {
|
||||||
refIDtoCondIdx := make(map[string][]int) // a map of original refIds to their corresponding condition index
|
refIDtoCondIdx := make(map[string][]int) // a map of original refIds to their corresponding condition index
|
||||||
for i, cond := range set.Conditions {
|
for i, cond := range set.Conditions {
|
||||||
if len(cond.Query.Params) != 3 {
|
if len(cond.Query.Params) != 3 {
|
||||||
@ -56,7 +56,6 @@ func transConditions(set dashAlertSettings, orgID int64, dsIDMap map[[2]int64][2
|
|||||||
}
|
}
|
||||||
|
|
||||||
// This referenced query/refID has different time ranges, so new queries are needed for each unique time range.
|
// This referenced query/refID has different time ranges, so new queries are needed for each unique time range.
|
||||||
|
|
||||||
timeRanges := make([][2]string, 0, len(timeRangesToCondIdx)) // a sorted list of unique time ranges for the query
|
timeRanges := make([][2]string, 0, len(timeRangesToCondIdx)) // a sorted list of unique time ranges for the query
|
||||||
for tr := range timeRangesToCondIdx {
|
for tr := range timeRangesToCondIdx {
|
||||||
timeRanges = append(timeRanges, tr)
|
timeRanges = append(timeRanges, tr)
|
||||||
@ -119,8 +118,7 @@ func transConditions(set dashAlertSettings, orgID int64, dsIDMap map[[2]int64][2
|
|||||||
}
|
}
|
||||||
|
|
||||||
// one could have an alert saved but datasource deleted, so can not require match.
|
// one could have an alert saved but datasource deleted, so can not require match.
|
||||||
dsInfo := dsIDMap[[2]int64{orgID, set.Conditions[condIdx].Query.DatasourceID}]
|
dsUID := dsUIDMap.GetUID(orgID, set.Conditions[condIdx].Query.DatasourceID)
|
||||||
queryObj["datasource"] = dsInfo[1] // name is needed for UI to load query editor
|
|
||||||
queryObj["refId"] = refID
|
queryObj["refId"] = refID
|
||||||
|
|
||||||
encodedObj, err := json.Marshal(queryObj)
|
encodedObj, err := json.Marshal(queryObj)
|
||||||
@ -140,7 +138,7 @@ func transConditions(set dashAlertSettings, orgID int64, dsIDMap map[[2]int64][2
|
|||||||
RefID: refID,
|
RefID: refID,
|
||||||
Model: encodedObj,
|
Model: encodedObj,
|
||||||
RelativeTimeRange: *rTR,
|
RelativeTimeRange: *rTR,
|
||||||
DatasourceUID: dsInfo[0],
|
DatasourceUID: dsUID,
|
||||||
QueryType: queryType,
|
QueryType: queryType,
|
||||||
}
|
}
|
||||||
newCond.Data = append(newCond.Data, alertQuery)
|
newCond.Data = append(newCond.Data, alertQuery)
|
||||||
@ -172,12 +170,10 @@ func transConditions(set dashAlertSettings, orgID int64, dsIDMap map[[2]int64][2
|
|||||||
exprModel := struct {
|
exprModel := struct {
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
RefID string `json:"refId"`
|
RefID string `json:"refId"`
|
||||||
Datasource string `json:"datasource"`
|
|
||||||
Conditions []classicConditionJSON `json:"conditions"`
|
Conditions []classicConditionJSON `json:"conditions"`
|
||||||
}{
|
}{
|
||||||
"classic_conditions",
|
"classic_conditions",
|
||||||
ccRefID,
|
ccRefID,
|
||||||
"__expr__",
|
|
||||||
conditions,
|
conditions,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,24 +1,30 @@
|
|||||||
package ualert
|
package ualert
|
||||||
|
|
||||||
// slurpDSIDs returns a map of [orgID, dataSourceId] -> [UID, Name].
|
type dsUIDLookup map[[2]int64]string
|
||||||
func (m *migration) slurpDSIDs() (map[[2]int64][2]string, error) {
|
|
||||||
|
// GetUID fetch thes datasource UID based on orgID+datasourceID
|
||||||
|
func (d dsUIDLookup) GetUID(orgID, datasourceID int64) string {
|
||||||
|
return d[[2]int64{orgID, datasourceID}]
|
||||||
|
}
|
||||||
|
|
||||||
|
// slurpDSIDs returns a map of [orgID, dataSourceId] -> UID.
|
||||||
|
func (m *migration) slurpDSIDs() (dsUIDLookup, error) {
|
||||||
dsIDs := []struct {
|
dsIDs := []struct {
|
||||||
OrgID int64 `xorm:"org_id"`
|
OrgID int64 `xorm:"org_id"`
|
||||||
ID int64 `xorm:"id"`
|
ID int64 `xorm:"id"`
|
||||||
UID string `xorm:"uid"`
|
UID string `xorm:"uid"`
|
||||||
Name string
|
|
||||||
}{}
|
}{}
|
||||||
|
|
||||||
err := m.sess.SQL(`SELECT org_id, id, uid, name FROM data_source`).Find(&dsIDs)
|
err := m.sess.SQL(`SELECT org_id, id, uid FROM data_source`).Find(&dsIDs)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
idToUID := make(map[[2]int64][2]string, len(dsIDs))
|
idToUID := make(dsUIDLookup, len(dsIDs))
|
||||||
|
|
||||||
for _, ds := range dsIDs {
|
for _, ds := range dsIDs {
|
||||||
idToUID[[2]int64{ds.OrgID, ds.ID}] = [2]string{ds.UID, ds.Name}
|
idToUID[[2]int64{ds.OrgID, ds.ID}] = ds.UID
|
||||||
}
|
}
|
||||||
|
|
||||||
return idToUID, nil
|
return idToUID, nil
|
||||||
|
@ -50,7 +50,7 @@ func (m *migration) Exec(sess *xorm.Session, mg *migrator.Migrator) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// [orgID, dataSourceId] -> [UID, Name]
|
// [orgID, dataSourceId] -> UID
|
||||||
dsIDMap, err := m.slurpDSIDs()
|
dsIDMap, err := m.slurpDSIDs()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
Loading…
Reference in New Issue
Block a user