mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
Query History: Use a search index on new queries to filter in mixed data sources (#88979)
* Add search index table * Stab a test * Add more tests * Add basic index * Switch to UID and add a test for the index * Improve tests coverage * Remove redundant whitespaces * Load all data source APIs when query history is loaded * Fix column type * Fix migration * Clean-up the index * Fix linting * Fix migrations * Fix migrations * Fix migrations * Rename index to details
This commit is contained in:
parent
6d9b8225aa
commit
6750e881e3
@ -9,6 +9,16 @@ import (
|
||||
"github.com/grafana/grafana/pkg/util"
|
||||
)
|
||||
|
||||
type Datasource struct {
|
||||
UID string `json:"uid"`
|
||||
}
|
||||
|
||||
type QueryHistoryDetails struct {
|
||||
ID int64 `xorm:"pk autoincr 'id'"`
|
||||
DatasourceUID string `xorm:"datasource_uid"`
|
||||
QueryHistoryItemUID string `xorm:"query_history_item_uid"`
|
||||
}
|
||||
|
||||
// createQuery adds a query into query history
|
||||
func (s QueryHistoryService) createQuery(ctx context.Context, user *user.SignedInUser, cmd CreateQueryInQueryHistoryCommand) (QueryHistoryDTO, error) {
|
||||
queryHistory := QueryHistory{
|
||||
@ -29,6 +39,25 @@ func (s QueryHistoryService) createQuery(ctx context.Context, user *user.SignedI
|
||||
return QueryHistoryDTO{}, err
|
||||
}
|
||||
|
||||
dsUids, err := FindDataSourceUIDs(cmd.Queries)
|
||||
|
||||
if err == nil {
|
||||
var queryHistoryDetailsItems []QueryHistoryDetails
|
||||
for _, uid := range dsUids {
|
||||
queryHistoryDetailsItems = append(queryHistoryDetailsItems, QueryHistoryDetails{
|
||||
QueryHistoryItemUID: queryHistory.UID,
|
||||
DatasourceUID: uid,
|
||||
})
|
||||
}
|
||||
|
||||
err = s.store.WithDbSession(ctx, func(session *db.Session) error {
|
||||
for _, queryHistoryDetailsItem := range queryHistoryDetailsItems {
|
||||
_, err = session.Insert(queryHistoryDetailsItem)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
dto := QueryHistoryDTO{
|
||||
UID: queryHistory.UID,
|
||||
DatasourceUID: queryHistory.DatasourceUID,
|
||||
@ -116,6 +145,12 @@ func (s QueryHistoryService) deleteQuery(ctx context.Context, user *user.SignedI
|
||||
s.log.Error("Failed to unstar query while deleting it from query history", "query", UID, "user", user.UserID, "error", err)
|
||||
}
|
||||
|
||||
// remove the details
|
||||
_, err = session.Table("query_history_details").Where("query_history_item_uid = ?", UID).Delete(QueryHistoryDetails{})
|
||||
if err != nil {
|
||||
s.log.Error("Failed to remove the details for the query item", "query", UID, "user", user.UserID, "error", err)
|
||||
}
|
||||
|
||||
// Then delete it
|
||||
id, err := session.Where("org_id = ? AND created_by = ? AND uid = ?", user.OrgID, user.UserID, UID).Delete(QueryHistory{})
|
||||
if err != nil {
|
||||
@ -274,19 +309,28 @@ func (s QueryHistoryService) deleteStaleQueries(ctx context.Context, olderThan i
|
||||
var rowsCount int64
|
||||
|
||||
err := s.store.WithDbSession(ctx, func(session *db.Session) error {
|
||||
sql := `DELETE
|
||||
FROM query_history
|
||||
WHERE uid IN (
|
||||
SELECT uid FROM (
|
||||
SELECT uid FROM query_history
|
||||
LEFT JOIN query_history_star
|
||||
ON query_history_star.query_uid = query_history.uid
|
||||
WHERE query_history_star.query_uid IS NULL
|
||||
AND query_history.created_at <= ?
|
||||
ORDER BY query_history.id ASC
|
||||
LIMIT 10000
|
||||
) AS q
|
||||
)`
|
||||
uids_sql := `SELECT uid FROM (
|
||||
SELECT uid FROM query_history
|
||||
LEFT JOIN query_history_star
|
||||
ON query_history_star.query_uid = query_history.uid
|
||||
WHERE query_history_star.query_uid IS NULL
|
||||
AND query_history.created_at <= ?
|
||||
ORDER BY query_history.id ASC
|
||||
LIMIT 10000
|
||||
) AS q`
|
||||
|
||||
details_sql := `DELETE
|
||||
FROM query_history_details
|
||||
WHERE query_history_item_uid IN (` + uids_sql + `)`
|
||||
|
||||
sql := `DELETE
|
||||
FROM query_history
|
||||
WHERE uid IN (` + uids_sql + `)`
|
||||
|
||||
_, err := session.Exec(details_sql, strconv.FormatInt(olderThan, 10))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
res, err := session.Exec(sql, strconv.FormatInt(olderThan, 10))
|
||||
if err != nil {
|
||||
@ -329,17 +373,17 @@ func (s QueryHistoryService) enforceQueryHistoryRowLimit(ctx context.Context, li
|
||||
if countRowsToDelete > 0 {
|
||||
var sql string
|
||||
if starredQueries {
|
||||
sql = `DELETE FROM query_history_star
|
||||
sql = `DELETE FROM query_history_star
|
||||
WHERE id IN (
|
||||
SELECT id FROM (
|
||||
SELECT id FROM query_history_star
|
||||
ORDER BY id ASC
|
||||
ORDER BY id ASC
|
||||
LIMIT ?
|
||||
) AS q
|
||||
)`
|
||||
} else {
|
||||
sql = `DELETE
|
||||
FROM query_history
|
||||
sql = `DELETE
|
||||
FROM query_history
|
||||
WHERE uid IN (
|
||||
SELECT uid FROM (
|
||||
SELECT uid FROM query_history
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/web"
|
||||
)
|
||||
|
||||
@ -44,9 +45,27 @@ func TestIntegrationDeleteStaleQueryFromQueryHistory(t *testing.T) {
|
||||
// In this scenario we have 2 starred queries and 1 not starred query
|
||||
testScenarioWithMultipleQueriesInQueryHistory(t, "Stale starred query history can not be deleted",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
// all indices are added
|
||||
err := sc.sqlStore.WithDbSession(context.Background(), func(dbSession *db.Session) error {
|
||||
count, err := dbSession.Table("query_history_details").Count()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, int64(3), count)
|
||||
return err
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
olderThan := sc.service.now().Unix() + 60
|
||||
rowsDeleted, err := sc.service.DeleteStaleQueriesInQueryHistory(context.Background(), olderThan)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, rowsDeleted)
|
||||
|
||||
// only one details row is removed
|
||||
err = sc.sqlStore.WithDbSession(context.Background(), func(dbSession *db.Session) error {
|
||||
count, err := dbSession.Table("query_history_details").Count()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, int64(2), count)
|
||||
return err
|
||||
})
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
36
pkg/services/queryhistory/queryhistory_details.go
Normal file
36
pkg/services/queryhistory/queryhistory_details.go
Normal file
@ -0,0 +1,36 @@
|
||||
package queryhistory
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"slices"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
)
|
||||
|
||||
type DataQuery struct {
|
||||
Datasource Datasource `json:"datasource"`
|
||||
}
|
||||
|
||||
func FindDataSourceUIDs(queriesJSON *simplejson.Json) ([]string, error) {
|
||||
uids := make([]string, 0)
|
||||
queries := []DataQuery{}
|
||||
bytes, err := queriesJSON.ToDB()
|
||||
|
||||
if err != nil {
|
||||
return uids, err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(bytes, &queries)
|
||||
|
||||
if err != nil {
|
||||
return uids, err
|
||||
}
|
||||
|
||||
for _, query := range queries {
|
||||
if !slices.Contains(uids, query.Datasource.UID) {
|
||||
uids = append(uids, query.Datasource.UID)
|
||||
}
|
||||
}
|
||||
|
||||
return uids, nil
|
||||
}
|
61
pkg/services/queryhistory/queryhistory_details_test.go
Normal file
61
pkg/services/queryhistory/queryhistory_details_test.go
Normal file
@ -0,0 +1,61 @@
|
||||
package queryhistory
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var mixedQuery []byte = []byte(`[{"alias":"","bucketAggs":[{"field":"@timestamp","id":"2","settings":{"interval":"auto"},"type":"date_histogram"}],"datasource":{"type":"elasticsearch","uid":"gdev-elasticsearch"},"key":"Q-15d3139f-a942-491c-8fb6-46982475ab07-0","metrics":[{"id":"1","type":"count"}],"query":"test","refId":"A","timeField":"@timestamp"},{"datasource":{"type":"loki","uid":"gdev-loki"},"editorMode":"code","expr":"test","key":"Q-67b4a119-37dd-4276-beac-4ec76b089f10-0","queryType":"range","refId":"B"}]`)
|
||||
var mixedQueryWithDuplicates = []byte(`[{"alias":"","bucketAggs":[{"field":"@timestamp","id":"2","settings":{"interval":"auto"},"type":"date_histogram"}],"datasource":{"type":"elasticsearch","uid":"gdev-elasticsearch"},"key":"Q-15d3139f-a942-491c-8fb6-46982475ab07-0","metrics":[{"id":"1","type":"count"}],"query":"test","refId":"A","timeField":"@timestamp"},{"datasource":{"type":"loki","uid":"gdev-loki"},"editorMode":"code","expr":"test","key":"Q-67b4a119-37dd-4276-beac-4ec76b089f10-0","queryType":"range","refId":"B"},{"datasource":{"type":"loki","uid":"gdev-loki"},"editorMode":"code","expr":"test2","key":"Q-28825351-32c3-4c46-98b4-b8cfffae0b42-2","queryType":"range","refId":"C"}]`)
|
||||
var nonMixedQuery = []byte(`[{"datasource":{"type":"loki","uid":"gdev-loki"},"editorMode":"code","expr":"test","key":"Q-95a6d6ef-7a5e-4e2a-a212-f1f2df4a349a-0","queryType":"range","refId":"A"},{"datasource":{"type":"loki","uid":"gdev-loki"},"editorMode":"code","expr":"test 2","key":"Q-b838fc6b-bcd7-4538-962a-d35209a8783f-1","queryType":"range","refId":"B"}]`)
|
||||
var mixedQueryMissingDatasource = []byte(`[{"alias":"","bucketAggs":[{"field":"@timestamp","id":"2","settings":{"interval":"auto"},"type":"date_histogram"}],"key":"Q-15d3139f-a942-491c-8fb6-46982475ab07-0","metrics":[{"id":"1","type":"count"}],"query":"test","refId":"A","timeField":"@timestamp"},{"datasource":{"type":"loki","uid":"PDDA8E780A17E7EF1"},"editorMode":"code","expr":"test","key":"Q-67b4a119-37dd-4276-beac-4ec76b089f10-0","queryType":"range","refId":"B"}]`)
|
||||
var mixedQueryMissingUID = []byte(`[{"alias":"","bucketAggs":[{"field":"@timestamp","id":"2","settings":{"interval":"auto"},"type":"date_histogram"}],"datasource":{"type":"elasticsearch"},"key":"Q-15d3139f-a942-491c-8fb6-46982475ab07-0","metrics":[{"id":"1","type":"count"}],"query":"test","refId":"A","timeField":"@timestamp"},{"datasource":{"type":"loki","uid":"PDDA8E780A17E7EF1"},"editorMode":"code","expr":"test","key":"Q-67b4a119-37dd-4276-beac-4ec76b089f10-0","queryType":"range","refId":"B"}]`)
|
||||
var invalidJSON = []byte(`{`)
|
||||
|
||||
func TestQueryHistoryDetails(t *testing.T) {
|
||||
t.Run("should return error when json is invalid", func(t *testing.T) {
|
||||
json, _ := simplejson.NewJson(invalidJSON)
|
||||
_, err := FindDataSourceUIDs(json)
|
||||
require.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("should return no error when data source uid is missing", func(t *testing.T) {
|
||||
json, _ := simplejson.NewJson(mixedQueryMissingUID)
|
||||
_, err := FindDataSourceUIDs(json)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("should return no error when data source property is missing", func(t *testing.T) {
|
||||
json, _ := simplejson.NewJson(mixedQueryMissingDatasource)
|
||||
_, err := FindDataSourceUIDs(json)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("should find data source uids in mixed queries", func(t *testing.T) {
|
||||
json, _ := simplejson.NewJson(mixedQuery)
|
||||
uids, err := FindDataSourceUIDs(json)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, uids, 2)
|
||||
require.Equal(t, uids[0], "gdev-elasticsearch")
|
||||
require.Equal(t, uids[1], "gdev-loki")
|
||||
})
|
||||
|
||||
t.Run("should find data source uids in non-mixed queries", func(t *testing.T) {
|
||||
json, _ := simplejson.NewJson(nonMixedQuery)
|
||||
uids, err := FindDataSourceUIDs(json)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, uids, 1)
|
||||
require.Equal(t, uids[0], "gdev-loki")
|
||||
})
|
||||
|
||||
t.Run("should remove duplicated uids", func(t *testing.T) {
|
||||
json, _ := simplejson.NewJson(mixedQueryWithDuplicates)
|
||||
uids, err := FindDataSourceUIDs(json)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, uids, 2)
|
||||
require.Equal(t, uids[0], "gdev-elasticsearch")
|
||||
require.Equal(t, uids[1], "gdev-loki")
|
||||
})
|
||||
}
|
@ -261,4 +261,28 @@ func TestIntegrationGetQueriesFromQueryHistory(t *testing.T) {
|
||||
require.Equal(t, 200, resp.Status())
|
||||
require.Equal(t, 0, response.Result.TotalCount)
|
||||
})
|
||||
|
||||
testScenarioWithMixedQueriesInQueryHistory(t, "When users tries to get queries with mixed data source it should return correct queries",
|
||||
func(t *testing.T, sc scenarioContext) {
|
||||
sc.reqContext.Req.Form.Add("to", strconv.FormatInt(sc.service.now().UnixMilli()-60, 10))
|
||||
sc.reqContext.Req.Form.Add("from", strconv.FormatInt(sc.service.now().UnixMilli()+60, 10))
|
||||
sc.reqContext.Req.Form.Add("datasourceUid", testDsUID1)
|
||||
|
||||
resp := sc.service.searchHandler(sc.reqContext)
|
||||
var response QueryHistorySearchResponse
|
||||
err := json.Unmarshal(resp.Body(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, 200, resp.Status())
|
||||
require.Equal(t, 1, response.Result.TotalCount)
|
||||
|
||||
sc.reqContext.Req.Form.Set("datasourceUid", testDsUID2)
|
||||
|
||||
resp = sc.service.searchHandler(sc.reqContext)
|
||||
err = json.Unmarshal(resp.Body(), &response)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, 200, resp.Status())
|
||||
require.Equal(t, 2, response.Result.TotalCount)
|
||||
})
|
||||
}
|
||||
|
@ -108,8 +108,10 @@ func testScenarioWithQueryInQueryHistory(t *testing.T, desc string, fn func(t *t
|
||||
testScenario(t, desc, func(t *testing.T, sc scenarioContext) {
|
||||
command := CreateQueryInQueryHistoryCommand{
|
||||
DatasourceUID: testDsUID1,
|
||||
Queries: simplejson.NewFromAny(map[string]any{
|
||||
"expr": "test",
|
||||
Queries: simplejson.NewFromAny([]interface{}{
|
||||
map[string]any{
|
||||
"expr": "test",
|
||||
},
|
||||
}),
|
||||
}
|
||||
sc.reqContext.Req.Body = mockRequestBody(command)
|
||||
@ -127,8 +129,10 @@ func testScenarioWithMultipleQueriesInQueryHistory(t *testing.T, desc string, fn
|
||||
sc.service.now = func() time.Time { return start }
|
||||
command1 := CreateQueryInQueryHistoryCommand{
|
||||
DatasourceUID: testDsUID1,
|
||||
Queries: simplejson.NewFromAny(map[string]any{
|
||||
"expr": "test",
|
||||
Queries: simplejson.NewFromAny([]interface{}{
|
||||
map[string]any{
|
||||
"expr": "test",
|
||||
},
|
||||
}),
|
||||
}
|
||||
sc.reqContext.Req.Body = mockRequestBody(command1)
|
||||
@ -144,8 +148,10 @@ func testScenarioWithMultipleQueriesInQueryHistory(t *testing.T, desc string, fn
|
||||
sc.service.now = func() time.Time { return start.Add(time.Second) }
|
||||
command2 := CreateQueryInQueryHistoryCommand{
|
||||
DatasourceUID: testDsUID1,
|
||||
Queries: simplejson.NewFromAny(map[string]any{
|
||||
"expr": "test2",
|
||||
Queries: simplejson.NewFromAny([]interface{}{
|
||||
map[string]any{
|
||||
"expr": "test2",
|
||||
},
|
||||
}),
|
||||
}
|
||||
sc.reqContext.Req.Body = mockRequestBody(command2)
|
||||
@ -157,8 +163,10 @@ func testScenarioWithMultipleQueriesInQueryHistory(t *testing.T, desc string, fn
|
||||
sc.service.now = func() time.Time { return start.Add(2 * time.Second) }
|
||||
command3 := CreateQueryInQueryHistoryCommand{
|
||||
DatasourceUID: testDsUID2,
|
||||
Queries: simplejson.NewFromAny(map[string]any{
|
||||
"expr": "test2",
|
||||
Queries: simplejson.NewFromAny([]interface{}{
|
||||
map[string]any{
|
||||
"expr": "test2",
|
||||
},
|
||||
}),
|
||||
}
|
||||
sc.reqContext.Req.Body = mockRequestBody(command3)
|
||||
@ -171,6 +179,48 @@ func testScenarioWithMultipleQueriesInQueryHistory(t *testing.T, desc string, fn
|
||||
})
|
||||
}
|
||||
|
||||
// Creates a scenario where 2 queries are added to the db
|
||||
// mixed with testDs1 & testDs2
|
||||
// non-mixed with testDs2
|
||||
func testScenarioWithMixedQueriesInQueryHistory(t *testing.T, desc string, fn func(t *testing.T, sc scenarioContext)) {
|
||||
t.Helper()
|
||||
|
||||
testScenario(t, desc, func(t *testing.T, sc scenarioContext) {
|
||||
start := time.Now()
|
||||
sc.service.now = func() time.Time { return start }
|
||||
command1 := CreateQueryInQueryHistoryCommand{
|
||||
DatasourceUID: "-- Mixed --",
|
||||
Queries: simplejson.NewFromAny([]interface{}{
|
||||
map[string]any{
|
||||
"datasource": map[string]any{"uid": testDsUID1},
|
||||
"expr": "test",
|
||||
},
|
||||
map[string]any{
|
||||
"datasource": map[string]any{"uid": testDsUID2},
|
||||
"expr": "test",
|
||||
},
|
||||
}),
|
||||
}
|
||||
sc.reqContext.Req.Body = mockRequestBody(command1)
|
||||
resp1 := sc.service.createHandler(sc.reqContext)
|
||||
sc.initialResult = validateAndUnMarshalResponse(t, resp1)
|
||||
|
||||
command2 := CreateQueryInQueryHistoryCommand{
|
||||
DatasourceUID: testDsUID2,
|
||||
Queries: simplejson.NewFromAny([]interface{}{
|
||||
map[string]any{
|
||||
"datasource": map[string]any{"uid": testDsUID2},
|
||||
"expr": "test2",
|
||||
},
|
||||
}),
|
||||
}
|
||||
sc.reqContext.Req.Body = mockRequestBody(command2)
|
||||
sc.service.createHandler(sc.reqContext)
|
||||
|
||||
fn(t, sc)
|
||||
})
|
||||
}
|
||||
|
||||
func mockRequestBody(v any) io.ReadCloser {
|
||||
b, _ := json.Marshal(v)
|
||||
return io.NopCloser(bytes.NewReader(b))
|
||||
|
@ -39,10 +39,18 @@ func writeFiltersSQL(query SearchInQueryHistoryQuery, user *user.SignedInUser, s
|
||||
sql.WriteString(" WHERE query_history.org_id = ? AND query_history.created_by = ? AND query_history.created_at >= ? AND query_history.created_at <= ? AND (query_history.queries " + sqlStore.GetDialect().LikeStr() + " ? OR query_history.comment " + sqlStore.GetDialect().LikeStr() + " ?) ")
|
||||
|
||||
if len(query.DatasourceUIDs) > 0 {
|
||||
q := "?" + strings.Repeat(",?", len(query.DatasourceUIDs)-1)
|
||||
for _, uid := range query.DatasourceUIDs {
|
||||
params = append(params, uid)
|
||||
}
|
||||
sql.WriteString(" AND query_history.datasource_uid IN (? " + strings.Repeat(",?", len(query.DatasourceUIDs)-1) + ") ")
|
||||
for _, uid := range query.DatasourceUIDs {
|
||||
params = append(params, uid)
|
||||
}
|
||||
sql.WriteString(" AND (")
|
||||
sql.WriteString("(query_history.datasource_uid IN (" + q + "))")
|
||||
sql.WriteString(" OR ")
|
||||
sql.WriteString("(query_history.uid IN (SELECT i.query_history_item_uid from query_history_details i WHERE i.datasource_uid IN (" + q + ")))")
|
||||
sql.WriteString(")")
|
||||
}
|
||||
builder.Write(sql.String(), params...)
|
||||
}
|
||||
|
@ -29,4 +29,14 @@ func addQueryHistoryMigrations(mg *Migrator) {
|
||||
mg.AddMigration("alter table query_history alter column created_by type to bigint", NewRawSQLMigration("").
|
||||
Mysql("ALTER TABLE query_history MODIFY created_by BIGINT;").
|
||||
Postgres("ALTER TABLE query_history ALTER COLUMN created_by TYPE BIGINT;"))
|
||||
|
||||
queryHistoryDetailsV1 := Table{
|
||||
Name: "query_history_details",
|
||||
Columns: []*Column{
|
||||
{Name: "id", Type: DB_BigInt, Nullable: false, IsPrimaryKey: true, IsAutoIncrement: true},
|
||||
{Name: "query_history_item_uid", Type: DB_NVarchar, Length: 40, Nullable: false},
|
||||
{Name: "datasource_uid", Type: DB_NVarchar, Length: 40, Nullable: false},
|
||||
},
|
||||
}
|
||||
mg.AddMigration("create query_history_details table v1", NewAddTableMigration(queryHistoryDetailsV1))
|
||||
}
|
||||
|
@ -156,13 +156,9 @@ export function RichHistoryQueriesTab(props: RichHistoryQueriesTabProps) {
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
// whenever the filter changes, get all datasource information for the filtered datasources
|
||||
const { value: datasourceFilterApis, loading: loadingDs } = useAsync(async () => {
|
||||
const datasourcesToGet =
|
||||
richHistorySearchFilters?.datasourceFilters && richHistorySearchFilters?.datasourceFilters.length > 0
|
||||
? richHistorySearchFilters?.datasourceFilters
|
||||
: listOfDatasources.map((ds) => ds.uid);
|
||||
const dsGetProm = await datasourcesToGet.map(async (dsf) => {
|
||||
const datasourcesToGet = listOfDatasources.map((ds) => ds.uid);
|
||||
const dsGetProm = datasourcesToGet.map(async (dsf) => {
|
||||
try {
|
||||
// this get works off datasource names
|
||||
return getDataSourceSrv().get(dsf);
|
||||
|
Loading…
Reference in New Issue
Block a user