SSE: Put data source query grouping behind feature flag (#74551)

change was originally merged in commit: 720d716 via PR: https://github.com/grafana/grafana/pull/72935 with no flag
flag is: sseGroupByDatasource
This commit is contained in:
Kyle Brandt 2023-09-07 16:02:07 -04:00 committed by GitHub
parent ebdd2a72a8
commit 5cc737bb24
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 84 additions and 11 deletions

View File

@ -133,6 +133,7 @@ Experimental features might be changed or removed without prior notice.
| `noBasicRole` | Enables a new role that has no permissions by default |
| `angularDeprecationUI` | Display new Angular deprecation-related UI features |
| `dashgpt` | Enable AI powered features in dashboards |
| `sseGroupByDatasource` | Send query to the same datasource in a single request when using server side expressions |
## Development feature toggles

View File

@ -123,4 +123,5 @@ export interface FeatureToggles {
dashgpt?: boolean;
reportingRetries?: boolean;
newBrowseDashboards?: boolean;
sseGroupByDatasource?: boolean;
}

View File

@ -56,21 +56,24 @@ type DataPipeline []Node
func (dp *DataPipeline) execute(c context.Context, now time.Time, s *Service) (mathexp.Vars, error) {
vars := make(mathexp.Vars)
groupByDSFlag := s.features.IsEnabled(featuremgmt.FlagSseGroupByDatasource)
// Execute datasource nodes first, and grouped by datasource.
dsNodes := []*DSNode{}
for _, node := range *dp {
if node.NodeType() != TypeDatasourceNode {
continue
if groupByDSFlag {
dsNodes := []*DSNode{}
for _, node := range *dp {
if node.NodeType() != TypeDatasourceNode {
continue
}
dsNodes = append(dsNodes, node.(*DSNode))
}
dsNodes = append(dsNodes, node.(*DSNode))
}
if err := executeDSNodesGrouped(c, now, vars, s, dsNodes); err != nil {
return nil, err
if err := executeDSNodesGrouped(c, now, vars, s, dsNodes); err != nil {
return nil, err
}
}
for _, node := range *dp {
if node.NodeType() == TypeDatasourceNode {
if groupByDSFlag && node.NodeType() == TypeDatasourceNode {
continue // already executed via executeDSNodesGrouped
}
c, span := s.tracer.Start(c, "SSE.ExecuteNode")

View File

@ -290,8 +290,65 @@ func executeDSNodesGrouped(ctx context.Context, now time.Time, vars mathexp.Vars
// other nodes they must have already been executed and their results must
// already by in vars.
func (dn *DSNode) Execute(ctx context.Context, now time.Time, _ mathexp.Vars, s *Service) (r mathexp.Results, e error) {
panic("Execute called on DSNode and should not be")
// Datasource queries are sent as a group to the datasource, see executeDSNodesGrouped.
logger := logger.FromContext(ctx).New("datasourceType", dn.datasource.Type, "queryRefId", dn.refID, "datasourceUid", dn.datasource.UID, "datasourceVersion", dn.datasource.Version)
ctx, span := s.tracer.Start(ctx, "SSE.ExecuteDatasourceQuery")
defer span.End()
pCtx, err := s.pCtxProvider.GetWithDataSource(ctx, dn.datasource.Type, dn.request.User, dn.datasource)
if err != nil {
return mathexp.Results{}, err
}
span.SetAttributes("datasource.type", dn.datasource.Type, attribute.Key("datasource.type").String(dn.datasource.Type))
span.SetAttributes("datasource.uid", dn.datasource.UID, attribute.Key("datasource.uid").String(dn.datasource.UID))
req := &backend.QueryDataRequest{
PluginContext: pCtx,
Queries: []backend.DataQuery{
{
RefID: dn.refID,
MaxDataPoints: dn.maxDP,
Interval: time.Duration(int64(time.Millisecond) * dn.intervalMS),
JSON: dn.query,
TimeRange: dn.timeRange.AbsoluteTime(now),
QueryType: dn.queryType,
},
},
Headers: dn.request.Headers,
}
responseType := "unknown"
respStatus := "success"
defer func() {
if e != nil {
responseType = "error"
respStatus = "failure"
span.AddEvents([]string{"error", "message"},
[]tracing.EventValue{
{Str: fmt.Sprintf("%v", err)},
{Str: "failed to query data source"},
})
}
logger.Debug("Data source queried", "responseType", responseType)
useDataplane := strings.HasPrefix(responseType, "dataplane-")
s.metrics.dsRequests.WithLabelValues(respStatus, fmt.Sprintf("%t", useDataplane), dn.datasource.Type).Inc()
}()
resp, err := s.dataService.QueryData(ctx, req)
if err != nil {
return mathexp.Results{}, MakeQueryError(dn.refID, dn.datasource.UID, err)
}
dataFrames, err := getResponseFrame(resp, dn.refID)
if err != nil {
return mathexp.Results{}, MakeQueryError(dn.refID, dn.datasource.UID, err)
}
var result mathexp.Results
responseType, result, err = convertDataFramesToResults(ctx, dataFrames, dn.datasource.Type, s, logger)
if err != nil {
err = MakeConversionError(dn.refID, err)
}
return result, err
}
func getResponseFrame(resp *backend.QueryDataResponse, refID string) (data.Frames, error) {

View File

@ -731,5 +731,11 @@ var (
Owner: grafanaFrontendPlatformSquad,
FrontendOnly: true,
},
{
Name: "sseGroupByDatasource",
Description: "Send query to the same datasource in a single request when using server side expressions",
Stage: FeatureStageExperimental,
Owner: grafanaObservabilityMetricsSquad,
},
}
)

View File

@ -104,3 +104,4 @@ angularDeprecationUI,experimental,@grafana/plugins-platform-backend,false,false,
dashgpt,experimental,@grafana/dashboards-squad,false,false,false,true
reportingRetries,preview,@grafana/sharing-squad,false,false,true,false
newBrowseDashboards,preview,@grafana/grafana-frontend-platform,false,false,false,true
sseGroupByDatasource,experimental,@grafana/observability-metrics,false,false,false,false

1 Name Stage Owner requiresDevMode RequiresLicense RequiresRestart FrontendOnly
104 dashgpt experimental @grafana/dashboards-squad false false false true
105 reportingRetries preview @grafana/sharing-squad false false true false
106 newBrowseDashboards preview @grafana/grafana-frontend-platform false false false true
107 sseGroupByDatasource experimental @grafana/observability-metrics false false false false

View File

@ -426,4 +426,8 @@ const (
// FlagNewBrowseDashboards
// New browse/manage dashboards UI
FlagNewBrowseDashboards = "newBrowseDashboards"
// FlagSseGroupByDatasource
// Send query to the same datasource in a single request when using server side expressions
FlagSseGroupByDatasource = "sseGroupByDatasource"
)