2020-11-19 06:17:00 -06:00
|
|
|
package expr
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
|
|
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
2023-04-18 07:04:51 -05:00
|
|
|
"go.opentelemetry.io/otel/attribute"
|
2023-01-30 02:38:51 -06:00
|
|
|
"gonum.org/v1/gonum/graph/simple"
|
2022-05-23 09:08:14 -05:00
|
|
|
|
2021-03-02 12:51:33 -06:00
|
|
|
"github.com/grafana/grafana/pkg/expr/classic"
|
2020-11-19 06:17:00 -06:00
|
|
|
"github.com/grafana/grafana/pkg/expr/mathexp"
|
2021-05-07 08:16:21 -05:00
|
|
|
"github.com/grafana/grafana/pkg/infra/log"
|
2022-06-27 11:23:15 -05:00
|
|
|
"github.com/grafana/grafana/pkg/services/datasources"
|
2023-04-12 11:24:34 -05:00
|
|
|
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
2023-03-07 10:22:30 -06:00
|
|
|
"github.com/grafana/grafana/pkg/services/pluginsintegration/adapters"
|
2020-11-19 06:17:00 -06:00
|
|
|
)
|
|
|
|
|
2021-05-07 08:16:21 -05:00
|
|
|
var (
|
|
|
|
logger = log.New("expr")
|
|
|
|
)
|
|
|
|
|
2021-11-16 07:42:22 -06:00
|
|
|
type QueryError struct {
|
|
|
|
RefID string
|
|
|
|
Err error
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e QueryError) Error() string {
|
|
|
|
return fmt.Sprintf("failed to execute query %s: %s", e.RefID, e.Err)
|
|
|
|
}
|
|
|
|
|
2021-11-17 04:07:24 -06:00
|
|
|
func (e QueryError) Unwrap() error {
|
|
|
|
return e.Err
|
|
|
|
}
|
|
|
|
|
2021-08-27 07:57:41 -05:00
|
|
|
// baseNode includes common properties used across DPNodes.
|
2020-11-19 06:17:00 -06:00
|
|
|
type baseNode struct {
|
|
|
|
id int64
|
|
|
|
refID string
|
|
|
|
}
|
|
|
|
|
|
|
|
type rawNode struct {
|
2022-12-01 12:08:36 -06:00
|
|
|
RefID string `json:"refId"`
|
|
|
|
Query map[string]interface{}
|
|
|
|
QueryType string
|
|
|
|
TimeRange TimeRange
|
|
|
|
DataSource *datasources.DataSource
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func (rn *rawNode) GetCommandType() (c CommandType, err error) {
|
|
|
|
rawType, ok := rn.Query["type"]
|
|
|
|
if !ok {
|
|
|
|
return c, fmt.Errorf("no expression command type in query for refId %v", rn.RefID)
|
|
|
|
}
|
|
|
|
typeString, ok := rawType.(string)
|
|
|
|
if !ok {
|
|
|
|
return c, fmt.Errorf("expected expression command type to be a string, got type %T", rawType)
|
|
|
|
}
|
|
|
|
return ParseCommandType(typeString)
|
|
|
|
}
|
|
|
|
|
|
|
|
// String returns a string representation of the node. In particular for
|
2021-06-09 18:59:44 -05:00
|
|
|
// %v formatting in error messages.
|
2020-11-19 06:17:00 -06:00
|
|
|
func (b *baseNode) String() string {
|
|
|
|
return b.refID
|
|
|
|
}
|
|
|
|
|
|
|
|
// CMDNode is a DPNode that holds an expression command.
|
|
|
|
type CMDNode struct {
|
|
|
|
baseNode
|
|
|
|
CMDType CommandType
|
|
|
|
Command Command
|
|
|
|
}
|
|
|
|
|
|
|
|
// ID returns the id of the node so it can fulfill the gonum's graph Node interface.
|
|
|
|
func (b *baseNode) ID() int64 {
|
|
|
|
return b.id
|
|
|
|
}
|
|
|
|
|
|
|
|
// RefID returns the refId of the node.
|
|
|
|
func (b *baseNode) RefID() string {
|
|
|
|
return b.refID
|
|
|
|
}
|
|
|
|
|
|
|
|
// NodeType returns the data pipeline node type.
|
|
|
|
func (gn *CMDNode) NodeType() NodeType {
|
|
|
|
return TypeCMDNode
|
|
|
|
}
|
|
|
|
|
|
|
|
// Execute runs the node and adds the results to vars. If the node requires
|
|
|
|
// other nodes they must have already been executed and their results must
|
|
|
|
// already by in vars.
|
2023-04-18 07:04:51 -05:00
|
|
|
func (gn *CMDNode) Execute(ctx context.Context, now time.Time, vars mathexp.Vars, s *Service) (mathexp.Results, error) {
|
|
|
|
return gn.Command.Execute(ctx, now, vars, s.tracer)
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
func buildCMDNode(dp *simple.DirectedGraph, rn *rawNode) (*CMDNode, error) {
|
|
|
|
commandType, err := rn.GetCommandType()
|
|
|
|
if err != nil {
|
2022-09-21 14:14:11 -05:00
|
|
|
return nil, fmt.Errorf("invalid command type in expression '%v': %w", rn.RefID, err)
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
node := &CMDNode{
|
|
|
|
baseNode: baseNode{
|
|
|
|
id: dp.NewNode().ID(),
|
|
|
|
refID: rn.RefID,
|
|
|
|
},
|
2021-04-27 06:22:11 -05:00
|
|
|
CMDType: commandType,
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
switch commandType {
|
|
|
|
case TypeMath:
|
|
|
|
node.Command, err = UnmarshalMathCommand(rn)
|
|
|
|
case TypeReduce:
|
|
|
|
node.Command, err = UnmarshalReduceCommand(rn)
|
|
|
|
case TypeResample:
|
|
|
|
node.Command, err = UnmarshalResampleCommand(rn)
|
2021-03-02 12:51:33 -06:00
|
|
|
case TypeClassicConditions:
|
|
|
|
node.Command, err = classic.UnmarshalConditionsCmd(rn.Query, rn.RefID)
|
2022-09-26 09:05:44 -05:00
|
|
|
case TypeThreshold:
|
|
|
|
node.Command, err = UnmarshalThresholdCommand(rn)
|
2020-11-19 06:17:00 -06:00
|
|
|
default:
|
2022-09-21 14:14:11 -05:00
|
|
|
return nil, fmt.Errorf("expression command type '%v' in expression '%v' not implemented", commandType, rn.RefID)
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
|
|
|
if err != nil {
|
2022-09-21 14:14:11 -05:00
|
|
|
return nil, fmt.Errorf("failed to parse expression '%v': %w", rn.RefID, err)
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
return node, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
const (
|
|
|
|
defaultIntervalMS = int64(64)
|
|
|
|
defaultMaxDP = int64(5000)
|
|
|
|
)
|
|
|
|
|
|
|
|
// DSNode is a DPNode that holds a datasource request.
|
|
|
|
type DSNode struct {
|
|
|
|
baseNode
|
2022-12-01 12:08:36 -06:00
|
|
|
query json.RawMessage
|
|
|
|
datasource *datasources.DataSource
|
2021-01-15 10:33:50 -06:00
|
|
|
|
|
|
|
orgID int64
|
|
|
|
queryType string
|
2021-04-23 09:52:32 -05:00
|
|
|
timeRange TimeRange
|
2021-01-15 10:33:50 -06:00
|
|
|
intervalMS int64
|
|
|
|
maxDP int64
|
2021-07-09 06:43:22 -05:00
|
|
|
request Request
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
// NodeType returns the data pipeline node type.
|
|
|
|
func (dn *DSNode) NodeType() NodeType {
|
|
|
|
return TypeDatasourceNode
|
|
|
|
}
|
|
|
|
|
2021-07-09 06:43:22 -05:00
|
|
|
func (s *Service) buildDSNode(dp *simple.DirectedGraph, rn *rawNode, req *Request) (*DSNode, error) {
|
2022-10-26 15:13:58 -05:00
|
|
|
if rn.TimeRange == nil {
|
|
|
|
return nil, fmt.Errorf("time range must be specified for refID %s", rn.RefID)
|
|
|
|
}
|
2020-11-19 06:17:00 -06:00
|
|
|
encodedQuery, err := json.Marshal(rn.Query)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
dsNode := &DSNode{
|
|
|
|
baseNode: baseNode{
|
|
|
|
id: dp.NewNode().ID(),
|
|
|
|
refID: rn.RefID,
|
|
|
|
},
|
2022-12-01 12:08:36 -06:00
|
|
|
orgID: req.OrgId,
|
|
|
|
query: json.RawMessage(encodedQuery),
|
|
|
|
queryType: rn.QueryType,
|
|
|
|
intervalMS: defaultIntervalMS,
|
|
|
|
maxDP: defaultMaxDP,
|
|
|
|
timeRange: rn.TimeRange,
|
|
|
|
request: *req,
|
|
|
|
datasource: rn.DataSource,
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
var floatIntervalMS float64
|
2021-08-10 02:59:48 -05:00
|
|
|
if rawIntervalMS, ok := rn.Query["intervalMs"]; ok {
|
2020-11-19 06:17:00 -06:00
|
|
|
if floatIntervalMS, ok = rawIntervalMS.(float64); !ok {
|
|
|
|
return nil, fmt.Errorf("expected intervalMs to be an float64, got type %T for refId %v", rawIntervalMS, rn.RefID)
|
|
|
|
}
|
|
|
|
dsNode.intervalMS = int64(floatIntervalMS)
|
|
|
|
}
|
|
|
|
|
|
|
|
var floatMaxDP float64
|
2021-08-10 02:59:48 -05:00
|
|
|
if rawMaxDP, ok := rn.Query["maxDataPoints"]; ok {
|
2020-11-19 06:17:00 -06:00
|
|
|
if floatMaxDP, ok = rawMaxDP.(float64); !ok {
|
|
|
|
return nil, fmt.Errorf("expected maxDataPoints to be an float64, got type %T for refId %v", rawMaxDP, rn.RefID)
|
|
|
|
}
|
|
|
|
dsNode.maxDP = int64(floatMaxDP)
|
|
|
|
}
|
|
|
|
|
|
|
|
return dsNode, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Execute runs the node and adds the results to vars. If the node requires
|
|
|
|
// other nodes they must have already been executed and their results must
|
|
|
|
// already by in vars.
|
2023-01-18 12:06:10 -06:00
|
|
|
func (dn *DSNode) Execute(ctx context.Context, now time.Time, _ mathexp.Vars, s *Service) (r mathexp.Results, e error) {
|
2023-02-02 10:22:43 -06:00
|
|
|
logger := logger.FromContext(ctx).New("datasourceType", dn.datasource.Type, "queryRefId", dn.refID, "datasourceUid", dn.datasource.UID, "datasourceVersion", dn.datasource.Version)
|
2023-04-18 07:04:51 -05:00
|
|
|
ctx, span := s.tracer.Start(ctx, "SSE.ExecuteDatasourceQuery")
|
|
|
|
defer span.End()
|
|
|
|
|
2021-12-16 10:51:46 -06:00
|
|
|
dsInstanceSettings, err := adapters.ModelToInstanceSettings(dn.datasource, s.decryptSecureJsonDataFn(ctx))
|
|
|
|
if err != nil {
|
2022-06-03 02:24:24 -05:00
|
|
|
return mathexp.Results{}, fmt.Errorf("%v: %w", "failed to convert datasource instance settings", err)
|
2021-12-16 10:51:46 -06:00
|
|
|
}
|
2020-11-19 06:17:00 -06:00
|
|
|
pc := backend.PluginContext{
|
2021-12-16 10:51:46 -06:00
|
|
|
OrgID: dn.orgID,
|
|
|
|
DataSourceInstanceSettings: dsInstanceSettings,
|
|
|
|
PluginID: dn.datasource.Type,
|
2022-11-28 06:40:06 -06:00
|
|
|
User: dn.request.User,
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
2023-04-18 07:04:51 -05:00
|
|
|
span.SetAttributes("datasource.type", dn.datasource.Type, attribute.Key("datasource.type").String(dn.datasource.Type))
|
2020-11-19 06:17:00 -06:00
|
|
|
|
2022-11-28 06:40:06 -06:00
|
|
|
req := &backend.QueryDataRequest{
|
|
|
|
PluginContext: pc,
|
|
|
|
Queries: []backend.DataQuery{
|
|
|
|
{
|
|
|
|
RefID: dn.refID,
|
|
|
|
MaxDataPoints: dn.maxDP,
|
|
|
|
Interval: time.Duration(int64(time.Millisecond) * dn.intervalMS),
|
|
|
|
JSON: dn.query,
|
|
|
|
TimeRange: dn.timeRange.AbsoluteTime(now),
|
|
|
|
QueryType: dn.queryType,
|
|
|
|
},
|
2020-11-19 06:17:00 -06:00
|
|
|
},
|
2022-11-28 06:40:06 -06:00
|
|
|
Headers: dn.request.Headers,
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
|
|
|
|
2023-01-18 12:06:10 -06:00
|
|
|
responseType := "unknown"
|
2023-04-17 18:12:44 -05:00
|
|
|
respStatus := "success"
|
|
|
|
var useDataplane bool
|
2023-01-18 12:06:10 -06:00
|
|
|
defer func() {
|
|
|
|
if e != nil {
|
|
|
|
responseType = "error"
|
2023-04-17 18:12:44 -05:00
|
|
|
respStatus = "failure"
|
2023-01-18 12:06:10 -06:00
|
|
|
}
|
|
|
|
logger.Debug("Data source queried", "responseType", responseType)
|
2023-04-17 18:12:44 -05:00
|
|
|
|
|
|
|
s.metrics.dsRequests.WithLabelValues(respStatus, fmt.Sprintf("%t", useDataplane)).Inc()
|
2023-01-18 12:06:10 -06:00
|
|
|
}()
|
|
|
|
|
2022-11-28 06:40:06 -06:00
|
|
|
resp, err := s.dataService.QueryData(ctx, req)
|
2020-11-19 06:17:00 -06:00
|
|
|
if err != nil {
|
|
|
|
return mathexp.Results{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
vals := make([]mathexp.Value, 0)
|
2023-01-18 12:06:10 -06:00
|
|
|
response, ok := resp.Responses[dn.refID]
|
|
|
|
if !ok {
|
|
|
|
if len(resp.Responses) > 0 {
|
|
|
|
keys := make([]string, 0, len(resp.Responses))
|
|
|
|
for refID := range resp.Responses {
|
|
|
|
keys = append(keys, refID)
|
2022-05-23 09:08:14 -05:00
|
|
|
}
|
2023-01-18 12:06:10 -06:00
|
|
|
logger.Warn("Can't find response by refID. Return nodata", "responseRefIds", keys)
|
2022-05-23 09:08:14 -05:00
|
|
|
}
|
2023-01-18 12:06:10 -06:00
|
|
|
return mathexp.Results{Values: mathexp.Values{mathexp.NoData{}.New()}}, nil
|
|
|
|
}
|
2022-05-23 09:08:14 -05:00
|
|
|
|
2023-01-18 12:06:10 -06:00
|
|
|
if response.Error != nil {
|
|
|
|
return mathexp.Results{}, QueryError{RefID: dn.refID, Err: response.Error}
|
|
|
|
}
|
2022-07-14 08:18:12 -05:00
|
|
|
|
2023-04-17 18:12:44 -05:00
|
|
|
var dt data.FrameType
|
|
|
|
dt, useDataplane, _ = shouldUseDataplane(response.Frames, logger, s.features.IsEnabled(featuremgmt.FlagDisableSSEDataplane))
|
|
|
|
if useDataplane {
|
2023-04-12 11:24:34 -05:00
|
|
|
logger.Debug("Handling SSE data source query through dataplane", "datatype", dt)
|
2023-04-18 07:04:51 -05:00
|
|
|
return handleDataplaneFrames(ctx, s.tracer, dt, response.Frames)
|
2023-04-12 11:24:34 -05:00
|
|
|
}
|
|
|
|
|
2023-01-18 12:06:10 -06:00
|
|
|
dataSource := dn.datasource.Type
|
|
|
|
if isAllFrameVectors(dataSource, response.Frames) { // Prometheus Specific Handling
|
|
|
|
vals, err = framesToNumbers(response.Frames)
|
|
|
|
if err != nil {
|
|
|
|
return mathexp.Results{}, fmt.Errorf("failed to read frames as numbers: %w", err)
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
2023-01-18 12:06:10 -06:00
|
|
|
responseType = "vector"
|
|
|
|
return mathexp.Results{Values: vals}, nil
|
|
|
|
}
|
2020-11-19 06:17:00 -06:00
|
|
|
|
2023-01-18 12:06:10 -06:00
|
|
|
if len(response.Frames) == 1 {
|
|
|
|
frame := response.Frames[0]
|
|
|
|
// Handle Untyped NoData
|
|
|
|
if len(frame.Fields) == 0 {
|
|
|
|
return mathexp.Results{Values: mathexp.Values{mathexp.NoData{Frame: frame}}}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// Handle Numeric Table
|
|
|
|
if frame.TimeSeriesSchema().Type == data.TimeSeriesTypeNot && isNumberTable(frame) {
|
|
|
|
numberSet, err := extractNumberSet(frame)
|
2020-11-19 06:17:00 -06:00
|
|
|
if err != nil {
|
|
|
|
return mathexp.Results{}, err
|
|
|
|
}
|
2023-01-18 12:06:10 -06:00
|
|
|
for _, n := range numberSet {
|
|
|
|
vals = append(vals, n)
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
2023-01-18 12:06:10 -06:00
|
|
|
responseType = "number set"
|
|
|
|
return mathexp.Results{
|
|
|
|
Values: vals,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, frame := range response.Frames {
|
|
|
|
// Check for TimeSeriesTypeNot in InfluxDB queries. A data frame of this type will cause
|
|
|
|
// the WideToMany() function to error out, which results in unhealthy alerts.
|
|
|
|
// This check should be removed once inconsistencies in data source responses are solved.
|
|
|
|
if frame.TimeSeriesSchema().Type == data.TimeSeriesTypeNot && dataSource == datasources.DS_INFLUXDB {
|
|
|
|
logger.Warn("Ignoring InfluxDB data frame due to missing numeric fields")
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
series, err := WideToMany(frame)
|
|
|
|
if err != nil {
|
|
|
|
return mathexp.Results{}, err
|
|
|
|
}
|
|
|
|
for _, s := range series {
|
|
|
|
vals = append(vals, s)
|
2020-11-19 06:17:00 -06:00
|
|
|
}
|
|
|
|
}
|
2023-01-18 12:06:10 -06:00
|
|
|
|
|
|
|
responseType = "series set"
|
2020-11-19 06:17:00 -06:00
|
|
|
return mathexp.Results{
|
2023-01-18 12:06:10 -06:00
|
|
|
Values: vals, // TODO vals can be empty. Should we replace with no-data?
|
2020-11-19 06:17:00 -06:00
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2022-05-23 09:08:14 -05:00
|
|
|
func isAllFrameVectors(datasourceType string, frames data.Frames) bool {
|
|
|
|
if datasourceType != "prometheus" {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
allVector := false
|
|
|
|
for i, frame := range frames {
|
|
|
|
if frame.Meta != nil && frame.Meta.Custom != nil {
|
|
|
|
if sMap, ok := frame.Meta.Custom.(map[string]string); ok {
|
|
|
|
if sMap != nil {
|
|
|
|
if sMap["resultType"] == "vector" {
|
|
|
|
if i != 0 && !allVector {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
allVector = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return allVector
|
|
|
|
}
|
|
|
|
|
|
|
|
func framesToNumbers(frames data.Frames) ([]mathexp.Value, error) {
|
|
|
|
vals := make([]mathexp.Value, 0, len(frames))
|
|
|
|
for _, frame := range frames {
|
|
|
|
if frame == nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if len(frame.Fields) == 2 && frame.Fields[0].Len() == 1 {
|
|
|
|
// Can there be zero Len Field results that are being skipped?
|
|
|
|
valueField := frame.Fields[1]
|
|
|
|
if valueField.Type().Numeric() { // should be []float64
|
|
|
|
val, err := valueField.FloatAt(0) // FloatAt should not err if numeric
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("failed to read value of frame [%v] (RefID %v) of type [%v] as float: %w", frame.Name, frame.RefID, valueField.Type(), err)
|
|
|
|
}
|
|
|
|
n := mathexp.NewNumber(frame.Name, valueField.Labels)
|
|
|
|
n.SetValue(&val)
|
|
|
|
vals = append(vals, n)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return vals, nil
|
|
|
|
}
|
|
|
|
|
2020-11-19 06:17:00 -06:00
|
|
|
func isNumberTable(frame *data.Frame) bool {
|
|
|
|
if frame == nil || frame.Fields == nil {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
numericCount := 0
|
|
|
|
stringCount := 0
|
|
|
|
otherCount := 0
|
|
|
|
for _, field := range frame.Fields {
|
|
|
|
fType := field.Type()
|
|
|
|
switch {
|
|
|
|
case fType.Numeric():
|
|
|
|
numericCount++
|
|
|
|
case fType == data.FieldTypeString || fType == data.FieldTypeNullableString:
|
|
|
|
stringCount++
|
|
|
|
default:
|
|
|
|
otherCount++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return numericCount == 1 && otherCount == 0
|
|
|
|
}
|
|
|
|
|
|
|
|
func extractNumberSet(frame *data.Frame) ([]mathexp.Number, error) {
|
|
|
|
numericField := 0
|
|
|
|
stringFieldIdxs := []int{}
|
|
|
|
stringFieldNames := []string{}
|
|
|
|
for i, field := range frame.Fields {
|
|
|
|
fType := field.Type()
|
|
|
|
switch {
|
|
|
|
case fType.Numeric():
|
|
|
|
numericField = i
|
|
|
|
case fType == data.FieldTypeString || fType == data.FieldTypeNullableString:
|
|
|
|
stringFieldIdxs = append(stringFieldIdxs, i)
|
|
|
|
stringFieldNames = append(stringFieldNames, field.Name)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
numbers := make([]mathexp.Number, frame.Rows())
|
|
|
|
|
|
|
|
for rowIdx := 0; rowIdx < frame.Rows(); rowIdx++ {
|
|
|
|
val, _ := frame.FloatAt(numericField, rowIdx)
|
|
|
|
var labels data.Labels
|
|
|
|
for i := 0; i < len(stringFieldIdxs); i++ {
|
|
|
|
if i == 0 {
|
|
|
|
labels = make(data.Labels)
|
|
|
|
}
|
|
|
|
key := stringFieldNames[i] // TODO check for duplicate string column names
|
|
|
|
val, _ := frame.ConcreteAt(stringFieldIdxs[i], rowIdx)
|
|
|
|
labels[key] = val.(string) // TODO check assertion / return error
|
|
|
|
}
|
|
|
|
|
2022-11-16 09:39:28 -06:00
|
|
|
n := mathexp.NewNumber(frame.Fields[numericField].Name, labels)
|
2022-03-10 09:03:26 -06:00
|
|
|
|
|
|
|
// The new value fields' configs gets pointed to the one in the original frame
|
|
|
|
n.Frame.Fields[0].Config = frame.Fields[numericField].Config
|
2020-11-19 06:17:00 -06:00
|
|
|
n.SetValue(&val)
|
2022-03-10 09:03:26 -06:00
|
|
|
|
2020-11-19 06:17:00 -06:00
|
|
|
numbers[rowIdx] = n
|
|
|
|
}
|
|
|
|
return numbers, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// WideToMany converts a data package wide type Frame to one or multiple Series. A series
|
|
|
|
// is created for each value type column of wide frame.
|
|
|
|
//
|
|
|
|
// This might not be a good idea long term, but works now as an adapter/shim.
|
|
|
|
func WideToMany(frame *data.Frame) ([]mathexp.Series, error) {
|
|
|
|
tsSchema := frame.TimeSeriesSchema()
|
|
|
|
if tsSchema.Type != data.TimeSeriesTypeWide {
|
|
|
|
return nil, fmt.Errorf("input data must be a wide series but got type %s (input refid)", tsSchema.Type)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(tsSchema.ValueIndices) == 1 {
|
|
|
|
s, err := mathexp.SeriesFromFrame(frame)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return []mathexp.Series{s}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
series := []mathexp.Series{}
|
|
|
|
for _, valIdx := range tsSchema.ValueIndices {
|
|
|
|
l := frame.Rows()
|
|
|
|
f := data.NewFrameOfFieldTypes(frame.Name, l, frame.Fields[tsSchema.TimeIndex].Type(), frame.Fields[valIdx].Type())
|
|
|
|
f.Fields[0].Name = frame.Fields[tsSchema.TimeIndex].Name
|
|
|
|
f.Fields[1].Name = frame.Fields[valIdx].Name
|
2022-03-10 09:03:26 -06:00
|
|
|
|
|
|
|
// The new value fields' configs gets pointed to the one in the original frame
|
|
|
|
f.Fields[1].Config = frame.Fields[valIdx].Config
|
|
|
|
|
2020-11-19 06:17:00 -06:00
|
|
|
if frame.Fields[valIdx].Labels != nil {
|
|
|
|
f.Fields[1].Labels = frame.Fields[valIdx].Labels.Copy()
|
|
|
|
}
|
|
|
|
for i := 0; i < l; i++ {
|
|
|
|
f.SetRow(i, frame.Fields[tsSchema.TimeIndex].CopyAt(i), frame.Fields[valIdx].CopyAt(i))
|
|
|
|
}
|
|
|
|
s, err := mathexp.SeriesFromFrame(f)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
series = append(series, s)
|
|
|
|
}
|
|
|
|
|
|
|
|
return series, nil
|
|
|
|
}
|