Expressions: Move GEL into core as expressions (#29072)

* comes from grafana/gel-app
* remove transform plugin code
* move __expr__ and -100 constants to expr pkg
* set OrgID on request plugin context
* use gtime for resample duration
* in resample, rename "rule" to "window", use gtime for duration, parse duration before exec
* remove gel entry from plugins-bundled/external.json
which creates an empty array for plugins
This commit is contained in:
Kyle Brandt 2020-11-19 07:17:00 -05:00 committed by GitHub
parent f01c3f35e1
commit 0cb29d337a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 5770 additions and 401 deletions

1
go.mod
View File

@ -86,6 +86,7 @@ require (
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9
golang.org/x/sys v0.0.0-20201022201747-fb209a7c41cd // indirect
golang.org/x/tools v0.0.0-20201023150057-2f4fa188d925 // indirect
gonum.org/v1/gonum v0.6.0
google.golang.org/api v0.33.0
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto v0.0.0-20201022181438-0ff5f38871d5 // indirect

3
go.sum
View File

@ -1338,6 +1338,7 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5 h1:FR+oGxGfbQu1d+jglI3rCkjAjUnhRSZcUxr+DqlDLNo=
golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw=
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
@ -1623,8 +1624,10 @@ golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1N
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
gonum.org/v1/gonum v0.0.0-20181121035319-3f7ecaa7e8ca/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
gonum.org/v1/gonum v0.6.0 h1:DJy6UzXbahnGUf1ujUNkh/NEtK14qMo2nvlBPs4U5yw=
gonum.org/v1/gonum v0.6.0/go.mod h1:9mxDZsDKxgMAuccQkewq682L+0eCu4dCN2yonUJTCLU=
gonum.org/v1/netlib v0.0.0-20181029234149-ec6d1f5cefe6/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0 h1:OE9mWmgKkjJyEmDAAtGMPjXu+YNeGvK9VTSHY6+Qihc=
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=

View File

@ -5,8 +5,8 @@ import (
"errors"
"sort"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/api/dtos"
"github.com/grafana/grafana/pkg/bus"
@ -29,13 +29,13 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDto dtos.MetricReq
User: c.SignedInUser,
}
expr := false
hasExpr := false
var ds *models.DataSource
for i, query := range reqDto.Queries {
hs.log.Debug("Processing metrics query", "query", query)
name := query.Get("datasource").MustString("")
if name == "__expr__" {
expr = true
if name == expr.DatasourceName {
hasExpr = true
}
datasourceID, err := query.Get("datasourceId").Int64()
@ -44,7 +44,7 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDto dtos.MetricReq
return Error(400, "Query missing data source ID", nil)
}
if i == 0 && !expr {
if i == 0 && !hasExpr {
ds, err = hs.DatasourceCache.GetDatasource(datasourceID, c.SignedInUser, c.SkipCache)
if err != nil {
hs.log.Debug("Encountered error getting data source", "err", err)
@ -70,7 +70,7 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDto dtos.MetricReq
var resp *tsdb.Response
var err error
if !expr {
if !hasExpr {
resp, err = tsdb.HandleRequest(c.Req.Context(), ds, request)
if err != nil {
return Error(500, "Metric request error", err)
@ -80,7 +80,7 @@ func (hs *HTTPServer) QueryMetricsV2(c *models.ReqContext, reqDto dtos.MetricReq
return Error(404, "Expressions feature toggle is not enabled", nil)
}
resp, err = plugins.Transform.Transform(c.Req.Context(), request)
resp, err = expr.WrapTransformData(c.Req.Context(), request)
if err != nil {
return Error(500, "Transform request error", err)
}

263
pkg/expr/commands.go Normal file
View File

@ -0,0 +1,263 @@
package expr
import (
"context"
"fmt"
"strings"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/components/gtime"
"github.com/grafana/grafana/pkg/expr/mathexp"
)
// Command is an interface for all expression commands.
type Command interface {
NeedsVars() []string
Execute(c context.Context, vars mathexp.Vars) (mathexp.Results, error)
}
// MathCommand is a command for a math expression such as "1 + $GA / 2"
type MathCommand struct {
RawExpression string
Expression *mathexp.Expr
}
// NewMathCommand creates a new MathCommand. It will return an error
// if there is an error parsing expr.
func NewMathCommand(expr string) (*MathCommand, error) {
parsedExpr, err := mathexp.New(expr)
if err != nil {
return nil, err
}
return &MathCommand{
RawExpression: expr,
Expression: parsedExpr,
}, nil
}
// UnmarshalMathCommand creates a MathCommand from Grafana's frontend query.
func UnmarshalMathCommand(rn *rawNode) (*MathCommand, error) {
rawExpr, ok := rn.Query["expression"]
if !ok {
return nil, fmt.Errorf("math command for refId %v is missing an expression", rn.RefID)
}
exprString, ok := rawExpr.(string)
if !ok {
return nil, fmt.Errorf("expected math command for refId %v expression to be a string, got %T", rn.RefID, rawExpr)
}
gm, err := NewMathCommand(exprString)
if err != nil {
return nil, fmt.Errorf("invalid math command type in '%v': %v", rn.RefID, err)
}
return gm, nil
}
// NeedsVars returns the variable names (refIds) that are dependencies
// to execute the command and allows the command to fulfill the Command interface.
func (gm *MathCommand) NeedsVars() []string {
return gm.Expression.VarNames
}
// Execute runs the command and returns the results or an error if the command
// failed to execute.
func (gm *MathCommand) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Results, error) {
return gm.Expression.Execute(vars)
}
// ReduceCommand is an expression command for reduction of a timeseries such as a min, mean, or max.
type ReduceCommand struct {
Reducer string
VarToReduce string
}
// NewReduceCommand creates a new ReduceCMD.
func NewReduceCommand(reducer, varToReduce string) *ReduceCommand {
// TODO: validate reducer here, before execution
return &ReduceCommand{
Reducer: reducer,
VarToReduce: varToReduce,
}
}
// UnmarshalReduceCommand creates a MathCMD from Grafana's frontend query.
func UnmarshalReduceCommand(rn *rawNode) (*ReduceCommand, error) {
rawVar, ok := rn.Query["expression"]
if !ok {
return nil, fmt.Errorf("no variable specified to reduce for refId %v", rn.RefID)
}
varToReduce, ok := rawVar.(string)
if !ok {
return nil, fmt.Errorf("expected reduce variable to be a string, got %T for refId %v", rawVar, rn.RefID)
}
varToReduce = strings.TrimPrefix(varToReduce, "$")
rawReducer, ok := rn.Query["reducer"]
if !ok {
return nil, fmt.Errorf("no reducer specified for refId %v", rn.RefID)
}
redFunc, ok := rawReducer.(string)
if !ok {
return nil, fmt.Errorf("expected reducer to be a string, got %T for refId %v", rawReducer, rn.RefID)
}
return NewReduceCommand(redFunc, varToReduce), nil
}
// NeedsVars returns the variable names (refIds) that are dependencies
// to execute the command and allows the command to fulfill the Command interface.
func (gr *ReduceCommand) NeedsVars() []string {
return []string{gr.VarToReduce}
}
// Execute runs the command and returns the results or an error if the command
// failed to execute.
func (gr *ReduceCommand) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Results, error) {
newRes := mathexp.Results{}
for _, val := range vars[gr.VarToReduce].Values {
series, ok := val.(mathexp.Series)
if !ok {
return newRes, fmt.Errorf("can only reduce type series, got type %v", val.Type())
}
num, err := series.Reduce(gr.Reducer)
if err != nil {
return newRes, err
}
newRes.Values = append(newRes.Values, num)
}
return newRes, nil
}
// ResampleCommand is an expression command for resampling of a timeseries.
type ResampleCommand struct {
Window time.Duration
VarToResample string
Downsampler string
Upsampler string
TimeRange backend.TimeRange
}
// NewResampleCommand creates a new ResampleCMD.
func NewResampleCommand(rawWindow, varToResample string, downsampler string, upsampler string, tr backend.TimeRange) (*ResampleCommand, error) {
// TODO: validate reducer here, before execution
window, err := gtime.ParseDuration(rawWindow)
if err != nil {
return nil, fmt.Errorf(`failed to parse resample "window" duration field %q: %w`, window, err)
}
return &ResampleCommand{
Window: window,
VarToResample: varToResample,
Downsampler: downsampler,
Upsampler: upsampler,
TimeRange: tr,
}, nil
}
// UnmarshalResampleCommand creates a ResampleCMD from Grafana's frontend query.
func UnmarshalResampleCommand(rn *rawNode) (*ResampleCommand, error) {
rawVar, ok := rn.Query["expression"]
if !ok {
return nil, fmt.Errorf("no variable to resample for refId %v", rn.RefID)
}
varToReduce, ok := rawVar.(string)
if !ok {
return nil, fmt.Errorf("expected resample input variable to be type string, but got type %T for refId %v", rawVar, rn.RefID)
}
varToReduce = strings.TrimPrefix(varToReduce, "$")
varToResample := varToReduce
rawWindow, ok := rn.Query["window"]
if !ok {
return nil, fmt.Errorf("no time duration specified for the window in resample command for refId %v", rn.RefID)
}
window, ok := rawWindow.(string)
if !ok {
return nil, fmt.Errorf("expected resample window to be a string, got %T for refId %v", rawWindow, rn.RefID)
}
rawDownsampler, ok := rn.Query["downsampler"]
if !ok {
return nil, fmt.Errorf("no downsampler function specified in resample command for refId %v", rn.RefID)
}
downsampler, ok := rawDownsampler.(string)
if !ok {
return nil, fmt.Errorf("expected resample downsampler to be a string, got type %T for refId %v", downsampler, rn.RefID)
}
rawUpsampler, ok := rn.Query["upsampler"]
if !ok {
return nil, fmt.Errorf("no downsampler specified in resample command for refId %v", rn.RefID)
}
upsampler, ok := rawUpsampler.(string)
if !ok {
return nil, fmt.Errorf("expected resample downsampler to be a string, got type %T for refId %v", upsampler, rn.RefID)
}
return NewResampleCommand(window, varToResample, downsampler, upsampler, rn.TimeRange)
}
// NeedsVars returns the variable names (refIds) that are dependencies
// to execute the command and allows the command to fulfill the Command interface.
func (gr *ResampleCommand) NeedsVars() []string {
return []string{gr.VarToResample}
}
// Execute runs the command and returns the results or an error if the command
// failed to execute.
func (gr *ResampleCommand) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Results, error) {
newRes := mathexp.Results{}
for _, val := range vars[gr.VarToResample].Values {
series, ok := val.(mathexp.Series)
if !ok {
return newRes, fmt.Errorf("can only resample type series, got type %v", val.Type())
}
num, err := series.Resample(gr.Window, gr.Downsampler, gr.Upsampler, gr.TimeRange)
if err != nil {
return newRes, err
}
newRes.Values = append(newRes.Values, num)
}
return newRes, nil
}
// CommandType is the type of the expression command.
type CommandType int
const (
// TypeUnknown is the CMDType for an unrecognized expression type.
TypeUnknown CommandType = iota
// TypeMath is the CMDType for a math expression.
TypeMath
// TypeReduce is the CMDType for a reduction expression.
TypeReduce
// TypeResample is the CMDType for a resampling expression.
TypeResample
)
func (gt CommandType) String() string {
switch gt {
case TypeMath:
return "math"
case TypeReduce:
return "reduce"
case TypeResample:
return "resample"
default:
return "unknown"
}
}
// ParseCommandType returns a CommandType from its string representation.
func ParseCommandType(s string) (CommandType, error) {
switch s {
case "math":
return TypeMath, nil
case "reduce":
return TypeReduce, nil
case "resample":
return TypeResample, nil
default:
return TypeUnknown, fmt.Errorf("'%v' is not a recognized expression type", s)
}
}

183
pkg/expr/graph.go Normal file
View File

@ -0,0 +1,183 @@
package expr
import (
"context"
"encoding/json"
"fmt"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/expr/mathexp"
"gonum.org/v1/gonum/graph"
"gonum.org/v1/gonum/graph/simple"
"gonum.org/v1/gonum/graph/topo"
)
// NodeType is the type of a DPNode. Currently either a expression command or datasource query.
type NodeType int
const (
// TypeCMDNode is a NodeType for expression commands.
TypeCMDNode NodeType = iota
// TypeDatasourceNode is a NodeType for datasource queries.
TypeDatasourceNode
)
// Node is a node in a Data Pipeline. Node is either a expression command or a datasource query.
type Node interface {
ID() int64 // ID() allows the gonum graph node interface to be fulfilled
NodeType() NodeType
RefID() string
Execute(c context.Context, vars mathexp.Vars) (mathexp.Results, error)
String() string
}
// DataPipeline is an ordered set of nodes returned from DPGraph processing.
type DataPipeline []Node
// execute runs all the command/datasource requests in the pipeline return a
// map of the refId of the of each command
func (dp *DataPipeline) execute(c context.Context) (mathexp.Vars, error) {
vars := make(mathexp.Vars)
for _, node := range *dp {
res, err := node.Execute(c, vars)
if err != nil {
return nil, err
}
vars[node.RefID()] = res
}
return vars, nil
}
// BuildPipeline builds a graph of the nodes, and returns the nodes in an
// executable order.
func buildPipeline(queries []backend.DataQuery) (DataPipeline, error) {
graph, err := buildDependencyGraph(queries)
if err != nil {
return nil, err
}
nodes, err := buildExecutionOrder(graph)
if err != nil {
return nil, err
}
return nodes, nil
}
// buildDependencyGraph returns a dependency graph for a set of queries.
func buildDependencyGraph(queries []backend.DataQuery) (*simple.DirectedGraph, error) {
graph, err := buildGraph(queries)
if err != nil {
return nil, err
}
registry := buildNodeRegistry(graph)
if err := buildGraphEdges(graph, registry); err != nil {
return nil, err
}
return graph, nil
}
// buildExecutionOrder returns a sequence of nodes ordered by dependency.
func buildExecutionOrder(graph *simple.DirectedGraph) ([]Node, error) {
sortedNodes, err := topo.Sort(graph)
if err != nil {
return nil, err
}
nodes := make([]Node, len(sortedNodes))
for i, v := range sortedNodes {
nodes[i] = v.(Node)
}
return nodes, nil
}
// buildNodeRegistry returns a lookup table for reference IDs to respective node.
func buildNodeRegistry(g *simple.DirectedGraph) map[string]Node {
res := make(map[string]Node)
nodeIt := g.Nodes()
for nodeIt.Next() {
if dpNode, ok := nodeIt.Node().(Node); ok {
res[dpNode.RefID()] = dpNode
}
}
return res
}
// buildGraph creates a new graph populated with nodes for every query.
func buildGraph(queries []backend.DataQuery) (*simple.DirectedGraph, error) {
dp := simple.NewDirectedGraph()
for _, query := range queries {
rawQueryProp := make(map[string]interface{})
err := json.Unmarshal(query.JSON, &rawQueryProp)
if err != nil {
return nil, err
}
rn := &rawNode{
Query: rawQueryProp,
RefID: query.RefID,
TimeRange: query.TimeRange,
QueryType: query.QueryType,
}
dsName, err := rn.GetDatasourceName()
if err != nil {
return nil, err
}
var node graph.Node
switch dsName {
case DatasourceName:
node, err = buildCMDNode(dp, rn)
default: // If it's not an expression query, it's a data source query.
node, err = buildDSNode(dp, rn)
}
if err != nil {
return nil, err
}
dp.AddNode(node)
}
return dp, nil
}
// buildGraphEdges generates graph edges based on each node's dependencies.
func buildGraphEdges(dp *simple.DirectedGraph, registry map[string]Node) error {
nodeIt := dp.Nodes()
for nodeIt.Next() {
node := nodeIt.Node().(Node)
if node.NodeType() != TypeCMDNode {
// datasource node, nothing to do for now. Although if we want expression results to be
// used as datasource query params some day this will need change
continue
}
cmdNode := node.(*CMDNode)
for _, neededVar := range cmdNode.Command.NeedsVars() {
neededNode, ok := registry[neededVar]
if !ok {
return fmt.Errorf("unable to find dependent node '%v'", neededVar)
}
if neededNode.ID() == cmdNode.ID() {
return fmt.Errorf("can not add self referencing node for var '%v' ", neededVar)
}
edge := dp.NewEdge(neededNode, cmdNode)
dp.SetEdge(edge)
}
}
return nil
}

536
pkg/expr/mathexp/exp.go Normal file
View File

@ -0,0 +1,536 @@
package mathexp
import (
"fmt"
"math"
"reflect"
"runtime"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/expr/mathexp/parse"
)
// Expr holds a parsed math command expression.
type Expr struct {
*parse.Tree
}
// State embeds a parsed Expr with variables and their results
// so the expression can act on them.
type State struct {
*Expr
Vars Vars
// Could hold more properties that change behavior around:
// - Unions (How many result A and many Result B in case A + B are joined)
// - NaN/Null behavior
}
// Vars holds the results of datasource queries or other expression commands.
type Vars map[string]Results
// New creates a new expression tree
func New(expr string, funcs ...map[string]parse.Func) (*Expr, error) {
funcs = append(funcs, builtins)
t, err := parse.Parse(expr, funcs...)
if err != nil {
return nil, err
}
e := &Expr{
Tree: t,
}
return e, nil
}
// Execute applies a parse expression to the context and executes it
func (e *Expr) Execute(vars Vars) (r Results, err error) {
s := &State{
Expr: e,
Vars: vars,
}
return e.executeState(s)
}
func (e *Expr) executeState(s *State) (r Results, err error) {
defer errRecover(&err, s)
r, err = s.walk(e.Tree.Root)
return
}
// errRecover is the handler that turns panics into returns from the top
// level of Parse.
func errRecover(errp *error, s *State) {
e := recover()
if e != nil {
switch err := e.(type) {
case runtime.Error:
panic(e)
case error:
*errp = err
default:
panic(e)
}
}
}
// walk is the top level function to walk a parsed expression
// with its associate variables
func (e *State) walk(node parse.Node) (res Results, err error) {
switch node := node.(type) {
case *parse.ScalarNode:
res = NewScalarResults(&node.Float64)
case *parse.VarNode:
res = e.Vars[node.Name]
case *parse.BinaryNode:
res, err = e.walkBinary(node)
case *parse.UnaryNode:
res, err = e.walkUnary(node)
case *parse.FuncNode:
res, err = e.walkFunc(node)
default:
return res, fmt.Errorf("expr: can not walk node type: %s", node.Type())
}
return
}
func (e *State) walkUnary(node *parse.UnaryNode) (Results, error) {
a, err := e.walk(node.Arg)
if err != nil {
return Results{}, err
}
newResults := Results{}
for _, val := range a.Values {
var newVal Value
switch rt := val.(type) {
case Scalar:
newVal = NewScalar(nil)
f := rt.GetFloat64Value()
if f != nil {
newF, err := unaryOp(node.OpStr, *f)
if err != nil {
return newResults, err
}
newVal = NewScalar(&newF)
}
case Number:
newVal, err = unaryNumber(rt, node.OpStr)
case Series:
newVal, err = unarySeries(rt, node.OpStr)
default:
return newResults, fmt.Errorf("can not perform a unary operation on type %v", rt.Type())
}
if err != nil {
return newResults, err
}
newResults.Values = append(newResults.Values, newVal)
}
return newResults, nil
}
func unarySeries(s Series, op string) (Series, error) {
newSeries := NewSeries(s.GetName(), s.GetLabels(), s.TimeIdx, s.TimeIsNullable, s.ValueIdx, s.ValueIsNullabe, s.Len())
for i := 0; i < s.Len(); i++ {
t, f := s.GetPoint(i)
if f == nil {
if err := newSeries.SetPoint(i, t, nil); err != nil {
return newSeries, err
}
continue
}
newF, err := unaryOp(op, *f)
if err != nil {
return newSeries, err
}
if err := newSeries.SetPoint(i, t, &newF); err != nil {
return newSeries, err
}
}
return newSeries, nil
}
func unaryNumber(n Number, op string) (Number, error) {
newNumber := NewNumber(n.GetName(), n.GetLabels())
f := n.GetFloat64Value()
if f != nil {
newF, err := unaryOp(op, *f)
if err != nil {
return newNumber, err
}
newNumber.SetValue(&newF)
}
return newNumber, nil
}
// unaryOp performs a unary operation on a float.
func unaryOp(op string, a float64) (r float64, err error) {
if math.IsNaN(a) {
return math.NaN(), nil
}
switch op {
case "!":
if a == 0 {
r = 1
} else {
r = 0
}
case "-":
r = -a
default:
return r, fmt.Errorf("expr: unknown unary operator %s", op)
}
return
}
// Union holds to Values from Two sets where their labels are compatible (TODO: define compatible).
// This is a intermediate container for Binary operations such (e.g. A + B).
type Union struct {
Labels data.Labels
A, B Value
}
// union creates Union objects based on the Labels attached to each Series or Number
// within a collection of Series or Numbers. The Unions are used with binary
// operations. The labels of the Union will the taken from result with a greater
// number of tags.
func union(aResults, bResults Results) []*Union {
unions := []*Union{}
if len(aResults.Values) == 0 || len(bResults.Values) == 0 {
return unions
}
for _, a := range aResults.Values {
for _, b := range bResults.Values {
var labels data.Labels
aLabels := a.GetLabels()
bLabels := b.GetLabels()
switch {
case aLabels.Equals(bLabels) || len(aLabels) == 0 || len(bLabels) == 0:
l := aLabels
if len(aLabels) == 0 {
l = bLabels
}
labels = l
case len(aLabels) == len(bLabels):
continue // invalid union, drop for now
case aLabels.Contains(bLabels):
labels = aLabels
case bLabels.Contains(aLabels):
labels = bLabels
default:
continue
}
u := &Union{
Labels: labels,
A: a,
B: b,
}
unions = append(unions, u)
}
}
if len(unions) == 0 && len(aResults.Values) == 1 && len(bResults.Values) == 1 {
// In the case of only 1 thing on each side of the operator, we combine them
// and strip the tags.
// This isn't ideal for understanding behavior, but will make more stuff work when
// combining different datasources without munging.
// This choice is highly questionable in the long term.
unions = append(unions, &Union{
A: aResults.Values[0],
B: bResults.Values[0],
})
}
return unions
}
func (e *State) walkBinary(node *parse.BinaryNode) (Results, error) {
res := Results{Values{}}
ar, err := e.walk(node.Args[0])
if err != nil {
return res, err
}
br, err := e.walk(node.Args[1])
if err != nil {
return res, err
}
unions := union(ar, br)
for _, uni := range unions {
name := uni.Labels.String()
var value Value
switch at := uni.A.(type) {
case Scalar:
aFloat := at.GetFloat64Value()
switch bt := uni.B.(type) {
// Scalar op Scalar
case Scalar:
bFloat := bt.GetFloat64Value()
if aFloat == nil || bFloat == nil {
value = NewScalar(nil)
break
}
f := math.NaN()
if aFloat != nil && bFloat != nil {
f, err = binaryOp(node.OpStr, *aFloat, *bFloat)
if err != nil {
return res, err
}
}
value = NewScalar(&f)
// Scalar op Scalar
case Number:
value, err = biScalarNumber(name, uni.Labels, node.OpStr, bt, aFloat, false)
// Scalar op Series
case Series:
value, err = biSeriesNumber(name, uni.Labels, node.OpStr, bt, aFloat, false)
default:
return res, fmt.Errorf("not implemented: binary %v on %T and %T", node.OpStr, uni.A, uni.B)
}
case Series:
switch bt := uni.B.(type) {
// Series Op Scalar
case Scalar:
bFloat := bt.GetFloat64Value()
value, err = biSeriesNumber(name, uni.Labels, node.OpStr, at, bFloat, true)
// case Series Op Number
case Number:
bFloat := bt.GetFloat64Value()
value, err = biSeriesNumber(name, uni.Labels, node.OpStr, at, bFloat, true)
// case Series op Series
case Series:
value, err = biSeriesSeries(name, uni.Labels, node.OpStr, at, bt)
default:
return res, fmt.Errorf("not implemented: binary %v on %T and %T", node.OpStr, uni.A, uni.B)
}
case Number:
aFloat := at.GetFloat64Value()
switch bt := uni.B.(type) {
case Scalar:
bFloat := bt.GetFloat64Value()
value, err = biScalarNumber(name, uni.Labels, node.OpStr, at, bFloat, true)
case Number:
bFloat := bt.GetFloat64Value()
value, err = biScalarNumber(name, uni.Labels, node.OpStr, at, bFloat, true)
case Series:
value, err = biSeriesNumber(name, uni.Labels, node.OpStr, bt, aFloat, false)
default:
return res, fmt.Errorf("not implemented: binary %v on %T and %T", node.OpStr, uni.A, uni.B)
}
default:
return res, fmt.Errorf("not implemented: binary %v on %T and %T", node.OpStr, uni.A, uni.B)
}
if err != nil {
return res, err
}
res.Values = append(res.Values, value)
}
return res, nil
}
// binaryOp performs a binary operations (e.g. A+B or A>B) on two
// float values
// nolint:gocyclo
func binaryOp(op string, a, b float64) (r float64, err error) {
// Test short circuit before NaN.
switch op {
case "||":
if a != 0 {
return 1, nil
}
case "&&":
if a == 0 {
return 0, nil
}
}
if math.IsNaN(a) || math.IsNaN(b) {
return math.NaN(), nil
}
switch op {
case "+":
r = a + b
case "*":
r = a * b
case "-":
r = a - b
case "/":
r = a / b
case "**":
r = math.Pow(a, b)
case "%":
r = math.Mod(a, b)
case "==":
if a == b {
r = 1
} else {
r = 0
}
case ">":
if a > b {
r = 1
} else {
r = 0
}
case "!=":
if a != b {
r = 1
} else {
r = 0
}
case "<":
if a < b {
r = 1
} else {
r = 0
}
case ">=":
if a >= b {
r = 1
} else {
r = 0
}
case "<=":
if a <= b {
r = 1
} else {
r = 0
}
case "||":
if a != 0 || b != 0 {
r = 1
} else {
r = 0
}
case "&&":
if a != 0 && b != 0 {
r = 1
} else {
r = 0
}
default:
return r, fmt.Errorf("expr: unknown operator %s", op)
}
return r, nil
}
func biScalarNumber(name string, labels data.Labels, op string, number Number, scalarVal *float64, numberFirst bool) (Number, error) {
newNumber := NewNumber(name, labels)
f := number.GetFloat64Value()
if f == nil || scalarVal == nil {
newNumber.SetValue(nil)
return newNumber, nil
}
nF := math.NaN()
var err error
if numberFirst {
nF, err = binaryOp(op, *f, *scalarVal)
} else {
nF, err = binaryOp(op, *scalarVal, *f)
}
if err != nil {
return newNumber, err
}
newNumber.SetValue(&nF)
return newNumber, nil
}
func biSeriesNumber(name string, labels data.Labels, op string, s Series, scalarVal *float64, seriesFirst bool) (Series, error) {
newSeries := NewSeries(name, labels, s.TimeIdx, s.TimeIsNullable, s.ValueIdx, s.ValueIsNullabe, s.Len())
var err error
for i := 0; i < s.Len(); i++ {
nF := math.NaN()
t, f := s.GetPoint(i)
if f == nil || scalarVal == nil {
if err := newSeries.SetPoint(i, t, nil); err != nil {
return newSeries, err
}
continue
}
if seriesFirst {
nF, err = binaryOp(op, *f, *scalarVal)
} else {
nF, err = binaryOp(op, *scalarVal, *f)
}
if err != nil {
return newSeries, err
}
if err := newSeries.SetPoint(i, t, &nF); err != nil {
return newSeries, err
}
}
return newSeries, nil
}
// ... if would you like some series with your series and then get some series, or is that enough series?
// biSeriesSeries performs a the binary operation for each value in the two series where the times
// are equal. If there are datapoints in A or B that do not share a time, they will be dropped.
func biSeriesSeries(name string, labels data.Labels, op string, aSeries, bSeries Series) (Series, error) {
bPoints := make(map[time.Time]*float64)
for i := 0; i < bSeries.Len(); i++ {
t, f := bSeries.GetPoint(i)
if t != nil {
bPoints[*t] = f
}
}
newSeries := NewSeries(name, labels, aSeries.TimeIdx, aSeries.TimeIsNullable || bSeries.TimeIsNullable, aSeries.ValueIdx, aSeries.ValueIsNullabe || bSeries.ValueIsNullabe, 0)
for aIdx := 0; aIdx < aSeries.Len(); aIdx++ {
aTime, aF := aSeries.GetPoint(aIdx)
bF, ok := bPoints[*aTime]
if !ok {
continue
}
if aF == nil || bF == nil {
if err := newSeries.AppendPoint(aIdx, aTime, nil); err != nil {
return newSeries, err
}
continue
}
nF, err := binaryOp(op, *aF, *bF)
if err != nil {
return newSeries, err
}
if err := newSeries.AppendPoint(aIdx, aTime, &nF); err != nil {
return newSeries, err
}
}
return newSeries, nil
}
func (e *State) walkFunc(node *parse.FuncNode) (Results, error) {
var res Results
var err error
var in []reflect.Value
for _, a := range node.Args {
var v interface{}
switch t := a.(type) {
case *parse.StringNode:
v = t.Text
case *parse.VarNode:
v = e.Vars[t.Name]
case *parse.ScalarNode:
v = t.Float64
case *parse.FuncNode:
v, err = e.walkFunc(t)
case *parse.UnaryNode:
v, err = e.walkUnary(t)
case *parse.BinaryNode:
v, err = e.walkBinary(t)
default:
return res, fmt.Errorf("expr: unknown func arg type: %T", t)
}
if err != nil {
return res, err
}
in = append(in, reflect.ValueOf(v))
}
f := reflect.ValueOf(node.F.F)
fr := f.Call(append([]reflect.Value{reflect.ValueOf(e)}, in...))
res = fr[0].Interface().(Results)
if len(fr) > 1 && !fr[1].IsNil() {
err := fr[1].Interface().(error)
if err != nil {
return res, err
}
}
return res, nil
}

View File

@ -0,0 +1,423 @@
package mathexp
import (
"math"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/assert"
)
func TestNaN(t *testing.T) {
var tests = []struct {
name string
expr string
vars Vars
newErrIs assert.ErrorAssertionFunc
execErrIs assert.ErrorAssertionFunc
results Results
willPanic bool
}{
{
name: "unary !: Op Number(NaN) is NaN",
expr: "! $A",
vars: Vars{"A": Results{[]Value{makeNumber("", nil, NaN)}}},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{[]Value{makeNumber("", nil, NaN)}},
},
{
name: "unary -: Op Number(NaN) is NaN",
expr: "! $A",
vars: Vars{"A": Results{[]Value{makeNumber("", nil, NaN)}}},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{[]Value{makeNumber("", nil, NaN)}},
},
{
name: "binary: Scalar Op(Non-AND/OR) Number(NaN) is NaN",
expr: "1 * $A",
vars: Vars{"A": Results{[]Value{makeNumber("", nil, NaN)}}},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{[]Value{makeNumber("", nil, NaN)}},
},
{
name: "binary: Scalar Op(AND/OR) Number(NaN) is 0/1",
expr: "1 || $A",
vars: Vars{"A": Results{[]Value{makeNumber("", nil, NaN)}}},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{[]Value{makeNumber("", nil, float64Pointer(1))}},
},
{
name: "binary: Scalar Op(Non-AND/OR) Series(with NaN value) is NaN)",
expr: "1 - $A",
vars: Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("temp", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(10, 0), NaN,
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(-1),
}, nullTimeTP{
unixTimePointer(10, 0), NaN,
}),
},
},
},
{
name: "binary: Number Op(Non-AND/OR) Series(with NaN value) is Series with NaN",
expr: "$A == $B",
vars: Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("temp", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(10, 0), NaN,
}),
},
},
"B": Results{[]Value{makeNumber("", nil, float64Pointer(0))}},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(0),
}, nullTimeTP{
unixTimePointer(10, 0), NaN,
}),
},
},
},
{
name: "binary: Number(NaN) Op Series(with NaN value) is Series with NaN",
expr: "$A + $B",
vars: Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("temp", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(10, 0), NaN,
}),
},
},
"B": Results{[]Value{makeNumber("", nil, NaN)}},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), NaN,
}, nullTimeTP{
unixTimePointer(10, 0), NaN,
}),
},
},
},
}
opt := cmp.Comparer(func(x, y float64) bool {
return (math.IsNaN(x) && math.IsNaN(y)) || x == y
})
options := append([]cmp.Option{opt}, data.FrameTestCompareOptions()...)
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
testBlock := func() {
e, err := New(tt.expr)
tt.newErrIs(t, err)
if e != nil {
res, err := e.Execute(tt.vars)
tt.execErrIs(t, err)
if diff := cmp.Diff(res, tt.results, options...); diff != "" {
assert.FailNow(t, tt.name, diff)
}
}
}
if tt.willPanic {
assert.Panics(t, testBlock)
} else {
assert.NotPanics(t, testBlock)
}
})
}
}
func TestNullValues(t *testing.T) {
var tests = []struct {
name string
expr string
vars Vars
newErrIs assert.ErrorAssertionFunc
execErrIs assert.ErrorAssertionFunc
results Results
willPanic bool
}{
{
name: "scalar: unary ! null(): is null",
expr: "! null()",
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: NewScalarResults(nil),
},
{
name: "scalar: binary null() + null(): is null",
expr: "null() + null()",
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: NewScalarResults(nil),
},
{
name: "scalar: binary 1 + null(): is null",
expr: "1 + null()",
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: NewScalarResults(nil),
},
{
name: "series: unary with a null value in it has a null value in result",
expr: "- $A",
vars: Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(-1),
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
},
},
{
name: "series: binary with a null value in it has a null value in result",
expr: "$A - $A",
vars: Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(0),
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
},
},
{
name: "series and scalar: binary with a null value in it has a nil value in result",
expr: "$A - 1",
vars: Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(0),
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
},
},
{
name: "number: unary ! null number: is null",
expr: "! $A",
vars: Vars{
"A": Results{
[]Value{
makeNumber("", nil, nil),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeNumber("", nil, nil),
},
},
},
{
name: "number: binary null number and null number: is null",
expr: "$A + $A",
vars: Vars{
"A": Results{
[]Value{
makeNumber("", nil, nil),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeNumber("", nil, nil),
},
},
},
{
name: "number: binary non-null number and null number: is null",
expr: "$A * $B",
vars: Vars{
"A": Results{
[]Value{
makeNumber("", nil, nil),
},
},
"B": Results{
[]Value{
makeNumber("", nil, float64Pointer(1)),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeNumber("", nil, nil),
},
},
},
{
name: "number and series: binary non-null number and series with a null: is null",
expr: "$A * $B",
vars: Vars{
"A": Results{
[]Value{
makeNumber("", nil, float64Pointer(1)),
},
},
"B": Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
},
},
{
name: "number and series: binary null number and series with non-null and null: is null and null",
expr: "$A * $B",
vars: Vars{
"A": Results{
[]Value{
makeNumber("", nil, nil),
},
},
"B": Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), nil,
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
},
},
}
// go-cmp instead of testify assert is used to compare results here
// because it supports an option for NaN equality.
// https://github.com/stretchr/testify/pull/691#issuecomment-528457166
opt := cmp.Comparer(func(x, y float64) bool {
return (math.IsNaN(x) && math.IsNaN(y)) || x == y
})
options := append([]cmp.Option{opt}, data.FrameTestCompareOptions()...)
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
testBlock := func() {
e, err := New(tt.expr)
tt.newErrIs(t, err)
if e != nil {
res, err := e.Execute(tt.vars)
tt.execErrIs(t, err)
if diff := cmp.Diff(tt.results, res, options...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
}
}
if tt.willPanic {
assert.Panics(t, testBlock)
} else {
testBlock()
}
})
}
}

View File

@ -0,0 +1,144 @@
package mathexp
import (
"math"
"testing"
"github.com/stretchr/testify/assert"
)
func TestScalarExpr(t *testing.T) {
var tests = []struct {
name string
expr string
vars Vars
newErrIs assert.ErrorAssertionFunc
execErrIs assert.ErrorAssertionFunc
resultIs assert.ComparisonAssertionFunc
Results Results
}{
{
name: "a scalar",
expr: "1",
vars: Vars{},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
Results: Results{[]Value{NewScalar(float64Pointer(1.0))}},
},
{
name: "unary: scalar",
expr: "! 1.2",
vars: Vars{},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
Results: Results{[]Value{NewScalar(float64Pointer(0.0))}},
},
{
name: "binary: scalar Op scalar",
expr: "1 + 1",
vars: Vars{},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
Results: Results{[]Value{NewScalar(float64Pointer(2.0))}},
},
{
name: "binary: scalar Op scalar - divide by zero",
expr: "1 / 0",
vars: Vars{},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
Results: Results{[]Value{NewScalar(float64Pointer(math.Inf(1)))}},
},
{
name: "binary: scalar Op number",
expr: "1 + $A",
vars: Vars{"A": Results{[]Value{makeNumber("temp", nil, float64Pointer(2.0))}}},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
Results: Results{[]Value{makeNumber("", nil, float64Pointer(3.0))}},
},
{
name: "binary: number Op Scalar",
expr: "$A - 3",
vars: Vars{"A": Results{[]Value{makeNumber("temp", nil, float64Pointer(2.0))}}},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
Results: Results{[]Value{makeNumber("", nil, float64Pointer(-1))}},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
e, err := New(tt.expr)
tt.newErrIs(t, err)
if e != nil {
res, err := e.Execute(tt.vars)
tt.execErrIs(t, err)
tt.resultIs(t, tt.Results, res)
}
})
}
}
func TestNumberExpr(t *testing.T) {
var tests = []struct {
name string
expr string
vars Vars
newErrIs assert.ErrorAssertionFunc
execErrIs assert.ErrorAssertionFunc
resultIs assert.ComparisonAssertionFunc
results Results
willPanic bool
}{
{
name: "binary: number Op Scalar",
expr: "$A / $A",
vars: Vars{"A": Results{[]Value{makeNumber("temp", nil, float64Pointer(2.0))}}},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
results: Results{[]Value{makeNumber("", nil, float64Pointer(1))}},
},
{
name: "unary: number",
expr: "- $A",
vars: Vars{"A": Results{[]Value{makeNumber("temp", nil, float64Pointer(2.0))}}},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
results: Results{[]Value{makeNumber("", nil, float64Pointer(-2.0))}},
},
{
name: "binary: Scalar Op Number (Number will nil val) - currently Panics",
expr: "1 + $A",
vars: Vars{"A": Results{[]Value{makeNumber("", nil, nil)}}},
willPanic: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
testBlock := func() {
e, err := New(tt.expr)
tt.newErrIs(t, err)
if e != nil {
res, err := e.Execute(tt.vars)
tt.execErrIs(t, err)
tt.resultIs(t, tt.results, res)
}
}
if tt.willPanic {
assert.Panics(t, testBlock)
} else {
assert.NotPanics(t, testBlock)
}
})
}
}

View File

@ -0,0 +1,383 @@
package mathexp
import (
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/assert"
)
func TestSeriesExpr(t *testing.T) {
var tests = []struct {
name string
expr string
vars Vars
newErrIs assert.ErrorAssertionFunc
execErrIs assert.ErrorAssertionFunc
results Results
}{
{
name: "unary series",
expr: "! ! $A",
vars: aSeriesNullableTime,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{ // Not sure about preservering names...
unixTimePointer(5, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(1),
}),
},
},
},
{
name: "binary scalar Op series",
expr: "98 + $A",
vars: aSeriesNullableTime,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{ // Not sure about preservering names...
unixTimePointer(5, 0), float64Pointer(100),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(99),
}),
},
},
},
{
name: "binary series Op scalar",
expr: "$A + 98",
vars: aSeriesNullableTime,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{ // Not sure about preservering names...
unixTimePointer(5, 0), float64Pointer(100),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(99),
}),
},
},
},
{
name: "series Op series",
expr: "$A + $A",
vars: aSeriesNullableTime,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{ // Not sure about preservering names...
unixTimePointer(5, 0), float64Pointer(4),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(2),
}),
},
},
},
{
name: "series Op number",
expr: "$A + $B",
vars: aSeriesbNumber,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("id=1", data.Labels{"id": "1"}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(9),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(8),
}),
},
},
},
{
name: "number Op series",
expr: "$B + $A",
vars: aSeriesbNumber,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("id=1", data.Labels{"id": "1"}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(9),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(8),
}),
},
},
},
{
name: "series Op series with label union",
expr: "$A * $B",
vars: twoSeriesSets,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("sensor=a, turbine=1", data.Labels{"sensor": "a", "turbine": "1"}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(6 * .5),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(8 * .2),
}),
makeSeriesNullableTime("sensor=b, turbine=1", data.Labels{"sensor": "b", "turbine": "1"}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(10 * .5),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(16 * .2),
}),
},
},
},
// Length of resulting series is A when A + B. However, only points where the time matches
// for A and B are added to the result
{
name: "series Op series with sparse time join",
expr: "$A + $B",
vars: Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("temp", data.Labels{}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(2),
}),
},
},
"B": Results{
[]Value{
makeSeriesNullableTime("efficiency", data.Labels{}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(9, 0), float64Pointer(4),
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{ // Not sure about preserving names...
unixTimePointer(5, 0), float64Pointer(4),
}),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
e, err := New(tt.expr)
tt.newErrIs(t, err)
if e != nil {
res, err := e.Execute(tt.vars)
tt.execErrIs(t, err)
if diff := cmp.Diff(tt.results, res, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
}
})
}
}
func TestSeriesAlternateFormsExpr(t *testing.T) {
var tests = []struct {
name string
expr string
vars Vars
newErrIs assert.ErrorAssertionFunc
execErrIs assert.ErrorAssertionFunc
results Results
}{
{
name: "unary series: non-nullable time",
expr: "! ! $A",
vars: aSeries,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeries("", nil, tp{ // Not sure about preservering names...
time.Unix(5, 0), float64Pointer(1),
}, tp{
time.Unix(10, 0), float64Pointer(1),
}),
},
},
},
{
name: "unary series: non-nullable time, time second",
expr: "! ! $A",
vars: aSeriesTimeSecond,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesTimeSecond("", nil, timeSecondTP{ // Not sure about preservering names...
float64Pointer(1), time.Unix(5, 0),
}, timeSecondTP{
float64Pointer(1), time.Unix(10, 0),
}),
},
},
},
{
name: "unary series: non-nullable value",
expr: "! ! $A",
vars: aSeriesNoNull,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeNoNullSeries("", nil, noNullTP{ // Not sure about preservering names...
time.Unix(5, 0), 1,
}, noNullTP{
time.Unix(10, 0), 1,
}),
},
},
},
{
name: "series Op series: nullable and non-nullable time",
expr: "$A + $B",
vars: Vars{
"A": Results{
[]Value{
makeSeries("temp", data.Labels{}, tp{
time.Unix(5, 0), float64Pointer(1),
}, tp{
time.Unix(10, 0), float64Pointer(2),
}),
},
},
"B": Results{
[]Value{
makeSeriesNullableTime("efficiency", data.Labels{}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(4),
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(4),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(6),
}),
},
},
},
{
name: "series Op series: nullable (time second) and non-nullable time (time first)",
expr: "$B + $A", // takes order from first operator
vars: Vars{
"A": Results{
[]Value{
makeSeriesTimeSecond("temp", data.Labels{}, timeSecondTP{
float64Pointer(1), time.Unix(5, 0),
}, timeSecondTP{
float64Pointer(2), time.Unix(10, 0),
}),
},
},
"B": Results{
[]Value{
makeSeriesNullableTime("efficiency", data.Labels{}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(4),
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(4),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(6),
}),
},
},
},
{
name: "series Op series: nullable and non-nullable values",
expr: "$A + $B",
vars: Vars{
"A": Results{
[]Value{
makeSeries("temp", data.Labels{}, tp{
time.Unix(5, 0), float64Pointer(1),
}, tp{
time.Unix(10, 0), float64Pointer(2),
}),
},
},
"B": Results{
[]Value{
makeNoNullSeries("efficiency", data.Labels{}, noNullTP{
time.Unix(5, 0), 3,
}, noNullTP{
time.Unix(10, 0), 4,
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeries("", nil, tp{
time.Unix(5, 0), float64Pointer(4),
}, tp{
time.Unix(10, 0), float64Pointer(6),
}),
},
},
},
{
name: "binary scalar Op series: non-nullable time second",
expr: "98 + $A",
vars: aSeriesTimeSecond,
newErrIs: assert.NoError,
execErrIs: assert.NoError,
results: Results{
[]Value{
makeSeriesTimeSecond("", nil, timeSecondTP{ // Not sure about preservering names...
float64Pointer(100), time.Unix(5, 0),
}, timeSecondTP{
float64Pointer(99), time.Unix(10, 0),
}),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
e, err := New(tt.expr)
tt.newErrIs(t, err)
if e != nil {
res, err := e.Execute(tt.vars)
tt.execErrIs(t, err)
if diff := cmp.Diff(tt.results, res, data.FrameTestCompareOptions()...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
}
})
}
}

View File

@ -0,0 +1,181 @@
package mathexp
import (
"math"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// Common Test Constructor Utils and Types
type nullTimeTP struct {
t *time.Time
f *float64
}
type tp struct {
t time.Time
f *float64
}
type timeSecondTP struct {
f *float64
t time.Time
}
type noNullTP struct {
t time.Time
f float64
}
func makeSeriesNullableTime(name string, labels data.Labels, points ...nullTimeTP) Series {
newSeries := NewSeries(name, labels, 0, true, 1, true, len(points))
for idx, p := range points {
_ = newSeries.SetPoint(idx, p.t, p.f)
}
return newSeries
}
func makeSeries(name string, labels data.Labels, points ...tp) Series {
newSeries := NewSeries(name, labels, 0, false, 1, true, len(points))
for idx, p := range points {
err := newSeries.SetPoint(idx, &p.t, p.f)
if err != nil {
panic(err)
}
}
return newSeries
}
func makeNoNullSeries(name string, labels data.Labels, points ...noNullTP) Series {
newSeries := NewSeries(name, labels, 0, false, 1, false, len(points))
for idx, p := range points {
err := newSeries.SetPoint(idx, &p.t, &p.f)
if err != nil {
panic(err)
}
}
return newSeries
}
func makeSeriesTimeSecond(name string, labels data.Labels, points ...timeSecondTP) Series {
newSeries := NewSeries(name, labels, 1, false, 0, true, len(points))
for idx, p := range points {
err := newSeries.SetPoint(idx, &p.t, p.f)
if err != nil {
panic(err)
}
}
return newSeries
}
func makeNumber(name string, labels data.Labels, f *float64) Number {
newNumber := NewNumber(name, labels)
newNumber.SetValue(f)
return newNumber
}
func unixTimePointer(sec, nsec int64) *time.Time {
t := time.Unix(sec, nsec)
return &t
}
func float64Pointer(f float64) *float64 {
return &f
}
var aSeriesNullableTime = Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("temp", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(1),
}),
},
},
}
var aSeries = Vars{
"A": Results{
[]Value{
makeSeries("temp", nil, tp{
time.Unix(5, 0), float64Pointer(2),
}, tp{
time.Unix(10, 0), float64Pointer(1),
}),
},
},
}
var aSeriesTimeSecond = Vars{
"A": Results{
[]Value{
makeSeriesTimeSecond("temp", nil, timeSecondTP{
float64Pointer(2), time.Unix(5, 0),
}, timeSecondTP{
float64Pointer(1), time.Unix(10, 0),
}),
},
},
}
var aSeriesNoNull = Vars{
"A": Results{
[]Value{
makeNoNullSeries("temp", nil, noNullTP{
time.Unix(5, 0), 2,
}, noNullTP{
time.Unix(10, 0), 1,
}),
},
},
}
var aSeriesbNumber = Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("temp", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(1),
}),
},
},
"B": Results{
[]Value{
makeNumber("volt", data.Labels{"id": "1"}, float64Pointer(7)),
},
},
}
var twoSeriesSets = Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("temp", data.Labels{"sensor": "a", "turbine": "1"}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(6),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(8),
}),
makeSeriesNullableTime("temp", data.Labels{"sensor": "b", "turbine": "1"}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(10),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(16),
}),
},
},
"B": Results{
[]Value{
makeSeriesNullableTime("efficiency", data.Labels{"turbine": "1"}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(.5),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(.2),
}),
},
},
}
// NaN is just to make the calls a little cleaner, the one
// call is not for any sort of equality side effect in tests.
// note: cmp.Equal must be used to test Equality for NaNs.
var NaN = float64Pointer(math.NaN())

112
pkg/expr/mathexp/funcs.go Normal file
View File

@ -0,0 +1,112 @@
package mathexp
import (
"math"
"github.com/grafana/grafana/pkg/expr/mathexp/parse"
)
var builtins = map[string]parse.Func{
"abs": {
Args: []parse.ReturnType{parse.TypeVariantSet},
VariantReturn: true,
F: abs,
},
"log": {
Args: []parse.ReturnType{parse.TypeVariantSet},
VariantReturn: true,
F: log,
},
"nan": {
Return: parse.TypeScalar,
F: nan,
},
"inf": {
Return: parse.TypeScalar,
F: inf,
},
"null": {
Return: parse.TypeScalar,
F: null,
},
}
// abs returns the absolute value for each result in NumberSet, SeriesSet, or Scalar
func abs(e *State, varSet Results) (Results, error) {
newRes := Results{}
for _, res := range varSet.Values {
newVal, err := perFloat(res, math.Abs)
if err != nil {
return newRes, err
}
newRes.Values = append(newRes.Values, newVal)
}
return newRes, nil
}
// log returns the natural logarithm value for each result in NumberSet, SeriesSet, or Scalar
func log(e *State, varSet Results) (Results, error) {
newRes := Results{}
for _, res := range varSet.Values {
newVal, err := perFloat(res, math.Log)
if err != nil {
return newRes, err
}
newRes.Values = append(newRes.Values, newVal)
}
return newRes, nil
}
// nan returns a scalar nan value
func nan(e *State) Results {
aNaN := math.NaN()
return NewScalarResults(&aNaN)
}
// inf returns a scalar positive infinity value
func inf(e *State) Results {
aInf := math.Inf(0)
return NewScalarResults(&aInf)
}
// null returns a null scalar value
func null(e *State) Results {
return NewScalarResults(nil)
}
func perFloat(val Value, floatF func(x float64) float64) (Value, error) {
var newVal Value
switch val.Type() {
case parse.TypeNumberSet:
n := NewNumber(val.GetName(), val.GetLabels())
f := val.(Number).GetFloat64Value()
nF := math.NaN()
if f != nil {
nF = floatF(*f)
}
n.SetValue(&nF)
newVal = n
case parse.TypeScalar:
f := val.(Scalar).GetFloat64Value()
nF := math.NaN()
if f != nil {
nF = floatF(*f)
}
newVal = NewScalar(&nF)
case parse.TypeSeriesSet:
resSeries := val.(Series)
newSeries := NewSeries(resSeries.GetName(), resSeries.GetLabels(), resSeries.TimeIdx, resSeries.TimeIsNullable, resSeries.ValueIdx, resSeries.ValueIsNullabe, resSeries.Len())
for i := 0; i < resSeries.Len(); i++ {
t, f := resSeries.GetPoint(i)
nF := math.NaN()
if f != nil {
nF = floatF(*f)
}
if err := newSeries.SetPoint(i, t, &nF); err != nil {
return newSeries, err
}
}
newVal = newSeries
}
return newVal, nil
}

View File

@ -0,0 +1,88 @@
package mathexp
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestFunc(t *testing.T) {
var tests = []struct {
name string
expr string
vars Vars
newErrIs assert.ErrorAssertionFunc
execErrIs assert.ErrorAssertionFunc
resultIs assert.ComparisonAssertionFunc
results Results
}{
{
name: "abs on number",
expr: "abs($A)",
vars: Vars{
"A": Results{
[]Value{
makeNumber("", nil, float64Pointer(-7)),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
results: Results{[]Value{makeNumber("", nil, float64Pointer(7))}},
},
{
name: "abs on scalar",
expr: "abs(-1)",
vars: Vars{},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
results: Results{[]Value{NewScalar(float64Pointer(1.0))}},
},
{
name: "abs on series",
expr: "abs($A)",
vars: Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(-2),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(-1),
}),
},
},
},
newErrIs: assert.NoError,
execErrIs: assert.NoError,
resultIs: assert.Equal,
results: Results{
[]Value{
makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(1),
}),
},
},
},
{
name: "abs on string - should error",
expr: `abs("hi")`,
vars: Vars{},
newErrIs: assert.Error,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
e, err := New(tt.expr)
tt.newErrIs(t, err)
if e != nil {
res, err := e.Execute(tt.vars)
tt.execErrIs(t, err)
tt.resultIs(t, tt.results, res)
}
})
}
}

View File

@ -0,0 +1,337 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package parse
import (
"fmt"
"strings"
"unicode"
"unicode/utf8"
)
// item represents a token or text string returned from the scanner.
type item struct {
typ itemType // The type of this item.
pos Pos // The starting position, in bytes, of this item in the input string.
val string // The value of this item.
}
func (i item) String() string {
switch {
case i.typ == itemEOF:
return "EOF"
case i.typ == itemError:
return i.val
case len(i.val) > 10:
return fmt.Sprintf("%.10q...", i.val)
}
return fmt.Sprintf("%q", i.val)
}
// itemType identifies the type of lex items.
type itemType int
const (
itemError itemType = iota // error occurred; value is text of error
itemEOF
itemNot // '!'
itemAnd // '&&'
itemOr // '||'
itemGreater // '>'
itemLess // '<'
itemGreaterEq // '>='
itemLessEq // '<='
itemEq // '=='
itemNotEq // '!='
itemPlus // '+'
itemMinus // '-'
itemMult // '*'
itemDiv // '/'
itemMod // '%'
itemNumber // simple number
itemComma
itemLeftParen
itemRightParen
itemString
itemFunc
itemVar // e.g. $A
itemPow // '**'
)
const eof = -1
// stateFn represents the state of the scanner as a function that returns the next state.
type stateFn func(*lexer) stateFn
// lexer holds the state of the scanner.
type lexer struct {
input string // the string being scanned
state stateFn // the next lexing function to enter
pos Pos // current position in the input
start Pos // start position of this item
width Pos // width of last rune read from input
lastPos Pos // position of most recent item returned by nextItem
items chan item // channel of scanned items
}
// next returns the next rune in the input.
func (l *lexer) next() rune {
if int(l.pos) >= len(l.input) {
l.width = 0
return eof
}
r, w := utf8.DecodeRuneInString(l.input[l.pos:])
l.width = Pos(w)
l.pos += l.width
return r
}
// peek returns but does not consume the next rune in the input.
// nolint:unused
func (l *lexer) peek() rune {
r := l.next()
l.backup()
return r
}
// backup steps back one rune. Can only be called once per call of next.
func (l *lexer) backup() {
l.pos -= l.width
}
// emit passes an item back to the client.
func (l *lexer) emit(t itemType) {
l.items <- item{t, l.start, l.input[l.start:l.pos]}
l.start = l.pos
}
// accept consumes the next rune if it's from the valid set.
func (l *lexer) accept(valid string) bool {
if strings.ContainsRune(valid, l.next()) {
return true
}
l.backup()
return false
}
// acceptRun consumes a run of runes from the valid set.
func (l *lexer) acceptRun(valid string) {
for strings.ContainsRune(valid, l.next()) {
}
l.backup()
}
// ignore skips over the pending input before this point.
func (l *lexer) ignore() {
l.start = l.pos
}
// lineNumber reports which line we're on, based on the position of
// the previous item returned by nextItem. Doing it this way
// means we don't have to worry about peek double counting.
// nolint:unused
func (l *lexer) lineNumber() int {
return 1 + strings.Count(l.input[:l.lastPos], "\n")
}
// errorf returns an error token and terminates the scan by passing
// back a nil pointer that will be the next state, terminating l.nextItem.
func (l *lexer) errorf(format string, args ...interface{}) stateFn {
l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)}
return nil
}
// nextItem returns the next item from the input.
func (l *lexer) nextItem() item {
item := <-l.items
l.lastPos = item.pos
return item
}
// lex creates a new scanner for the input string.
func lex(input string) *lexer {
l := &lexer{
input: input,
items: make(chan item),
}
go l.run()
return l
}
// run runs the state machine for the lexer.
func (l *lexer) run() {
for l.state = lexItem; l.state != nil; {
l.state = l.state(l)
}
}
// state functions
func lexItem(l *lexer) stateFn {
Loop:
for {
switch r := l.next(); {
case r == '$':
return lexVar
case isSymbol(r):
return lexSymbol
case isNumber(r):
l.backup()
return lexNumber
case unicode.IsLetter(r):
return lexFunc
case r == '(':
l.emit(itemLeftParen)
case r == ')':
l.emit(itemRightParen)
case r == '"':
return lexString
case r == ',':
l.emit(itemComma)
case isSpace(r):
l.ignore()
case r == eof:
l.emit(itemEOF)
break Loop
default:
return l.errorf("invalid character: %s", string(r))
}
}
return nil
}
// lexNumber scans a number: decimal, octal, hex, float, or imaginary. This
// isn't a perfect number scanner - for instance it accepts "." and "0x0.2"
// and "089" - but when it's wrong the input is invalid and the parser (via
// strconv) will notice.
func lexNumber(l *lexer) stateFn {
if !l.scanNumber() {
return l.errorf("bad number syntax: %q", l.input[l.start:l.pos])
}
l.emit(itemNumber)
return lexItem
}
func (l *lexer) scanNumber() bool {
// Is it hex?
digits := "0123456789"
if l.accept("0") && l.accept("xX") {
digits = "0123456789abcdefABCDEF"
}
l.acceptRun(digits)
if l.accept(".") {
l.acceptRun(digits)
}
if l.accept("eE") {
l.accept("+-")
l.acceptRun("0123456789")
}
return true
}
const symbols = "!<>=&|+-*/%"
func lexSymbol(l *lexer) stateFn {
l.acceptRun(symbols)
s := l.input[l.start:l.pos]
switch s {
case "!":
l.emit(itemNot)
case "&&":
l.emit(itemAnd)
case "||":
l.emit(itemOr)
case ">":
l.emit(itemGreater)
case "<":
l.emit(itemLess)
case ">=":
l.emit(itemGreaterEq)
case "<=":
l.emit(itemLessEq)
case "==":
l.emit(itemEq)
case "!=":
l.emit(itemNotEq)
case "+":
l.emit(itemPlus)
case "-":
l.emit(itemMinus)
case "*":
l.emit(itemMult)
case "**":
l.emit(itemPow)
case "/":
l.emit(itemDiv)
case "%":
l.emit(itemMod)
default:
l.emit(itemError)
}
return lexItem
}
func lexFunc(l *lexer) stateFn {
for {
switch r := l.next(); {
case unicode.IsLetter(r):
// absorb
default:
l.backup()
l.emit(itemFunc)
return lexItem
}
}
}
func lexVar(l *lexer) stateFn {
hasChar := false
for {
switch r := l.next(); {
case unicode.IsLetter(r):
hasChar = true
// absorb
default:
if !hasChar {
return l.errorf("incomplete variable")
}
l.backup()
l.emit(itemVar)
return lexItem
}
}
}
func lexString(l *lexer) stateFn {
for {
switch l.next() {
case '"':
l.emit(itemString)
return lexItem
case eof:
return l.errorf("unterminated string")
}
}
}
// isSpace reports whether r is a space character.
func isSpace(r rune) bool {
return unicode.IsSpace(r)
}
// isVarchar should maybe be used in place of unicode is letter above,
// but do not want to modify it at this time, so adding lint exception.
// nolint:unused,deadcode
func isVarchar(r rune) bool {
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
}
func isSymbol(r rune) bool {
return strings.ContainsRune(symbols, r)
}
func isNumber(r rune) bool {
return unicode.IsDigit(r) || r == '.'
}

View File

@ -0,0 +1,156 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package parse
import (
"fmt"
"testing"
)
// Make the types prettyprint.
var itemName = map[itemType]string{
itemError: "error",
itemEOF: "EOF",
itemNot: "!",
itemAnd: "&&",
itemOr: "||",
itemGreater: ">",
itemLess: "<",
itemGreaterEq: ">=",
itemLessEq: "<=",
itemEq: "==",
itemNotEq: "!=",
itemPlus: "+",
itemMinus: "-",
itemMult: "*",
itemDiv: "/",
itemMod: "%",
itemNumber: "number",
itemComma: ",",
itemLeftParen: "(",
itemRightParen: ")",
itemString: "string",
itemFunc: "func",
}
func (i itemType) String() string {
s := itemName[i]
if s == "" {
return fmt.Sprintf("item%d", int(i))
}
return s
}
type lexTest struct {
name string
input string
items []item
}
var (
tEOF = item{itemEOF, 0, ""}
tLt = item{itemLess, 0, "<"}
tGt = item{itemGreater, 0, ">"}
tOr = item{itemOr, 0, "||"}
tNot = item{itemNot, 0, "!"}
tAnd = item{itemAnd, 0, "&&"}
tLtEq = item{itemLessEq, 0, "<="}
tGtEq = item{itemGreaterEq, 0, ">="}
tNotEq = item{itemNotEq, 0, "!="}
tEq = item{itemEq, 0, "=="}
tPlus = item{itemPlus, 0, "+"}
tMinus = item{itemMinus, 0, "-"}
tMult = item{itemMult, 0, "*"}
tDiv = item{itemDiv, 0, "/"}
tMod = item{itemMod, 0, "%"}
)
var lexTests = []lexTest{
{"empty", "", []item{tEOF}},
{"spaces", " \t\n", []item{tEOF}},
{"text", `"now is the time"`, []item{{itemString, 0, `"now is the time"`}, tEOF}},
{"operators", "! && || < > <= >= == != + - * / %", []item{
tNot,
tAnd,
tOr,
tLt,
tGt,
tLtEq,
tGtEq,
tEq,
tNotEq,
tPlus,
tMinus,
tMult,
tDiv,
tMod,
tEOF,
}},
{"numbers", "1 02 0x14 7.2 1e3 1.2e-4", []item{
{itemNumber, 0, "1"},
{itemNumber, 0, "02"},
{itemNumber, 0, "0x14"},
{itemNumber, 0, "7.2"},
{itemNumber, 0, "1e3"},
{itemNumber, 0, "1.2e-4"},
tEOF,
}},
{"number plus var", "1 + $A", []item{
{itemNumber, 0, "1"},
tPlus,
{itemVar, 0, "$A"},
tEOF,
}},
// errors
{"unclosed quote", "\"", []item{
{itemError, 0, "unterminated string"},
}},
{"single quote", "'single quote is invalid'", []item{
{itemError, 0, "invalid character: '"},
}},
{"invalid var", "$", []item{
{itemError, 0, "incomplete variable"},
}},
}
// collect gathers the emitted items into a slice.
func collect(t *lexTest) (items []item) {
l := lex(t.input)
for {
item := l.nextItem()
items = append(items, item)
if item.typ == itemEOF || item.typ == itemError {
break
}
}
return
}
func equal(i1, i2 []item, checkPos bool) bool {
if len(i1) != len(i2) {
return false
}
for k := range i1 {
if i1[k].typ != i2[k].typ {
return false
}
if i1[k].val != i2[k].val {
return false
}
if checkPos && i1[k].pos != i2[k].pos {
return false
}
}
return true
}
func TestLex(t *testing.T) {
for i, test := range lexTests {
items := collect(&lexTests[i])
if !equal(items, test.items, false) {
t.Errorf("%s: got\n\t%+v\nexpected\n\t%v", test.name, items, test.items)
}
}
}

View File

@ -0,0 +1,422 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Parse nodes.
package parse
import (
"fmt"
"strconv"
)
// A Node is an element in the parse tree. The interface is trivial.
// The interface contains an unexported method so that only
// types local to this package can satisfy it.
type Node interface {
Type() NodeType
String() string
StringAST() string
Position() Pos // byte position of start of node in full original input string
Check(*Tree) error // performs type checking for itself and sub-nodes
Return() ReturnType
// Make sure only functions in this package can create Nodes.
unexported()
}
// NodeType identifies the type of a parse tree node.
type NodeType int
// Pos represents a byte position in the original input text from which
// this template was parsed.
type Pos int
// Position returns the integer Position of p
func (p Pos) Position() Pos {
return p
}
// unexported keeps Node implementations local to the package.
// All implementations embed Pos, so this takes care of it.
func (Pos) unexported() {
}
// Type returns itself and provides an easy default implementation
// for embedding in a Node. Embedded in all non-trivial Nodes.
func (t NodeType) Type() NodeType {
return t
}
const (
// NodeFunc is a function call.
NodeFunc NodeType = iota
// NodeBinary is a binary operator: math, logical, compare
NodeBinary
// NodeUnary is unary operator: !, -
NodeUnary
// NodeString is string constant.
NodeString
// NodeNumber is a numerical constant (Scalar).
NodeNumber
// NodeVar is variable: $A
NodeVar
)
// String returns the string representation of the NodeType
func (t NodeType) String() string {
switch t {
case NodeFunc:
return "NodeFunc"
case NodeBinary:
return "NodeBinary"
case NodeUnary:
return "NodeUnary"
case NodeString:
return "NodeString"
case NodeNumber:
return "NodeNumber"
default:
return "NodeUnknown"
}
}
// Nodes.
// VarNode holds a variable reference.
type VarNode struct {
NodeType
Pos
Name string // Without the $
Text string // Raw
}
func newVar(pos Pos, name, text string) *VarNode {
return &VarNode{NodeType: NodeVar, Pos: pos, Name: name, Text: text}
}
// Type returns the Type of the VarNode so it fulfills the Node interface.
func (n *VarNode) Type() NodeType { return NodeVar }
// String returns the string representation of the VarNode so it fulfills the Node interface.
func (n *VarNode) String() string { return n.Text }
// StringAST returns the string representation of abstract syntax tree of the VarNode so it fulfills the Node interface.
func (n *VarNode) StringAST() string { return n.String() }
// Check performs parse time checking on the VarNode so it fulfills the Node interface.
func (n *VarNode) Check(*Tree) error {
return nil
}
// Return returns the result type of the VarNode so it fulfills the Node interface.
func (n *VarNode) Return() ReturnType {
return TypeSeriesSet // Vars are only time series for now I guess....
}
// FuncNode holds a function invocation.
type FuncNode struct {
NodeType
Pos
Name string
F *Func
Args []Node
Prefix string
}
func newFunc(pos Pos, name string, f Func) *FuncNode {
return &FuncNode{NodeType: NodeFunc, Pos: pos, Name: name, F: &f}
}
func (f *FuncNode) append(arg Node) {
f.Args = append(f.Args, arg)
}
// String returns the string representation of the FuncNode so it fulfills the Node interface.
func (f *FuncNode) String() string {
s := f.Name + "("
for i, arg := range f.Args {
if i > 0 {
s += ", "
}
s += arg.String()
}
s += ")"
return s
}
// StringAST returns the string representation of abstract syntax tree of the FuncNode so it fulfills the Node interface.
func (f *FuncNode) StringAST() string {
s := f.Name + "("
for i, arg := range f.Args {
if i > 0 {
s += ", "
}
s += arg.StringAST()
}
s += ")"
return s
}
// Check performs parse time checking on the FuncNode so it fulfills the Node interface.
func (f *FuncNode) Check(t *Tree) error {
if len(f.Args) < len(f.F.Args) {
return fmt.Errorf("parse: not enough arguments for %s", f.Name)
} else if len(f.Args) > len(f.F.Args) {
return fmt.Errorf("parse: too many arguments for %s", f.Name)
}
for i, arg := range f.Args {
funcType := f.F.Args[i]
argType := arg.Return()
// if funcType == TypeNumberSet && argType == TypeScalar {
// argType = TypeNumberSet
// }
if funcType == TypeVariantSet {
if !(argType == TypeNumberSet || argType == TypeSeriesSet || argType == TypeScalar) {
return fmt.Errorf("parse: expected %v or %v for argument %v, got %v", TypeNumberSet, TypeSeriesSet, i, argType)
}
} else if funcType != argType {
return fmt.Errorf("parse: expected %v, got %v for argument %v (%v)", funcType, argType, i, arg.String())
}
if err := arg.Check(t); err != nil {
return err
}
}
if f.F.Check != nil {
return f.F.Check(t, f)
}
return nil
}
// Return returns the result type of the FuncNode so it fulfills the Node interface.
func (f *FuncNode) Return() ReturnType {
return f.F.Return
}
// ScalarNode holds a number: signed or unsigned integer or float.
// The value is parsed and stored under all the types that can represent the value.
// This simulates in a small amount of code the behavior of Go's ideal constants.
type ScalarNode struct {
NodeType
Pos
IsUint bool // Number has an unsigned integral value.
IsFloat bool // Number has a floating-point value.
Uint64 uint64 // The unsigned integer value.
Float64 float64 // The floating-point value.
Text string // The original textual representation from the input.
}
func newNumber(pos Pos, text string) (*ScalarNode, error) {
n := &ScalarNode{NodeType: NodeNumber, Pos: pos, Text: text}
// Do integer test first so we get 0x123 etc.
u, err := strconv.ParseUint(text, 0, 64) // will fail for -0.
if err == nil {
n.IsUint = true
n.Uint64 = u
}
// If an integer extraction succeeded, promote the float.
if n.IsUint {
n.IsFloat = true
n.Float64 = float64(n.Uint64)
} else {
f, err := strconv.ParseFloat(text, 64)
if err == nil {
n.IsFloat = true
n.Float64 = f
// If a floating-point extraction succeeded, extract the int if needed.
if !n.IsUint && float64(uint64(f)) == f {
n.IsUint = true
n.Uint64 = uint64(f)
}
}
}
if !n.IsUint && !n.IsFloat {
return nil, fmt.Errorf("illegal number syntax: %q", text)
}
return n, nil
}
// String returns the string representation of the ScalarNode so it fulfills the Node interface.
func (n *ScalarNode) String() string {
return n.Text
}
// StringAST returns the string representation of abstract syntax tree of the ScalarNode so it fulfills the Node interface.
func (n *ScalarNode) StringAST() string {
return n.String()
}
// Check performs parse time checking on the ScalarNode so it fulfills the Node interface.
func (n *ScalarNode) Check(*Tree) error {
return nil
}
// Return returns the result type of the ScalarNode so it fulfills the Node interface.
func (n *ScalarNode) Return() ReturnType {
return TypeScalar
}
// StringNode holds a string constant. The value has been "unquoted".
type StringNode struct {
NodeType
Pos
Quoted string // The original text of the string, with quotes.
Text string // The string, after quote processing.
}
func newString(pos Pos, orig, text string) *StringNode {
return &StringNode{NodeType: NodeString, Pos: pos, Quoted: orig, Text: text}
}
// String returns the string representation of the StringNode so it fulfills the Node interface.
func (s *StringNode) String() string {
return s.Quoted
}
// StringAST returns the string representation of abstract syntax tree of the StringNode so it fulfills the Node interface.
func (s *StringNode) StringAST() string {
return s.String()
}
// Check performs parse time checking on the StringNode so it fulfills the Node interface.
func (s *StringNode) Check(*Tree) error {
return nil
}
// Return returns the result type of the TypeString so it fulfills the Node interface.
func (s *StringNode) Return() ReturnType {
return TypeString
}
// BinaryNode holds two arguments and an operator.
type BinaryNode struct {
NodeType
Pos
Args [2]Node
Operator item
OpStr string
}
func newBinary(operator item, arg1, arg2 Node) *BinaryNode {
return &BinaryNode{NodeType: NodeBinary, Pos: operator.pos, Args: [2]Node{arg1, arg2}, Operator: operator, OpStr: operator.val}
}
// String returns the string representation of the BinaryNode so it fulfills the Node interface.
func (b *BinaryNode) String() string {
return fmt.Sprintf("%s %s %s", b.Args[0], b.Operator.val, b.Args[1])
}
// StringAST returns the string representation of abstract syntax tree of the BinaryNode so it fulfills the Node interface.
func (b *BinaryNode) StringAST() string {
return fmt.Sprintf("%s(%s, %s)", b.Operator.val, b.Args[0], b.Args[1])
}
// Check performs parse time checking on the BinaryNode so it fulfills the Node interface.
func (b *BinaryNode) Check(t *Tree) error {
return nil
}
// Return returns the result type of the BinaryNode so it fulfills the Node interface.
func (b *BinaryNode) Return() ReturnType {
t0 := b.Args[0].Return()
t1 := b.Args[1].Return()
if t1 > t0 {
return t1
}
return t0
}
// UnaryNode holds one argument and an operator.
type UnaryNode struct {
NodeType
Pos
Arg Node
Operator item
OpStr string
}
func newUnary(operator item, arg Node) *UnaryNode {
return &UnaryNode{NodeType: NodeUnary, Pos: operator.pos, Arg: arg, Operator: operator, OpStr: operator.val}
}
// String returns the string representation of the UnaryNode so it fulfills the Node interface.
func (u *UnaryNode) String() string {
return fmt.Sprintf("%s%s", u.Operator.val, u.Arg)
}
// StringAST returns the string representation of abstract syntax tree of the UnaryNode so it fulfills the Node interface.
func (u *UnaryNode) StringAST() string {
return fmt.Sprintf("%s(%s)", u.Operator.val, u.Arg)
}
// Check performs parse time checking on the UnaryNode so it fulfills the Node interface.
func (u *UnaryNode) Check(t *Tree) error {
switch rt := u.Arg.Return(); rt {
case TypeNumberSet, TypeSeriesSet, TypeScalar:
return u.Arg.Check(t)
default:
return fmt.Errorf(`parse: type error in %s, expected "number", got %s`, u, rt)
}
}
// Return returns the result type of the UnaryNode so it fulfills the Node interface.
func (u *UnaryNode) Return() ReturnType {
return u.Arg.Return()
}
// Walk invokes f on n and sub-nodes of n.
func Walk(n Node, f func(Node)) {
f(n)
switch n := n.(type) {
case *BinaryNode:
Walk(n.Args[0], f)
Walk(n.Args[1], f)
case *FuncNode:
for _, a := range n.Args {
Walk(a, f)
}
case *ScalarNode, *StringNode:
// Ignore since these node types have no sub nodes.
case *UnaryNode:
Walk(n.Arg, f)
default:
panic(fmt.Errorf("other type: %T", n))
}
}
// ReturnType represents the type that is returned from a node.
type ReturnType int
const (
// TypeString is a single string.
TypeString ReturnType = iota
// TypeScalar is a unlabled number constant.
TypeScalar
// TypeNumberSet is a collection of labelled numbers.
TypeNumberSet
// TypeSeriesSet is a collection of labelled time series.
TypeSeriesSet
// TypeVariantSet is a collection of the same type Number, Series, or Scalar.
TypeVariantSet
)
// String returns a string representation of the ReturnType.
func (f ReturnType) String() string {
switch f {
case TypeNumberSet:
return "numberSet"
case TypeString:
return "string"
case TypeSeriesSet:
return "seriesSet"
case TypeScalar:
return "scalar"
case TypeVariantSet:
return "variant"
default:
return "unknown"
}
}

View File

@ -0,0 +1,353 @@
// Copyright 2011 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package parse builds parse trees for expressions as defined by expr. Clients
// should use that package to construct expressions rather than this one, which
// provides shared internal data structures not intended for general use.
package parse
import (
"fmt"
"runtime"
"strconv"
"strings"
)
// Tree is the representation of a single parsed expression.
type Tree struct {
Text string // text parsed to create the expression.
Root Node // top-level root of the tree, returns a number.
VarNames []string
funcs []map[string]Func
// Parsing only; cleared after parse.
lex *lexer
token [1]item // one-token lookahead for parser.
peekCount int
}
// Func holds the structure of a parsed function call.
type Func struct {
Args []ReturnType
Return ReturnType
F interface{}
VariantReturn bool
Check func(*Tree, *FuncNode) error
}
// Parse returns a Tree, created by parsing the expression described in the
// argument string. If an error is encountered, parsing stops and an empty Tree
// is returned with the error.
func Parse(text string, funcs ...map[string]Func) (t *Tree, err error) {
t = New()
t.Text = text
err = t.Parse(text, funcs...)
return
}
// next returns the next token.
func (t *Tree) next() item {
if t.peekCount > 0 {
t.peekCount--
} else {
t.token[0] = t.lex.nextItem()
}
return t.token[t.peekCount]
}
// backup backs the input stream up one token.
func (t *Tree) backup() {
t.peekCount++
}
// peek returns but does not consume the next token.
func (t *Tree) peek() item {
if t.peekCount > 0 {
return t.token[t.peekCount-1]
}
t.peekCount = 1
t.token[0] = t.lex.nextItem()
return t.token[0]
}
// Parsing.
// New allocates a new parse tree with the given name.
func New(funcs ...map[string]Func) *Tree {
return &Tree{
funcs: funcs,
}
}
// errorf formats the error and terminates processing.
func (t *Tree) errorf(format string, args ...interface{}) {
t.Root = nil
format = fmt.Sprintf("expr: %s", format)
panic(fmt.Errorf(format, args...))
}
// error terminates processing.
func (t *Tree) error(err error) {
t.errorf("%s", err)
}
// expect consumes the next token and guarantees it has the required type.
func (t *Tree) expect(expected itemType, context string) item {
token := t.next()
if token.typ != expected {
t.unexpected(token, context)
}
return token
}
// expectOneOf consumes the next token and guarantees it has one of the required types.
// nolint:unused
func (t *Tree) expectOneOf(expected1, expected2 itemType, context string) item {
token := t.next()
if token.typ != expected1 && token.typ != expected2 {
t.unexpected(token, context)
}
return token
}
// unexpected complains about the token and terminates processing.
func (t *Tree) unexpected(token item, context string) {
t.errorf("unexpected %s in %s", token, context)
}
// recover is the handler that turns panics into returns from the top level of Parse.
func (t *Tree) recover(errp *error) {
e := recover()
if e != nil {
if _, ok := e.(runtime.Error); ok {
panic(e)
}
if t != nil {
t.stopParse()
}
*errp = e.(error)
}
}
// startParse initializes the parser, using the lexer.
func (t *Tree) startParse(funcs []map[string]Func, lex *lexer) {
t.Root = nil
t.lex = lex
t.funcs = funcs
}
// stopParse terminates parsing.
func (t *Tree) stopParse() {
t.lex = nil
}
// Parse parses the expression definition string to construct a representation
// of the expression for execution.
func (t *Tree) Parse(text string, funcs ...map[string]Func) (err error) {
defer t.recover(&err)
t.startParse(funcs, lex(text))
t.Text = text
t.parse()
t.stopParse()
return nil
}
// parse is the top-level parser for an expression.
// It runs to EOF.
func (t *Tree) parse() {
t.Root = t.O()
t.expect(itemEOF, "root input")
if err := t.Root.Check(t); err != nil {
t.error(err)
}
}
/* Grammar:
O -> A {"||" A}
A -> C {"&&" C}
C -> P {( "==" | "!=" | ">" | ">=" | "<" | "<=") P}
P -> M {( "+" | "-" ) M}
M -> E {( "*" | "/" ) F}
E -> F {( "**" ) F}
F -> v | "(" O ")" | "!" O | "-" O
v -> number | func(..) | queryVar
Func -> name "(" param {"," param} ")"
param -> number | "string" | queryVar
*/
// expr:
// O is A {"||" A} in the grammar.
func (t *Tree) O() Node {
n := t.A()
for {
switch t.peek().typ {
case itemOr:
n = newBinary(t.next(), n, t.A())
default:
return n
}
}
}
// A is C {"&&" C} in the grammar.
func (t *Tree) A() Node {
n := t.C()
for {
switch t.peek().typ {
case itemAnd:
n = newBinary(t.next(), n, t.C())
default:
return n
}
}
}
// C is C -> P {( "==" | "!=" | ">" | ">=" | "<" | "<=") P} in the grammar.
func (t *Tree) C() Node {
n := t.P()
for {
switch t.peek().typ {
case itemEq, itemNotEq, itemGreater, itemGreaterEq, itemLess, itemLessEq:
n = newBinary(t.next(), n, t.P())
default:
return n
}
}
}
// P is M {( "+" | "-" ) M} in the grammar.
func (t *Tree) P() Node {
n := t.M()
for {
switch t.peek().typ {
case itemPlus, itemMinus:
n = newBinary(t.next(), n, t.M())
default:
return n
}
}
}
// M is E {( "*" | "/" ) F} in the grammar.
func (t *Tree) M() Node {
n := t.E()
for {
switch t.peek().typ {
case itemMult, itemDiv, itemMod:
n = newBinary(t.next(), n, t.E())
default:
return n
}
}
}
// E is F {( "**" ) F} in the grammar.
func (t *Tree) E() Node {
n := t.F()
for {
switch t.peek().typ {
case itemPow:
n = newBinary(t.next(), n, t.F())
default:
return n
}
}
}
// F is v | "(" O ")" | "!" O | "-" O in the grammar.
func (t *Tree) F() Node {
switch token := t.peek(); token.typ {
case itemNumber, itemFunc, itemVar:
return t.v()
case itemNot, itemMinus:
return newUnary(t.next(), t.F())
case itemLeftParen:
t.next()
n := t.O()
t.expect(itemRightParen, "input: F()")
return n
default:
t.unexpected(token, "input: F()")
}
return nil
}
// V is number | func(..) | queryVar in the grammar.
func (t *Tree) v() Node {
switch token := t.next(); token.typ {
case itemNumber:
n, err := newNumber(token.pos, token.val)
if err != nil {
t.error(err)
}
return n
case itemFunc:
t.backup()
return t.Func()
case itemVar:
t.backup()
return t.Var()
default:
t.unexpected(token, "input: v()")
}
return nil
}
// Var is queryVar in the grammar.
func (t *Tree) Var() (v *VarNode) {
token := t.next()
varNoPrefix := strings.TrimPrefix(token.val, "$")
t.VarNames = append(t.VarNames, varNoPrefix)
return newVar(token.pos, varNoPrefix, token.val)
}
// Func parses a FuncNode.
func (t *Tree) Func() (f *FuncNode) {
token := t.next()
funcv, ok := t.GetFunction(token.val)
if !ok {
t.errorf("non existent function %s", token.val)
}
f = newFunc(token.pos, token.val, funcv)
t.expect(itemLeftParen, "func")
for {
switch token = t.next(); token.typ {
default:
t.backup()
node := t.O()
f.append(node)
if len(f.Args) == 1 && f.F.VariantReturn {
f.F.Return = node.Return()
}
case itemString:
s, err := strconv.Unquote(token.val)
if err != nil {
t.errorf("Unquoting error: %s", err)
}
f.append(newString(token.pos, token.val, s))
case itemRightParen:
return
}
}
}
// GetFunction gets a parsed Func from the functions available on the tree's func property.
func (t *Tree) GetFunction(name string) (v Func, ok bool) {
for _, funcMap := range t.funcs {
if funcMap == nil {
continue
}
if v, ok = funcMap[name]; ok {
return
}
}
return
}
// String returns a string representation of the parse tree.
func (t *Tree) String() string {
return t.Root.String()
}

101
pkg/expr/mathexp/reduce.go Normal file
View File

@ -0,0 +1,101 @@
package mathexp
import (
"fmt"
"math"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
func Sum(v *data.Field) *float64 {
var sum float64
for i := 0; i < v.Len(); i++ {
if f, ok := v.At(i).(*float64); ok {
if f == nil {
nan := math.NaN()
return &nan
}
sum += *f
}
}
return &sum
}
func Avg(v *data.Field) *float64 {
sum := Sum(v)
f := *sum / float64(v.Len())
return &f
}
func Min(fv *data.Field) *float64 {
var f float64
if fv.Len() == 0 {
nan := math.NaN()
return &nan
}
for i := 0; i < fv.Len(); i++ {
if v, ok := fv.At(i).(*float64); ok {
if v == nil {
nan := math.NaN()
return &nan
}
if i == 0 || *v < f {
f = *v
}
}
}
return &f
}
func Max(fv *data.Field) *float64 {
var f float64
if fv.Len() == 0 {
nan := math.NaN()
return &nan
}
for i := 0; i < fv.Len(); i++ {
if v, ok := fv.At(i).(*float64); ok {
if v == nil {
nan := math.NaN()
return &nan
}
if i == 0 || *v > f {
f = *v
}
}
}
return &f
}
func Count(fv *data.Field) *float64 {
f := float64(fv.Len())
return &f
}
// Reduce turns the Series into a Number based on the given reduction function
func (s Series) Reduce(rFunc string) (Number, error) {
var l data.Labels
if s.GetLabels() != nil {
l = s.GetLabels().Copy()
}
number := NewNumber(fmt.Sprintf("%v_%v", rFunc, s.GetName()), l)
var f *float64
fVec := s.Frame.Fields[1]
switch rFunc {
case "sum":
f = Sum(fVec)
case "mean":
f = Avg(fVec)
case "min":
f = Min(fVec)
case "max":
f = Max(fVec)
case "count":
f = Count(fVec)
default:
return number, fmt.Errorf("reduction %v not implemented", rFunc)
}
number.SetValue(f)
return number, nil
}

View File

@ -0,0 +1,240 @@
package mathexp
import (
"math"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/require"
)
var seriesWithNil = Vars{
"A": Results{
[]Value{
makeSeries("temp", nil, tp{
time.Unix(5, 0), float64Pointer(2),
}, tp{
time.Unix(10, 0), nil,
}),
},
},
}
var seriesEmpty = Vars{
"A": Results{
[]Value{
makeSeries("temp", nil),
},
},
}
func TestSeriesReduce(t *testing.T) {
var tests = []struct {
name string
red string
vars Vars
varToReduce string
errIs require.ErrorAssertionFunc
resultsIs require.ComparisonAssertionFunc
results Results
}{
{
name: "foo reduction will error",
red: "foo",
varToReduce: "A",
vars: aSeriesNullableTime,
errIs: require.Error,
resultsIs: require.Equal,
},
{
name: "sum series",
red: "sum",
varToReduce: "A",
vars: aSeriesNullableTime,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("sum_", nil, float64Pointer(3)),
},
},
},
{
name: "sum series with a nil value",
red: "sum",
varToReduce: "A",
vars: seriesWithNil,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("sum_", nil, NaN),
},
},
},
{
name: "sum empty series",
red: "sum",
varToReduce: "A",
vars: seriesEmpty,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("sum_", nil, float64Pointer(0)),
},
},
},
{
name: "mean series with a nil value",
red: "mean",
varToReduce: "A",
vars: seriesWithNil,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("mean_", nil, NaN),
},
},
},
{
name: "mean empty series",
red: "mean",
varToReduce: "A",
vars: seriesEmpty,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("mean_", nil, NaN),
},
},
},
{
name: "min series with a nil value",
red: "min",
varToReduce: "A",
vars: seriesWithNil,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("min_", nil, NaN),
},
},
},
{
name: "min empty series",
red: "min",
varToReduce: "A",
vars: seriesEmpty,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("min_", nil, NaN),
},
},
},
{
name: "max series with a nil value",
red: "max",
varToReduce: "A",
vars: seriesWithNil,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("max_", nil, NaN),
},
},
},
{
name: "max empty series",
red: "max",
varToReduce: "A",
vars: seriesEmpty,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("max_", nil, NaN),
},
},
},
{
name: "mean series",
red: "mean",
varToReduce: "A",
vars: aSeriesNullableTime,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("mean_", nil, float64Pointer(1.5)),
},
},
},
{
name: "count empty series",
red: "count",
varToReduce: "A",
vars: seriesEmpty,
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("count_", nil, float64Pointer(0)),
},
},
},
{
name: "mean series with labels",
red: "mean",
varToReduce: "A",
vars: Vars{
"A": Results{
[]Value{
makeSeriesNullableTime("temp", data.Labels{"host": "a"}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(1),
}),
},
},
},
errIs: require.NoError,
resultsIs: require.Equal,
results: Results{
[]Value{
makeNumber("mean_", data.Labels{"host": "a"}, float64Pointer(1.5)),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
results := Results{}
seriesSet := tt.vars[tt.varToReduce]
for _, series := range seriesSet.Values {
ns, err := series.Value().(*Series).Reduce(tt.red)
tt.errIs(t, err)
if err != nil {
return
}
results.Values = append(results.Values, ns)
}
opt := cmp.Comparer(func(x, y float64) bool {
return (math.IsNaN(x) && math.IsNaN(y)) || x == y
})
options := append([]cmp.Option{opt}, data.FrameTestCompareOptions()...)
if diff := cmp.Diff(tt.results, results, options...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
})
}
}

View File

@ -0,0 +1,83 @@
package mathexp
import (
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
// Resample turns the Series into a Number based on the given reduction function
func (s Series) Resample(interval time.Duration, downsampler string, upsampler string, tr backend.TimeRange) (Series, error) {
newSeriesLength := int(float64(tr.To.Sub(tr.From).Nanoseconds()) / float64(interval.Nanoseconds()))
if newSeriesLength <= 0 {
return s, fmt.Errorf("the series cannot be sampled further; the time range is shorter than the interval")
}
resampled := NewSeries(s.GetName(), s.GetLabels(), s.TimeIdx, s.TimeIsNullable, s.ValueIdx, s.ValueIsNullabe, newSeriesLength+1)
bookmark := 0
var lastSeen *float64
idx := 0
t := tr.From
for !t.After(tr.To) && idx <= newSeriesLength {
vals := make([]*float64, 0)
sIdx := bookmark
for {
if sIdx == s.Len() {
break
}
st, v := s.GetPoint(sIdx)
if st.After(t) {
break
}
bookmark++
sIdx++
lastSeen = v
vals = append(vals, v)
}
var value *float64
if len(vals) == 0 { // upsampling
switch upsampler {
case "pad":
if lastSeen != nil {
value = lastSeen
} else {
value = nil
}
case "backfilling":
if sIdx == s.Len() { // no vals left
value = nil
} else {
_, value = s.GetPoint(sIdx)
}
case "fillna":
value = nil
default:
return s, fmt.Errorf("upsampling %v not implemented", upsampler)
}
} else { // downsampling
fVec := data.NewField("", s.GetLabels(), vals)
var tmp *float64
switch downsampler {
case "sum":
tmp = Sum(fVec)
case "mean":
tmp = Avg(fVec)
case "min":
tmp = Min(fVec)
case "max":
tmp = Max(fVec)
default:
return s, fmt.Errorf("downsampling %v not implemented", downsampler)
}
value = tmp
}
tv := t // his is required otherwise all points keep the latest timestamp; anything better?
if err := resampled.SetPoint(idx, &tv, value); err != nil {
return resampled, err
}
t = t.Add(interval)
idx++
}
return resampled, nil
}

View File

@ -0,0 +1,260 @@
package mathexp
import (
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestResampleSeries(t *testing.T) {
var tests = []struct {
name string
interval time.Duration
downsampler string
upsampler string
timeRange backend.TimeRange
seriesToResample Series
series Series
}{
{
name: "resample series: time range shorter than the rule interval",
interval: time.Second * 5,
downsampler: "mean",
upsampler: "fillna",
timeRange: backend.TimeRange{
From: time.Unix(0, 0),
To: time.Unix(4, 0),
},
seriesToResample: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(7, 0), float64Pointer(1),
}),
},
{
name: "resample series: invalid time range",
interval: time.Second * 5,
downsampler: "mean",
upsampler: "fillna",
timeRange: backend.TimeRange{
From: time.Unix(11, 0),
To: time.Unix(0, 0),
},
seriesToResample: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(7, 0), float64Pointer(1),
}),
},
{
name: "resample series: downsampling (mean / pad)",
interval: time.Second * 5,
downsampler: "mean",
upsampler: "pad",
timeRange: backend.TimeRange{
From: time.Unix(0, 0),
To: time.Unix(16, 0),
},
seriesToResample: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(4, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(7, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(9, 0), float64Pointer(2),
}),
series: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(0, 0), nil,
}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(2.5),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(1.5),
}, nullTimeTP{
unixTimePointer(15, 0), float64Pointer(2),
}),
},
{
name: "resample series: downsampling (max / fillna)",
interval: time.Second * 5,
downsampler: "max",
upsampler: "fillna",
timeRange: backend.TimeRange{
From: time.Unix(0, 0),
To: time.Unix(16, 0),
},
seriesToResample: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(4, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(7, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(9, 0), float64Pointer(2),
}),
series: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(0, 0), nil,
}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(15, 0), nil,
}),
},
{
name: "resample series: downsampling (min / fillna)",
interval: time.Second * 5,
downsampler: "min",
upsampler: "fillna",
timeRange: backend.TimeRange{
From: time.Unix(0, 0),
To: time.Unix(16, 0),
},
seriesToResample: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(4, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(7, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(9, 0), float64Pointer(2),
}),
series: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(0, 0), nil,
}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(15, 0), nil,
}),
},
{
name: "resample series: downsampling (sum / fillna)",
interval: time.Second * 5,
downsampler: "sum",
upsampler: "fillna",
timeRange: backend.TimeRange{
From: time.Unix(0, 0),
To: time.Unix(16, 0),
},
seriesToResample: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(4, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(7, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(9, 0), float64Pointer(2),
}),
series: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(0, 0), nil,
}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(5),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(15, 0), nil,
}),
},
{
name: "resample series: downsampling (mean / fillna)",
interval: time.Second * 5,
downsampler: "mean",
upsampler: "fillna",
timeRange: backend.TimeRange{
From: time.Unix(0, 0),
To: time.Unix(16, 0),
},
seriesToResample: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(4, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(7, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(9, 0), float64Pointer(2),
}),
series: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(0, 0), nil,
}, nullTimeTP{
unixTimePointer(5, 0), float64Pointer(2.5),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(1.5),
}, nullTimeTP{
unixTimePointer(15, 0), nil,
}),
},
{
name: "resample series: upsampling (mean / pad )",
interval: time.Second * 2,
downsampler: "mean",
upsampler: "pad",
timeRange: backend.TimeRange{
From: time.Unix(0, 0),
To: time.Unix(11, 0),
},
seriesToResample: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(7, 0), float64Pointer(1),
}),
series: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(0, 0), nil,
}, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(4, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(6, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(8, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(10, 0), float64Pointer(1),
}),
},
{
name: "resample series: upsampling (mean / backfilling )",
interval: time.Second * 2,
downsampler: "mean",
upsampler: "backfilling",
timeRange: backend.TimeRange{
From: time.Unix(0, 0),
To: time.Unix(11, 0),
},
seriesToResample: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(7, 0), float64Pointer(1),
}),
series: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(0, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(4, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(6, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(8, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(10, 0), nil,
}),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
series, err := tt.seriesToResample.Resample(tt.interval, tt.downsampler, tt.upsampler, tt.timeRange)
if tt.series.Frame == nil {
require.Error(t, err)
} else {
require.NoError(t, err)
assert.Equal(t, tt.series, series)
}
})
}
}

View File

@ -0,0 +1,199 @@
package mathexp
import (
"fmt"
"sort"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/expr/mathexp/parse"
)
// Series has time.Time and ...? *float64 fields.
type Series struct {
Frame *data.Frame
TimeIsNullable bool
TimeIdx int
ValueIsNullabe bool
ValueIdx int
// TODO:
// - Multiple Value Fields
// - Value can be different number types
}
// SeriesFromFrame validates that the dataframe can be considered a Series type
// and populate meta information on Series about the frame.
func SeriesFromFrame(frame *data.Frame) (s Series, err error) {
if len(frame.Fields) != 2 {
return s, fmt.Errorf("frame must have exactly two fields to be a series, has %v", len(frame.Fields))
}
foundTime := false
foundValue := false
for i, field := range frame.Fields {
switch field.Type() {
case data.FieldTypeTime:
s.TimeIdx = i
foundTime = true
case data.FieldTypeNullableTime:
s.TimeIsNullable = true
foundTime = true
s.TimeIdx = i
case data.FieldTypeFloat64:
foundValue = true
s.ValueIdx = i
case data.FieldTypeNullableFloat64:
s.ValueIsNullabe = true
foundValue = true
s.ValueIdx = i
}
}
if !foundTime {
return s, fmt.Errorf("no time column found in frame %v", frame.Name)
}
if !foundValue {
return s, fmt.Errorf("no float64 value column found in frame %v", frame.Name)
}
s.Frame = frame
return s, nil
}
// NewSeries returns a dataframe of type Series.
func NewSeries(name string, labels data.Labels, timeIdx int, nullableTime bool, valueIdx int, nullableValue bool, size int) Series {
fields := make([]*data.Field, 2)
if nullableValue {
fields[valueIdx] = data.NewField(name, labels, make([]*float64, size))
} else {
fields[valueIdx] = data.NewField(name, labels, make([]float64, size))
}
if nullableTime {
fields[timeIdx] = data.NewField("Time", nil, make([]*time.Time, size))
} else {
fields[timeIdx] = data.NewField("Time", nil, make([]time.Time, size))
}
return Series{
Frame: data.NewFrame("", fields...),
TimeIsNullable: nullableTime,
TimeIdx: timeIdx,
ValueIsNullabe: nullableValue,
ValueIdx: valueIdx,
}
}
// Type returns the Value type and allows it to fulfill the Value interface.
func (s Series) Type() parse.ReturnType { return parse.TypeSeriesSet }
// Value returns the actual value allows it to fulfill the Value interface.
func (s Series) Value() interface{} { return &s }
func (s Series) GetLabels() data.Labels { return s.Frame.Fields[s.ValueIdx].Labels }
func (s Series) SetLabels(ls data.Labels) { s.Frame.Fields[s.ValueIdx].Labels = ls }
func (s Series) GetName() string { return s.Frame.Name }
// AsDataFrame returns the underlying *data.Frame.
func (s Series) AsDataFrame() *data.Frame { return s.Frame }
// GetPoint returns the time and value at the specified index.
func (s Series) GetPoint(pointIdx int) (*time.Time, *float64) {
return s.GetTime(pointIdx), s.GetValue(pointIdx)
}
// SetPoint sets the time and value on the corresponding vectors at the specified index.
func (s Series) SetPoint(pointIdx int, t *time.Time, f *float64) (err error) {
if s.TimeIsNullable {
s.Frame.Fields[s.TimeIdx].Set(pointIdx, t)
} else {
if t == nil {
return fmt.Errorf("can not set null time value on non-nullable time field for series name %v", s.Frame.Name)
}
s.Frame.Fields[s.TimeIdx].Set(pointIdx, *t)
}
if s.ValueIsNullabe {
s.Frame.Fields[s.ValueIdx].Set(pointIdx, f)
} else {
if f == nil {
return fmt.Errorf("can not set null float value on non-nullable float field for series name %v", s.Frame.Name)
}
s.Frame.Fields[s.ValueIdx].Set(pointIdx, *f)
}
return
}
// AppendPoint appends a point (time/value).
func (s Series) AppendPoint(pointIdx int, t *time.Time, f *float64) (err error) {
if s.TimeIsNullable {
s.Frame.Fields[s.TimeIdx].Append(t)
} else {
if t == nil {
return fmt.Errorf("can not append null time value on non-nullable time field for series name %v", s.Frame.Name)
}
s.Frame.Fields[s.TimeIdx].Append(*t)
}
if s.ValueIsNullabe {
s.Frame.Fields[s.ValueIdx].Append(f)
} else {
if f == nil {
return fmt.Errorf("can not append null float value on non-nullable float field for series name %v", s.Frame.Name)
}
s.Frame.Fields[s.ValueIdx].Append(*f)
}
return
}
// Len returns the length of the series.
func (s Series) Len() int {
return s.Frame.Fields[0].Len()
}
// GetTime returns the time at the specified index.
func (s Series) GetTime(pointIdx int) *time.Time {
if s.TimeIsNullable {
return s.Frame.Fields[s.TimeIdx].At(pointIdx).(*time.Time)
}
t := s.Frame.Fields[s.TimeIdx].At(pointIdx).(time.Time)
return &t
}
// GetValue returns the float value at the specified index.
func (s Series) GetValue(pointIdx int) *float64 {
if s.ValueIsNullabe {
return s.Frame.Fields[s.ValueIdx].At(pointIdx).(*float64)
}
f := s.Frame.Fields[s.ValueIdx].At(pointIdx).(float64)
return &f
}
// SortByTime sorts the series by the time from oldest to newest.
// If desc is true, it will sort from newest to oldest.
// If any time values are nil, it will panic.
func (s Series) SortByTime(desc bool) {
if desc {
sort.Sort(sort.Reverse(SortSeriesByTime(s)))
return
}
sort.Sort(SortSeriesByTime(s))
}
// SortSeriesByTime allows a Series to be sorted by time
// the sort interface will panic if any timestamps are null
type SortSeriesByTime Series
func (ss SortSeriesByTime) Len() int { return Series(ss).Len() }
func (ss SortSeriesByTime) Swap(i, j int) {
iTimeVal, iFVal := Series(ss).GetPoint(i)
jTimeVal, jFVal := Series(ss).GetPoint(j)
_ = Series(ss).SetPoint(j, iTimeVal, iFVal)
_ = Series(ss).SetPoint(i, jTimeVal, jFVal)
}
func (ss SortSeriesByTime) Less(i, j int) bool {
iTimeVal := Series(ss).GetTime(i)
jTimeVal := Series(ss).GetTime(j)
return iTimeVal.Before(*jTimeVal)
}

112
pkg/expr/mathexp/types.go Normal file
View File

@ -0,0 +1,112 @@
package mathexp
import (
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/expr/mathexp/parse"
)
// Results is a container for Value interfaces.
type Results struct {
Values Values
}
// Values is a slice of Value interfaces
type Values []Value
// AsDataFrames returns each value as a slice of frames.
func (vals Values) AsDataFrames(refID string) []*data.Frame {
frames := make([]*data.Frame, len(vals))
for i, v := range vals {
frames[i] = v.AsDataFrame()
frames[i].RefID = refID
}
return frames
}
// Value is the interface that holds different types such as a Scalar, Series, or Number.
// all Value implementations should be a *data.Frame
type Value interface {
Type() parse.ReturnType
Value() interface{}
GetLabels() data.Labels
SetLabels(data.Labels)
GetName() string
AsDataFrame() *data.Frame
}
// Scalar is the type that holds a single number constant.
// Before returning from an expression it will be wrapped in a
// data frame.
type Scalar struct{ Frame *data.Frame }
// Type returns the Value type and allows it to fulfill the Value interface.
func (s Scalar) Type() parse.ReturnType { return parse.TypeScalar }
// Value returns the actual value allows it to fulfill the Value interface.
func (s Scalar) Value() interface{} { return s }
func (s Scalar) GetLabels() data.Labels { return nil }
func (s Scalar) SetLabels(ls data.Labels) {}
func (s Scalar) GetName() string { return s.Frame.Name }
// AsDataFrame returns the underlying *data.Frame.
func (s Scalar) AsDataFrame() *data.Frame { return s.Frame }
// NewScalar creates a Scalar holding value f.
func NewScalar(f *float64) Scalar {
frame := data.NewFrame("",
data.NewField("Scalar", nil, []*float64{f}),
)
return Scalar{frame}
}
// NewScalarResults creates a Results holding a single Scalar
func NewScalarResults(f *float64) Results {
return Results{
Values: []Value{NewScalar(f)},
}
}
// GetFloat64Value retrieves the single scalar value from the data
func (s Scalar) GetFloat64Value() *float64 {
return s.Frame.At(0, 0).(*float64)
}
// Number hold a labelled single number values.
type Number struct{ Frame *data.Frame }
// Type returns the Value type and allows it to fulfill the Value interface.
func (n Number) Type() parse.ReturnType { return parse.TypeNumberSet }
// Value returns the actual value allows it to fulfill the Value interface.
func (n Number) Value() interface{} { return &n }
func (n Number) GetLabels() data.Labels { return n.Frame.Fields[0].Labels }
func (n Number) SetLabels(ls data.Labels) { n.Frame.Fields[0].Labels = ls }
func (n Number) GetName() string { return n.Frame.Name }
// AsDataFrame returns the underlying *data.Frame.
func (n Number) AsDataFrame() *data.Frame { return n.Frame }
// SetValue sets the value of the Number to float64 pointer f
func (n Number) SetValue(f *float64) {
n.Frame.Set(0, 0, f)
}
// GetFloat64Value retrieves the single scalar value from the data
func (n Number) GetFloat64Value() *float64 {
return n.Frame.At(0, 0).(*float64)
}
// NewNumber returns a data that holds a float64Vector
func NewNumber(name string, labels data.Labels) Number {
return Number{
data.NewFrame("",
data.NewField(name, labels, make([]*float64, 1)),
),
}
}

View File

@ -0,0 +1,168 @@
package mathexp
import (
"testing"
"time"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/assert"
)
func TestSeriesSort(t *testing.T) {
var tests = []struct {
name string
descending bool
series Series
sortedSeriesIs assert.ComparisonAssertionFunc
sortedSeries Series
panics assert.PanicTestFunc
}{
{
name: "unordered series should sort by time ascending",
descending: false,
series: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(3, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(1, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}),
sortedSeriesIs: assert.Equal,
sortedSeries: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(1, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(3, 0), float64Pointer(3),
}),
},
{
name: "unordered series should sort by time descending",
descending: true,
series: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(3, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(1, 0), float64Pointer(1),
}, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}),
sortedSeriesIs: assert.Equal,
sortedSeries: makeSeriesNullableTime("", nil, nullTimeTP{
unixTimePointer(3, 0), float64Pointer(3),
}, nullTimeTP{
unixTimePointer(2, 0), float64Pointer(2),
}, nullTimeTP{
unixTimePointer(1, 0), float64Pointer(1),
}),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.series.SortByTime(tt.descending)
tt.sortedSeriesIs(t, tt.series, tt.sortedSeries)
})
}
}
func TestSeriesFromFrame(t *testing.T) {
var tests = []struct {
name string
frame *data.Frame
errIs assert.ErrorAssertionFunc
Is assert.ComparisonAssertionFunc
Series Series
}{
{
name: "[]time, []float frame should convert",
frame: &data.Frame{
Name: "test",
Fields: []*data.Field{
data.NewField("time", nil, []time.Time{}),
data.NewField("value", nil, []float64{}),
},
},
errIs: assert.NoError,
Is: assert.Equal,
Series: Series{
Frame: &data.Frame{
Name: "test",
Fields: []*data.Field{
data.NewField("time", nil, []time.Time{}),
data.NewField("value", nil, []float64{}),
},
},
TimeIdx: 0,
TimeIsNullable: false,
ValueIdx: 1,
ValueIsNullabe: false,
},
},
{
name: "[]*float, []*time frame should convert",
frame: &data.Frame{
Name: "test",
Fields: []*data.Field{
data.NewField("value", nil, []*float64{float64Pointer(5)}),
data.NewField("time", nil, []*time.Time{unixTimePointer(5, 0)}),
},
},
errIs: assert.NoError,
Is: assert.Equal,
Series: Series{
Frame: &data.Frame{
Name: "test",
Fields: []*data.Field{
data.NewField("value", nil, []*float64{float64Pointer(5)}),
data.NewField("time", nil, []*time.Time{unixTimePointer(5, 0)}),
},
},
TimeIdx: 1,
TimeIsNullable: true,
ValueIdx: 0,
ValueIsNullabe: true,
},
},
{
name: "[]*time, []*time frame should error",
frame: &data.Frame{
Name: "test",
Fields: []*data.Field{
data.NewField("time", nil, []*time.Time{}),
data.NewField("time", nil, []*time.Time{}),
},
},
errIs: assert.Error,
},
{
name: "[]*float64, []float64 frame should error",
frame: &data.Frame{
Name: "test",
Fields: []*data.Field{
data.NewField("value", nil, []*float64{}),
data.NewField("value", nil, []*float64{}),
},
},
errIs: assert.Error,
},
{
name: "[]*float64 frame should error",
frame: &data.Frame{
Name: "test",
Fields: []*data.Field{
data.NewField("value", nil, []*float64{}),
},
},
errIs: assert.Error,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s, err := SeriesFromFrame(tt.frame)
tt.errIs(t, err)
if err == nil {
tt.Is(t, s, tt.Series)
}
})
}
}

View File

@ -0,0 +1,244 @@
package mathexp
import (
"testing"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/stretchr/testify/assert"
)
func Test_union(t *testing.T) {
var tests = []struct {
name string
aResults Results
bResults Results
unionsAre assert.ComparisonAssertionFunc
unions []*Union
}{
{
name: "equal tags single union",
aResults: Results{
Values: Values{
makeSeriesNullableTime("a", data.Labels{"id": "1"}),
},
},
bResults: Results{
Values: Values{
makeSeriesNullableTime("b", data.Labels{"id": "1"}),
},
},
unionsAre: assert.EqualValues,
unions: []*Union{
{
Labels: data.Labels{"id": "1"},
A: makeSeriesNullableTime("a", data.Labels{"id": "1"}),
B: makeSeriesNullableTime("b", data.Labels{"id": "1"}),
},
},
},
{
name: "equal tags keys with no matching values will result in a union when len(A) == 1 && len(B) == 1",
aResults: Results{
Values: Values{
makeSeriesNullableTime("a", data.Labels{"id": "1"}),
},
},
bResults: Results{
Values: Values{
makeSeriesNullableTime("b", data.Labels{"id": "2"}),
},
},
unionsAre: assert.EqualValues,
unions: []*Union{
{
A: makeSeriesNullableTime("a", data.Labels{"id": "1"}),
B: makeSeriesNullableTime("b", data.Labels{"id": "2"}),
},
},
},
{
name: "equal tags keys with no matching values will result in no unions when len(A) != 1 && len(B) != 1",
aResults: Results{
Values: Values{
makeSeriesNullableTime("a", data.Labels{"id": "1"}),
makeSeriesNullableTime("q", data.Labels{"id": "3"}),
},
},
bResults: Results{
Values: Values{
makeSeriesNullableTime("b", data.Labels{"id": "2"}),
},
},
unionsAre: assert.EqualValues,
unions: []*Union{},
},
{
name: "empty results will result in no unions",
aResults: Results{},
bResults: Results{},
unionsAre: assert.EqualValues,
unions: []*Union{},
},
{
name: "incompatible tags of different length with will result in no unions when len(A) != 1 && len(B) != 1",
aResults: Results{
Values: Values{
makeSeriesNullableTime("a", data.Labels{"ID": "1"}),
makeSeriesNullableTime("q", data.Labels{"ID": "3"}),
},
},
bResults: Results{
Values: Values{
makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "red snapper"}),
},
},
unionsAre: assert.EqualValues,
unions: []*Union{},
},
{
name: "A is subset of B results in single union with Labels of B",
aResults: Results{
Values: Values{
makeSeriesNullableTime("a", data.Labels{"id": "1"}),
},
},
bResults: Results{
Values: Values{
makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "herring"}),
},
},
unionsAre: assert.EqualValues,
unions: []*Union{
{
Labels: data.Labels{"id": "1", "fish": "herring"}, // Union gets the labels that is not the subset
A: makeSeriesNullableTime("a", data.Labels{"id": "1"}),
B: makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "herring"}),
},
},
},
{
name: "B is subset of A results in single union with Labels of A",
aResults: Results{
Values: Values{
makeSeriesNullableTime("a", data.Labels{"id": "1", "fish": "herring"}),
},
},
bResults: Results{
Values: Values{
makeSeriesNullableTime("b", data.Labels{"id": "1"}),
},
},
unionsAre: assert.EqualValues,
unions: []*Union{
{
Labels: data.Labels{"id": "1", "fish": "herring"}, // Union gets the labels that is not the subset
A: makeSeriesNullableTime("a", data.Labels{"id": "1", "fish": "herring"}),
B: makeSeriesNullableTime("b", data.Labels{"id": "1"}),
},
},
},
{
name: "single valued A is subset of many valued B, results in many union with Labels of B",
aResults: Results{
Values: Values{
makeSeriesNullableTime("a", data.Labels{"id": "1"}),
},
},
bResults: Results{
Values: Values{
makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "herring"}),
makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "red snapper"}),
},
},
unionsAre: assert.EqualValues,
unions: []*Union{
{
Labels: data.Labels{"id": "1", "fish": "herring"},
A: makeSeriesNullableTime("a", data.Labels{"id": "1"}),
B: makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "herring"}),
},
{
Labels: data.Labels{"id": "1", "fish": "red snapper"},
A: makeSeriesNullableTime("a", data.Labels{"id": "1"}),
B: makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "red snapper"}),
},
},
},
{
name: "A with different tags keys lengths to B makes 3 unions (with two unions have matching tags)",
// Is this the behavior we want? A result within the results will no longer
// be uniquely identifiable.
aResults: Results{
Values: Values{
makeSeriesNullableTime("a", data.Labels{"id": "1"}),
makeSeriesNullableTime("aa", data.Labels{"id": "1", "fish": "herring"}),
},
},
bResults: Results{
Values: Values{
makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "herring"}),
makeSeriesNullableTime("bb", data.Labels{"id": "1", "fish": "red snapper"}),
},
},
unionsAre: assert.EqualValues,
unions: []*Union{
{
Labels: data.Labels{"id": "1", "fish": "herring"},
A: makeSeriesNullableTime("a", data.Labels{"id": "1"}),
B: makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "herring"}),
},
{
Labels: data.Labels{"id": "1", "fish": "red snapper"},
A: makeSeriesNullableTime("a", data.Labels{"id": "1"}),
B: makeSeriesNullableTime("bb", data.Labels{"id": "1", "fish": "red snapper"}),
},
{
Labels: data.Labels{"id": "1", "fish": "herring"},
A: makeSeriesNullableTime("aa", data.Labels{"id": "1", "fish": "herring"}),
B: makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "herring"}),
},
},
},
{
name: "B with different tags keys lengths to A makes 3 unions (with two unions have matching tags)",
// Is this the behavior we want? A result within the results will no longer
// be uniquely identifiable.
aResults: Results{
Values: Values{
makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "herring"}),
makeSeriesNullableTime("bb", data.Labels{"id": "1", "fish": "red snapper"}),
},
},
bResults: Results{
Values: Values{
makeSeriesNullableTime("a", data.Labels{"id": "1"}),
makeSeriesNullableTime("aa", data.Labels{"id": "1", "fish": "herring"}),
},
},
unionsAre: assert.EqualValues,
unions: []*Union{
{
Labels: data.Labels{"id": "1", "fish": "herring"},
A: makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "herring"}),
B: makeSeriesNullableTime("a", data.Labels{"id": "1"}),
},
{
Labels: data.Labels{"id": "1", "fish": "herring"},
A: makeSeriesNullableTime("b", data.Labels{"id": "1", "fish": "herring"}),
B: makeSeriesNullableTime("aa", data.Labels{"id": "1", "fish": "herring"}),
},
{
Labels: data.Labels{"id": "1", "fish": "red snapper"},
A: makeSeriesNullableTime("bb", data.Labels{"id": "1", "fish": "red snapper"}),
B: makeSeriesNullableTime("a", data.Labels{"id": "1"}),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
unions := union(tt.aResults, tt.bResults)
tt.unionsAre(t, tt.unions, unions)
})
}
}

358
pkg/expr/nodes.go Normal file
View File

@ -0,0 +1,358 @@
package expr
import (
"context"
"encoding/json"
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/expr/mathexp"
"gonum.org/v1/gonum/graph/simple"
)
// baseNode includes commmon properties used across DPNodes.
type baseNode struct {
id int64
refID string
}
type rawNode struct {
RefID string `json:"refId"`
Query map[string]interface{}
QueryType string
TimeRange backend.TimeRange
}
func (rn *rawNode) GetDatasourceName() (string, error) {
rawDs, ok := rn.Query["datasource"]
if !ok {
return "", fmt.Errorf("no datasource in query for refId %v", rn.RefID)
}
dsName, ok := rawDs.(string)
if !ok {
return "", fmt.Errorf("expted datasource identifier to be a string, got %T", rawDs)
}
return dsName, nil
}
func (rn *rawNode) GetCommandType() (c CommandType, err error) {
rawType, ok := rn.Query["type"]
if !ok {
return c, fmt.Errorf("no expression command type in query for refId %v", rn.RefID)
}
typeString, ok := rawType.(string)
if !ok {
return c, fmt.Errorf("expected expression command type to be a string, got type %T", rawType)
}
return ParseCommandType(typeString)
}
// String returns a string representation of the node. In particular for
// %v formating in error messages.
func (b *baseNode) String() string {
return b.refID
}
// CMDNode is a DPNode that holds an expression command.
type CMDNode struct {
baseNode
CMDType CommandType
Command Command
}
// ID returns the id of the node so it can fulfill the gonum's graph Node interface.
func (b *baseNode) ID() int64 {
return b.id
}
// RefID returns the refId of the node.
func (b *baseNode) RefID() string {
return b.refID
}
// NodeType returns the data pipeline node type.
func (gn *CMDNode) NodeType() NodeType {
return TypeCMDNode
}
// Execute runs the node and adds the results to vars. If the node requires
// other nodes they must have already been executed and their results must
// already by in vars.
func (gn *CMDNode) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Results, error) {
return gn.Command.Execute(ctx, vars)
}
func buildCMDNode(dp *simple.DirectedGraph, rn *rawNode) (*CMDNode, error) {
commandType, err := rn.GetCommandType()
if err != nil {
return nil, fmt.Errorf("invalid expression command type in '%v'", rn.RefID)
}
node := &CMDNode{
baseNode: baseNode{
id: dp.NewNode().ID(),
refID: rn.RefID,
},
}
switch commandType {
case TypeMath:
node.Command, err = UnmarshalMathCommand(rn)
case TypeReduce:
node.Command, err = UnmarshalReduceCommand(rn)
case TypeResample:
node.Command, err = UnmarshalResampleCommand(rn)
default:
return nil, fmt.Errorf("expression command type '%v' in '%v' not implemented", commandType, rn.RefID)
}
if err != nil {
return nil, err
}
return node, nil
}
const (
defaultIntervalMS = int64(64)
defaultMaxDP = int64(5000)
)
// DSNode is a DPNode that holds a datasource request.
type DSNode struct {
baseNode
query json.RawMessage
datasourceID int64
orgID int64
queryType string
timeRange backend.TimeRange
intervalMS int64
maxDP int64
}
// NodeType returns the data pipeline node type.
func (dn *DSNode) NodeType() NodeType {
return TypeDatasourceNode
}
func buildDSNode(dp *simple.DirectedGraph, rn *rawNode) (*DSNode, error) {
encodedQuery, err := json.Marshal(rn.Query)
if err != nil {
return nil, err
}
dsNode := &DSNode{
baseNode: baseNode{
id: dp.NewNode().ID(),
refID: rn.RefID,
},
query: json.RawMessage(encodedQuery),
queryType: rn.QueryType,
intervalMS: defaultIntervalMS,
maxDP: defaultMaxDP,
timeRange: rn.TimeRange,
}
rawDsID, ok := rn.Query["datasourceId"]
if !ok {
return nil, fmt.Errorf("no datasourceId in expression data source request for refId %v", rn.RefID)
}
floatDsID, ok := rawDsID.(float64)
if !ok {
return nil, fmt.Errorf("expected datasourceId to be a float64, got type %T for refId %v", rawDsID, rn.RefID)
}
dsNode.datasourceID = int64(floatDsID)
rawOrgID, ok := rn.Query["orgId"]
if !ok {
return nil, fmt.Errorf("no orgId in expression data source request command for refId %v", rn.RefID)
}
floatOrgID, ok := rawOrgID.(float64)
if !ok {
return nil, fmt.Errorf("expected orgId to be a float64, got type %T for refId %v", rawOrgID, rn.RefID)
}
dsNode.orgID = int64(floatOrgID)
var floatIntervalMS float64
if rawIntervalMS := rn.Query["intervalMs"]; ok {
if floatIntervalMS, ok = rawIntervalMS.(float64); !ok {
return nil, fmt.Errorf("expected intervalMs to be an float64, got type %T for refId %v", rawIntervalMS, rn.RefID)
}
dsNode.intervalMS = int64(floatIntervalMS)
}
var floatMaxDP float64
if rawMaxDP := rn.Query["maxDataPoints"]; ok {
if floatMaxDP, ok = rawMaxDP.(float64); !ok {
return nil, fmt.Errorf("expected maxDataPoints to be an float64, got type %T for refId %v", rawMaxDP, rn.RefID)
}
dsNode.maxDP = int64(floatMaxDP)
}
return dsNode, nil
}
// Execute runs the node and adds the results to vars. If the node requires
// other nodes they must have already been executed and their results must
// already by in vars.
func (dn *DSNode) Execute(ctx context.Context, vars mathexp.Vars) (mathexp.Results, error) {
pc := backend.PluginContext{
OrgID: dn.orgID,
DataSourceInstanceSettings: &backend.DataSourceInstanceSettings{
ID: dn.datasourceID,
},
}
q := []backend.DataQuery{
{
RefID: dn.refID,
MaxDataPoints: dn.maxDP,
Interval: time.Duration(int64(time.Millisecond) * dn.intervalMS),
JSON: dn.query,
TimeRange: dn.timeRange,
QueryType: dn.queryType,
},
}
resp, err := QueryData(ctx, &backend.QueryDataRequest{
PluginContext: pc,
Queries: q,
})
if err != nil {
return mathexp.Results{}, err
}
vals := make([]mathexp.Value, 0)
for refID, qr := range resp.Responses {
if len(qr.Frames) == 1 {
frame := qr.Frames[0]
if frame.TimeSeriesSchema().Type == data.TimeSeriesTypeNot && isNumberTable(frame) {
backend.Logger.Debug("expression datasource query (numberSet)", "query", refID)
numberSet, err := extractNumberSet(frame)
if err != nil {
return mathexp.Results{}, err
}
for _, n := range numberSet {
vals = append(vals, n)
}
return mathexp.Results{
Values: vals,
}, nil
}
}
for _, frame := range qr.Frames {
backend.Logger.Debug("expression datasource query (seriesSet)", "query", refID)
series, err := WideToMany(frame)
if err != nil {
return mathexp.Results{}, err
}
for _, s := range series {
vals = append(vals, s)
}
}
}
return mathexp.Results{
Values: vals,
}, nil
}
func isNumberTable(frame *data.Frame) bool {
if frame == nil || frame.Fields == nil {
return false
}
numericCount := 0
stringCount := 0
otherCount := 0
for _, field := range frame.Fields {
fType := field.Type()
switch {
case fType.Numeric():
numericCount++
case fType == data.FieldTypeString || fType == data.FieldTypeNullableString:
stringCount++
default:
otherCount++
}
}
return numericCount == 1 && otherCount == 0
}
func extractNumberSet(frame *data.Frame) ([]mathexp.Number, error) {
numericField := 0
stringFieldIdxs := []int{}
stringFieldNames := []string{}
for i, field := range frame.Fields {
fType := field.Type()
switch {
case fType.Numeric():
numericField = i
case fType == data.FieldTypeString || fType == data.FieldTypeNullableString:
stringFieldIdxs = append(stringFieldIdxs, i)
stringFieldNames = append(stringFieldNames, field.Name)
}
}
numbers := make([]mathexp.Number, frame.Rows())
for rowIdx := 0; rowIdx < frame.Rows(); rowIdx++ {
val, _ := frame.FloatAt(numericField, rowIdx)
var labels data.Labels
for i := 0; i < len(stringFieldIdxs); i++ {
if i == 0 {
labels = make(data.Labels)
}
key := stringFieldNames[i] // TODO check for duplicate string column names
val, _ := frame.ConcreteAt(stringFieldIdxs[i], rowIdx)
labels[key] = val.(string) // TODO check assertion / return error
}
n := mathexp.NewNumber("", labels)
n.SetValue(&val)
numbers[rowIdx] = n
}
return numbers, nil
}
// WideToMany converts a data package wide type Frame to one or multiple Series. A series
// is created for each value type column of wide frame.
//
// This might not be a good idea long term, but works now as an adapter/shim.
func WideToMany(frame *data.Frame) ([]mathexp.Series, error) {
tsSchema := frame.TimeSeriesSchema()
if tsSchema.Type != data.TimeSeriesTypeWide {
return nil, fmt.Errorf("input data must be a wide series but got type %s (input refid)", tsSchema.Type)
}
if len(tsSchema.ValueIndices) == 1 {
s, err := mathexp.SeriesFromFrame(frame)
if err != nil {
return nil, err
}
return []mathexp.Series{s}, nil
}
series := []mathexp.Series{}
for _, valIdx := range tsSchema.ValueIndices {
l := frame.Rows()
f := data.NewFrameOfFieldTypes(frame.Name, l, frame.Fields[tsSchema.TimeIndex].Type(), frame.Fields[valIdx].Type())
f.Fields[0].Name = frame.Fields[tsSchema.TimeIndex].Name
f.Fields[1].Name = frame.Fields[valIdx].Name
if frame.Fields[valIdx].Labels != nil {
f.Fields[1].Labels = frame.Fields[valIdx].Labels.Copy()
}
for i := 0; i < l; i++ {
f.SetRow(i, frame.Fields[tsSchema.TimeIndex].CopyAt(i), frame.Fields[valIdx].CopyAt(i))
}
s, err := mathexp.SeriesFromFrame(f)
if err != nil {
return nil, err
}
series = append(series, s)
}
return series, nil
}

39
pkg/expr/service.go Normal file
View File

@ -0,0 +1,39 @@
package expr
import (
"context"
"github.com/grafana/grafana-plugin-sdk-go/backend"
)
// DatasourceName is the string constant used as the datasource name in requests
// to identify it as an expression command.
const DatasourceName = "__expr__"
// DatasourceID is the fake datasource id used in requests to identify it as an
// expression command.
const DatasourceID = -100
// Service is service representation for expression handling.
type Service struct {
}
// BuildPipeline builds a pipeline from a request.
func (s *Service) BuildPipeline(queries []backend.DataQuery) (DataPipeline, error) {
return buildPipeline(queries)
}
// ExecutePipeline executes an expression pipeline and returns all the results.
func (s *Service) ExecutePipeline(ctx context.Context, pipeline DataPipeline) (*backend.QueryDataResponse, error) {
res := backend.NewQueryDataResponse()
vars, err := pipeline.execute(ctx)
if err != nil {
return nil, err
}
for refID, val := range vars {
res.Responses[refID] = backend.DataResponse{
Frames: val.Values.AsDataFrames(refID),
}
}
return res, nil
}

111
pkg/expr/service_test.go Normal file
View File

@ -0,0 +1,111 @@
package expr
import (
"context"
"encoding/json"
"sort"
"testing"
"time"
"github.com/google/go-cmp/cmp"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/stretchr/testify/require"
)
func TestService(t *testing.T) {
dsDF := data.NewFrame("test",
data.NewField("time", nil, []*time.Time{utp(1)}),
data.NewField("value", nil, []*float64{fp(2)}))
registerEndPoint(dsDF)
s := Service{}
queries := []backend.DataQuery{
{
RefID: "A",
JSON: json.RawMessage(`{ "datasource": "test", "datasourceId": 1, "orgId": 1, "intervalMs": 1000, "maxDataPoints": 1000 }`),
},
{
RefID: "B",
JSON: json.RawMessage(`{ "datasource": "__expr__", "datasourceId": -100, "type": "math", "expression": "$A * 2" }`),
},
}
pl, err := s.BuildPipeline(queries)
require.NoError(t, err)
res, err := s.ExecutePipeline(context.Background(), pl)
require.NoError(t, err)
bDF := data.NewFrame("",
data.NewField("Time", nil, []*time.Time{utp(1)}),
data.NewField("", nil, []*float64{fp(4)}))
bDF.RefID = "B"
expect := &backend.QueryDataResponse{
Responses: backend.Responses{
"A": {
Frames: []*data.Frame{dsDF},
},
"B": {
Frames: []*data.Frame{bDF},
},
},
}
// Service currently doesn't care about order of datas in the return.
trans := cmp.Transformer("Sort", func(in []*data.Frame) []*data.Frame {
out := append([]*data.Frame(nil), in...) // Copy input to avoid mutating it
sort.SliceStable(out, func(i, j int) bool {
return out[i].RefID > out[j].RefID
})
return out
})
options := append([]cmp.Option{trans}, data.FrameTestCompareOptions()...)
if diff := cmp.Diff(expect, res, options...); diff != "" {
t.Errorf("Result mismatch (-want +got):\n%s", diff)
}
}
func utp(sec int64) *time.Time {
t := time.Unix(sec, 0)
return &t
}
func fp(f float64) *float64 {
return &f
}
type mockEndpoint struct {
Frames data.Frames
}
func (me *mockEndpoint) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) {
return &tsdb.Response{
Results: map[string]*tsdb.QueryResult{
"A": {
Dataframes: tsdb.NewDecodedDataFrames(me.Frames),
},
},
}, nil
}
func registerEndPoint(df ...*data.Frame) {
me := &mockEndpoint{
Frames: df,
}
endpoint := func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
return me, nil
}
tsdb.RegisterTsdbQueryEndpoint("test", endpoint)
bus.AddHandler("test", func(query *models.GetDataSourceByIdQuery) error {
query.Result = &models.DataSource{Id: 1, OrgId: 1, Type: "test"}
return nil
})
}

211
pkg/expr/transform.go Normal file
View File

@ -0,0 +1,211 @@
package expr
import (
"encoding/json"
"fmt"
"strconv"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/tsdb"
"golang.org/x/net/context"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
func WrapTransformData(ctx context.Context, query *tsdb.TsdbQuery) (*tsdb.Response, error) {
sdkReq := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{
OrgID: query.User.OrgId,
},
Queries: []backend.DataQuery{},
}
for _, q := range query.Queries {
modelJSON, err := q.Model.MarshalJSON()
if err != nil {
return nil, err
}
sdkReq.Queries = append(sdkReq.Queries, backend.DataQuery{
JSON: modelJSON,
Interval: time.Duration(q.IntervalMs) * time.Millisecond,
RefID: q.RefId,
MaxDataPoints: q.MaxDataPoints,
QueryType: q.QueryType,
TimeRange: backend.TimeRange{
From: query.TimeRange.GetFromAsTimeUTC(),
To: query.TimeRange.GetToAsTimeUTC(),
},
})
}
pbRes, err := TransformData(ctx, sdkReq)
if err != nil {
return nil, err
}
tR := &tsdb.Response{
Results: make(map[string]*tsdb.QueryResult, len(pbRes.Responses)),
}
for refID, res := range pbRes.Responses {
tRes := &tsdb.QueryResult{
RefId: refID,
Dataframes: tsdb.NewDecodedDataFrames(res.Frames),
}
// if len(res.JsonMeta) != 0 {
// tRes.Meta = simplejson.NewFromAny(res.JsonMeta)
// }
if res.Error != nil {
tRes.Error = res.Error
tRes.ErrorString = res.Error.Error()
}
tR.Results[refID] = tRes
}
return tR, nil
}
// TransformData takes Queries which are either expressions nodes
// or are datasource requests.
func TransformData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
svc := Service{}
// Build the pipeline from the request, checking for ordering issues (e.g. loops)
// and parsing graph nodes from the queries.
pipeline, err := svc.BuildPipeline(req.Queries)
if err != nil {
return nil, status.Error(codes.InvalidArgument, err.Error())
}
// Execute the pipeline
responses, err := svc.ExecutePipeline(ctx, pipeline)
if err != nil {
return nil, status.Error(codes.Unknown, err.Error())
}
// Get which queries have the Hide property so they those queries' results
// can be excluded from the response.
hidden, err := hiddenRefIDs(req.Queries)
if err != nil {
return nil, status.Error((codes.Internal), err.Error())
}
if len(hidden) != 0 {
filteredRes := backend.NewQueryDataResponse()
for refID, res := range responses.Responses {
if _, ok := hidden[refID]; !ok {
filteredRes.Responses[refID] = res
}
}
responses = filteredRes
}
return responses, nil
}
func hiddenRefIDs(queries []backend.DataQuery) (map[string]struct{}, error) {
hidden := make(map[string]struct{})
for _, query := range queries {
hide := struct {
Hide bool `json:"hide"`
}{}
if err := json.Unmarshal(query.JSON, &hide); err != nil {
return nil, err
}
if hide.Hide {
hidden[query.RefID] = struct{}{}
}
}
return hidden, nil
}
// QueryData is called used to query datasources that are not expression commands, but are used
// alongside expressions and/or are the input of an expression command.
func QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
if len(req.Queries) == 0 {
return nil, fmt.Errorf("zero queries found in datasource request")
}
datasourceID := int64(0)
if req.PluginContext.DataSourceInstanceSettings != nil {
datasourceID = req.PluginContext.DataSourceInstanceSettings.ID
}
getDsInfo := &models.GetDataSourceByIdQuery{
OrgId: req.PluginContext.OrgID,
Id: datasourceID,
}
if err := bus.Dispatch(getDsInfo); err != nil {
return nil, fmt.Errorf("could not find datasource: %w", err)
}
// Convert plugin-model (datasource) queries to tsdb queries
queries := make([]*tsdb.Query, len(req.Queries))
for i, query := range req.Queries {
sj, err := simplejson.NewJson(query.JSON)
if err != nil {
return nil, err
}
queries[i] = &tsdb.Query{
RefId: query.RefID,
IntervalMs: query.Interval.Microseconds(),
MaxDataPoints: query.MaxDataPoints,
QueryType: query.QueryType,
DataSource: getDsInfo.Result,
Model: sj,
}
}
// For now take Time Range from first query.
timeRange := tsdb.NewTimeRange(strconv.FormatInt(req.Queries[0].TimeRange.From.Unix()*1000, 10), strconv.FormatInt(req.Queries[0].TimeRange.To.Unix()*1000, 10))
tQ := &tsdb.TsdbQuery{
TimeRange: timeRange,
Queries: queries,
}
// Execute the converted queries
tsdbRes, err := tsdb.HandleRequest(ctx, getDsInfo.Result, tQ)
if err != nil {
return nil, err
}
// Convert tsdb results (map) to plugin-model/datasource (slice) results.
// Only error, tsdb.Series, and encoded Dataframes responses are mapped.
responses := make(map[string]backend.DataResponse, len(tsdbRes.Results))
for refID, res := range tsdbRes.Results {
pRes := backend.DataResponse{}
if res.Error != nil {
pRes.Error = res.Error
}
if res.Dataframes != nil {
decoded, err := res.Dataframes.Decoded()
if err != nil {
return nil, err
}
pRes.Frames = decoded
responses[refID] = pRes
continue
}
for _, series := range res.Series {
frame, err := tsdb.SeriesToFrame(series)
frame.RefID = refID
if err != nil {
return nil, err
}
pRes.Frames = append(pRes.Frames, frame)
}
responses[refID] = pRes
}
return &backend.QueryDataResponse{
Responses: responses,
}, nil
}

View File

@ -71,7 +71,6 @@ func getV2PluginSet() goplugin.PluginSet {
"diagnostics": &grpcplugin.DiagnosticsGRPCPlugin{},
"resource": &grpcplugin.ResourceGRPCPlugin{},
"data": &grpcplugin.DataGRPCPlugin{},
"transform": &grpcplugin.TransformGRPCPlugin{},
"renderer": &pluginextensionv2.RendererGRPCPlugin{},
}
}
@ -116,7 +115,6 @@ type LegacyClient struct {
// Client client for communicating with a plugin using the current (v2) plugin protocol.
type Client struct {
DataPlugin grpcplugin.DataClient
TransformPlugin grpcplugin.TransformClient
RendererPlugin pluginextensionv2.RendererPlugin
DataPlugin grpcplugin.DataClient
RendererPlugin pluginextensionv2.RendererPlugin
}

View File

@ -21,7 +21,6 @@ type clientV2 struct {
grpcplugin.DiagnosticsClient
grpcplugin.ResourceClient
grpcplugin.DataClient
grpcplugin.TransformClient
pluginextensionv2.RendererPlugin
}
@ -41,11 +40,6 @@ func newClientV2(descriptor PluginDescriptor, logger log.Logger, rpcClient plugi
return nil, err
}
rawTransform, err := rpcClient.Dispense("transform")
if err != nil {
return nil, err
}
rawRenderer, err := rpcClient.Dispense("renderer")
if err != nil {
return nil, err
@ -70,12 +64,6 @@ func newClientV2(descriptor PluginDescriptor, logger log.Logger, rpcClient plugi
}
}
if rawTransform != nil {
if plugin, ok := rawTransform.(grpcplugin.TransformClient); ok {
c.TransformClient = instrumentTransformPlugin(plugin)
}
}
if rawRenderer != nil {
if plugin, ok := rawRenderer.(pluginextensionv2.RendererPlugin); ok {
c.RendererPlugin = plugin
@ -84,9 +72,8 @@ func newClientV2(descriptor PluginDescriptor, logger log.Logger, rpcClient plugi
if descriptor.startFns.OnStart != nil {
client := &Client{
DataPlugin: c.DataClient,
TransformPlugin: c.TransformClient,
RendererPlugin: c.RendererPlugin,
DataPlugin: c.DataClient,
RendererPlugin: c.RendererPlugin,
}
if err := descriptor.startFns.OnStart(descriptor.pluginID, client, logger); err != nil {
return nil, err
@ -189,24 +176,3 @@ func instrumentDataClient(plugin grpcplugin.DataClient) grpcplugin.DataClient {
return resp, err
})
}
type transformPluginTransformDataFunc func(ctx context.Context, req *pluginv2.QueryDataRequest, callback grpcplugin.TransformDataCallBack) (*pluginv2.QueryDataResponse, error)
func (fn transformPluginTransformDataFunc) TransformData(ctx context.Context, req *pluginv2.QueryDataRequest, callback grpcplugin.TransformDataCallBack) (*pluginv2.QueryDataResponse, error) {
return fn(ctx, req, callback)
}
func instrumentTransformPlugin(plugin grpcplugin.TransformClient) grpcplugin.TransformClient {
if plugin == nil {
return nil
}
return transformPluginTransformDataFunc(func(ctx context.Context, req *pluginv2.QueryDataRequest, callback grpcplugin.TransformDataCallBack) (*pluginv2.QueryDataResponse, error) {
var resp *pluginv2.QueryDataResponse
err := backendplugin.InstrumentTransformDataRequest(req.PluginContext.PluginId, func() (innerErr error) {
resp, innerErr = plugin.TransformData(ctx, req, callback)
return
})
return resp, err
})
}

View File

@ -65,11 +65,6 @@ func InstrumentQueryDataRequest(pluginID string, fn func() error) error {
return instrumentPluginRequest(pluginID, "queryData", fn)
}
// InstrumentTransformDataRequest instruments success rate and latency of transform data request.
func InstrumentTransformDataRequest(pluginID string, fn func() error) error {
return instrumentPluginRequest(pluginID, "transformData", fn)
}
// InstrumentQueryDataHandler wraps a backend.QueryDataHandler with instrumentation of success rate and latency.
func InstrumentQueryDataHandler(handler backend.QueryDataHandler) backend.QueryDataHandler {
if handler == nil {

View File

@ -31,7 +31,6 @@ var (
Plugins map[string]*PluginBase
PluginTypes map[string]interface{}
Renderer *RendererPlugin
Transform *TransformPlugin
GrafanaLatestVersion string
GrafanaHasUpdate bool
@ -82,7 +81,6 @@ func (pm *PluginManager) Init() error {
"datasource": DataSourcePlugin{},
"app": AppPlugin{},
"renderer": RendererPlugin{},
"transform": TransformPlugin{},
}
pluginScanningErrors = map[string]*PluginError{}
@ -357,16 +355,6 @@ func (s *PluginScanner) loadPlugin(pluginJSONFilePath string) error {
return errors.New("did not find type or id properties in plugin.json")
}
// The expressions feature toggle corresponds to transform plug-ins.
if pluginCommon.Type == "transform" {
isEnabled := s.cfg.IsExpressionsEnabled()
if !isEnabled {
s.log.Debug("Transform plugin is disabled since the expressions feature toggle is not enabled",
"pluginID", pluginCommon.Id)
return nil
}
}
pluginCommon.PluginDir = filepath.Dir(pluginJSONFilePath)
pluginCommon.Signature = getPluginSignatureState(s.log, &pluginCommon)
@ -376,7 +364,7 @@ func (s *PluginScanner) loadPlugin(pluginJSONFilePath string) error {
}
func (*PluginScanner) IsBackendOnlyPlugin(pluginType string) bool {
return pluginType == "renderer" || pluginType == "transform"
return pluginType == "renderer"
}
// validateSignature validates a plugin's signature.

View File

@ -142,29 +142,6 @@ func TestPluginManager_Init(t *testing.T) {
assert.Empty(t, fm.registeredPlugins)
})
t.Run("Transform plugins should be loaded when expressions feature is on", func(t *testing.T) {
origPluginsPath := setting.PluginsPath
t.Cleanup(func() {
setting.PluginsPath = origPluginsPath
})
setting.PluginsPath = "testdata/behind-feature-flag"
fm := &fakeBackendPluginManager{}
pm := &PluginManager{
Cfg: &setting.Cfg{
FeatureToggles: map[string]bool{
"expressions": true,
},
},
BackendPluginManager: fm,
}
err := pm.Init()
require.NoError(t, err)
require.Empty(t, pm.scanningErrors)
assert.Equal(t, []string{"gel"}, fm.registeredPlugins)
})
t.Run("With nested plugin duplicating parent", func(t *testing.T) {
origPluginsPath := setting.PluginsPath
t.Cleanup(func() {

View File

@ -1,23 +0,0 @@
-----BEGIN PGP SIGNED MESSAGE-----
Hash: SHA512
{
"plugin": "gel",
"version": "1.0.0",
"files": {
"plugin.json": "b9b3bb0dab3c4655a929a1e48a957466e3e2717992bdd29da27e5eed2fae090c"
},
"time": 1589274667427,
"keyId": "7e4d0c6a708866e7"
}
-----BEGIN PGP SIGNATURE-----
Version: OpenPGP.js v4.10.1
Comment: https://openpgpjs.org
wqIEARMKAAYFAl66aCsACgkQfk0ManCIZufDMAIJAWoNVihI9ZSBpUpgXrzY
XXsI3OmHuVpzrv6M6bk5jYdzY4SyzZmdw4CB51TIDJW9SnUajlXxWLXGYY+w
B2rSYvuhAgkBlG9w5OV3jcyg/wfUrIcCO5XRHMydCg0hIOznClzuG0uWn3wm
d4RT/ap1ezislQ/91zvhsLgAIztZlm3EsNBv7sI=
=WPLw
-----END PGP SIGNATURE-----

View File

@ -1,14 +0,0 @@
{
"type": "transform",
"name": "GEL",
"id": "gel",
"backend": true,
"info": {
"description": "Test",
"version": "1.0.0",
"author": {
"name": "Grafana Labs",
"url": "https://grafana.com"
}
}
}

View File

@ -1,228 +0,0 @@
package plugins
import (
"context"
"encoding/json"
"fmt"
"path/filepath"
"strconv"
sdkgrpcplugin "github.com/grafana/grafana-plugin-sdk-go/backend/grpcplugin"
"github.com/grafana/grafana-plugin-sdk-go/genproto/pluginv2"
"github.com/grafana/grafana/pkg/bus"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins/backendplugin"
"github.com/grafana/grafana/pkg/plugins/backendplugin/grpcplugin"
"github.com/grafana/grafana/pkg/plugins/datasource/wrapper"
"github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/util/errutil"
)
type TransformPlugin struct {
PluginBase
Executable string `json:"executable,omitempty"`
*TransformWrapper
}
func (p *TransformPlugin) Load(decoder *json.Decoder, base *PluginBase, backendPluginManager backendplugin.Manager) error {
if err := decoder.Decode(p); err != nil {
return err
}
if err := p.registerPlugin(base); err != nil {
return err
}
cmd := ComposePluginStartCommand(p.Executable)
fullpath := filepath.Join(p.PluginDir, cmd)
factory := grpcplugin.NewBackendPlugin(p.Id, fullpath, grpcplugin.PluginStartFuncs{
OnStart: p.onPluginStart,
})
if err := backendPluginManager.Register(p.Id, factory); err != nil {
return errutil.Wrapf(err, "Failed to register backend plugin")
}
Transform = p
return nil
}
func (p *TransformPlugin) onPluginStart(pluginID string, client *grpcplugin.Client, logger log.Logger) error {
p.TransformWrapper = NewTransformWrapper(logger, client.TransformPlugin)
if client.DataPlugin != nil {
tsdb.RegisterTsdbQueryEndpoint(pluginID, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) {
return wrapper.NewDatasourcePluginWrapperV2(logger, p.Id, p.Type, client.DataPlugin), nil
})
}
return nil
}
// ...
// Wrapper Code
// ...
func NewTransformWrapper(log log.Logger, plugin sdkgrpcplugin.TransformClient) *TransformWrapper {
return &TransformWrapper{plugin, log, &transformCallback{log}}
}
type TransformWrapper struct {
sdkgrpcplugin.TransformClient
logger log.Logger
Callback *transformCallback
}
func (tw *TransformWrapper) Transform(ctx context.Context, query *tsdb.TsdbQuery) (*tsdb.Response, error) {
pbQuery := &pluginv2.QueryDataRequest{
PluginContext: &pluginv2.PluginContext{
// TODO: Things probably
},
Queries: []*pluginv2.DataQuery{},
}
for _, q := range query.Queries {
modelJSON, err := q.Model.MarshalJSON()
if err != nil {
return nil, err
}
pbQuery.Queries = append(pbQuery.Queries, &pluginv2.DataQuery{
Json: modelJSON,
IntervalMS: q.IntervalMs,
RefId: q.RefId,
MaxDataPoints: q.MaxDataPoints,
QueryType: q.QueryType,
TimeRange: &pluginv2.TimeRange{
ToEpochMS: query.TimeRange.GetToAsMsEpoch(),
FromEpochMS: query.TimeRange.GetFromAsMsEpoch(),
},
})
}
pbRes, err := tw.TransformClient.TransformData(ctx, pbQuery, tw.Callback)
if err != nil {
return nil, err
}
tR := &tsdb.Response{
Results: make(map[string]*tsdb.QueryResult, len(pbRes.Responses)),
}
for refID, res := range pbRes.Responses {
tRes := &tsdb.QueryResult{
RefId: refID,
Dataframes: tsdb.NewEncodedDataFrames(res.Frames),
}
if len(res.JsonMeta) != 0 {
tRes.Meta = simplejson.NewFromAny(res.JsonMeta)
}
if res.Error != "" {
tRes.Error = fmt.Errorf(res.Error)
tRes.ErrorString = res.Error
}
tR.Results[refID] = tRes
}
return tR, nil
}
type transformCallback struct {
logger log.Logger
}
func (s *transformCallback) QueryData(ctx context.Context, req *pluginv2.QueryDataRequest) (*pluginv2.QueryDataResponse, error) {
if len(req.Queries) == 0 {
return nil, fmt.Errorf("zero queries found in datasource request")
}
datasourceID := int64(0)
if req.PluginContext.DataSourceInstanceSettings != nil {
datasourceID = req.PluginContext.DataSourceInstanceSettings.Id
}
getDsInfo := &models.GetDataSourceByIdQuery{
OrgId: req.PluginContext.OrgId,
Id: datasourceID,
}
if err := bus.Dispatch(getDsInfo); err != nil {
return nil, fmt.Errorf("could not find datasource: %w", err)
}
// Convert plugin-model (datasource) queries to tsdb queries
queries := make([]*tsdb.Query, len(req.Queries))
for i, query := range req.Queries {
sj, err := simplejson.NewJson(query.Json)
if err != nil {
return nil, err
}
queries[i] = &tsdb.Query{
RefId: query.RefId,
IntervalMs: query.IntervalMS,
MaxDataPoints: query.MaxDataPoints,
QueryType: query.QueryType,
DataSource: getDsInfo.Result,
Model: sj,
}
}
// For now take Time Range from first query.
timeRange := tsdb.NewTimeRange(strconv.FormatInt(req.Queries[0].TimeRange.FromEpochMS, 10), strconv.FormatInt(req.Queries[0].TimeRange.ToEpochMS, 10))
tQ := &tsdb.TsdbQuery{
TimeRange: timeRange,
Queries: queries,
}
// Execute the converted queries
tsdbRes, err := tsdb.HandleRequest(ctx, getDsInfo.Result, tQ)
if err != nil {
return nil, err
}
// Convert tsdb results (map) to plugin-model/datasource (slice) results.
// Only error, tsdb.Series, and encoded Dataframes responses are mapped.
responses := make(map[string]*pluginv2.DataResponse, len(tsdbRes.Results))
for refID, res := range tsdbRes.Results {
pRes := &pluginv2.DataResponse{}
if res.Error != nil {
pRes.Error = res.Error.Error()
}
if res.Dataframes != nil {
encoded, err := res.Dataframes.Encoded()
if err != nil {
return nil, err
}
pRes.Frames = encoded
responses[refID] = pRes
continue
}
for _, series := range res.Series {
frame, err := tsdb.SeriesToFrame(series)
frame.RefID = refID
if err != nil {
return nil, err
}
encFrame, err := frame.MarshalArrow()
if err != nil {
return nil, err
}
pRes.Frames = append(pRes.Frames, encFrame)
}
if res.Meta != nil {
b, err := res.Meta.MarshalJSON()
if err != nil {
s.logger.Error("failed to marshal json metadata", err)
}
pRes.JsonMeta = b
}
responses[refID] = pRes
}
return &pluginv2.QueryDataResponse{
Responses: responses,
}, nil
}

View File

@ -5,15 +5,13 @@ import (
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/genproto/pluginv2"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana/pkg/expr"
)
const defaultMaxDataPoints float64 = 100
const defaultIntervalMS float64 = 1000
// DefaultExprDatasourceID is the datasource identifier for expressions.:w
const DefaultExprDatasourceID = -100
// Duration is a type used for marshalling durations.
type Duration time.Duration
@ -51,10 +49,10 @@ func (rtr *RelativeTimeRange) isValid() bool {
return rtr.From > rtr.To
}
func (rtr *RelativeTimeRange) toTimeRange(now time.Time) *pluginv2.TimeRange {
return &pluginv2.TimeRange{
FromEpochMS: now.Add(-time.Duration(rtr.From)).UnixNano() / 1e6,
ToEpochMS: now.Add(-time.Duration(rtr.To)).UnixNano() / 1e6,
func (rtr *RelativeTimeRange) toTimeRange(now time.Time) backend.TimeRange {
return backend.TimeRange{
From: now.Add(-time.Duration(rtr.From)),
To: now.Add(-time.Duration(rtr.To)),
}
}
@ -103,9 +101,9 @@ func (aq *AlertQuery) setDatasource() error {
return fmt.Errorf("failed to get datasource from query model")
}
if dsName == "__expr__" {
aq.DatasourceID = DefaultExprDatasourceID
aq.modelProps["datasourceId"] = DefaultExprDatasourceID
if dsName == expr.DatasourceName {
aq.DatasourceID = expr.DatasourceID
aq.modelProps["datasourceId"] = expr.DatasourceID
return nil
}
@ -127,7 +125,7 @@ func (aq *AlertQuery) IsExpression() (bool, error) {
if err != nil {
return false, err
}
return aq.DatasourceID == DefaultExprDatasourceID, nil
return aq.DatasourceID == expr.DatasourceID, nil
}
// setMaxDatapoints sets the model maxDataPoints if it's missing or invalid
@ -194,6 +192,19 @@ func (aq *AlertQuery) getIntervalMS() (int64, error) {
return int64(intervalMs), nil
}
func (aq *AlertQuery) getIntervalDuration() (time.Duration, error) {
err := aq.setIntervalMS()
if err != nil {
return 0, err
}
intervalMs, ok := aq.modelProps["intervalMs"].(float64)
if !ok {
return 0, fmt.Errorf("failed to cast intervalMs to float64: %v", aq.modelProps["intervalMs"])
}
return time.Duration(intervalMs) * time.Millisecond, nil
}
// GetDatasource returns the query datasource identifier.
func (aq *AlertQuery) GetDatasource() (int64, error) {
err := aq.setDatasource()

View File

@ -6,6 +6,7 @@ import (
"testing"
"time"
"github.com/grafana/grafana/pkg/expr"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -32,8 +33,8 @@ func TestAlertQuery(t *testing.T) {
}`),
},
expectedIsExpression: true,
expectedDatasource: "__expr__",
expectedDatasourceID: int64(DefaultExprDatasourceID),
expectedDatasource: expr.DatasourceName,
expectedDatasourceID: int64(expr.DatasourceID),
expectedMaxPoints: int64(defaultMaxDataPoints),
expectedIntervalMS: int64(defaultIntervalMS),
},

View File

@ -7,11 +7,10 @@ import (
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/genproto/pluginv2"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/tsdb"
)
// invalidEvalResultFormatError is an error for invalid format of the alert definition evaluation results.
@ -99,11 +98,11 @@ func (c *Condition) Execute(ctx AlertExecCtx, fromStr, toStr string) (*Execution
return nil, fmt.Errorf("invalid conditions")
}
pbQuery := &pluginv2.QueryDataRequest{
PluginContext: &pluginv2.PluginContext{
queryDataReq := &backend.QueryDataRequest{
PluginContext: backend.PluginContext{
// TODO: Things probably
},
Queries: []*pluginv2.DataQuery{},
Queries: []backend.DataQuery{},
}
for i := range c.QueriesAndExpressions {
@ -112,7 +111,7 @@ func (c *Condition) Execute(ctx AlertExecCtx, fromStr, toStr string) (*Execution
if err != nil {
return nil, fmt.Errorf("failed to get query model: %w", err)
}
intervalMS, err := q.getIntervalMS()
interval, err := q.getIntervalDuration()
if err != nil {
return nil, fmt.Errorf("failed to retrieve intervalMs from the model: %w", err)
}
@ -122,18 +121,17 @@ func (c *Condition) Execute(ctx AlertExecCtx, fromStr, toStr string) (*Execution
return nil, fmt.Errorf("failed to retrieve maxDatapoints from the model: %w", err)
}
pbQuery.Queries = append(pbQuery.Queries, &pluginv2.DataQuery{
Json: model,
IntervalMS: intervalMS,
RefId: q.RefID,
queryDataReq.Queries = append(queryDataReq.Queries, backend.DataQuery{
JSON: model,
Interval: interval,
RefID: q.RefID,
MaxDataPoints: maxDatapoints,
QueryType: q.QueryType,
TimeRange: q.RelativeTimeRange.toTimeRange(time.Now()),
})
}
tw := plugins.Transform
pbRes, err := tw.TransformClient.TransformData(ctx.Ctx, pbQuery, tw.Callback)
pbRes, err := expr.TransformData(ctx.Ctx, queryDataReq)
if err != nil {
return &result, err
}
@ -142,12 +140,7 @@ func (c *Condition) Execute(ctx AlertExecCtx, fromStr, toStr string) (*Execution
if refID != c.RefID {
continue
}
df := tsdb.NewEncodedDataFrames(res.Frames)
result.Results, err = df.Decoded()
if err != nil {
result.Error = err
return &result, err
}
result.Results = res.Frames
}
if len(result.Results) == 0 {

View File

@ -3,8 +3,8 @@ package ngalert
import (
"fmt"
"github.com/grafana/grafana/pkg/expr"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/ngalert/eval"
)
// validateAlertDefinition validates that the alert definition contains at least one alert query
@ -20,7 +20,7 @@ func (ng *AlertNG) validateAlertDefinition(alertDefinition *AlertDefinition, sig
return err
}
if datasourceID == eval.DefaultExprDatasourceID {
if datasourceID == expr.DatasourceID {
return nil
}

View File

@ -55,8 +55,8 @@ var engineCache = engineCacheType{
var sqlIntervalCalculator = tsdb.NewIntervalCalculator(nil)
//nolint:gocritic
// NewXormEngine is an xorm.Engine factory, that can be stubbed by tests.
//nolint:gocritic
var NewXormEngine = func(driverName string, connectionString string) (*xorm.Engine, error) {
return xorm.NewEngine(driverName, connectionString)
}

View File

@ -1,9 +1,3 @@
{
"plugins": [
{
"name": "gel",
"version": "0.6.0",
"checksum": "eeab73565b0f167d3d576dc6da86d53419bff5bbb2ea927fae1cc2fe4b52f55f"
}
]
"plugins": []
}

View File

@ -97,7 +97,7 @@ export class ExpressionQueryEditor extends PureComponent<Props, State> {
const { query, onChange } = this.props;
onChange({
...query,
rule: item.value!,
window: item.value!,
});
};
@ -109,11 +109,11 @@ export class ExpressionQueryEditor extends PureComponent<Props, State> {
});
};
onRuleChange = (evt: ChangeEvent<any>) => {
onWindowChange = (evt: ChangeEvent<any>) => {
const { query, onChange } = this.props;
onChange({
...query,
rule: evt.target.value,
window: evt.target.value,
});
};
@ -150,7 +150,7 @@ export class ExpressionQueryEditor extends PureComponent<Props, State> {
<Input onChange={this.onExpressionChange} value={query.expression} width={25} />
</InlineField>
<InlineField label="Window">
<Input onChange={this.onRuleChange} value={query.rule} width={25} />
<Input onChange={this.onWindowChange} value={query.window} width={25} />
</InlineField>
<InlineField label="Downsample">
<Select options={downsamplingTypes} value={downsampler} onChange={this.onSelectDownsampler} width={25} />

View File

@ -14,7 +14,7 @@ export interface ExpressionQuery extends DataQuery {
type: GELQueryType;
reducer?: string;
expression?: string;
rule?: string;
window?: string;
downsampler?: string;
upsampler?: string;
}