diff --git a/go.mod b/go.mod index b2d57d2c85d..3ea967c2a18 100644 --- a/go.mod +++ b/go.mod @@ -300,7 +300,7 @@ require ( github.com/hashicorp/memberlist v0.5.0 // indirect github.com/hashicorp/yamux v0.1.1 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/invopop/jsonschema v0.12.0 // indirect + github.com/invopop/jsonschema v0.13.0 // indirect github.com/invopop/yaml v0.3.1 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect diff --git a/go.sum b/go.sum index a5e9100ceb0..8ddd913c161 100644 --- a/go.sum +++ b/go.sum @@ -1692,8 +1692,8 @@ github.com/influxdata/influxql v1.4.0 h1:Lf62rbAF8KWQf+4Djqf4hVXgmQuGozUoSD6kNWj github.com/influxdata/influxql v1.4.0/go.mod h1:VqxAKyQz5p8GzgGsxWalCWYGxEqw6kvJo2IickMQiQk= github.com/influxdata/line-protocol v0.0.0-20210922203350-b1ad95c89adf h1:7JTmneyiNEwVBOHSjoMxiWAqB992atOeepeFYegn5RU= github.com/influxdata/line-protocol v0.0.0-20210922203350-b1ad95c89adf/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo= -github.com/invopop/jsonschema v0.12.0 h1:6ovsNSuvn9wEQVOyc72aycBMVQFKz7cPdMJn10CvzRI= -github.com/invopop/jsonschema v0.12.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= +github.com/invopop/jsonschema v0.13.0 h1:KvpoAJWEjR3uD9Kbm2HWJmqsEaHt8lBUpd0qHcIi21E= +github.com/invopop/jsonschema v0.13.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= github.com/invopop/yaml v0.3.1 h1:f0+ZpmhfBSS4MhG+4HYseMdJhoeeopbSKbq5Rpeelso= github.com/invopop/yaml v0.3.1/go.mod h1:PMOp3nn4/12yEZUFfmOuNHJsZToEEOwoWsT+D81KkeA= github.com/ionos-cloud/sdk-go/v6 v6.3.0 h1:/lTieTH9Mo/CWm3cTlFLnK10jgxjUGkAqRffGqvPteY= diff --git a/go.work.sum b/go.work.sum index e6633f8a092..57fda6cd812 100644 --- a/go.work.sum +++ b/go.work.sum @@ -1609,6 +1609,7 @@ github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8 h1:a9ENSRDFBUPkJ5lCg github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20= github.com/frankban/quicktest v1.14.0/go.mod h1:NeW+ay9A/U67EYXNFA1nPE8e/tnQv/09mUdL/ijj8og= github.com/frankban/quicktest v1.14.5/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/fsouza/fake-gcs-server v1.7.0 h1:Un0BXUXrRWYSmYyC1Rqm2e2WJfTPyDy/HGMz31emTi8= github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= @@ -1661,6 +1662,7 @@ github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM= github.com/go-resty/resty/v2 v2.12.0 h1:rsVL8P90LFvkUYq/V5BTVe203WfRIU4gvcf+yfzJzGA= github.com/go-resty/resty/v2 v2.12.0/go.mod h1:o0yGPrkS3lOe1+eFajk6kBW8ScXzwU3hD69/gt2yB/0= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1 h1:TQcrn6Wq+sKGkpyPvppOz99zsMBaUOKXq6HSv655U1c= github.com/go-viper/mapstructure/v2 v2.0.0-alpha.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= @@ -1707,6 +1709,7 @@ github.com/google/go-pkcs11 v0.2.1-0.20230907215043-c6f79328ddf9/go.mod h1:6eQoG github.com/google/go-pkcs11 v0.3.0 h1:PVRnTgtArZ3QQqTGtbtjtnIkzl2iY2kt24yqbrf7td8= github.com/google/go-pkcs11 v0.3.0/go.mod h1:6eQoGcuNJpa7jnd5pMGdkSaQpNDYvPlXWMcjXXThLlY= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20221118152302-e6195bd50e26/go.mod h1:dDKJzRmX4S37WGHujM7tX//fmj1uioxKzKxz3lo4HJo= github.com/google/pprof v0.0.0-20240416155748-26353dc0451f/go.mod h1:kf6iHlnVGwgKolg33glAes7Yg/8iWP8ukqeldJSO7jw= github.com/google/renameio v0.1.0 h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA= @@ -1983,7 +1986,11 @@ github.com/ncw/swift/v2 v2.0.2/go.mod h1:z0A9RVdYPjNjXVo2pDOPxZ4eu3oarO1P91fTItc github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/nsf/jsondiff v0.0.0-20230430225905-43f6cf3098c1 h1:dOYG7LS/WK00RWZc8XGgcUTlTxpp3mKhdR2Q9z9HbXM= github.com/nsf/jsondiff v0.0.0-20230430225905-43f6cf3098c1/go.mod h1:mpRZBD8SJ55OIICQ3iWH0Yz3cjzA61JdqMLoWXeB2+8= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.16.2/go.mod h1:CObGmKUOKaSC0RjmoAK7tKyn4Azo5P2IWuoMnvwxz1E= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= github.com/onsi/ginkgo/v2 v2.1.4/go.mod h1:um6tUpWM/cxCK3/FK8BXqEiUMUwRgSM4JXG47RKZmLU= github.com/onsi/ginkgo/v2 v2.1.6/go.mod h1:MEH45j8TBi6u9BMogfbp0stKC5cdGjumZj5Y7AG4VIk= github.com/onsi/ginkgo/v2 v2.3.0/go.mod h1:Eew0uilEqZmIEZr8JrvYlvOM7Rr6xzTmMV8AyFNU9d0= @@ -1998,7 +2005,11 @@ github.com/onsi/ginkgo/v2 v2.9.5/go.mod h1:tvAoo1QUJwNEU2ITftXTpR7R1RbCzoZUOs3Ro github.com/onsi/ginkgo/v2 v2.9.7/go.mod h1:cxrmXWykAwTwhQsJOPfdIDiJ+l2RYq7U8hFU+M/1uw0= github.com/onsi/ginkgo/v2 v2.11.0/go.mod h1:ZhrRA5XmEE3x3rhlzamx/JJvujdZoJ2uvgI7kR0iZvM= github.com/onsi/ginkgo/v2 v2.13.0/go.mod h1:TE309ZR8s5FsKKpuB1YAQYBzCaAfUgatB/xlT/ETL/o= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.13.0/go.mod h1:lRk9szgn8TxENtWd0Tp4c3wjlRfMTMH27I+3Je41yGY= +github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= +github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= github.com/onsi/gomega v1.20.1/go.mod h1:DtrZpjmvpn2mPm4YWQa0/ALMDj9v4YxLgojwPeREyVo= github.com/onsi/gomega v1.21.1/go.mod h1:iYAIXgPSaDHak0LCMA+AWBpIKBr8WZicMxnE8luStNc= github.com/onsi/gomega v1.22.1/go.mod h1:x6n7VNe4hw0vkyYUM4mjIXx3JbLiPaBPNgB7PRQ1tuM= @@ -2445,7 +2456,9 @@ golang.org/x/mod v0.16.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.18.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20201010224723-4f7140c49acb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210917221730-978cfadd31cf/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= @@ -2482,6 +2495,10 @@ golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190130150945-aca44879d564/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210906170528-6f6e22806c34/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -2528,6 +2545,7 @@ golang.org/x/time v0.6.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/time v0.7.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20200505023115-26f46d2f7ef8/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.8-0.20211029000441-d6a9af8af023/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= diff --git a/kindsv2/go.mod b/kindsv2/go.mod index 37f632240d1..bd1b49f86ef 100644 --- a/kindsv2/go.mod +++ b/kindsv2/go.mod @@ -2,7 +2,7 @@ module github.com/grafana/grafana/kindsv2 go 1.23.1 -require github.com/grafana/cog v0.0.10 +require github.com/grafana/cog v0.0.12 require ( cuelabs.dev/go/oci/ociregistry v0.0.0-20240906074133-82eb438dd565 // indirect diff --git a/kindsv2/go.sum b/kindsv2/go.sum index 6a78ef7df75..593dbcbd428 100644 --- a/kindsv2/go.sum +++ b/kindsv2/go.sum @@ -26,8 +26,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/grafana/codejen v0.0.4-0.20230321061741-77f656893a3d h1:hrXbGJ5jgp6yNITzs5o+zXq0V5yT3siNJ+uM8LGwWKk= github.com/grafana/codejen v0.0.4-0.20230321061741-77f656893a3d/go.mod h1:zmwwM/DRyQB7pfuBjTWII3CWtxcXh8LTwAYGfDfpR6s= -github.com/grafana/cog v0.0.10 h1:YqtqZ1a51njXEVRKspuQifRwgYgU5NcK5KwuiEDGUqo= -github.com/grafana/cog v0.0.10/go.mod h1:HwJbc60fZ+viayROClLGdDwO5w/JjBOpO9wjGnAfMLc= +github.com/grafana/cog v0.0.12 h1:MJfFUVzp0El3+zZCmUQ2Y8uzwvM3aa5zj7EOeeuG6VY= +github.com/grafana/cog v0.0.12/go.mod h1:HwJbc60fZ+viayROClLGdDwO5w/JjBOpO9wjGnAfMLc= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= diff --git a/pkg/codegen/generators/decorators.go b/pkg/codegen/generators/decorators.go deleted file mode 100644 index 5d9500f6a54..00000000000 --- a/pkg/codegen/generators/decorators.go +++ /dev/null @@ -1,282 +0,0 @@ -package generators - -import ( - "fmt" - "regexp" - "strings" - "unicode" - - "github.com/dave/dst" - "github.com/dave/dst/dstutil" -) - -// depointerizer returns an AST manipulator that removes redundant -// pointer indirection from the defined types. -func depointerizer() dstutil.ApplyFunc { - return func(c *dstutil.Cursor) bool { - switch x := c.Node().(type) { - case *dst.Field: - if s, is := x.Type.(*dst.StarExpr); is { - switch deref := depoint(s).(type) { - case *dst.ArrayType, *dst.MapType: - x.Type = deref - } - } - } - return true - } -} - -func depoint(e dst.Expr) dst.Expr { - if star, is := e.(*dst.StarExpr); is { - return star.X - } - return e -} - -func setStar(e dst.Expr) string { - if _, is := e.(*dst.StarExpr); is { - return "*" - } - return "" -} - -func fixTODOComments() dstutil.ApplyFunc { - return func(cursor *dstutil.Cursor) bool { - switch f := cursor.Node().(type) { - case *dst.File: - for _, d := range f.Decls { - if isTypeSpec(d) { - removeGoFieldComment(d.Decorations().Start.All()) - } - fixTODOComment(d.Decorations().Start.All()) - } - case *dst.Field: - if len(f.Names) > 0 { - removeGoFieldComment(f.Decorations().Start.All()) - } - } - - return true - } -} - -func fixTODOComment(comments []string) { - todoRegex := regexp.MustCompile("(//) (.*) (TODO.*)") - if len(comments) > 0 { - comments[0] = todoRegex.ReplaceAllString(comments[0], "$1 $3") - } -} - -func removeGoFieldComment(comments []string) { - todoRegex := regexp.MustCompile("(//) ([A-Z].*?) ([A-Z]?.*?) (.*)") - if len(comments) > 0 { - matches := todoRegex.FindAllStringSubmatch(comments[0], -1) - if len(matches) > 0 { - if strings.EqualFold(matches[0][3], matches[0][2]) { - comments[0] = fmt.Sprintf("%s %s %s", matches[0][1], matches[0][3], matches[0][4]) - } else { - r := []rune(matches[0][3]) - if !unicode.IsLower(r[0]) { - comments[0] = fmt.Sprintf("%s %s %s", matches[0][1], matches[0][3], matches[0][4]) - } - } - } - } -} - -func isTypeSpec(d dst.Decl) bool { - gd, ok := d.(*dst.GenDecl) - if !ok { - return false - } - - _, is := gd.Specs[0].(*dst.TypeSpec) - return is -} - -// It fixes the "generic" fields. It happens when a value in cue could be different structs. -// For Go it generates a struct with a json.RawMessage field inside and multiple functions to map it between the different possibilities. -func fixRawData() dstutil.ApplyFunc { - return func(c *dstutil.Cursor) bool { - f, is := c.Node().(*dst.File) - if !is { - return false - } - - rawFields := make(map[string]bool) - existingRawFields := make(map[string]bool) - for _, decl := range f.Decls { - switch x := decl.(type) { - // Find the structs that only contains one json.RawMessage inside - case *dst.GenDecl: - for _, t := range x.Specs { - if ts, ok := t.(*dst.TypeSpec); ok { - if tp, ok := ts.Type.(*dst.StructType); ok && len(tp.Fields.List) == 1 { - if fn, ok := tp.Fields.List[0].Type.(*dst.SelectorExpr); ok { - if fmt.Sprintf("%s.%s", fn.X, fn.Sel.Name) == "json.RawMessage" { - rawFields[ts.Name.Name] = true - } - } - } - } - } - // Find the functions of the previous structs to verify that are the ones that we are looking for. - case *dst.FuncDecl: - for _, recv := range x.Recv.List { - fnType := depoint(recv.Type).(*dst.Ident).Name - if rawFields[fnType] { - existingRawFields[fnType] = true - } - } - } - } - - dstutil.Apply(f, func(c *dstutil.Cursor) bool { - switch x := c.Node().(type) { - // Delete the functions - case *dst.FuncDecl: - c.Delete() - case *dst.GenDecl: - // Deletes all "generics" generated for these json.RawMessage structs - comments := x.Decorations().Start.All() - if len(comments) > 0 { - if strings.HasSuffix(comments[0], "defines model for .") { - c.Delete() - } - } - for _, spec := range x.Specs { - if tp, ok := spec.(*dst.TypeSpec); ok { - // Delete structs with only json.RawMessage - if existingRawFields[tp.Name.Name] && tp.Name.Name != "MetricAggregation2" { - c.Delete() - continue - } - // Set types that was using these structs as interface{} - if st, ok := tp.Type.(*dst.StructType); ok { - iterateStruct(st, withoutRawData(existingRawFields)) - } - if mt, ok := tp.Type.(*dst.MapType); ok { - iterateMap(mt, withoutRawData(existingRawFields)) - } - if at, ok := tp.Type.(*dst.ArrayType); ok { - iterateArray(at, withoutRawData(existingRawFields)) - } - } - } - } - return true - }, nil) - - return true - } -} - -// Fixes type name containing underscores in the generated Go files -func fixUnderscoreInTypeName() dstutil.ApplyFunc { - return func(c *dstutil.Cursor) bool { - switch x := c.Node().(type) { - case *dst.GenDecl: - if specs, isType := x.Specs[0].(*dst.TypeSpec); isType { - if strings.Contains(specs.Name.Name, "_") { - oldName := specs.Name.Name - specs.Name.Name = strings.ReplaceAll(specs.Name.Name, "_", "") - x.Decs.Start[0] = strings.ReplaceAll(x.Decs.Start[0], oldName, specs.Name.Name) - } - if st, ok := specs.Type.(*dst.StructType); ok { - iterateStruct(st, withoutUnderscore) - } - if mt, ok := specs.Type.(*dst.MapType); ok { - iterateMap(mt, withoutUnderscore) - } - if at, ok := specs.Type.(*dst.ArrayType); ok { - iterateArray(at, withoutUnderscore) - } - } - case *dst.Field: - findFieldsWithUnderscores(x) - } - return true - } -} - -func findFieldsWithUnderscores(x *dst.Field) { - switch t := x.Type.(type) { - case *dst.Ident: - withoutUnderscore(t) - case *dst.StarExpr: - if i, is := t.X.(*dst.Ident); is { - withoutUnderscore(i) - } - case *dst.ArrayType: - iterateArray(t, withoutUnderscore) - case *dst.MapType: - iterateMap(t, withoutUnderscore) - } -} - -func withoutUnderscore(i *dst.Ident) { - if strings.Contains(i.Name, "_") { - i.Name = strings.ReplaceAll(i.Name, "_", "") - } -} - -func withoutRawData(existingFields map[string]bool) func(ident *dst.Ident) { - return func(i *dst.Ident) { - if existingFields[i.Name] { - i.Name = setStar(i) + "any" - } - } -} - -func iterateStruct(s *dst.StructType, fn func(i *dst.Ident)) { - for i, f := range s.Fields.List { - switch mx := depoint(f.Type).(type) { - case *dst.Ident: - fn(mx) - case *dst.ArrayType: - iterateArray(mx, fn) - case *dst.MapType: - iterateMap(mx, fn) - case *dst.StructType: - iterateStruct(mx, fn) - case *dst.InterfaceType: - s.Fields.List[i].Type = interfaceToAny(f.Type) - } - } -} - -func iterateMap(s *dst.MapType, fn func(i *dst.Ident)) { - switch mx := s.Value.(type) { - case *dst.Ident: - fn(mx) - case *dst.ArrayType: - iterateArray(mx, fn) - case *dst.MapType: - iterateMap(mx, fn) - case *dst.InterfaceType: - s.Value = interfaceToAny(s.Value) - } -} - -func iterateArray(a *dst.ArrayType, fn func(i *dst.Ident)) { - switch mx := a.Elt.(type) { - case *dst.Ident: - fn(mx) - case *dst.ArrayType: - iterateArray(mx, fn) - case *dst.StructType: - iterateStruct(mx, fn) - case *dst.InterfaceType: - a.Elt = interfaceToAny(a.Elt) - } -} - -func interfaceToAny(i dst.Expr) dst.Expr { - star := "" - if _, is := i.(*dst.StarExpr); is { - star = "*" - } - - return &dst.Ident{Name: star + "any"} -} diff --git a/pkg/codegen/generators/go_generator.go b/pkg/codegen/generators/go_generator.go deleted file mode 100644 index ad8e914504d..00000000000 --- a/pkg/codegen/generators/go_generator.go +++ /dev/null @@ -1,193 +0,0 @@ -package generators - -import ( - "bytes" - "fmt" - "go/parser" - "go/token" - "path/filepath" - "strings" - - "cuelang.org/go/cue" - "cuelang.org/go/pkg/encoding/yaml" - "github.com/dave/dst/decorator" - "github.com/dave/dst/dstutil" - "github.com/getkin/kin-openapi/openapi3" - "github.com/oapi-codegen/oapi-codegen/v2/pkg/codegen" - "golang.org/x/tools/imports" -) - -type GoConfig struct { - Config *OpenApiConfig - PackageName string - ApplyFuncs []dstutil.ApplyFunc -} - -func GenerateTypesGo(v cue.Value, cfg *GoConfig) ([]byte, error) { - if cfg == nil { - return nil, fmt.Errorf("configuration cannot be nil") - } - - applyFuncs := []dstutil.ApplyFunc{depointerizer(), fixRawData(), fixUnderscoreInTypeName(), fixTODOComments()} - applyFuncs = append(applyFuncs, cfg.ApplyFuncs...) - - f, err := generateOpenAPI(v, cfg.Config) - if err != nil { - return nil, err - } - - str, err := yaml.Marshal(v.Context().BuildFile(f)) - if err != nil { - return nil, fmt.Errorf("cue-yaml marshaling failed: %w", err) - } - - loader := openapi3.NewLoader() - oT, err := loader.LoadFromData([]byte(str)) - if err != nil { - return nil, fmt.Errorf("loading generated openapi failed: %w", err) - } - - schemaName, err := getSchemaName(v) - if err != nil { - return nil, err - } - - if cfg.PackageName == "" { - cfg.PackageName = schemaName - } - - // Hack to fix https://github.com/grafana/thema/pull/127 issue without importing - // to avoid to add the whole vendor in Grafana code - if cfg.PackageName == "dataquery" { - fixDataQuery(oT) - } - - ccfg := codegen.Configuration{ - PackageName: cfg.PackageName, - Compatibility: codegen.CompatibilityOptions{ - AlwaysPrefixEnumValues: true, - }, - Generate: codegen.GenerateOptions{ - Models: true, - }, - OutputOptions: codegen.OutputOptions{ - SkipPrune: true, - UserTemplates: map[string]string{ - "imports.tmpl": importstmpl, - }, - }, - } - - gostr, err := codegen.Generate(oT, ccfg) - if err != nil { - return nil, fmt.Errorf("openapi generation failed: %w", err) - } - - return postprocessGoFile(genGoFile{ - path: fmt.Sprintf("%s_type_gen.go", schemaName), - appliers: applyFuncs, - in: []byte(gostr), - }) -} - -type genGoFile struct { - path string - appliers []dstutil.ApplyFunc - in []byte -} - -func postprocessGoFile(cfg genGoFile) ([]byte, error) { - fname := sanitizeLabelString(filepath.Base(cfg.path)) - buf := new(bytes.Buffer) - fset := token.NewFileSet() - gf, err := decorator.ParseFile(fset, fname, string(cfg.in), parser.ParseComments) - if err != nil { - return nil, fmt.Errorf("error parsing generated file: %w", err) - } - - for _, af := range cfg.appliers { - dstutil.Apply(gf, af, nil) - } - - err = decorator.Fprint(buf, gf) - if err != nil { - return nil, fmt.Errorf("error formatting generated file: %w", err) - } - - byt, err := imports.Process(fname, buf.Bytes(), nil) - if err != nil { - return nil, fmt.Errorf("goimports processing of generated file failed: %w", err) - } - - // Compare imports before and after; warn about performance if some were added - gfa, _ := parser.ParseFile(fset, fname, string(byt), parser.ParseComments) - imap := make(map[string]bool) - for _, im := range gf.Imports { - imap[im.Path.Value] = true - } - var added []string - for _, im := range gfa.Imports { - if !imap[im.Path.Value] { - added = append(added, im.Path.Value) - } - } - - if len(added) != 0 { - // TODO improve the guidance in this error if/when we better abstract over imports to generate - return nil, fmt.Errorf("goimports added the following import statements to %s: \n\t%s\nRelying on goimports to find imports significantly slows down code generation. Either add these imports with an AST manipulation in cfg.ApplyFuncs, or set cfg.IgnoreDiscoveredImports to true", cfg.path, strings.Join(added, "\n\t")) - } - return byt, nil -} - -// fixDataQuery extends the properties for the AllOf schemas when a DataQuery exists. -// deep/oapi-codegen library ignores the properties of the models and only ones have references. -// It doesn't apply this change https://github.com/grafana/thema/pull/154 since it modifies the -// vendor implementation, and we don't import it. -func fixDataQuery(spec *openapi3.T) *openapi3.T { - for _, sch := range spec.Components.Schemas { - if sch.Value != nil && len(sch.Value.AllOf) > 0 { - for _, allOf := range sch.Value.AllOf { - for n, p := range allOf.Value.Properties { - sch.Value.Properties[n] = p - } - } - sch.Value.AllOf = nil - } - } - return spec -} - -// Almost all of the below imports are eliminated by dst transformers and calls -// to goimports - but if they're not present in the template, then the internal -// call to goimports that oapi-codegen makes will trigger a search for them, -// which can slow down codegen by orders of magnitude. -var importstmpl = `package {{ .PackageName }} - -import ( - "bytes" - "compress/gzip" - "context" - "encoding/base64" - "encoding/json" - "encoding/xml" - "errors" - "fmt" - "gopkg.in/yaml.v2" - "io" - "io/ioutil" - "os" - "net/http" - "net/url" - "path" - "strings" - "time" - - "github.com/oapi-codegen/runtime" - openapi_types "github.com/oapi-codegen/runtime/types" - "github.com/getkin/kin-openapi/openapi3" - "github.com/go-chi/chi/v5" - "github.com/labstack/echo/v4" - "github.com/gin-gonic/gin" - "github.com/gorilla/mux" -) -` diff --git a/pkg/codegen/generators/openapi_generator.go b/pkg/codegen/generators/openapi_generator.go deleted file mode 100644 index 19e02aa5488..00000000000 --- a/pkg/codegen/generators/openapi_generator.go +++ /dev/null @@ -1,199 +0,0 @@ -package generators - -import ( - "fmt" - "strings" - - "cuelang.org/go/cue" - "cuelang.org/go/cue/ast" - "cuelang.org/go/encoding/openapi" -) - -type OpenApiConfig struct { - Config *openapi.Config - IsGroup bool - RootName string - SubPath cue.Path -} - -func generateOpenAPI(v cue.Value, cfg *OpenApiConfig) (*ast.File, error) { - if cfg == nil { - return nil, fmt.Errorf("missing openapi configuration") - } - - if cfg.Config == nil { - cfg.Config = &openapi.Config{} - } - - name, err := getSchemaName(v) - if err != nil { - return nil, err - } - - gen := &oapiGen{ - cfg: cfg, - name: name, - val: v.LookupPath(cue.ParsePath("lineage.schemas[0].schema")), - subpath: cfg.SubPath, - bpath: v.LookupPath(cue.ParsePath("lineage.schemas[0]")).Path(), - } - - declFunc := genSchema - if cfg.IsGroup { - declFunc = genGroup - } - - decls, err := declFunc(gen) - - if err != nil { - return nil, err - } - - // TODO recursively sort output to improve stability of output - return &ast.File{ - Decls: []ast.Decl{ - ast.NewStruct( - "openapi", ast.NewString("3.0.0"), - "paths", ast.NewStruct(), - "components", ast.NewStruct( - "schemas", &ast.StructLit{Elts: decls}, - ), - ), - }, - }, nil -} - -type oapiGen struct { - cfg *OpenApiConfig - val cue.Value - subpath cue.Path - - // overall name for the generated oapi doc - name string - - // original NameFunc - onf func(cue.Value, cue.Path) string - - // full prefix path that leads up to the #SchemaDef, e.g. lin._sortedSchemas[0] - bpath cue.Path -} - -func genGroup(gen *oapiGen) ([]ast.Decl, error) { - ctx := gen.val.Context() - iter, err := gen.val.Fields(cue.Definitions(true), cue.Optional(true)) - if err != nil { - panic(fmt.Errorf("unreachable - should always be able to get iter for struct kinds: %w", err)) - } - - var decls []ast.Decl - for iter.Next() { - val, sel := iter.Value(), iter.Selector() - name := strings.Trim(sel.String(), "?#") - - v := ctx.CompileString(fmt.Sprintf("#%s: _", name)) - defpath := cue.MakePath(cue.Def(name)) - defsch := v.FillPath(defpath, val) - - cfgi := *gen.cfg.Config - cfgi.NameFunc = func(val cue.Value, path cue.Path) string { - return gen.nfSingle(val, path, defpath, name) - } - - part, err := openapi.Generate(defsch, &cfgi) - if err != nil { - return nil, fmt.Errorf("failed generation for grouped field %s: %w", sel, err) - } - - decls = append(decls, getSchemas(part)...) - } - - return decls, nil -} - -func genSchema(gen *oapiGen) ([]ast.Decl, error) { - hasSubpath := len(gen.cfg.SubPath.Selectors()) > 0 - name := sanitizeLabelString(gen.name) - if gen.cfg.RootName != "" { - name = gen.cfg.RootName - } else if hasSubpath { - sel := gen.cfg.SubPath.Selectors() - name = sel[len(sel)-1].String() - } - - val := gen.val - if hasSubpath { - for i, sel := range gen.cfg.SubPath.Selectors() { - if !gen.val.Allows(sel) { - return nil, fmt.Errorf("subpath %q not present in schema", cue.MakePath(gen.cfg.SubPath.Selectors()[:i+1]...)) - } - } - val = val.LookupPath(gen.cfg.SubPath) - } - - v := gen.val.Context().CompileString(fmt.Sprintf("#%s: _", name)) - defpath := cue.MakePath(cue.Def(name)) - defsch := v.FillPath(defpath, val) - - gen.cfg.Config.NameFunc = func(val cue.Value, path cue.Path) string { - return gen.nfSingle(val, path, defpath, name) - } - - f, err := openapi.Generate(defsch.Eval(), gen.cfg.Config) - if err != nil { - return nil, err - } - - return getSchemas(f), nil -} - -// For generating a single, our NameFunc must: -// - Eliminate any path prefixes on the element, both internal lineage and wrapping -// - Replace the name "_#schema" with the desired name -// - Call the user-provided NameFunc, if any -// - Remove CUE markers like #, !, ? -func (gen *oapiGen) nfSingle(val cue.Value, path, defpath cue.Path, name string) string { - tpath := trimPathPrefix(trimThemaPathPrefix(path, gen.bpath), defpath) - - if path.String() == "" || tpath.String() == defpath.String() { - return name - } - - if val == gen.val { - return "" - } - - if gen.onf != nil { - return gen.onf(val, tpath) - } - return strings.Trim(tpath.String(), "?#") -} - -func getSchemas(f *ast.File) []ast.Decl { - compos := orp(getFieldByLabel(f, "components")) - schemas := orp(getFieldByLabel(compos.Value, "schemas")) - return schemas.Value.(*ast.StructLit).Elts -} - -func orp[T any](t T, err error) T { - if err != nil { - panic(err) - } - return t -} - -func trimThemaPathPrefix(p, base cue.Path) cue.Path { - if !pathHasPrefix(p, base) { - return p - } - - rest := p.Selectors()[len(base.Selectors()):] - if len(rest) == 0 { - return cue.Path{} - } - switch rest[0].String() { - case "schema", "_#schema", "_join", "joinSchema": - return cue.MakePath(rest[1:]...) - default: - return cue.MakePath(rest...) - } -} diff --git a/pkg/codegen/generators/utils.go b/pkg/codegen/generators/utils.go deleted file mode 100644 index 382e9ed2078..00000000000 --- a/pkg/codegen/generators/utils.go +++ /dev/null @@ -1,130 +0,0 @@ -package generators - -import ( - "fmt" - "strconv" - "strings" - - "cuelang.org/go/cue" - "cuelang.org/go/cue/ast" - "cuelang.org/go/cue/token" -) - -// sanitizeLabelString strips characters from a string that are not allowed for -// use in a CUE label. -func sanitizeLabelString(s string) string { - return strings.Map(func(r rune) rune { - switch { - case r >= 'a' && r <= 'z': - fallthrough - case r >= 'A' && r <= 'Z': - fallthrough - case r >= '0' && r <= '9': - fallthrough - case r == '_': - return r - default: - return -1 - } - }, s) -} - -// trimPathPrefix strips the provided prefix from the provided path, if the -// prefix exists. -// -// If path and prefix are equivalent, and there is at least one additional -// selector in the provided path. -func trimPathPrefix(path, prefix cue.Path) cue.Path { - sels, psels := path.Selectors(), prefix.Selectors() - if len(sels) == 1 { - return path - } - var i int - for ; i < len(psels) && i < len(sels); i++ { - if !selEq(psels[i], sels[i]) { - break - } - } - return cue.MakePath(sels[i:]...) -} - -// selEq indicates whether two selectors are equivalent. Selectors are equivalent if -// they are either exactly equal, or if they are equal ignoring path optionality. -func selEq(s1, s2 cue.Selector) bool { - return s1 == s2 || s1.Optional() == s2.Optional() -} - -// getFieldByLabel returns the ast.Field with a given label from a struct-ish input. -func getFieldByLabel(n ast.Node, label string) (*ast.Field, error) { - var d []ast.Decl - switch x := n.(type) { - case *ast.File: - d = x.Decls - case *ast.StructLit: - d = x.Elts - default: - return nil, fmt.Errorf("not an *ast.File or *ast.StructLit") - } - - for _, el := range d { - if isFieldWithLabel(el, label) { - return el.(*ast.Field), nil - } - } - - return nil, fmt.Errorf("no field with label %q", label) -} - -func isFieldWithLabel(n ast.Node, label string) bool { - if x, is := n.(*ast.Field); is { - if l, is := x.Label.(*ast.BasicLit); is { - return strEq(l, label) - } - if l, is := x.Label.(*ast.Ident); is { - return identStrEq(l, label) - } - } - return false -} - -func strEq(lit *ast.BasicLit, str string) bool { - if lit.Kind != token.STRING { - return false - } - ls, _ := strconv.Unquote(lit.Value) - return str == ls || str == lit.Value -} - -func identStrEq(id *ast.Ident, str string) bool { - if str == id.Name { - return true - } - ls, _ := strconv.Unquote(id.Name) - return str == ls -} - -// pathHasPrefix tests whether the [cue.Path] p begins with prefix. -func pathHasPrefix(p, prefix cue.Path) bool { - ps, pres := p.Selectors(), prefix.Selectors() - if len(pres) > len(ps) { - return false - } - return pathsAreEq(ps[:len(pres)], pres) -} - -func pathsAreEq(p1s, p2s []cue.Selector) bool { - if len(p1s) != len(p2s) { - return false - } - for i := 0; i < len(p2s); i++ { - if !selEq(p2s[i], p1s[i]) { - return false - } - } - return true -} - -func getSchemaName(v cue.Value) (string, error) { - nameValue := v.LookupPath(cue.ParsePath("name")) - return nameValue.String() -} diff --git a/pkg/codegen/go.mod b/pkg/codegen/go.mod index 33c82749df2..0199ca6b685 100644 --- a/pkg/codegen/go.mod +++ b/pkg/codegen/go.mod @@ -5,21 +5,19 @@ go 1.23.1 require ( cuelang.org/go v0.11.1 github.com/dave/dst v0.27.3 - github.com/getkin/kin-openapi v0.128.0 github.com/grafana/codejen v0.0.4-0.20230321061741-77f656893a3d + github.com/grafana/cog v0.0.12 github.com/grafana/cuetsy v0.1.11 github.com/matryer/is v1.4.1 - github.com/oapi-codegen/oapi-codegen/v2 v2.4.1 - golang.org/x/tools v0.29.0 ) require ( github.com/cockroachdb/apd/v2 v2.0.2 // indirect github.com/dave/jennifer v1.6.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect - github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect github.com/emicklei/proto v1.13.2 // indirect - github.com/fsnotify/fsnotify v1.8.0 // indirect + github.com/expr-lang/expr v1.16.9 // indirect + github.com/getkin/kin-openapi v0.128.0 // indirect github.com/go-openapi/jsonpointer v0.21.0 // indirect github.com/go-openapi/swag v0.23.0 // indirect github.com/golang/glog v1.2.2 // indirect @@ -27,29 +25,28 @@ require ( github.com/google/uuid v1.6.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/huandu/xstrings v1.5.0 // indirect github.com/invopop/yaml v0.3.1 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/kr/text v0.2.0 // indirect github.com/lib/pq v1.10.9 // indirect github.com/mailru/easyjson v0.7.7 // indirect + github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de // indirect - github.com/onsi/ginkgo v1.16.5 // indirect - github.com/onsi/gomega v1.35.1 // indirect github.com/perimeterx/marshmallow v1.1.5 // indirect github.com/pkg/errors v0.9.1 // indirect - github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/protocolbuffers/txtpbfmt v0.0.0-20241112170944-20d2c9ebc01d // indirect - github.com/rogpeppe/go-internal v1.13.1 // indirect + github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 // indirect github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect - github.com/speakeasy-api/openapi-overlay v0.9.0 // indirect - github.com/stretchr/testify v1.10.0 // indirect - github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect + github.com/ugorji/go/codec v1.2.11 // indirect github.com/xlab/treeprint v1.2.0 // indirect + github.com/yalue/merged_fs v1.3.0 // indirect golang.org/x/mod v0.22.0 // indirect golang.org/x/net v0.34.0 // indirect golang.org/x/sync v0.10.0 // indirect golang.org/x/text v0.21.0 // indirect + golang.org/x/tools v0.29.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/pkg/codegen/go.sum b/pkg/codegen/go.sum index 018e669150e..7c208c51720 100644 --- a/pkg/codegen/go.sum +++ b/pkg/codegen/go.sum @@ -1,6 +1,3 @@ -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/cockroachdb/apd/v2 v2.0.2 h1:weh8u7Cneje73dDh+2tEVLUvyBc89iwepWCD8b8034E= github.com/cockroachdb/apd/v2 v2.0.2/go.mod h1:DDxRlzC2lo3/vSlmSoS7JkqbbrARPuFOGr0B9pvN3Gw= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= @@ -12,48 +9,30 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/0PGwMEbC3d5AB7oi67+h4TsQqItC1GVYG58= -github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 h1:PRxIJD8XjimM5aTknUK9w6DHLDox2r2M3DI4i2pnd3w= -github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936/go.mod h1:ttYvX5qlB+mlV1okblJqcSMtR4c52UKxDiX9GRBS8+Q= github.com/emicklei/proto v1.13.2 h1:z/etSFO3uyXeuEsVPzfl56WNgzcvIr42aQazXaQmFZY= github.com/emicklei/proto v1.13.2/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= -github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/expr-lang/expr v1.16.9 h1:WUAzmR0JNI9JCiF0/ewwHB1gmcGw5wW7nWt8gc6PpCI= +github.com/expr-lang/expr v1.16.9/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4= github.com/getkin/kin-openapi v0.128.0 h1:jqq3D9vC9pPq1dGcOCv7yOp1DaEe7c/T1vzcLbITSp4= github.com/getkin/kin-openapi v0.128.0/go.mod h1:OZrfXzUfGrNbsKj+xmFBx6E5c6yH3At/tAKSc2UszXM= github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v1.2.2 h1:1+mZ9upx1Dh6FmUTFR1naJ77miKiXgALjWOZ3NVFPmY= github.com/golang/glog v1.2.2/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/grafana/codejen v0.0.4-0.20230321061741-77f656893a3d h1:hrXbGJ5jgp6yNITzs5o+zXq0V5yT3siNJ+uM8LGwWKk= github.com/grafana/codejen v0.0.4-0.20230321061741-77f656893a3d/go.mod h1:zmwwM/DRyQB7pfuBjTWII3CWtxcXh8LTwAYGfDfpR6s= +github.com/grafana/cog v0.0.12 h1:MJfFUVzp0El3+zZCmUQ2Y8uzwvM3aa5zj7EOeeuG6VY= +github.com/grafana/cog v0.0.12/go.mod h1:HwJbc60fZ+viayROClLGdDwO5w/JjBOpO9wjGnAfMLc= github.com/grafana/cue v0.0.0-20230926092038-971951014e3f h1:TmYAMnqg3d5KYEAaT6PtTguL2GjLfvr6wnAX8Azw6tQ= github.com/grafana/cue v0.0.0-20230926092038-971951014e3f/go.mod h1:okjJBHFQFer+a41sAe2SaGm1glWS8oEb6CmJvn5Zdws= github.com/grafana/cuetsy v0.1.11 h1:I3IwBhF+UaQxRM79HnImtrAn8REGdb5M3+C4QrYHoWk= @@ -63,8 +42,8 @@ github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI= +github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/invopop/yaml v0.3.1 h1:f0+ZpmhfBSS4MhG+4HYseMdJhoeeopbSKbq5Rpeelso= github.com/invopop/yaml v0.3.1/go.mod h1:PMOp3nn4/12yEZUFfmOuNHJsZToEEOwoWsT+D81KkeA= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= @@ -90,25 +69,6 @@ github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9 github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de h1:D5x39vF5KCwKQaw+OC9ZPiLVHXz3UFw2+psEX+gYcto= github.com/mpvl/unique v0.0.0-20150818121801-cbe035fff7de/go.mod h1:kJun4WP5gFuHZgRjZUWWuH1DTxCtxbHDOIJsudS8jzY= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= -github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/oapi-codegen/oapi-codegen/v2 v2.4.1 h1:ykgG34472DWey7TSjd8vIfNykXgjOgYJZoQbKfEeY/Q= -github.com/oapi-codegen/oapi-codegen/v2 v2.4.1/go.mod h1:N5+lY1tiTDV3V1BeHtOxeWXHoPVeApvsvjJqegfoaz8= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.2/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= -github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= -github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= -github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= -github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= -github.com/onsi/gomega v1.35.1 h1:Cwbd75ZBPxFSuZ6T+rN/WCb/gOc6YgFBXLlZLhC7Ds4= -github.com/onsi/gomega v1.35.1/go.mod h1:PvZbdDc8J6XJEpDK4HCuRBm8a6Fzp9/DmhC9C7yFlog= github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -121,98 +81,38 @@ github.com/protocolbuffers/txtpbfmt v0.0.0-20220428173112-74888fd59c2b h1:zd/2RN github.com/protocolbuffers/txtpbfmt v0.0.0-20220428173112-74888fd59c2b/go.mod h1:KjY0wibdYKc4DYkerHSbguaf3JeIPGhNJBp2BNiFH78= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= -github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= -github.com/speakeasy-api/openapi-overlay v0.9.0 h1:Wrz6NO02cNlLzx1fB093lBlYxSI54VRhy1aSutx0PQg= -github.com/speakeasy-api/openapi-overlay v0.9.0/go.mod h1:f5FloQrHA7MsxYg9djzMD5h6dxrHjVVByWKh7an8TRc= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= -github.com/vmware-labs/yaml-jsonpath v0.3.2 h1:/5QKeCBGdsInyDCyVNLbXyilb61MXGi9NP674f9Hobk= -github.com/vmware-labs/yaml-jsonpath v0.3.2/go.mod h1:U6whw1z03QyqgWdgXxvVnQ90zN1BWz5V+51Ewf8k+rQ= github.com/xlab/treeprint v1.2.0 h1:HzHnuAF1plUN2zGlAFHbSQP2qJ0ZAD3XF5XD7OesXRQ= github.com/xlab/treeprint v1.2.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +github.com/yalue/merged_fs v1.3.0 h1:qCeh9tMPNy/i8cwDsQTJ5bLr6IRxbs6meakNE5O+wyY= +github.com/yalue/merged_fs v1.3.0/go.mod h1:WqqchfVYQyclV2tnR7wtRhBddzBvLVR83Cjw9BKQw0M= golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= -golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE= golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20191026110619-0b21df46bc1d/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/pkg/codegen/jenny_go_spec.go b/pkg/codegen/jenny_go_spec.go index caefbdbbe4d..d753d9cae55 100644 --- a/pkg/codegen/jenny_go_spec.go +++ b/pkg/codegen/jenny_go_spec.go @@ -1,13 +1,14 @@ package codegen import ( + "context" "fmt" "strings" "cuelang.org/go/cue" "github.com/dave/dst/dstutil" "github.com/grafana/codejen" - "github.com/grafana/grafana/pkg/codegen/generators" + "github.com/grafana/cog" ) type GoSpecJenny struct { @@ -20,25 +21,20 @@ func (jenny *GoSpecJenny) JennyName() string { func (jenny *GoSpecJenny) Generate(sfg ...SchemaForGen) (codejen.Files, error) { files := make(codejen.Files, len(sfg)) + for i, v := range sfg { packageName := strings.ToLower(v.Name) - b, err := generators.GenerateTypesGo(v.CueFile, - &generators.GoConfig{ - Config: &generators.OpenApiConfig{ - IsGroup: false, - RootName: "Spec", - SubPath: cue.MakePath(cue.Str("spec")), - }, - PackageName: packageName, - ApplyFuncs: append(jenny.ApplyFuncs, PrefixDropper(v.Name)), - }, - ) + cueValue := v.CueFile.LookupPath(cue.ParsePath("lineage.schemas[0].schema.spec")) + b, err := cog.TypesFromSchema(). + CUEValue(packageName, cueValue, cog.ForceEnvelope("Spec")). + Golang(cog.GoConfig{}). + Run(context.Background()) if err != nil { return nil, err } - files[i] = *codejen.NewFile(fmt.Sprintf("pkg/kinds/%s/%s_spec_gen.go", packageName, packageName), b, jenny) + files[i] = *codejen.NewFile(fmt.Sprintf("pkg/kinds/%s/%s_spec_gen.go", packageName, packageName), b[0].Data, jenny) } return files, nil diff --git a/pkg/kinds/accesspolicy/accesspolicy_spec_gen.go b/pkg/kinds/accesspolicy/accesspolicy_spec_gen.go index 06776864630..bb5d2de3235 100644 --- a/pkg/kinds/accesspolicy/accesspolicy_spec_gen.go +++ b/pkg/kinds/accesspolicy/accesspolicy_spec_gen.go @@ -7,54 +7,73 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package accesspolicy -// Defines values for RoleRefKind. -const ( - RoleRefKindBuiltinRole RoleRefKind = "BuiltinRole" - RoleRefKindRole RoleRefKind = "Role" - RoleRefKindTeam RoleRefKind = "Team" - RoleRefKindUser RoleRefKind = "User" -) - -// AccessRule defines model for AccessRule. -type AccessRule struct { - // The kind this rule applies to (dashboards, alert, etc) - Kind string `json:"kind"` - - // Specific sub-elements like "alert.rules" or "dashboard.permissions"???? - Target *string `json:"target,omitempty"` - - // READ, WRITE, CREATE, DELETE, ... - // should move to k8s style verbs like: "get", "list", "watch", "create", "update", "patch", "delete" - Verb string `json:"verb"` -} - -// ResourceRef defines model for ResourceRef. type ResourceRef struct { + // explicit resource or folder will cascade Kind string `json:"kind"` Name string `json:"name"` } -// RoleRef defines model for RoleRef. +// NewResourceRef creates a new ResourceRef object. +func NewResourceRef() *ResourceRef { + return &ResourceRef{} +} + type RoleRef struct { // Policies can apply to roles, teams, or users // Applying policies to individual users is supported, but discouraged - Kind RoleRefKind `json:"kind"` - Name string `json:"name"` - Xname string `json:"xname"` + Kind RoleRefKind `json:"kind"` + Name string `json:"name"` + // temporary + Xname string `json:"xname"` } -// Policies can apply to roles, teams, or users -// Applying policies to individual users is supported, but discouraged -type RoleRefKind string +// NewRoleRef creates a new RoleRef object. +func NewRoleRef() *RoleRef { + return &RoleRef{} +} + +type AccessRule struct { + // The kind this rule applies to (dashboards, alert, etc) + Kind string `json:"kind"` + // READ, WRITE, CREATE, DELETE, ... + // should move to k8s style verbs like: "get", "list", "watch", "create", "update", "patch", "delete" + Verb string `json:"verb"` + // Specific sub-elements like "alert.rules" or "dashboard.permissions"???? + Target *string `json:"target,omitempty"` +} + +// NewAccessRule creates a new AccessRule object. +func NewAccessRule() *AccessRule { + return &AccessRule{} +} -// Spec defines model for Spec. type Spec struct { + // The scope where these policies should apply + Scope ResourceRef `json:"scope"` + // The role that must apply this policy Role RoleRef `json:"role"` - // The set of rules to apply. Note that * is required to modify // access policy rules, and that "none" will reject all actions Rules []AccessRule `json:"rules"` - Scope ResourceRef `json:"scope"` } + +// NewSpec creates a new Spec object. +func NewSpec() *Spec { + return &Spec{ + Scope: *NewResourceRef(), + Role: *NewRoleRef(), + } +} + +type RoleRefKind string + +const ( + RoleRefKindRole RoleRefKind = "Role" + RoleRefKindBuiltinRole RoleRefKind = "BuiltinRole" + RoleRefKindTeam RoleRefKind = "Team" + RoleRefKindUser RoleRefKind = "User" +) diff --git a/pkg/kinds/dashboard/dashboard_spec_gen.go b/pkg/kinds/dashboard/dashboard_spec_gen.go index 2a26d5eb998..3d11277ab73 100644 --- a/pkg/kinds/dashboard/dashboard_spec_gen.go +++ b/pkg/kinds/dashboard/dashboard_spec_gen.go @@ -7,349 +7,326 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package dashboard import ( - "time" + json "encoding/json" + errors "errors" + fmt "fmt" + time "time" ) -// Defines values for CursorSync. -const ( - CursorSyncN0 CursorSync = 0 - CursorSyncN1 CursorSync = 1 - CursorSyncN2 CursorSync = 2 -) - -// Defines values for LinkType. -const ( - LinkTypeDashboards LinkType = "dashboards" - LinkTypeLink LinkType = "link" -) - -// Defines values for DataTransformerConfigTopic. -const ( - DataTransformerConfigTopicAlertStates DataTransformerConfigTopic = "alertStates" - DataTransformerConfigTopicAnnotations DataTransformerConfigTopic = "annotations" - DataTransformerConfigTopicSeries DataTransformerConfigTopic = "series" -) - -// Defines values for FieldColorModeId. -const ( - FieldColorModeIdContinuousBlPu FieldColorModeId = "continuous-BlPu" - FieldColorModeIdContinuousBlYlRd FieldColorModeId = "continuous-BlYlRd" - FieldColorModeIdContinuousBlues FieldColorModeId = "continuous-blues" - FieldColorModeIdContinuousGrYlRd FieldColorModeId = "continuous-GrYlRd" - FieldColorModeIdContinuousGreens FieldColorModeId = "continuous-greens" - FieldColorModeIdContinuousPurples FieldColorModeId = "continuous-purples" - FieldColorModeIdContinuousRdYlGr FieldColorModeId = "continuous-RdYlGr" - FieldColorModeIdContinuousReds FieldColorModeId = "continuous-reds" - FieldColorModeIdContinuousYlBl FieldColorModeId = "continuous-YlBl" - FieldColorModeIdContinuousYlRd FieldColorModeId = "continuous-YlRd" - FieldColorModeIdFixed FieldColorModeId = "fixed" - FieldColorModeIdPaletteClassic FieldColorModeId = "palette-classic" - FieldColorModeIdPaletteClassicByName FieldColorModeId = "palette-classic-by-name" - FieldColorModeIdShades FieldColorModeId = "shades" - FieldColorModeIdThresholds FieldColorModeId = "thresholds" -) - -// Defines values for FieldColorSeriesByMode. -const ( - FieldColorSeriesByModeLast FieldColorSeriesByMode = "last" - FieldColorSeriesByModeMax FieldColorSeriesByMode = "max" - FieldColorSeriesByModeMin FieldColorSeriesByMode = "min" -) - -// Defines values for MappingType. -const ( - MappingTypeRange MappingType = "range" - MappingTypeRegex MappingType = "regex" - MappingTypeSpecial MappingType = "special" - MappingTypeValue MappingType = "value" -) - -// Defines values for PanelRepeatDirection. -const ( - PanelRepeatDirectionH PanelRepeatDirection = "h" - PanelRepeatDirectionV PanelRepeatDirection = "v" -) - -// Defines values for RangeMapType. -const ( - RangeMapTypeRange RangeMapType = "range" - RangeMapTypeRegex RangeMapType = "regex" - RangeMapTypeSpecial RangeMapType = "special" - RangeMapTypeValue RangeMapType = "value" -) - -// Defines values for RegexMapType. -const ( - RegexMapTypeRange RegexMapType = "range" - RegexMapTypeRegex RegexMapType = "regex" - RegexMapTypeSpecial RegexMapType = "special" - RegexMapTypeValue RegexMapType = "value" -) - -// Defines values for RowPanelType. -const ( - RowPanelTypeRow RowPanelType = "row" -) - -// Defines values for SpecPanels0RepeatDirection. -const ( - SpecPanels0RepeatDirectionH SpecPanels0RepeatDirection = "h" - SpecPanels0RepeatDirectionV SpecPanels0RepeatDirection = "v" -) - -// Defines values for SpecialValueMapType. -const ( - SpecialValueMapTypeRange SpecialValueMapType = "range" - SpecialValueMapTypeRegex SpecialValueMapType = "regex" - SpecialValueMapTypeSpecial SpecialValueMapType = "special" - SpecialValueMapTypeValue SpecialValueMapType = "value" -) - -// Defines values for SpecialValueMatch. -const ( - SpecialValueMatchEmpty SpecialValueMatch = "empty" - SpecialValueMatchFalse SpecialValueMatch = "false" - SpecialValueMatchNan SpecialValueMatch = "nan" - SpecialValueMatchNull SpecialValueMatch = "null" - SpecialValueMatchNullNan SpecialValueMatch = "null+nan" - SpecialValueMatchTrue SpecialValueMatch = "true" -) - -// Defines values for ThresholdsMode. -const ( - ThresholdsModeAbsolute ThresholdsMode = "absolute" - ThresholdsModePercentage ThresholdsMode = "percentage" -) - -// Defines values for ValueMapType. -const ( - ValueMapTypeRange ValueMapType = "range" - ValueMapTypeRegex ValueMapType = "regex" - ValueMapTypeSpecial ValueMapType = "special" - ValueMapTypeValue ValueMapType = "value" -) - -// Defines values for VariableHide. -const ( - VariableHideN0 VariableHide = 0 - VariableHideN1 VariableHide = 1 - VariableHideN2 VariableHide = 2 -) - -// Defines values for VariableRefresh. -const ( - VariableRefreshN0 VariableRefresh = 0 - VariableRefreshN1 VariableRefresh = 1 - VariableRefreshN2 VariableRefresh = 2 -) - -// Defines values for VariableSort. -const ( - VariableSortN0 VariableSort = 0 - VariableSortN1 VariableSort = 1 - VariableSortN2 VariableSort = 2 - VariableSortN3 VariableSort = 3 - VariableSortN4 VariableSort = 4 - VariableSortN5 VariableSort = 5 - VariableSortN6 VariableSort = 6 - VariableSortN7 VariableSort = 7 - VariableSortN8 VariableSort = 8 -) - -// Defines values for VariableType. -const ( - VariableTypeAdhoc VariableType = "adhoc" - VariableTypeConstant VariableType = "constant" - VariableTypeCustom VariableType = "custom" - VariableTypeDatasource VariableType = "datasource" - VariableTypeGroupby VariableType = "groupby" - VariableTypeInterval VariableType = "interval" - VariableTypeQuery VariableType = "query" - VariableTypeSnapshot VariableType = "snapshot" - VariableTypeSystem VariableType = "system" - VariableTypeTextbox VariableType = "textbox" -) - -// Contains the list of annotations that are associated with the dashboard. -// Annotations are used to overlay event markers and overlay event tags on graphs. -// Grafana comes with a native annotation store and the ability to add annotation events directly from the graph panel or via the HTTP API. -// See https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/annotate-visualizations/ -type AnnotationContainer struct { - // List of annotations - List []AnnotationQuery `json:"list,omitempty"` -} - -// AnnotationPanelFilter defines model for AnnotationPanelFilter. -type AnnotationPanelFilter struct { - // Should the specified panels be included or excluded - Exclude *bool `json:"exclude,omitempty"` - - // Panel IDs that should be included or excluded - Ids []int `json:"ids"` -} - -// TODO docs -// FROM: AnnotationQuery in grafana-data/src/types/annotations.ts -type AnnotationQuery struct { - // Set to 1 for the standard annotation query all dashboards have by default. - BuiltIn *float32 `json:"builtIn,omitempty"` - - // Ref to a DataSource instance - Datasource DataSourceRef `json:"datasource"` - - // When enabled the annotation query is issued with every dashboard refresh - Enable bool `json:"enable"` - Filter *AnnotationPanelFilter `json:"filter,omitempty"` - - // Annotation queries can be toggled on or off at the top of the dashboard. - // When hide is true, the toggle is not shown in the dashboard. - Hide *bool `json:"hide,omitempty"` - - // Color to use for the annotation event markers - IconColor string `json:"iconColor"` - - // Name of annotation. - Name string `json:"name"` - - // TODO: this should be a regular DataQuery that depends on the selected dashboard - // these match the properties of the "grafana" datasouce that is default in most dashboards - Target *AnnotationTarget `json:"target,omitempty"` - - // TODO -- this should not exist here, it is based on the --grafana-- datasource - Type *string `json:"type,omitempty"` -} - -// TODO: this should be a regular DataQuery that depends on the selected dashboard -// these match the properties of the "grafana" datasouce that is default in most dashboards -type AnnotationTarget struct { - // Only required/valid for the grafana datasource... - // but code+tests is already depending on it so hard to change - Limit int64 `json:"limit"` - - // Only required/valid for the grafana datasource... - // but code+tests is already depending on it so hard to change - MatchAny bool `json:"matchAny"` - - // Only required/valid for the grafana datasource... - // but code+tests is already depending on it so hard to change - Tags []string `json:"tags"` - - // Only required/valid for the grafana datasource... - // but code+tests is already depending on it so hard to change - Type string `json:"type"` -} - // 0 for no shared crosshair or tooltip (default). // 1 for shared crosshair. // 2 for shared crosshair AND shared tooltip. -type CursorSync int +type DashboardCursorSync int64 -// Links with references to other dashboards or external resources -type Link struct { - // If true, all dashboards links will be displayed in a dropdown. If false, all dashboards links will be displayed side by side. Only valid if the type is dashboards - AsDropdown bool `json:"asDropdown"` +const ( + DashboardCursorSyncOff DashboardCursorSync = 0 + DashboardCursorSyncCrosshair DashboardCursorSync = 1 + DashboardCursorSyncTooltip DashboardCursorSync = 2 +) - // Icon name to be displayed with the link - Icon string `json:"icon"` - - // If true, includes current template variables values in the link as query params - IncludeVars bool `json:"includeVars"` - - // If true, includes current time range in the link as query params - KeepTime bool `json:"keepTime"` - - // List of tags to limit the linked dashboards. If empty, all dashboards will be displayed. Only valid if the type is dashboards - Tags []string `json:"tags"` - - // If true, the link will be opened in a new tab - TargetBlank bool `json:"targetBlank"` - - // Title to display with the link - Title string `json:"title"` - - // Tooltip to display when the user hovers their mouse over it - Tooltip string `json:"tooltip"` - - // Dashboard Link type. Accepted values are dashboards (to refer to another dashboard) and link (to refer to an external resource) - Type LinkType `json:"type"` - - // Link URL. Only required/valid if the type is link - Url *string `json:"url,omitempty"` +// Time picker configuration +// It defines the default config for the time picker and the refresh picker for the specific dashboard. +type TimePickerConfig struct { + // Whether timepicker is visible or not. + Hidden *bool `json:"hidden,omitempty"` + // Interval options available in the refresh picker dropdown. + RefreshIntervals []string `json:"refresh_intervals,omitempty"` + // Selectable options available in the time picker dropdown. Has no effect on provisioned dashboard. + TimeOptions []string `json:"time_options,omitempty"` + // Override the now time by entering a time delay. Use this option to accommodate known delays in data aggregation to avoid null values. + NowDelay *string `json:"nowDelay,omitempty"` } -// Dashboard Link type. Accepted values are dashboards (to refer to another dashboard) and link (to refer to an external resource) -type LinkType string +// NewTimePickerConfig creates a new TimePickerConfig object. +func NewTimePickerConfig() *TimePickerConfig { + return &TimePickerConfig{ + Hidden: (func(input bool) *bool { return &input })(false), + RefreshIntervals: []string{"5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"}, + TimeOptions: []string{"5m", "15m", "1h", "6h", "12h", "24h", "2d", "7d", "30d"}, + } +} + +// Schema for panel targets is specified by datasource +// plugins. We use a placeholder definition, which the Go +// schema loader either left open/as-is with the Base +// variant of the Dashboard and Panel families, or filled +// with types derived from plugins in the Instance variant. +// When working directly from CUE, importers can extend this +// type directly to achieve the same effect. +type Target map[string]any // Ref to a DataSource instance type DataSourceRef struct { // The plugin type-id Type *string `json:"type,omitempty"` - // Specific datasource instance Uid *string `json:"uid,omitempty"` } +// NewDataSourceRef creates a new DataSourceRef object. +func NewDataSourceRef() *DataSourceRef { + return &DataSourceRef{} +} + +// Position and dimensions of a panel in the grid +type GridPos struct { + // Panel height. The height is the number of rows from the top edge of the panel. + H uint32 `json:"h"` + // Panel width. The width is the number of columns from the left edge of the panel. + W uint32 `json:"w"` + // Panel x. The x coordinate is the number of columns from the left edge of the grid + X uint32 `json:"x"` + // Panel y. The y coordinate is the number of rows from the top edge of the grid + Y uint32 `json:"y"` + // Whether the panel is fixed within the grid. If true, the panel will not be affected by other panels' interactions + Static *bool `json:"static,omitempty"` +} + +// NewGridPos creates a new GridPos object. +func NewGridPos() *GridPos { + return &GridPos{ + H: 9, + W: 12, + X: 0, + Y: 0, + } +} + +// Dashboard Link type. Accepted values are dashboards (to refer to another dashboard) and link (to refer to an external resource) +type DashboardLinkType string + +const ( + DashboardLinkTypeLink DashboardLinkType = "link" + DashboardLinkTypeDashboards DashboardLinkType = "dashboards" +) + +// Links with references to other dashboards or external resources +type DashboardLink struct { + // Title to display with the link + Title string `json:"title"` + // Link type. Accepted values are dashboards (to refer to another dashboard) and link (to refer to an external resource) + Type DashboardLinkType `json:"type"` + // Icon name to be displayed with the link + Icon string `json:"icon"` + // Tooltip to display when the user hovers their mouse over it + Tooltip string `json:"tooltip"` + // Link URL. Only required/valid if the type is link + Url *string `json:"url,omitempty"` + // List of tags to limit the linked dashboards. If empty, all dashboards will be displayed. Only valid if the type is dashboards + Tags []string `json:"tags"` + // If true, all dashboards links will be displayed in a dropdown. If false, all dashboards links will be displayed side by side. Only valid if the type is dashboards + AsDropdown bool `json:"asDropdown"` + // If true, the link will be opened in a new tab + TargetBlank bool `json:"targetBlank"` + // If true, includes current template variables values in the link as query params + IncludeVars bool `json:"includeVars"` + // If true, includes current time range in the link as query params + KeepTime bool `json:"keepTime"` +} + +// NewDashboardLink creates a new DashboardLink object. +func NewDashboardLink() *DashboardLink { + return &DashboardLink{ + AsDropdown: false, + TargetBlank: false, + IncludeVars: false, + KeepTime: false, + } +} + +// Matcher is a predicate configuration. Based on the config a set of field(s) or values is filtered in order to apply override / transformation. +// It comes with in id ( to resolve implementation from registry) and a configuration that’s specific to a particular matcher type. +type MatcherConfig struct { + // The matcher id. This is used to find the matcher implementation from registry. + Id string `json:"id"` + // The matcher options. This is specific to the matcher implementation. + Options any `json:"options,omitempty"` +} + +// NewMatcherConfig creates a new MatcherConfig object. +func NewMatcherConfig() *MatcherConfig { + return &MatcherConfig{ + Id: "", + } +} + // Transformations allow to manipulate data returned by a query before the system applies a visualization. // Using transformations you can: rename fields, join time series data, perform mathematical operations across queries, // use the output of one transformation as the input to another transformation, etc. type DataTransformerConfig struct { - // Disabled transformations are skipped - Disabled *bool `json:"disabled,omitempty"` - - // Matcher is a predicate configuration. Based on the config a set of field(s) or values is filtered in order to apply override / transformation. - // It comes with in id ( to resolve implementation from registry) and a configuration that’s specific to a particular matcher type. - Filter *MatcherConfig `json:"filter,omitempty"` - // Unique identifier of transformer Id string `json:"id"` - + // Disabled transformations are skipped + Disabled *bool `json:"disabled,omitempty"` + // Optional frame matcher. When missing it will be applied to all results + Filter *MatcherConfig `json:"filter,omitempty"` + // Where to pull DataFrames from as input to transformation + // replaced with common.DataTopic + Topic *DataTransformerConfigTopic `json:"topic,omitempty"` // Options to be passed to the transformer // Valid options depend on the transformer id Options any `json:"options"` - - // Where to pull DataFrames from as input to transformation - Topic *DataTransformerConfigTopic `json:"topic,omitempty"` } -// Where to pull DataFrames from as input to transformation -type DataTransformerConfigTopic string - -// DynamicConfigValue defines model for DynamicConfigValue. -type DynamicConfigValue struct { - Id string `json:"id"` - Value *any `json:"value,omitempty"` +// NewDataTransformerConfig creates a new DataTransformerConfig object. +func NewDataTransformerConfig() *DataTransformerConfig { + return &DataTransformerConfig{} } -// Map a field to a color. -type FieldColor struct { - // The fixed color value for fixed or shades color modes. - FixedColor *string `json:"fixedColor,omitempty"` +// A library panel is a reusable panel that you can use in any dashboard. +// When you make a change to a library panel, that change propagates to all instances of where the panel is used. +// Library panels streamline reuse of panels across multiple dashboards. +type LibraryPanelRef struct { + // Library panel name + Name string `json:"name"` + // Library panel uid + Uid string `json:"uid"` +} - // Color mode for a field. You can specify a single color, or select a continuous (gradient) color schemes, based on a value. - // Continuous color interpolates a color using the percentage of a value relative to min and max. - // Accepted values are: - // `thresholds`: From thresholds. Informs Grafana to take the color from the matching threshold - // `palette-classic`: Classic palette. Grafana will assign color by looking up a color in a palette by series index. Useful for Graphs and pie charts and other categorical data visualizations - // `palette-classic-by-name`: Classic palette (by name). Grafana will assign color by looking up a color in a palette by series name. Useful for Graphs and pie charts and other categorical data visualizations - // `continuous-GrYlRd`: ontinuous Green-Yellow-Red palette mode - // `continuous-RdYlGr`: Continuous Red-Yellow-Green palette mode - // `continuous-BlYlRd`: Continuous Blue-Yellow-Red palette mode - // `continuous-YlRd`: Continuous Yellow-Red palette mode - // `continuous-BlPu`: Continuous Blue-Purple palette mode - // `continuous-YlBl`: Continuous Yellow-Blue palette mode - // `continuous-blues`: Continuous Blue palette mode - // `continuous-reds`: Continuous Red palette mode - // `continuous-greens`: Continuous Green palette mode - // `continuous-purples`: Continuous Purple palette mode - // `shades`: Shades of a single color. Specify a single color, useful in an override rule. - // `fixed`: Fixed color mode. Specify a single color, useful in an override rule. - Mode FieldColorModeId `json:"mode"` +// NewLibraryPanelRef creates a new LibraryPanelRef object. +func NewLibraryPanelRef() *LibraryPanelRef { + return &LibraryPanelRef{} +} - // Defines how to assign a series color from "by value" color schemes. For example for an aggregated data points like a timeseries, the color can be assigned by the min, max or last value. - SeriesBy *FieldColorSeriesByMode `json:"seriesBy,omitempty"` +// Result used as replacement with text and color when the value matches +type ValueMappingResult struct { + // Text to display when the value matches + Text *string `json:"text,omitempty"` + // Text to use when the value matches + Color *string `json:"color,omitempty"` + // Icon to display when the value matches. Only specific visualizations. + Icon *string `json:"icon,omitempty"` + // Position in the mapping array. Only used internally. + Index *int32 `json:"index,omitempty"` +} + +// NewValueMappingResult creates a new ValueMappingResult object. +func NewValueMappingResult() *ValueMappingResult { + return &ValueMappingResult{} +} + +// Maps text values to a color or different display text and color. +// For example, you can configure a value mapping so that all instances of the value 10 appear as Perfection! rather than the number. +type ValueMap struct { + Type string `json:"type"` + // Map with : ValueMappingResult. For example: { "10": { text: "Perfection!", color: "green" } } + Options map[string]ValueMappingResult `json:"options"` +} + +// NewValueMap creates a new ValueMap object. +func NewValueMap() *ValueMap { + return &ValueMap{ + Type: "value", + } +} + +// Maps numerical ranges to a display text and color. +// For example, if a value is within a certain range, you can configure a range value mapping to display Low or High rather than the number. +type RangeMap struct { + Type string `json:"type"` + // Range to match against and the result to apply when the value is within the range + Options DashboardRangeMapOptions `json:"options"` +} + +// NewRangeMap creates a new RangeMap object. +func NewRangeMap() *RangeMap { + return &RangeMap{ + Type: "range", + Options: *NewDashboardRangeMapOptions(), + } +} + +// Maps regular expressions to replacement text and a color. +// For example, if a value is www.example.com, you can configure a regex value mapping so that Grafana displays www and truncates the domain. +type RegexMap struct { + Type string `json:"type"` + // Regular expression to match against and the result to apply when the value matches the regex + Options DashboardRegexMapOptions `json:"options"` +} + +// NewRegexMap creates a new RegexMap object. +func NewRegexMap() *RegexMap { + return &RegexMap{ + Type: "regex", + Options: *NewDashboardRegexMapOptions(), + } +} + +// Special value types supported by the `SpecialValueMap` +type SpecialValueMatch string + +const ( + SpecialValueMatchTrue SpecialValueMatch = "true" + SpecialValueMatchFalse SpecialValueMatch = "false" + SpecialValueMatchNull SpecialValueMatch = "null" + SpecialValueMatchNaN SpecialValueMatch = "nan" + SpecialValueMatchNullAndNan SpecialValueMatch = "null+nan" + SpecialValueMatchEmpty SpecialValueMatch = "empty" +) + +// Maps special values like Null, NaN (not a number), and boolean values like true and false to a display text and color. +// See SpecialValueMatch to see the list of special values. +// For example, you can configure a special value mapping so that null values appear as N/A. +type SpecialValueMap struct { + Type string `json:"type"` + Options DashboardSpecialValueMapOptions `json:"options"` +} + +// NewSpecialValueMap creates a new SpecialValueMap object. +func NewSpecialValueMap() *SpecialValueMap { + return &SpecialValueMap{ + Type: "special", + Options: *NewDashboardSpecialValueMapOptions(), + } +} + +// Allow to transform the visual representation of specific data values in a visualization, irrespective of their original units +type ValueMapping = ValueMapOrRangeMapOrRegexMapOrSpecialValueMap + +// NewValueMapping creates a new ValueMapping object. +func NewValueMapping() *ValueMapping { + return NewValueMapOrRangeMapOrRegexMapOrSpecialValueMap() +} + +// Thresholds can either be `absolute` (specific number) or `percentage` (relative to min or max, it will be values between 0 and 1). +type ThresholdsMode string + +const ( + ThresholdsModeAbsolute ThresholdsMode = "absolute" + ThresholdsModePercentage ThresholdsMode = "percentage" +) + +// User-defined value for a metric that triggers visual changes in a panel when this value is met or exceeded +// They are used to conditionally style and color visualizations based on query results , and can be applied to most visualizations. +type Threshold struct { + // Value represents a specified metric for the threshold, which triggers a visual change in the dashboard when this value is met or exceeded. + // Nulls currently appear here when serializing -Infinity to JSON. + Value *float64 `json:"value"` + // Color represents the color of the visual change that will occur in the dashboard when the threshold value is met or exceeded. + Color string `json:"color"` +} + +// NewThreshold creates a new Threshold object. +func NewThreshold() *Threshold { + return &Threshold{} +} + +// Thresholds configuration for the panel +type ThresholdsConfig struct { + // Thresholds mode. + Mode ThresholdsMode `json:"mode"` + // Must be sorted by 'value', first value is always -Infinity + Steps []Threshold `json:"steps"` +} + +// NewThresholdsConfig creates a new ThresholdsConfig object. +func NewThresholdsConfig() *ThresholdsConfig { + return &ThresholdsConfig{} } // Color mode for a field. You can specify a single color, or select a continuous (gradient) color schemes, based on a value. @@ -372,64 +349,69 @@ type FieldColor struct { // `fixed`: Fixed color mode. Specify a single color, useful in an override rule. type FieldColorModeId string +const ( + FieldColorModeIdThresholds FieldColorModeId = "thresholds" + FieldColorModeIdPaletteClassic FieldColorModeId = "palette-classic" + FieldColorModeIdPaletteClassicByName FieldColorModeId = "palette-classic-by-name" + FieldColorModeIdContinuousGrYlRd FieldColorModeId = "continuous-GrYlRd" + FieldColorModeIdContinuousRdYlGr FieldColorModeId = "continuous-RdYlGr" + FieldColorModeIdContinuousBlYlRd FieldColorModeId = "continuous-BlYlRd" + FieldColorModeIdContinuousYlRd FieldColorModeId = "continuous-YlRd" + FieldColorModeIdContinuousBlPu FieldColorModeId = "continuous-BlPu" + FieldColorModeIdContinuousYlBl FieldColorModeId = "continuous-YlBl" + FieldColorModeIdContinuousBlues FieldColorModeId = "continuous-blues" + FieldColorModeIdContinuousReds FieldColorModeId = "continuous-reds" + FieldColorModeIdContinuousGreens FieldColorModeId = "continuous-greens" + FieldColorModeIdContinuousPurples FieldColorModeId = "continuous-purples" + FieldColorModeIdFixed FieldColorModeId = "fixed" + FieldColorModeIdShades FieldColorModeId = "shades" +) + // Defines how to assign a series color from "by value" color schemes. For example for an aggregated data points like a timeseries, the color can be assigned by the min, max or last value. type FieldColorSeriesByMode string +const ( + FieldColorSeriesByModeMin FieldColorSeriesByMode = "min" + FieldColorSeriesByModeMax FieldColorSeriesByMode = "max" + FieldColorSeriesByModeLast FieldColorSeriesByMode = "last" +) + +// Map a field to a color. +type FieldColor struct { + // The main color scheme mode. + Mode FieldColorModeId `json:"mode"` + // The fixed color value for fixed or shades color modes. + FixedColor *string `json:"fixedColor,omitempty"` + // Some visualizations need to know how to assign a series color from by value color schemes. + SeriesBy *FieldColorSeriesByMode `json:"seriesBy,omitempty"` +} + +// NewFieldColor creates a new FieldColor object. +func NewFieldColor() *FieldColor { + return &FieldColor{} +} + // The data model used in Grafana, namely the data frame, is a columnar-oriented table structure that unifies both time series and table query results. // Each column within this structure is called a field. A field can represent a single time series or table column. // Field options allow you to change how the data is displayed in your visualizations. type FieldConfig struct { - // Map a field to a color. - Color *FieldColor `json:"color,omitempty"` - - // custom is specified by the FieldConfig field - // in panel plugin schemas. - Custom map[string]any `json:"custom,omitempty"` - - // Specify the number of decimals Grafana includes in the rendered value. - // If you leave this field blank, Grafana automatically truncates the number of decimals based on the value. - // For example 1.1234 will display as 1.12 and 100.456 will display as 100. - // To display all decimals, set the unit to `String`. - Decimals *float32 `json:"decimals,omitempty"` - - // Human readable field metadata - Description *string `json:"description,omitempty"` - // The display value for this field. This supports template variables blank is auto DisplayName *string `json:"displayName,omitempty"` - // This can be used by data sources that return and explicit naming structure for values and labels // When this property is configured, this value is used rather than the default naming strategy. DisplayNameFromDS *string `json:"displayNameFromDS,omitempty"` - - // True if data source field supports ad-hoc filters - Filterable *bool `json:"filterable,omitempty"` - - // The behavior when clicking on a result - Links []any `json:"links,omitempty"` - - // Convert input values into a display string - Mappings []any `json:"mappings,omitempty"` - - // The maximum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields. - Max *float32 `json:"max,omitempty"` - - // The minimum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields. - Min *float32 `json:"min,omitempty"` - - // Alternative to empty string - NoValue *string `json:"noValue,omitempty"` - + // Human readable field metadata + Description *string `json:"description,omitempty"` // An explicit path to the field in the datasource. When the frame meta includes a path, // This will default to `${frame.meta.path}/${field.name} // // When defined, this value can be used as an identifier within the datasource scope, and // may be used to update the results Path *string `json:"path,omitempty"` - - // Thresholds configuration for the panel - Thresholds *ThresholdsConfig `json:"thresholds,omitempty"` - + // True if data source can write a value to the path. Auth/authz are supported separately + Writeable *bool `json:"writeable,omitempty"` + // True if data source field supports ad-hoc filters + Filterable *bool `json:"filterable,omitempty"` // Unit a field should use. The unit you select is applied to all fields except time. // You can use the units ID availables in Grafana or a custom unit. // Available units in Grafana: https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts @@ -441,140 +423,105 @@ type FieldConfig struct { // `count:` for a custom count unit. // `currency:` for custom a currency unit. Unit *string `json:"unit,omitempty"` + // Specify the number of decimals Grafana includes in the rendered value. + // If you leave this field blank, Grafana automatically truncates the number of decimals based on the value. + // For example 1.1234 will display as 1.12 and 100.456 will display as 100. + // To display all decimals, set the unit to `String`. + Decimals *float64 `json:"decimals,omitempty"` + // The minimum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields. + Min *float64 `json:"min,omitempty"` + // The maximum value used in percentage threshold calculations. Leave blank for auto calculation based on all series and fields. + Max *float64 `json:"max,omitempty"` + // Convert input values into a display string + Mappings []ValueMapping `json:"mappings,omitempty"` + // Map numeric values to states + Thresholds *ThresholdsConfig `json:"thresholds,omitempty"` + // Panel color configuration + Color *FieldColor `json:"color,omitempty"` + // The behavior when clicking on a result + Links []any `json:"links,omitempty"` + // Alternative to empty string + NoValue *string `json:"noValue,omitempty"` + // custom is specified by the FieldConfig field + // in panel plugin schemas. + Custom map[string]any `json:"custom,omitempty"` +} - // True if data source can write a value to the path. Auth/authz are supported separately - Writeable *bool `json:"writeable,omitempty"` +// NewFieldConfig creates a new FieldConfig object. +func NewFieldConfig() *FieldConfig { + return &FieldConfig{} +} + +type DynamicConfigValue struct { + Id string `json:"id"` + Value any `json:"value,omitempty"` +} + +// NewDynamicConfigValue creates a new DynamicConfigValue object. +func NewDynamicConfigValue() *DynamicConfigValue { + return &DynamicConfigValue{ + Id: "", + } } // The data model used in Grafana, namely the data frame, is a columnar-oriented table structure that unifies both time series and table query results. // Each column within this structure is called a field. A field can represent a single time series or table column. // Field options allow you to change how the data is displayed in your visualizations. type FieldConfigSource struct { - // The data model used in Grafana, namely the data frame, is a columnar-oriented table structure that unifies both time series and table query results. - // Each column within this structure is called a field. A field can represent a single time series or table column. - // Field options allow you to change how the data is displayed in your visualizations. + // Defaults are the options applied to all fields. Defaults FieldConfig `json:"defaults"` - // Overrides are the options applied to specific fields overriding the defaults. - Overrides []struct { - // Matcher is a predicate configuration. Based on the config a set of field(s) or values is filtered in order to apply override / transformation. - // It comes with in id ( to resolve implementation from registry) and a configuration that’s specific to a particular matcher type. - Matcher MatcherConfig `json:"matcher"` - Properties []DynamicConfigValue `json:"properties"` - } `json:"overrides"` + Overrides []DashboardFieldConfigSourceOverrides `json:"overrides"` } -// Position and dimensions of a panel in the grid -type GridPos struct { - // Panel height. The height is the number of rows from the top edge of the panel. - H int `json:"h"` - - // Whether the panel is fixed within the grid. If true, the panel will not be affected by other panels' interactions - Static *bool `json:"static,omitempty"` - - // Panel width. The width is the number of columns from the left edge of the panel. - W int `json:"w"` - - // Panel x. The x coordinate is the number of columns from the left edge of the grid - X int `json:"x"` - - // Panel y. The y coordinate is the number of rows from the top edge of the grid - Y int `json:"y"` -} - -// A library panel is a reusable panel that you can use in any dashboard. -// When you make a change to a library panel, that change propagates to all instances of where the panel is used. -// Library panels streamline reuse of panels across multiple dashboards. -type LibraryPanelRef struct { - // Library panel name - Name string `json:"name"` - - // Library panel uid - Uid string `json:"uid"` -} - -// Supported value mapping types -// `value`: Maps text values to a color or different display text and color. For example, you can configure a value mapping so that all instances of the value 10 appear as Perfection! rather than the number. -// `range`: Maps numerical ranges to a display text and color. For example, if a value is within a certain range, you can configure a range value mapping to display Low or High rather than the number. -// `regex`: Maps regular expressions to replacement text and a color. For example, if a value is www.example.com, you can configure a regex value mapping so that Grafana displays www and truncates the domain. -// `special`: Maps special values like Null, NaN (not a number), and boolean values like true and false to a display text and color. See SpecialValueMatch to see the list of special values. For example, you can configure a special value mapping so that null values appear as N/A. -type MappingType string - -// Matcher is a predicate configuration. Based on the config a set of field(s) or values is filtered in order to apply override / transformation. -// It comes with in id ( to resolve implementation from registry) and a configuration that’s specific to a particular matcher type. -type MatcherConfig struct { - // The matcher id. This is used to find the matcher implementation from registry. - Id string `json:"id"` - - // The matcher options. This is specific to the matcher implementation. - Options *any `json:"options,omitempty"` +// NewFieldConfigSource creates a new FieldConfigSource object. +func NewFieldConfigSource() *FieldConfigSource { + return &FieldConfigSource{ + Defaults: *NewFieldConfig(), + } } // Dashboard panels are the basic visualization building blocks. type Panel struct { - // Sets panel queries cache timeout. - CacheTimeout *string `json:"cacheTimeout,omitempty"` - - // Ref to a DataSource instance - Datasource *DataSourceRef `json:"datasource,omitempty"` - + // The panel plugin type id. This is used to find the plugin to display the panel. + Type string `json:"type"` + // Unique identifier of the panel. Generated by Grafana when creating a new panel. It must be unique within a dashboard, but not globally. + Id *uint32 `json:"id,omitempty"` + // The version of the plugin that is used for this panel. This is used to find the plugin to display the panel and to migrate old panel configs. + PluginVersion *string `json:"pluginVersion,omitempty"` + // Depends on the panel plugin. See the plugin documentation for details. + Targets []Target `json:"targets,omitempty"` + // Panel title. + Title *string `json:"title,omitempty"` // Panel description. Description *string `json:"description,omitempty"` - - // The data model used in Grafana, namely the data frame, is a columnar-oriented table structure that unifies both time series and table query results. - // Each column within this structure is called a field. A field can represent a single time series or table column. - // Field options allow you to change how the data is displayed in your visualizations. - FieldConfig *FieldConfigSource `json:"fieldConfig,omitempty"` - - // Position and dimensions of a panel in the grid + // Whether to display the panel without a background. + Transparent *bool `json:"transparent,omitempty"` + // The datasource used in all targets. + Datasource *DataSourceRef `json:"datasource,omitempty"` + // Grid position. GridPos *GridPos `json:"gridPos,omitempty"` - - // Controls if the timeFrom or timeShift overrides are shown in the panel header - HideTimeOverride *bool `json:"hideTimeOverride,omitempty"` - - // Unique identifier of the panel. Generated by Grafana when creating a new panel. It must be unique within a dashboard, but not globally. - Id *int `json:"id,omitempty"` - + // Panel links. + Links []DashboardLink `json:"links,omitempty"` + // Name of template variable to repeat for. + Repeat *string `json:"repeat,omitempty"` + // Direction to repeat in if 'repeat' is set. + // `h` for horizontal, `v` for vertical. + RepeatDirection *PanelRepeatDirection `json:"repeatDirection,omitempty"` + // Option for repeated panels that controls max items per row + // Only relevant for horizontally repeated panels + MaxPerRow *float64 `json:"maxPerRow,omitempty"` + // The maximum number of data points that the panel queries are retrieving. + MaxDataPoints *float64 `json:"maxDataPoints,omitempty"` + // List of transformations that are applied to the panel data before rendering. + // When there are multiple transformations, Grafana applies them in the order they are listed. + // Each transformation creates a result set that then passes on to the next transformation in the processing pipeline. + Transformations []DataTransformerConfig `json:"transformations,omitempty"` // The min time interval setting defines a lower limit for the $__interval and $__interval_ms variables. // This value must be formatted as a number followed by a valid time // identifier like: "40s", "3d", etc. // See: https://grafana.com/docs/grafana/latest/panels-visualizations/query-transform-data/#query-options Interval *string `json:"interval,omitempty"` - - // A library panel is a reusable panel that you can use in any dashboard. - // When you make a change to a library panel, that change propagates to all instances of where the panel is used. - // Library panels streamline reuse of panels across multiple dashboards. - LibraryPanel *LibraryPanelRef `json:"libraryPanel,omitempty"` - - // Panel links. - Links []Link `json:"links,omitempty"` - - // The maximum number of data points that the panel queries are retrieving. - MaxDataPoints *float32 `json:"maxDataPoints,omitempty"` - - // Option for repeated panels that controls max items per row - // Only relevant for horizontally repeated panels - MaxPerRow *float32 `json:"maxPerRow,omitempty"` - - // It depends on the panel plugin. They are specified by the Options field in panel plugin schemas. - Options map[string]any `json:"options,omitempty"` - - // The version of the plugin that is used for this panel. This is used to find the plugin to display the panel and to migrate old panel configs. - PluginVersion *string `json:"pluginVersion,omitempty"` - - // Overrides the data source configured time-to-live for a query cache item in milliseconds - QueryCachingTTL *float32 `json:"queryCachingTTL,omitempty"` - - // Name of template variable to repeat for. - Repeat *string `json:"repeat,omitempty"` - - // Direction to repeat in if 'repeat' is set. - // `h` for horizontal, `v` for vertical. - RepeatDirection *PanelRepeatDirection `json:"repeatDirection,omitempty"` - - // Depends on the panel plugin. See the plugin documentation for details. - Targets []Target `json:"targets,omitempty"` - // Overrides the relative time range for individual panels, // which causes them to be different than what is selected in // the dashboard time picker in the top-right corner of the dashboard. You can use this to show metrics from different @@ -584,435 +531,123 @@ type Panel struct { // Note: Panel time overrides have no effect when the dashboard’s time range is absolute. // See: https://grafana.com/docs/grafana/latest/panels-visualizations/query-transform-data/#query-options TimeFrom *string `json:"timeFrom,omitempty"` - // Overrides the time range for individual panels by shifting its start and end relative to the time picker. // For example, you can shift the time range for the panel to be two hours earlier than the dashboard time picker setting `2h`. // Note: Panel time overrides have no effect when the dashboard’s time range is absolute. // See: https://grafana.com/docs/grafana/latest/panels-visualizations/query-transform-data/#query-options TimeShift *string `json:"timeShift,omitempty"` - - // Panel title. - Title *string `json:"title,omitempty"` - - // List of transformations that are applied to the panel data before rendering. - // When there are multiple transformations, Grafana applies them in the order they are listed. - // Each transformation creates a result set that then passes on to the next transformation in the processing pipeline. - Transformations []DataTransformerConfig `json:"transformations,omitempty"` - - // Whether to display the panel without a background. - Transparent *bool `json:"transparent,omitempty"` - - // The panel plugin type id. This is used to find the plugin to display the panel. - Type string `json:"type"` + // Controls if the timeFrom or timeShift overrides are shown in the panel header + HideTimeOverride *bool `json:"hideTimeOverride,omitempty"` + // Dynamically load the panel + LibraryPanel *LibraryPanelRef `json:"libraryPanel,omitempty"` + // Sets panel queries cache timeout. + CacheTimeout *string `json:"cacheTimeout,omitempty"` + // Overrides the data source configured time-to-live for a query cache item in milliseconds + QueryCachingTTL *float64 `json:"queryCachingTTL,omitempty"` + // It depends on the panel plugin. They are specified by the Options field in panel plugin schemas. + Options map[string]any `json:"options,omitempty"` + // Field options allow you to change how the data is displayed in your visualizations. + FieldConfig *FieldConfigSource `json:"fieldConfig,omitempty"` } -// Direction to repeat in if 'repeat' is set. -// `h` for horizontal, `v` for vertical. -type PanelRepeatDirection string - -// Maps numerical ranges to a display text and color. -// For example, if a value is within a certain range, you can configure a range value mapping to display Low or High rather than the number. -type RangeMap struct { - // Range to match against and the result to apply when the value is within the range - Options struct { - // Min value of the range. It can be null which means -Infinity - From *float64 `json:"from"` - - // Result used as replacement with text and color when the value matches - Result ValueMappingResult `json:"result"` - - // Max value of the range. It can be null which means +Infinity - To *float64 `json:"to"` - } `json:"options"` - Type RangeMapType `json:"type"` +// NewPanel creates a new Panel object. +func NewPanel() *Panel { + return &Panel{ + Transparent: (func(input bool) *bool { return &input })(false), + } } -// RangeMapType defines model for RangeMap.Type. -type RangeMapType string - -// Maps regular expressions to replacement text and a color. -// For example, if a value is www.example.com, you can configure a regex value mapping so that Grafana displays www and truncates the domain. -type RegexMap struct { - // Regular expression to match against and the result to apply when the value matches the regex - Options struct { - // Regular expression to match against - Pattern string `json:"pattern"` - - // Result used as replacement with text and color when the value matches - Result ValueMappingResult `json:"result"` - } `json:"options"` - Type RegexMapType `json:"type"` -} - -// RegexMapType defines model for RegexMap.Type. -type RegexMapType string - // Row panel type RowPanel struct { + // The panel type + Type string `json:"type"` // Whether this row should be collapsed or not. Collapsed bool `json:"collapsed"` - - // Ref to a DataSource instance - Datasource *DataSourceRef `json:"datasource,omitempty"` - - // Position and dimensions of a panel in the grid - GridPos *GridPos `json:"gridPos,omitempty"` - - // Unique identifier of the panel. Generated by Grafana when creating a new panel. It must be unique within a dashboard, but not globally. - Id int `json:"id"` - - // List of panels in the row - Panels []Panel `json:"panels"` - - // Name of template variable to repeat for. - Repeat *string `json:"repeat,omitempty"` - // Row title Title *string `json:"title,omitempty"` - - // The panel type - Type RowPanelType `json:"type"` + // Name of default datasource for the row + Datasource *DataSourceRef `json:"datasource,omitempty"` + // Row grid position + GridPos *GridPos `json:"gridPos,omitempty"` + // Unique identifier of the panel. Generated by Grafana when creating a new panel. It must be unique within a dashboard, but not globally. + Id uint32 `json:"id"` + // List of panels in the row + Panels []Panel `json:"panels"` + // Name of template variable to repeat for. + Repeat *string `json:"repeat,omitempty"` } -// The panel type -type RowPanelType string - -// A dashboard snapshot shares an interactive dashboard publicly. -// It is a read-only version of a dashboard, and is not editable. -// It is possible to create a snapshot of a snapshot. -// Grafana strips away all sensitive information from the dashboard. -// Sensitive information stripped: queries (metric, template,annotation) and panel links. -type Snapshot struct { - // Time when the snapshot was created - Created time.Time `json:"created"` - - // Time when the snapshot expires, default is never to expire - Expires string `json:"expires"` - - // Is the snapshot saved in an external grafana instance - External bool `json:"external"` - - // ExternalUrl external url, if snapshot was shared in external grafana instance - ExternalUrl string `json:"externalUrl"` - - // Unique identifier of the snapshot - Id int `json:"id"` - - // Optional, defined the unique key of the snapshot, required if external is true - Key string `json:"key"` - - // Optional, name of the snapshot - Name string `json:"name"` - - // OrgId org id of the snapshot - OrgId int `json:"orgId"` - - // OriginalUrl original url, url of the dashboard that was snapshotted - OriginalUrl string `json:"originalUrl"` - - // Updated last time when the snapshot was updated - Updated time.Time `json:"updated"` - - // url of the snapshot, if snapshot was shared internally - Url *string `json:"url,omitempty"` - - // UserId user id of the snapshot creator - UserId int `json:"userId"` +// NewRowPanel creates a new RowPanel object. +func NewRowPanel() *RowPanel { + return &RowPanel{ + Type: "row", + Collapsed: false, + } } -// Spec defines model for Spec. -type Spec struct { - // Contains the list of annotations that are associated with the dashboard. - // Annotations are used to overlay event markers and overlay event tags on graphs. - // Grafana comes with a native annotation store and the ability to add annotation events directly from the graph panel or via the HTTP API. - // See https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/annotate-visualizations/ - Annotations *AnnotationContainer `json:"annotations,omitempty"` +// Dashboard variable type +// `query`: Query-generated list of values such as metric names, server names, sensor IDs, data centers, and so on. +// `adhoc`: Key/value filters that are automatically added to all metric queries for a data source (Prometheus, Loki, InfluxDB, and Elasticsearch only). +// `constant`: Define a hidden constant. +// `datasource`: Quickly change the data source for an entire dashboard. +// `interval`: Interval variables represent time spans. +// `textbox`: Display a free text input field with an optional default value. +// `custom`: Define the variable options manually using a comma-separated list. +// `system`: Variables defined by Grafana. See: https://grafana.com/docs/grafana/latest/dashboards/variables/add-template-variables/#global-variables +type VariableType string - // Description of dashboard. - Description *string `json:"description,omitempty"` - - // Whether a dashboard is editable or not. - Editable *bool `json:"editable,omitempty"` - - // The month that the fiscal year starts on. 0 = January, 11 = December - FiscalYearStartMonth *int `json:"fiscalYearStartMonth,omitempty"` - - // ID of a dashboard imported from the https://grafana.com/grafana/dashboards/ portal - GnetId *string `json:"gnetId,omitempty"` - - // 0 for no shared crosshair or tooltip (default). - // 1 for shared crosshair. - // 2 for shared crosshair AND shared tooltip. - GraphTooltip *CursorSync `json:"graphTooltip,omitempty"` - - // Unique numeric identifier for the dashboard. - // `id` is internal to a specific Grafana instance. `uid` should be used to identify a dashboard across Grafana instances. - Id *int64 `json:"id"` - - // Links with references to other dashboards or external websites. - Links []Link `json:"links,omitempty"` - - // When set to true, the dashboard will redraw panels at an interval matching the pixel width. - // This will keep data "moving left" regardless of the query refresh rate. This setting helps - // avoid dashboards presenting stale live data - LiveNow *bool `json:"liveNow,omitempty"` - - // List of dashboard panels - Panels []any `json:"panels,omitempty"` - - // When set to true, the dashboard will load all panels in the dashboard when it's loaded. - Preload *bool `json:"preload,omitempty"` - - // Refresh rate of dashboard. Represented via interval string, e.g. "5s", "1m", "1h", "1d". - Refresh *string `json:"refresh,omitempty"` - - // This property should only be used in dashboards defined by plugins. It is a quick check - // to see if the version has changed since the last time. - Revision *int64 `json:"revision,omitempty"` - - // Version of the JSON schema, incremented each time a Grafana update brings - // changes to said schema. - SchemaVersion int `json:"schemaVersion"` - - // A dashboard snapshot shares an interactive dashboard publicly. - // It is a read-only version of a dashboard, and is not editable. - // It is possible to create a snapshot of a snapshot. - // Grafana strips away all sensitive information from the dashboard. - // Sensitive information stripped: queries (metric, template,annotation) and panel links. - Snapshot *Snapshot `json:"snapshot,omitempty"` - - // Tags associated with dashboard. - Tags []string `json:"tags,omitempty"` - - // Configured template variables - Templating *struct { - // List of configured template variables with their saved values along with some other metadata - List []VariableModel `json:"list,omitempty"` - } `json:"templating,omitempty"` - - // Time range for dashboard. - // Accepted values are relative time strings like {from: 'now-6h', to: 'now'} or absolute time strings like {from: '2020-07-10T08:00:00.000Z', to: '2020-07-10T14:00:00.000Z'}. - Time *struct { - From string `json:"from"` - To string `json:"to"` - } `json:"time,omitempty"` - - // Time picker configuration - // It defines the default config for the time picker and the refresh picker for the specific dashboard. - Timepicker *TimePickerConfig `json:"timepicker,omitempty"` - - // Timezone of dashboard. Accepted values are IANA TZDB zone ID or "browser" or "utc". - Timezone *string `json:"timezone,omitempty"` - - // Title of dashboard. - Title *string `json:"title,omitempty"` - - // Unique dashboard identifier that can be generated by anyone. string (8-40) - Uid *string `json:"uid,omitempty"` - - // Version of the dashboard, incremented each time the dashboard is updated. - Version *int `json:"version,omitempty"` - - // Day when the week starts. Expressed by the name of the day in lowercase, e.g. "monday". - WeekStart *string `json:"weekStart,omitempty"` -} - -// Direction to repeat in if 'repeat' is set. -// `h` for horizontal, `v` for vertical. -type SpecPanels0RepeatDirection string - -// Maps special values like Null, NaN (not a number), and boolean values like true and false to a display text and color. -// See SpecialValueMatch to see the list of special values. -// For example, you can configure a special value mapping so that null values appear as N/A. -type SpecialValueMap struct { - Options struct { - // Special value types supported by the `SpecialValueMap` - Match SpecialValueMatch `json:"match"` - - // Result used as replacement with text and color when the value matches - Result ValueMappingResult `json:"result"` - } `json:"options"` - Type SpecialValueMapType `json:"type"` -} - -// SpecialValueMapType defines model for SpecialValueMap.Type. -type SpecialValueMapType string - -// Special value types supported by the `SpecialValueMap` -type SpecialValueMatch string - -// Schema for panel targets is specified by datasource -// plugins. We use a placeholder definition, which the Go -// schema loader either left open/as-is with the Base -// variant of the Dashboard and Panel families, or filled -// with types derived from plugins in the Instance variant. -// When working directly from CUE, importers can extend this -// type directly to achieve the same effect. -type Target = map[string]any - -// User-defined value for a metric that triggers visual changes in a panel when this value is met or exceeded -// They are used to conditionally style and color visualizations based on query results , and can be applied to most visualizations. -type Threshold struct { - // Color represents the color of the visual change that will occur in the dashboard when the threshold value is met or exceeded. - Color string `json:"color"` - - // Value represents a specified metric for the threshold, which triggers a visual change in the dashboard when this value is met or exceeded. - // Nulls currently appear here when serializing -Infinity to JSON. - Value *float32 `json:"value"` -} - -// Thresholds configuration for the panel -type ThresholdsConfig struct { - // Thresholds can either be `absolute` (specific number) or `percentage` (relative to min or max, it will be values between 0 and 1). - Mode ThresholdsMode `json:"mode"` - - // Must be sorted by 'value', first value is always -Infinity - Steps []Threshold `json:"steps"` -} - -// Thresholds can either be `absolute` (specific number) or `percentage` (relative to min or max, it will be values between 0 and 1). -type ThresholdsMode string - -// Time picker configuration -// It defines the default config for the time picker and the refresh picker for the specific dashboard. -type TimePickerConfig struct { - // Whether timepicker is visible or not. - Hidden *bool `json:"hidden,omitempty"` - - // Override the now time by entering a time delay. Use this option to accommodate known delays in data aggregation to avoid null values. - NowDelay *string `json:"nowDelay,omitempty"` - - // Interval options available in the refresh picker dropdown. - RefreshIntervals []string `json:"refresh_intervals,omitempty"` - - // Selectable options available in the time picker dropdown. Has no effect on provisioned dashboard. - TimeOptions []string `json:"time_options,omitempty"` -} - -// Maps text values to a color or different display text and color. -// For example, you can configure a value mapping so that all instances of the value 10 appear as Perfection! rather than the number. -type ValueMap struct { - // Map with : ValueMappingResult. For example: { "10": { text: "Perfection!", color: "green" } } - Options map[string]ValueMappingResult `json:"options"` - Type ValueMapType `json:"type"` -} - -// ValueMapType defines model for ValueMap.Type. -type ValueMapType string - -// Result used as replacement with text and color when the value matches -type ValueMappingResult struct { - // Text to use when the value matches - Color *string `json:"color,omitempty"` - - // Icon to display when the value matches. Only specific visualizations. - Icon *string `json:"icon,omitempty"` - - // Position in the mapping array. Only used internally. - Index *int32 `json:"index,omitempty"` - - // Text to display when the value matches - Text *string `json:"text,omitempty"` -} +const ( + VariableTypeQuery VariableType = "query" + VariableTypeAdhoc VariableType = "adhoc" + VariableTypeGroupby VariableType = "groupby" + VariableTypeConstant VariableType = "constant" + VariableTypeDatasource VariableType = "datasource" + VariableTypeInterval VariableType = "interval" + VariableTypeTextbox VariableType = "textbox" + VariableTypeCustom VariableType = "custom" + VariableTypeSystem VariableType = "system" + VariableTypeSnapshot VariableType = "snapshot" +) // Determine if the variable shows on dashboard // Accepted values are 0 (show label and value), 1 (show value only), 2 (show nothing). -type VariableHide int +type VariableHide int64 -// A variable is a placeholder for a value. You can use variables in metric queries and in panel titles. -type VariableModel struct { - // Custom all value - AllValue *string `json:"allValue,omitempty"` - - // Allow custom values to be entered in the variable - AllowCustomValue *bool `json:"allowCustomValue,omitempty"` - - // Option to be selected in a variable. - Current *VariableOption `json:"current,omitempty"` - - // Ref to a DataSource instance - Datasource *DataSourceRef `json:"datasource,omitempty"` - - // Description of variable. It can be defined but `null`. - Description *string `json:"description,omitempty"` - - // Determine if the variable shows on dashboard - // Accepted values are 0 (show label and value), 1 (show value only), 2 (show nothing). - Hide *VariableHide `json:"hide,omitempty"` - - // Whether all value option is available or not - IncludeAll *bool `json:"includeAll,omitempty"` - - // Optional display name - Label *string `json:"label,omitempty"` - - // Whether multiple values can be selected or not from variable value list - Multi *bool `json:"multi,omitempty"` - - // Name of variable - Name string `json:"name"` - - // Options that can be selected for a variable. - Options []VariableOption `json:"options,omitempty"` - - // Query used to fetch values for a variable - Query *any `json:"query,omitempty"` - - // Options to config when to refresh a variable - // `0`: Never refresh the variable - // `1`: Queries the data source every time the dashboard loads. - // `2`: Queries the data source when the dashboard time range changes. - Refresh *VariableRefresh `json:"refresh,omitempty"` - - // Optional field, if you want to extract part of a series name or metric node segment. - // Named capture groups can be used to separate the display text and value. - Regex *string `json:"regex,omitempty"` - - // Whether the variable value should be managed by URL query params or not - SkipUrlSync *bool `json:"skipUrlSync,omitempty"` - - // Sort variable options - // Accepted values are: - // `0`: No sorting - // `1`: Alphabetical ASC - // `2`: Alphabetical DESC - // `3`: Numerical ASC - // `4`: Numerical DESC - // `5`: Alphabetical Case Insensitive ASC - // `6`: Alphabetical Case Insensitive DESC - // `7`: Natural ASC - // `8`: Natural DESC - Sort *VariableSort `json:"sort,omitempty"` - - // Dashboard variable type - // `query`: Query-generated list of values such as metric names, server names, sensor IDs, data centers, and so on. - // `adhoc`: Key/value filters that are automatically added to all metric queries for a data source (Prometheus, Loki, InfluxDB, and Elasticsearch only). - // `constant`: Define a hidden constant. - // `datasource`: Quickly change the data source for an entire dashboard. - // `interval`: Interval variables represent time spans. - // `textbox`: Display a free text input field with an optional default value. - // `custom`: Define the variable options manually using a comma-separated list. - // `system`: Variables defined by Grafana. See: https://grafana.com/docs/grafana/latest/dashboards/variables/add-template-variables/#global-variables - Type VariableType `json:"type"` -} +const ( + VariableHideDontHide VariableHide = 0 + VariableHideHideLabel VariableHide = 1 + VariableHideHideVariable VariableHide = 2 +) // Option to be selected in a variable. type VariableOption struct { // Whether the option is selected or not Selected *bool `json:"selected,omitempty"` - // Text to be displayed for the option - Text any `json:"text"` - + Text StringOrArrayOfString `json:"text"` // Value of the option - Value any `json:"value"` + Value StringOrArrayOfString `json:"value"` +} + +// NewVariableOption creates a new VariableOption object. +func NewVariableOption() *VariableOption { + return &VariableOption{ + Text: *NewStringOrArrayOfString(), + Value: *NewStringOrArrayOfString(), + } } // Options to config when to refresh a variable // `0`: Never refresh the variable // `1`: Queries the data source every time the dashboard loads. // `2`: Queries the data source when the dashboard time range changes. -type VariableRefresh int +type VariableRefresh int64 + +const ( + VariableRefreshNever VariableRefresh = 0 + VariableRefreshOnDashboardLoad VariableRefresh = 1 + VariableRefreshOnTimeRangeChanged VariableRefresh = 2 +) // Sort variable options // Accepted values are: @@ -1025,15 +660,546 @@ type VariableRefresh int // `6`: Alphabetical Case Insensitive DESC // `7`: Natural ASC // `8`: Natural DESC -type VariableSort int +type VariableSort int64 -// Dashboard variable type -// `query`: Query-generated list of values such as metric names, server names, sensor IDs, data centers, and so on. -// `adhoc`: Key/value filters that are automatically added to all metric queries for a data source (Prometheus, Loki, InfluxDB, and Elasticsearch only). -// `constant`: Define a hidden constant. -// `datasource`: Quickly change the data source for an entire dashboard. -// `interval`: Interval variables represent time spans. -// `textbox`: Display a free text input field with an optional default value. -// `custom`: Define the variable options manually using a comma-separated list. -// `system`: Variables defined by Grafana. See: https://grafana.com/docs/grafana/latest/dashboards/variables/add-template-variables/#global-variables -type VariableType string +const ( + VariableSortDisabled VariableSort = 0 + VariableSortAlphabeticalAsc VariableSort = 1 + VariableSortAlphabeticalDesc VariableSort = 2 + VariableSortNumericalAsc VariableSort = 3 + VariableSortNumericalDesc VariableSort = 4 + VariableSortAlphabeticalCaseInsensitiveAsc VariableSort = 5 + VariableSortAlphabeticalCaseInsensitiveDesc VariableSort = 6 + VariableSortNaturalAsc VariableSort = 7 + VariableSortNaturalDesc VariableSort = 8 +) + +// A variable is a placeholder for a value. You can use variables in metric queries and in panel titles. +type VariableModel struct { + // Type of variable + Type VariableType `json:"type"` + // Name of variable + Name string `json:"name"` + // Optional display name + Label *string `json:"label,omitempty"` + // Visibility configuration for the variable + Hide *VariableHide `json:"hide,omitempty"` + // Whether the variable value should be managed by URL query params or not + SkipUrlSync *bool `json:"skipUrlSync,omitempty"` + // Description of variable. It can be defined but `null`. + Description *string `json:"description,omitempty"` + // Query used to fetch values for a variable + Query *StringOrMap `json:"query,omitempty"` + // Data source used to fetch values for a variable. It can be defined but `null`. + Datasource *DataSourceRef `json:"datasource,omitempty"` + // Shows current selected variable text/value on the dashboard + Current *VariableOption `json:"current,omitempty"` + // Whether multiple values can be selected or not from variable value list + Multi *bool `json:"multi,omitempty"` + // Allow custom values to be entered in the variable + AllowCustomValue *bool `json:"allowCustomValue,omitempty"` + // Options that can be selected for a variable. + Options []VariableOption `json:"options,omitempty"` + // Options to config when to refresh a variable + Refresh *VariableRefresh `json:"refresh,omitempty"` + // Options sort order + Sort *VariableSort `json:"sort,omitempty"` + // Whether all value option is available or not + IncludeAll *bool `json:"includeAll,omitempty"` + // Custom all value + AllValue *string `json:"allValue,omitempty"` + // Optional field, if you want to extract part of a series name or metric node segment. + // Named capture groups can be used to separate the display text and value. + Regex *string `json:"regex,omitempty"` +} + +// NewVariableModel creates a new VariableModel object. +func NewVariableModel() *VariableModel { + return &VariableModel{ + SkipUrlSync: (func(input bool) *bool { return &input })(false), + Multi: (func(input bool) *bool { return &input })(false), + AllowCustomValue: (func(input bool) *bool { return &input })(true), + IncludeAll: (func(input bool) *bool { return &input })(false), + } +} + +type AnnotationPanelFilter struct { + // Should the specified panels be included or excluded + Exclude *bool `json:"exclude,omitempty"` + // Panel IDs that should be included or excluded + Ids []uint8 `json:"ids"` +} + +// NewAnnotationPanelFilter creates a new AnnotationPanelFilter object. +func NewAnnotationPanelFilter() *AnnotationPanelFilter { + return &AnnotationPanelFilter{ + Exclude: (func(input bool) *bool { return &input })(false), + } +} + +// TODO: this should be a regular DataQuery that depends on the selected dashboard +// these match the properties of the "grafana" datasouce that is default in most dashboards +type AnnotationTarget struct { + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + Limit int64 `json:"limit"` + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + MatchAny bool `json:"matchAny"` + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + Tags []string `json:"tags"` + // Only required/valid for the grafana datasource... + // but code+tests is already depending on it so hard to change + Type string `json:"type"` +} + +// NewAnnotationTarget creates a new AnnotationTarget object. +func NewAnnotationTarget() *AnnotationTarget { + return &AnnotationTarget{} +} + +// TODO docs +// FROM: AnnotationQuery in grafana-data/src/types/annotations.ts +type AnnotationQuery struct { + // Name of annotation. + Name string `json:"name"` + // Datasource where the annotations data is + Datasource DataSourceRef `json:"datasource"` + // When enabled the annotation query is issued with every dashboard refresh + Enable bool `json:"enable"` + // Annotation queries can be toggled on or off at the top of the dashboard. + // When hide is true, the toggle is not shown in the dashboard. + Hide *bool `json:"hide,omitempty"` + // Color to use for the annotation event markers + IconColor string `json:"iconColor"` + // Filters to apply when fetching annotations + Filter *AnnotationPanelFilter `json:"filter,omitempty"` + // TODO.. this should just be a normal query target + Target *AnnotationTarget `json:"target,omitempty"` + // TODO -- this should not exist here, it is based on the --grafana-- datasource + Type *string `json:"type,omitempty"` + // Set to 1 for the standard annotation query all dashboards have by default. + BuiltIn *float64 `json:"builtIn,omitempty"` +} + +// NewAnnotationQuery creates a new AnnotationQuery object. +func NewAnnotationQuery() *AnnotationQuery { + return &AnnotationQuery{ + Datasource: *NewDataSourceRef(), + Enable: true, + Hide: (func(input bool) *bool { return &input })(false), + BuiltIn: (func(input float64) *float64 { return &input })(0), + } +} + +// Contains the list of annotations that are associated with the dashboard. +// Annotations are used to overlay event markers and overlay event tags on graphs. +// Grafana comes with a native annotation store and the ability to add annotation events directly from the graph panel or via the HTTP API. +// See https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/annotate-visualizations/ +type AnnotationContainer struct { + // List of annotations + List []AnnotationQuery `json:"list,omitempty"` +} + +// NewAnnotationContainer creates a new AnnotationContainer object. +func NewAnnotationContainer() *AnnotationContainer { + return &AnnotationContainer{} +} + +// A dashboard snapshot shares an interactive dashboard publicly. +// It is a read-only version of a dashboard, and is not editable. +// It is possible to create a snapshot of a snapshot. +// Grafana strips away all sensitive information from the dashboard. +// Sensitive information stripped: queries (metric, template,annotation) and panel links. +type Snapshot struct { + // Time when the snapshot was created + Created time.Time `json:"created"` + // Time when the snapshot expires, default is never to expire + Expires string `json:"expires"` + // Is the snapshot saved in an external grafana instance + External bool `json:"external"` + // external url, if snapshot was shared in external grafana instance + ExternalUrl string `json:"externalUrl"` + // original url, url of the dashboard that was snapshotted + OriginalUrl string `json:"originalUrl"` + // Unique identifier of the snapshot + Id uint32 `json:"id"` + // Optional, defined the unique key of the snapshot, required if external is true + Key string `json:"key"` + // Optional, name of the snapshot + Name string `json:"name"` + // org id of the snapshot + OrgId uint32 `json:"orgId"` + // last time when the snapshot was updated + Updated time.Time `json:"updated"` + // url of the snapshot, if snapshot was shared internally + Url *string `json:"url,omitempty"` + // user id of the snapshot creator + UserId uint32 `json:"userId"` +} + +// NewSnapshot creates a new Snapshot object. +func NewSnapshot() *Snapshot { + return &Snapshot{} +} + +type Spec struct { + // Unique numeric identifier for the dashboard. + // `id` is internal to a specific Grafana instance. `uid` should be used to identify a dashboard across Grafana instances. + // TODO eliminate this null option + Id *int64 `json:"id,omitempty"` + // Unique dashboard identifier that can be generated by anyone. string (8-40) + Uid *string `json:"uid,omitempty"` + // Title of dashboard. + Title *string `json:"title,omitempty"` + // Description of dashboard. + Description *string `json:"description,omitempty"` + // This property should only be used in dashboards defined by plugins. It is a quick check + // to see if the version has changed since the last time. + Revision *int64 `json:"revision,omitempty"` + // ID of a dashboard imported from the https://grafana.com/grafana/dashboards/ portal + GnetId *string `json:"gnetId,omitempty"` + // Tags associated with dashboard. + Tags []string `json:"tags,omitempty"` + // Timezone of dashboard. Accepted values are IANA TZDB zone ID or "browser" or "utc". + Timezone *string `json:"timezone,omitempty"` + // Whether a dashboard is editable or not. + Editable *bool `json:"editable,omitempty"` + // Configuration of dashboard cursor sync behavior. + // Accepted values are 0 (sync turned off), 1 (shared crosshair), 2 (shared crosshair and tooltip). + GraphTooltip *DashboardCursorSync `json:"graphTooltip,omitempty"` + // Time range for dashboard. + // Accepted values are relative time strings like {from: 'now-6h', to: 'now'} or absolute time strings like {from: '2020-07-10T08:00:00.000Z', to: '2020-07-10T14:00:00.000Z'}. + Time *DashboardSpecTime `json:"time,omitempty"` + // Configuration of the time picker shown at the top of a dashboard. + Timepicker *TimePickerConfig `json:"timepicker,omitempty"` + // The month that the fiscal year starts on. 0 = January, 11 = December + FiscalYearStartMonth *uint8 `json:"fiscalYearStartMonth,omitempty"` + // When set to true, the dashboard will redraw panels at an interval matching the pixel width. + // This will keep data "moving left" regardless of the query refresh rate. This setting helps + // avoid dashboards presenting stale live data + LiveNow *bool `json:"liveNow,omitempty"` + // Day when the week starts. Expressed by the name of the day in lowercase, e.g. "monday". + WeekStart *string `json:"weekStart,omitempty"` + // Refresh rate of dashboard. Represented via interval string, e.g. "5s", "1m", "1h", "1d". + Refresh *string `json:"refresh,omitempty"` + // Version of the JSON schema, incremented each time a Grafana update brings + // changes to said schema. + SchemaVersion uint16 `json:"schemaVersion"` + // Version of the dashboard, incremented each time the dashboard is updated. + Version *uint32 `json:"version,omitempty"` + // List of dashboard panels + Panels []any `json:"panels,omitempty"` + // Configured template variables + Templating *DashboardSpecTemplating `json:"templating,omitempty"` + // Contains the list of annotations that are associated with the dashboard. + // Annotations are used to overlay event markers and overlay event tags on graphs. + // Grafana comes with a native annotation store and the ability to add annotation events directly from the graph panel or via the HTTP API. + // See https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/annotate-visualizations/ + Annotations *AnnotationContainer `json:"annotations,omitempty"` + // Links with references to other dashboards or external websites. + Links []DashboardLink `json:"links,omitempty"` + // Snapshot options. They are present only if the dashboard is a snapshot. + Snapshot *Snapshot `json:"snapshot,omitempty"` + // When set to true, the dashboard will load all panels in the dashboard when it's loaded. + Preload *bool `json:"preload,omitempty"` +} + +// NewSpec creates a new Spec object. +func NewSpec() *Spec { + return &Spec{ + Timezone: (func(input string) *string { return &input })("browser"), + Editable: (func(input bool) *bool { return &input })(true), + GraphTooltip: (func(input DashboardCursorSync) *DashboardCursorSync { return &input })(DashboardCursorSyncOff), + FiscalYearStartMonth: (func(input uint8) *uint8 { return &input })(0), + SchemaVersion: 39, + } +} + +type DataTransformerConfigTopic string + +const ( + DataTransformerConfigTopicSeries DataTransformerConfigTopic = "series" + DataTransformerConfigTopicAnnotations DataTransformerConfigTopic = "annotations" + DataTransformerConfigTopicAlertStates DataTransformerConfigTopic = "alertStates" +) + +type PanelRepeatDirection string + +const ( + PanelRepeatDirectionH PanelRepeatDirection = "h" + PanelRepeatDirectionV PanelRepeatDirection = "v" +) + +type DashboardRangeMapOptions struct { + // Min value of the range. It can be null which means -Infinity + From *float64 `json:"from"` + // Max value of the range. It can be null which means +Infinity + To *float64 `json:"to"` + // Config to apply when the value is within the range + Result ValueMappingResult `json:"result"` +} + +// NewDashboardRangeMapOptions creates a new DashboardRangeMapOptions object. +func NewDashboardRangeMapOptions() *DashboardRangeMapOptions { + return &DashboardRangeMapOptions{ + Result: *NewValueMappingResult(), + } +} + +type DashboardRegexMapOptions struct { + // Regular expression to match against + Pattern string `json:"pattern"` + // Config to apply when the value matches the regex + Result ValueMappingResult `json:"result"` +} + +// NewDashboardRegexMapOptions creates a new DashboardRegexMapOptions object. +func NewDashboardRegexMapOptions() *DashboardRegexMapOptions { + return &DashboardRegexMapOptions{ + Result: *NewValueMappingResult(), + } +} + +type DashboardSpecialValueMapOptions struct { + // Special value to match against + Match SpecialValueMatch `json:"match"` + // Config to apply when the value matches the special value + Result ValueMappingResult `json:"result"` +} + +// NewDashboardSpecialValueMapOptions creates a new DashboardSpecialValueMapOptions object. +func NewDashboardSpecialValueMapOptions() *DashboardSpecialValueMapOptions { + return &DashboardSpecialValueMapOptions{ + Result: *NewValueMappingResult(), + } +} + +type DashboardFieldConfigSourceOverrides struct { + Matcher MatcherConfig `json:"matcher"` + Properties []DynamicConfigValue `json:"properties"` +} + +// NewDashboardFieldConfigSourceOverrides creates a new DashboardFieldConfigSourceOverrides object. +func NewDashboardFieldConfigSourceOverrides() *DashboardFieldConfigSourceOverrides { + return &DashboardFieldConfigSourceOverrides{ + Matcher: *NewMatcherConfig(), + } +} + +type DashboardSpecTime struct { + From string `json:"from"` + To string `json:"to"` +} + +// NewDashboardSpecTime creates a new DashboardSpecTime object. +func NewDashboardSpecTime() *DashboardSpecTime { + return &DashboardSpecTime{ + From: "now-6h", + To: "now", + } +} + +type DashboardSpecTemplating struct { + // List of configured template variables with their saved values along with some other metadata + List []VariableModel `json:"list,omitempty"` +} + +// NewDashboardSpecTemplating creates a new DashboardSpecTemplating object. +func NewDashboardSpecTemplating() *DashboardSpecTemplating { + return &DashboardSpecTemplating{} +} + +type ValueMapOrRangeMapOrRegexMapOrSpecialValueMap struct { + ValueMap *ValueMap `json:"ValueMap,omitempty"` + RangeMap *RangeMap `json:"RangeMap,omitempty"` + RegexMap *RegexMap `json:"RegexMap,omitempty"` + SpecialValueMap *SpecialValueMap `json:"SpecialValueMap,omitempty"` +} + +// NewValueMapOrRangeMapOrRegexMapOrSpecialValueMap creates a new ValueMapOrRangeMapOrRegexMapOrSpecialValueMap object. +func NewValueMapOrRangeMapOrRegexMapOrSpecialValueMap() *ValueMapOrRangeMapOrRegexMapOrSpecialValueMap { + return &ValueMapOrRangeMapOrRegexMapOrSpecialValueMap{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `ValueMapOrRangeMapOrRegexMapOrSpecialValueMap` as JSON. +func (resource ValueMapOrRangeMapOrRegexMapOrSpecialValueMap) MarshalJSON() ([]byte, error) { + if resource.ValueMap != nil { + return json.Marshal(resource.ValueMap) + } + if resource.RangeMap != nil { + return json.Marshal(resource.RangeMap) + } + if resource.RegexMap != nil { + return json.Marshal(resource.RegexMap) + } + if resource.SpecialValueMap != nil { + return json.Marshal(resource.SpecialValueMap) + } + + return nil, fmt.Errorf("no value for disjunction of refs") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `ValueMapOrRangeMapOrRegexMapOrSpecialValueMap` from JSON. +func (resource *ValueMapOrRangeMapOrRegexMapOrSpecialValueMap) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + // FIXME: this is wasteful, we need to find a more efficient way to unmarshal this. + parsedAsMap := make(map[string]any) + if err := json.Unmarshal(raw, &parsedAsMap); err != nil { + return err + } + + discriminator, found := parsedAsMap["type"] + if !found { + return errors.New("discriminator field 'type' not found in payload") + } + + switch discriminator { + case "range": + var rangeMap RangeMap + if err := json.Unmarshal(raw, &rangeMap); err != nil { + return err + } + + resource.RangeMap = &rangeMap + return nil + case "regex": + var regexMap RegexMap + if err := json.Unmarshal(raw, ®exMap); err != nil { + return err + } + + resource.RegexMap = ®exMap + return nil + case "special": + var specialValueMap SpecialValueMap + if err := json.Unmarshal(raw, &specialValueMap); err != nil { + return err + } + + resource.SpecialValueMap = &specialValueMap + return nil + case "value": + var valueMap ValueMap + if err := json.Unmarshal(raw, &valueMap); err != nil { + return err + } + + resource.ValueMap = &valueMap + return nil + } + + return fmt.Errorf("could not unmarshal resource with `type = %v`", discriminator) +} + +type StringOrArrayOfString struct { + String *string `json:"String,omitempty"` + ArrayOfString []string `json:"ArrayOfString,omitempty"` +} + +// NewStringOrArrayOfString creates a new StringOrArrayOfString object. +func NewStringOrArrayOfString() *StringOrArrayOfString { + return &StringOrArrayOfString{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `StringOrArrayOfString` as JSON. +func (resource StringOrArrayOfString) MarshalJSON() ([]byte, error) { + if resource.String != nil { + return json.Marshal(resource.String) + } + + if resource.ArrayOfString != nil { + return json.Marshal(resource.ArrayOfString) + } + + return nil, fmt.Errorf("no value for disjunction of scalars") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `StringOrArrayOfString` from JSON. +func (resource *StringOrArrayOfString) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + var errList []error + + // String + var String string + if err := json.Unmarshal(raw, &String); err != nil { + errList = append(errList, err) + resource.String = nil + } else { + resource.String = &String + return nil + } + + // ArrayOfString + var ArrayOfString []string + if err := json.Unmarshal(raw, &ArrayOfString); err != nil { + errList = append(errList, err) + resource.ArrayOfString = nil + } else { + resource.ArrayOfString = ArrayOfString + return nil + } + + return errors.Join(errList...) +} + +type StringOrMap struct { + String *string `json:"String,omitempty"` + Map map[string]any `json:"Map,omitempty"` +} + +// NewStringOrMap creates a new StringOrMap object. +func NewStringOrMap() *StringOrMap { + return &StringOrMap{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `StringOrMap` as JSON. +func (resource StringOrMap) MarshalJSON() ([]byte, error) { + if resource.String != nil { + return json.Marshal(resource.String) + } + + if resource.Map != nil { + return json.Marshal(resource.Map) + } + + return nil, fmt.Errorf("no value for disjunction of scalars") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `StringOrMap` from JSON. +func (resource *StringOrMap) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + var errList []error + + // String + var String string + if err := json.Unmarshal(raw, &String); err != nil { + errList = append(errList, err) + resource.String = nil + } else { + resource.String = &String + return nil + } + + // Map + var Map map[string]any + if err := json.Unmarshal(raw, &Map); err != nil { + errList = append(errList, err) + resource.Map = nil + } else { + resource.Map = Map + return nil + } + + return errors.Join(errList...) +} diff --git a/pkg/kinds/librarypanel/librarypanel_spec_gen.go b/pkg/kinds/librarypanel/librarypanel_spec_gen.go index e96f9c40020..177d6c26c41 100644 --- a/pkg/kinds/librarypanel/librarypanel_spec_gen.go +++ b/pkg/kinds/librarypanel/librarypanel_spec_gen.go @@ -7,55 +7,66 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package librarypanel import ( - "time" + time "time" ) -// LibraryElementDTOMeta defines model for LibraryElementDTOMeta. +type LibraryElementDTOMetaUser struct { + Id int64 `json:"id"` + Name string `json:"name"` + AvatarUrl string `json:"avatarUrl"` +} + +// NewLibraryElementDTOMetaUser creates a new LibraryElementDTOMetaUser object. +func NewLibraryElementDTOMetaUser() *LibraryElementDTOMetaUser { + return &LibraryElementDTOMetaUser{} +} + type LibraryElementDTOMeta struct { - ConnectedDashboards int64 `json:"connectedDashboards"` - Created time.Time `json:"created"` - CreatedBy LibraryElementDTOMetaUser `json:"createdBy"` FolderName string `json:"folderName"` FolderUid string `json:"folderUid"` + ConnectedDashboards int64 `json:"connectedDashboards"` + Created time.Time `json:"created"` Updated time.Time `json:"updated"` + CreatedBy LibraryElementDTOMetaUser `json:"createdBy"` UpdatedBy LibraryElementDTOMetaUser `json:"updatedBy"` } -// LibraryElementDTOMetaUser defines model for LibraryElementDTOMetaUser. -type LibraryElementDTOMetaUser struct { - AvatarUrl string `json:"avatarUrl"` - Id int64 `json:"id"` - Name string `json:"name"` +// NewLibraryElementDTOMeta creates a new LibraryElementDTOMeta object. +func NewLibraryElementDTOMeta() *LibraryElementDTOMeta { + return &LibraryElementDTOMeta{ + CreatedBy: *NewLibraryElementDTOMetaUser(), + UpdatedBy: *NewLibraryElementDTOMetaUser(), + } } -// Spec defines model for Spec. type Spec struct { + // Folder UID + FolderUid *string `json:"folderUid,omitempty"` + // Library element UID + Uid string `json:"uid"` + // Panel name (also saved in the model) + Name string `json:"name"` // Panel description Description *string `json:"description,omitempty"` - - // Folder UID - FolderUid *string `json:"folderUid,omitempty"` - Meta *LibraryElementDTOMeta `json:"meta,omitempty"` - + // The panel type (from inside the model) + Type string `json:"type"` + // Dashboard version when this was saved (zero if unknown) + SchemaVersion *uint16 `json:"schemaVersion,omitempty"` + // panel version, incremented each time the dashboard is updated. + Version int64 `json:"version"` // TODO: should be the same panel schema defined in dashboard // Typescript: Omit; Model map[string]any `json:"model"` - - // Panel name (also saved in the model) - Name string `json:"name"` - - // Dashboard version when this was saved (zero if unknown) - SchemaVersion *int `json:"schemaVersion,omitempty"` - - // The panel type (from inside the model) - Type string `json:"type"` - - // Library element UID - Uid string `json:"uid"` - - // Version panel version, incremented each time the dashboard is updated. - Version int64 `json:"version"` + // Object storage metadata + Meta *LibraryElementDTOMeta `json:"meta,omitempty"` +} + +// NewSpec creates a new Spec object. +func NewSpec() *Spec { + return &Spec{} } diff --git a/pkg/kinds/preferences/preferences_spec_gen.go b/pkg/kinds/preferences/preferences_spec_gen.go index 7c30cea20c1..431d2f9e9a0 100644 --- a/pkg/kinds/preferences/preferences_spec_gen.go +++ b/pkg/kinds/preferences/preferences_spec_gen.go @@ -7,46 +7,63 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package preferences -// CookiePreferences defines model for CookiePreferences. -type CookiePreferences struct { - Analytics map[string]any `json:"analytics,omitempty"` - Functional map[string]any `json:"functional,omitempty"` - Performance map[string]any `json:"performance,omitempty"` +type QueryHistoryPreference struct { + // one of: '' | 'query' | 'starred'; + HomeTab *string `json:"homeTab,omitempty"` +} + +// NewQueryHistoryPreference creates a new QueryHistoryPreference object. +func NewQueryHistoryPreference() *QueryHistoryPreference { + return &QueryHistoryPreference{} +} + +type CookiePreferences struct { + Analytics any `json:"analytics,omitempty"` + Performance any `json:"performance,omitempty"` + Functional any `json:"functional,omitempty"` +} + +// NewCookiePreferences creates a new CookiePreferences object. +func NewCookiePreferences() *CookiePreferences { + return &CookiePreferences{} } -// NavbarPreference defines model for NavbarPreference. type NavbarPreference struct { BookmarkUrls []string `json:"bookmarkUrls"` } -// QueryHistoryPreference defines model for QueryHistoryPreference. -type QueryHistoryPreference struct { - // HomeTab one of: '' | 'query' | 'starred'; - HomeTab *string `json:"homeTab,omitempty"` +// NewNavbarPreference creates a new NavbarPreference object. +func NewNavbarPreference() *NavbarPreference { + return &NavbarPreference{} } // Spec defines user, team or org Grafana preferences // swagger:model Preferences type Spec struct { - CookiePreferences *CookiePreferences `json:"cookiePreferences,omitempty"` - // UID for the home dashboard HomeDashboardUID *string `json:"homeDashboardUID,omitempty"` - - // Selected language (beta) - Language *string `json:"language,omitempty"` - Navbar *NavbarPreference `json:"navbar,omitempty"` - QueryHistory *QueryHistoryPreference `json:"queryHistory,omitempty"` - - // Theme light, dark, empty is default - Theme *string `json:"theme,omitempty"` - // The timezone selection // TODO: this should use the timezone defined in common Timezone *string `json:"timezone,omitempty"` - - // WeekStart day of the week (sunday, monday, etc) + // day of the week (sunday, monday, etc) WeekStart *string `json:"weekStart,omitempty"` + // light, dark, empty is default + Theme *string `json:"theme,omitempty"` + // Selected language (beta) + Language *string `json:"language,omitempty"` + // Explore query history preferences + QueryHistory *QueryHistoryPreference `json:"queryHistory,omitempty"` + // Cookie preferences + CookiePreferences *CookiePreferences `json:"cookiePreferences,omitempty"` + // Navigation preferences + Navbar *NavbarPreference `json:"navbar,omitempty"` +} + +// NewSpec creates a new Spec object. +func NewSpec() *Spec { + return &Spec{} } diff --git a/pkg/kinds/publicdashboard/publicdashboard_spec_gen.go b/pkg/kinds/publicdashboard/publicdashboard_spec_gen.go index 01321af228a..667cf38cff1 100644 --- a/pkg/kinds/publicdashboard/publicdashboard_spec_gen.go +++ b/pkg/kinds/publicdashboard/publicdashboard_spec_gen.go @@ -7,25 +7,26 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package publicdashboard -// Spec defines model for Spec. type Spec struct { - // Unique public access token - AccessToken *string `json:"accessToken,omitempty"` - - // Flag that indicates if annotations are enabled - AnnotationsEnabled bool `json:"annotationsEnabled"` - - // Dashboard unique identifier referenced by this public dashboard - DashboardUid string `json:"dashboardUid"` - - // Flag that indicates if the public dashboard is enabled - IsEnabled bool `json:"isEnabled"` - - // Flag that indicates if the time range picker is enabled - TimeSelectionEnabled bool `json:"timeSelectionEnabled"` - // Unique public dashboard identifier Uid string `json:"uid"` + // Dashboard unique identifier referenced by this public dashboard + DashboardUid string `json:"dashboardUid"` + // Unique public access token + AccessToken *string `json:"accessToken,omitempty"` + // Flag that indicates if the public dashboard is enabled + IsEnabled bool `json:"isEnabled"` + // Flag that indicates if annotations are enabled + AnnotationsEnabled bool `json:"annotationsEnabled"` + // Flag that indicates if the time range picker is enabled + TimeSelectionEnabled bool `json:"timeSelectionEnabled"` +} + +// NewSpec creates a new Spec object. +func NewSpec() *Spec { + return &Spec{} } diff --git a/pkg/kinds/role/role_spec_gen.go b/pkg/kinds/role/role_spec_gen.go index ca4c2329268..c7a7123c983 100644 --- a/pkg/kinds/role/role_spec_gen.go +++ b/pkg/kinds/role/role_spec_gen.go @@ -7,22 +7,24 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package role -// Spec defines model for Spec. type Spec struct { - // Role description - Description *string `json:"description,omitempty"` - - // Optional display - DisplayName *string `json:"displayName,omitempty"` - - // Name of the team. - GroupName *string `json:"groupName,omitempty"` - - // Do not show this role - Hidden bool `json:"hidden"` - // The role identifier `managed:builtins:editor:permissions` Name string `json:"name"` + // Optional display + DisplayName *string `json:"displayName,omitempty"` + // Name of the team. + GroupName *string `json:"groupName,omitempty"` + // Role description + Description *string `json:"description,omitempty"` + // Do not show this role + Hidden bool `json:"hidden"` +} + +// NewSpec creates a new Spec object. +func NewSpec() *Spec { + return &Spec{} } diff --git a/pkg/kinds/rolebinding/rolebinding_spec_gen.go b/pkg/kinds/rolebinding/rolebinding_spec_gen.go index 7434fd91dba..3d3149ef39d 100644 --- a/pkg/kinds/rolebinding/rolebinding_spec_gen.go +++ b/pkg/kinds/rolebinding/rolebinding_spec_gen.go @@ -7,66 +7,138 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package rolebinding -// Defines values for BuiltinRoleRefKind. -const ( - BuiltinRoleRefKindBuiltinRole BuiltinRoleRefKind = "BuiltinRole" +import ( + json "encoding/json" + errors "errors" + fmt "fmt" ) -// Defines values for BuiltinRoleRefName. -const ( - BuiltinRoleRefNameAdmin BuiltinRoleRefName = "admin" - BuiltinRoleRefNameEditor BuiltinRoleRefName = "editor" - BuiltinRoleRefNameViewer BuiltinRoleRefName = "viewer" -) - -// Defines values for CustomRoleRefKind. -const ( - CustomRoleRefKindRole CustomRoleRefKind = "Role" -) - -// Defines values for SubjectKind. -const ( - SubjectKindTeam SubjectKind = "Team" - SubjectKindUser SubjectKind = "User" -) - -// BuiltinRoleRef defines model for BuiltinRoleRef. type BuiltinRoleRef struct { - Kind BuiltinRoleRefKind `json:"kind"` + Kind string `json:"kind"` Name BuiltinRoleRefName `json:"name"` } -// BuiltinRoleRefKind defines model for BuiltinRoleRef.Kind. -type BuiltinRoleRefKind string - -// BuiltinRoleRefName defines model for BuiltinRoleRef.Name. -type BuiltinRoleRefName string - -// CustomRoleRef defines model for CustomRoleRef. -type CustomRoleRef struct { - Kind CustomRoleRefKind `json:"kind"` - Name string `json:"name"` +// NewBuiltinRoleRef creates a new BuiltinRoleRef object. +func NewBuiltinRoleRef() *BuiltinRoleRef { + return &BuiltinRoleRef{ + Kind: "BuiltinRole", + } } -// CustomRoleRefKind defines model for CustomRoleRef.Kind. -type CustomRoleRefKind string +type CustomRoleRef struct { + Kind string `json:"kind"` + Name string `json:"name"` +} -// Subject defines model for Subject. -type Subject struct { - Kind SubjectKind `json:"kind"` +// NewCustomRoleRef creates a new CustomRoleRef object. +func NewCustomRoleRef() *CustomRoleRef { + return &CustomRoleRef{ + Kind: "Role", + } +} +type RoleBindingSubject struct { + Kind RoleBindingSubjectKind `json:"kind"` // The team/user identifier name Name string `json:"name"` } -// SubjectKind defines model for Subject.Kind. -type SubjectKind string +// NewRoleBindingSubject creates a new RoleBindingSubject object. +func NewRoleBindingSubject() *RoleBindingSubject { + return &RoleBindingSubject{} +} -// Spec defines model for Spec. type Spec struct { // The role we are discussing - Role any `json:"role"` - Subject Subject `json:"subject"` + Role BuiltinRoleRefOrCustomRoleRef `json:"role"` + // The team or user that has the specified role + Subject RoleBindingSubject `json:"subject"` +} + +// NewSpec creates a new Spec object. +func NewSpec() *Spec { + return &Spec{ + Role: *NewBuiltinRoleRefOrCustomRoleRef(), + Subject: *NewRoleBindingSubject(), + } +} + +type BuiltinRoleRefName string + +const ( + BuiltinRoleRefNameViewer BuiltinRoleRefName = "viewer" + BuiltinRoleRefNameEditor BuiltinRoleRefName = "editor" + BuiltinRoleRefNameAdmin BuiltinRoleRefName = "admin" +) + +type RoleBindingSubjectKind string + +const ( + RoleBindingSubjectKindTeam RoleBindingSubjectKind = "Team" + RoleBindingSubjectKindUser RoleBindingSubjectKind = "User" +) + +type BuiltinRoleRefOrCustomRoleRef struct { + BuiltinRoleRef *BuiltinRoleRef `json:"BuiltinRoleRef,omitempty"` + CustomRoleRef *CustomRoleRef `json:"CustomRoleRef,omitempty"` +} + +// NewBuiltinRoleRefOrCustomRoleRef creates a new BuiltinRoleRefOrCustomRoleRef object. +func NewBuiltinRoleRefOrCustomRoleRef() *BuiltinRoleRefOrCustomRoleRef { + return &BuiltinRoleRefOrCustomRoleRef{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `BuiltinRoleRefOrCustomRoleRef` as JSON. +func (resource BuiltinRoleRefOrCustomRoleRef) MarshalJSON() ([]byte, error) { + if resource.BuiltinRoleRef != nil { + return json.Marshal(resource.BuiltinRoleRef) + } + if resource.CustomRoleRef != nil { + return json.Marshal(resource.CustomRoleRef) + } + + return nil, fmt.Errorf("no value for disjunction of refs") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `BuiltinRoleRefOrCustomRoleRef` from JSON. +func (resource *BuiltinRoleRefOrCustomRoleRef) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + // FIXME: this is wasteful, we need to find a more efficient way to unmarshal this. + parsedAsMap := make(map[string]any) + if err := json.Unmarshal(raw, &parsedAsMap); err != nil { + return err + } + + discriminator, found := parsedAsMap["kind"] + if !found { + return errors.New("discriminator field 'kind' not found in payload") + } + + switch discriminator { + case "BuiltinRole": + var builtinRoleRef BuiltinRoleRef + if err := json.Unmarshal(raw, &builtinRoleRef); err != nil { + return err + } + + resource.BuiltinRoleRef = &builtinRoleRef + return nil + case "Role": + var customRoleRef CustomRoleRef + if err := json.Unmarshal(raw, &customRoleRef); err != nil { + return err + } + + resource.CustomRoleRef = &customRoleRef + return nil + } + + return fmt.Errorf("could not unmarshal resource with `kind = %v`", discriminator) } diff --git a/pkg/plugins/codegen/go.mod b/pkg/plugins/codegen/go.mod index 2e7ff27a71b..3d2b4743a41 100644 --- a/pkg/plugins/codegen/go.mod +++ b/pkg/plugins/codegen/go.mod @@ -7,6 +7,7 @@ replace github.com/grafana/grafana/pkg/codegen => ../../codegen require ( cuelang.org/go v0.11.1 github.com/grafana/codejen v0.0.4-0.20230321061741-77f656893a3d + github.com/grafana/cog v0.0.12 github.com/grafana/cuetsy v0.1.11 github.com/grafana/grafana/pkg/codegen v0.0.0-00010101000000-000000000000 ) @@ -15,8 +16,8 @@ require ( cuelabs.dev/go/oci/ociregistry v0.0.0-20240906074133-82eb438dd565 // indirect github.com/cockroachdb/apd/v3 v3.2.1 // indirect github.com/dave/dst v0.27.3 // indirect - github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 // indirect github.com/emicklei/proto v1.13.2 // indirect + github.com/expr-lang/expr v1.16.9 // indirect github.com/getkin/kin-openapi v0.128.0 // indirect github.com/go-openapi/jsonpointer v0.21.0 // indirect github.com/go-openapi/swag v0.23.0 // indirect @@ -24,25 +25,25 @@ require ( github.com/google/uuid v1.6.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect + github.com/huandu/xstrings v1.5.0 // indirect github.com/invopop/yaml v0.3.1 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/kr/text v0.2.0 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect - github.com/oapi-codegen/oapi-codegen/v2 v2.4.1 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0 // indirect github.com/pelletier/go-toml/v2 v2.2.3 // indirect github.com/perimeterx/marshmallow v1.1.5 // indirect github.com/protocolbuffers/txtpbfmt v0.0.0-20241112170944-20d2c9ebc01d // indirect github.com/rogpeppe/go-internal v1.13.1 // indirect - github.com/speakeasy-api/openapi-overlay v0.9.0 // indirect - github.com/vmware-labs/yaml-jsonpath v0.3.2 // indirect + github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 // indirect github.com/xlab/treeprint v1.2.0 // indirect + github.com/yalue/merged_fs v1.3.0 // indirect golang.org/x/mod v0.22.0 // indirect golang.org/x/net v0.34.0 // indirect - golang.org/x/oauth2 v0.23.0 // indirect + golang.org/x/oauth2 v0.24.0 // indirect golang.org/x/sync v0.10.0 // indirect golang.org/x/text v0.21.0 // indirect golang.org/x/tools v0.29.0 // indirect diff --git a/pkg/plugins/codegen/go.sum b/pkg/plugins/codegen/go.sum index 0300df0c549..685edbe16cf 100644 --- a/pkg/plugins/codegen/go.sum +++ b/pkg/plugins/codegen/go.sum @@ -2,29 +2,18 @@ cuelabs.dev/go/oci/ociregistry v0.0.0-20240906074133-82eb438dd565 h1:R5wwEcbEZSB cuelabs.dev/go/oci/ociregistry v0.0.0-20240906074133-82eb438dd565/go.mod h1:5A4xfTzHTXfeVJBU6RAUf+QrlfTCW+017q/QiW+sMLg= cuelang.org/go v0.11.1 h1:pV+49MX1mmvDm8Qh3Za3M786cty8VKPWzQ1Ho4gZRP0= cuelang.org/go v0.11.1/go.mod h1:PBY6XvPUswPPJ2inpvUozP9mebDVTXaeehQikhZPBz0= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/cockroachdb/apd/v3 v3.2.1 h1:U+8j7t0axsIgvQUqthuNm82HIrYXodOV2iWLWtEaIwg= github.com/cockroachdb/apd/v3 v3.2.1/go.mod h1:klXJcjp+FffLTHlhIG69tezTDvdP065naDsHzKhYSqc= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/dave/dst v0.27.3 h1:P1HPoMza3cMEquVf9kKy8yXsFirry4zEnWOdYPOoIzY= github.com/dave/dst v0.27.3/go.mod h1:jHh6EOibnHgcUW3WjKHisiooEkYwqpHLBSX1iOBhEyc= -github.com/dave/jennifer v1.6.0 h1:MQ/6emI2xM7wt0tJzJzyUik2Q3Tcn2eE0vtYgh4GPVI= -github.com/dave/jennifer v1.6.0/go.mod h1:AxTG893FiZKqxy3FP1kL80VMshSMuz2G+EgvszgGRnk= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dprotaso/go-yit v0.0.0-20191028211022-135eb7262960/go.mod h1:9HQzr9D/0PGwMEbC3d5AB7oi67+h4TsQqItC1GVYG58= -github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936 h1:PRxIJD8XjimM5aTknUK9w6DHLDox2r2M3DI4i2pnd3w= -github.com/dprotaso/go-yit v0.0.0-20220510233725-9ba8df137936/go.mod h1:ttYvX5qlB+mlV1okblJqcSMtR4c52UKxDiX9GRBS8+Q= github.com/emicklei/proto v1.13.2 h1:z/etSFO3uyXeuEsVPzfl56WNgzcvIr42aQazXaQmFZY= github.com/emicklei/proto v1.13.2/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= -github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= +github.com/expr-lang/expr v1.16.9 h1:WUAzmR0JNI9JCiF0/ewwHB1gmcGw5wW7nWt8gc6PpCI= +github.com/expr-lang/expr v1.16.9/go.mod h1:8/vRC7+7HBzESEqt5kKpYXxrxkr31SaO8r40VO/1IT4= github.com/getkin/kin-openapi v0.128.0 h1:jqq3D9vC9pPq1dGcOCv7yOp1DaEe7c/T1vzcLbITSp4= github.com/getkin/kin-openapi v0.128.0/go.mod h1:OZrfXzUfGrNbsKj+xmFBx6E5c6yH3At/tAKSc2UszXM= github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= @@ -33,29 +22,16 @@ github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+Gr github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI= github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow= -github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/grafana/codejen v0.0.4-0.20230321061741-77f656893a3d h1:hrXbGJ5jgp6yNITzs5o+zXq0V5yT3siNJ+uM8LGwWKk= github.com/grafana/codejen v0.0.4-0.20230321061741-77f656893a3d/go.mod h1:zmwwM/DRyQB7pfuBjTWII3CWtxcXh8LTwAYGfDfpR6s= +github.com/grafana/cog v0.0.12 h1:MJfFUVzp0El3+zZCmUQ2Y8uzwvM3aa5zj7EOeeuG6VY= +github.com/grafana/cog v0.0.12/go.mod h1:HwJbc60fZ+viayROClLGdDwO5w/JjBOpO9wjGnAfMLc= github.com/grafana/cuetsy v0.1.11 h1:I3IwBhF+UaQxRM79HnImtrAn8REGdb5M3+C4QrYHoWk= github.com/grafana/cuetsy v0.1.11/go.mod h1:Ix97+CPD8ws9oSSxR3/Lf4ahU1I4Np83kjJmDVnLZvc= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= @@ -63,17 +39,14 @@ github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/huandu/xstrings v1.5.0 h1:2ag3IFq9ZDANvthTwTiqSSZLjDc+BedvHPAp5tJy2TI= +github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/invopop/yaml v0.3.1 h1:f0+ZpmhfBSS4MhG+4HYseMdJhoeeopbSKbq5Rpeelso= github.com/invopop/yaml v0.3.1/go.mod h1:PMOp3nn4/12yEZUFfmOuNHJsZToEEOwoWsT+D81KkeA= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= @@ -88,25 +61,6 @@ github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQ github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= -github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/oapi-codegen/oapi-codegen/v2 v2.4.1 h1:ykgG34472DWey7TSjd8vIfNykXgjOgYJZoQbKfEeY/Q= -github.com/oapi-codegen/oapi-codegen/v2 v2.4.1/go.mod h1:N5+lY1tiTDV3V1BeHtOxeWXHoPVeApvsvjJqegfoaz8= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.2/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= -github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= -github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= -github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= -github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= -github.com/onsi/gomega v1.35.1 h1:Cwbd75ZBPxFSuZ6T+rN/WCb/gOc6YgFBXLlZLhC7Ds4= -github.com/onsi/gomega v1.35.1/go.mod h1:PvZbdDc8J6XJEpDK4HCuRBm8a6Fzp9/DmhC9C7yFlog= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= @@ -124,100 +78,35 @@ github.com/protocolbuffers/txtpbfmt v0.0.0-20241112170944-20d2c9ebc01d h1:HWfigq github.com/protocolbuffers/txtpbfmt v0.0.0-20241112170944-20d2c9ebc01d/go.mod h1:jgxiZysxFPM+iWKwQwPR+y+Jvo54ARd4EisXxKYpB5c= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= -github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= -github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= -github.com/speakeasy-api/openapi-overlay v0.9.0 h1:Wrz6NO02cNlLzx1fB093lBlYxSI54VRhy1aSutx0PQg= -github.com/speakeasy-api/openapi-overlay v0.9.0/go.mod h1:f5FloQrHA7MsxYg9djzMD5h6dxrHjVVByWKh7an8TRc= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= +github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= -github.com/vmware-labs/yaml-jsonpath v0.3.2 h1:/5QKeCBGdsInyDCyVNLbXyilb61MXGi9NP674f9Hobk= -github.com/vmware-labs/yaml-jsonpath v0.3.2/go.mod h1:U6whw1z03QyqgWdgXxvVnQ90zN1BWz5V+51Ewf8k+rQ= github.com/xlab/treeprint v1.2.0 h1:HzHnuAF1plUN2zGlAFHbSQP2qJ0ZAD3XF5XD7OesXRQ= github.com/xlab/treeprint v1.2.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +github.com/yalue/merged_fs v1.3.0 h1:qCeh9tMPNy/i8cwDsQTJ5bLr6IRxbs6meakNE5O+wyY= +github.com/yalue/merged_fs v1.3.0/go.mod h1:WqqchfVYQyclV2tnR7wtRhBddzBvLVR83Cjw9BKQw0M= golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= -golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= -golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs= -golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/oauth2 v0.24.0 h1:KTBBxWqUa0ykRPLtV69rRto9TLXcqYkeswu48x/gvNE= +golang.org/x/oauth2 v0.24.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.29.0 h1:Xx0h3TtM9rzQpQuR4dKLrdglAmCEN5Oi+P74JdhdzXE= golang.org/x/tools v0.29.0/go.mod h1:KMQVMRsVxU6nHCFXrBPhDB8XncLNLM0lIy/F14RP588= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20191026110619-0b21df46bc1d/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/pkg/plugins/codegen/jenny_plugingotypes.go b/pkg/plugins/codegen/jenny_plugingotypes.go index d013840c7a0..e4227d4cb1e 100644 --- a/pkg/plugins/codegen/jenny_plugingotypes.go +++ b/pkg/plugins/codegen/jenny_plugingotypes.go @@ -1,13 +1,14 @@ package codegen import ( + "context" "fmt" "path/filepath" "strings" - copenapi "cuelang.org/go/encoding/openapi" + "cuelang.org/go/cue" "github.com/grafana/codejen" - "github.com/grafana/grafana/pkg/codegen/generators" + "github.com/grafana/cog" "github.com/grafana/grafana/pkg/plugins/codegen/pfs" ) @@ -30,20 +31,22 @@ func (j *pgoJenny) Generate(decl *pfs.PluginDecl) (*codejen.File, error) { hasBackend := decl.PluginMeta.Backend // We skip elasticsearch since we have problems with the generated file. // This is temporal until we migrate to the new system. - if hasBackend == nil || !*hasBackend || decl.PluginMeta.Id == "elasticsearch" { + if hasBackend == nil || !*hasBackend { return nil, nil } - slotname := strings.ToLower(decl.SchemaInterface.Name) - byt, err := generators.GenerateTypesGo(decl.CueFile, &generators.GoConfig{ - Config: &generators.OpenApiConfig{ - Config: &copenapi.Config{ - MaxCycleDepth: 10, - }, - IsGroup: decl.SchemaInterface.IsGroup, - }, - PackageName: slotname, - }) + slotName := strings.ToLower(decl.SchemaInterface.Name) + cueValue := decl.CueFile.LookupPath(cue.ParsePath("lineage.schemas[0].schema")) + name, err := decl.CueFile.LookupPath(cue.MakePath(cue.Str("name"))).String() + if err != nil { + return nil, err + } + + byt, err := cog.TypesFromSchema(). + CUEValue(slotName, cueValue, cog.ForceEnvelope(name)). + Golang(cog.GoConfig{}). + Run(context.Background()) + if err != nil { return nil, err } @@ -54,6 +57,6 @@ func (j *pgoJenny) Generate(decl *pfs.PluginDecl) (*codejen.File, error) { if pluginfolder == "testdata" { pluginfolder = "testdatasource" } - filename := fmt.Sprintf("types_%s_gen.go", slotname) - return codejen.NewFile(filepath.Join(j.root, pluginfolder, "kinds", slotname, filename), byt, j), nil + filename := fmt.Sprintf("types_%s_gen.go", slotName) + return codejen.NewFile(filepath.Join(j.root, pluginfolder, "kinds", slotName, filename), byt[0].Data, j), nil } diff --git a/pkg/storage/unified/apistore/go.sum b/pkg/storage/unified/apistore/go.sum index 8125f211acf..8ab64227d63 100644 --- a/pkg/storage/unified/apistore/go.sum +++ b/pkg/storage/unified/apistore/go.sum @@ -632,8 +632,8 @@ github.com/huandu/xstrings v1.5.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/invopop/jsonschema v0.12.0 h1:6ovsNSuvn9wEQVOyc72aycBMVQFKz7cPdMJn10CvzRI= -github.com/invopop/jsonschema v0.12.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= +github.com/invopop/jsonschema v0.13.0 h1:KvpoAJWEjR3uD9Kbm2HWJmqsEaHt8lBUpd0qHcIi21E= +github.com/invopop/jsonschema v0.13.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0= github.com/invopop/yaml v0.3.1 h1:f0+ZpmhfBSS4MhG+4HYseMdJhoeeopbSKbq5Rpeelso= github.com/invopop/yaml v0.3.1/go.mod h1:PMOp3nn4/12yEZUFfmOuNHJsZToEEOwoWsT+D81KkeA= github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ= diff --git a/pkg/tsdb/azuremonitor/kinds/dataquery/types_dataquery_gen.go b/pkg/tsdb/azuremonitor/kinds/dataquery/types_dataquery_gen.go index 5da56de1e77..a474d5fe419 100644 --- a/pkg/tsdb/azuremonitor/kinds/dataquery/types_dataquery_gen.go +++ b/pkg/tsdb/azuremonitor/kinds/dataquery/types_dataquery_gen.go @@ -7,436 +7,558 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package dataquery -// Defines values for AppInsightsGroupByQueryKind. -const ( - AppInsightsGroupByQueryKindAppInsightsGroupByQuery AppInsightsGroupByQueryKind = "AppInsightsGroupByQuery" +import ( + json "encoding/json" + errors "errors" + fmt "fmt" ) -// Defines values for AppInsightsMetricNameQueryKind. -const ( - AppInsightsMetricNameQueryKindAppInsightsMetricNameQuery AppInsightsMetricNameQueryKind = "AppInsightsMetricNameQuery" -) - -// Defines values for AzureQueryType. -const ( - AzureQueryTypeAzureLogAnalytics AzureQueryType = "Azure Log Analytics" - AzureQueryTypeAzureMetricNames AzureQueryType = "Azure Metric Names" - AzureQueryTypeAzureMonitor AzureQueryType = "Azure Monitor" - AzureQueryTypeAzureNamespaces AzureQueryType = "Azure Namespaces" - AzureQueryTypeAzureRegions AzureQueryType = "Azure Regions" - AzureQueryTypeAzureResourceGraph AzureQueryType = "Azure Resource Graph" - AzureQueryTypeAzureResourceGroups AzureQueryType = "Azure Resource Groups" - AzureQueryTypeAzureResourceNames AzureQueryType = "Azure Resource Names" - AzureQueryTypeAzureSubscriptions AzureQueryType = "Azure Subscriptions" - AzureQueryTypeAzureTraces AzureQueryType = "Azure Traces" - AzureQueryTypeAzureWorkspaces AzureQueryType = "Azure Workspaces" - AzureQueryTypeGrafanaTemplateVariableFunction AzureQueryType = "Grafana Template Variable Function" - AzureQueryTypeTraceql AzureQueryType = "traceql" -) - -// Defines values for GrafanaTemplateVariableQueryType. -const ( - GrafanaTemplateVariableQueryTypeAppInsightsGroupByQuery GrafanaTemplateVariableQueryType = "AppInsightsGroupByQuery" - GrafanaTemplateVariableQueryTypeAppInsightsMetricNameQuery GrafanaTemplateVariableQueryType = "AppInsightsMetricNameQuery" - GrafanaTemplateVariableQueryTypeMetricNamesQuery GrafanaTemplateVariableQueryType = "MetricNamesQuery" - GrafanaTemplateVariableQueryTypeMetricNamespaceQuery GrafanaTemplateVariableQueryType = "MetricNamespaceQuery" - GrafanaTemplateVariableQueryTypeResourceGroupsQuery GrafanaTemplateVariableQueryType = "ResourceGroupsQuery" - GrafanaTemplateVariableQueryTypeResourceNamesQuery GrafanaTemplateVariableQueryType = "ResourceNamesQuery" - GrafanaTemplateVariableQueryTypeSubscriptionsQuery GrafanaTemplateVariableQueryType = "SubscriptionsQuery" - GrafanaTemplateVariableQueryTypeUnknownQuery GrafanaTemplateVariableQueryType = "UnknownQuery" - GrafanaTemplateVariableQueryTypeWorkspacesQuery GrafanaTemplateVariableQueryType = "WorkspacesQuery" -) - -// Defines values for MetricDefinitionsQueryKind. -const ( - MetricDefinitionsQueryKindMetricDefinitionsQuery MetricDefinitionsQueryKind = "MetricDefinitionsQuery" -) - -// Defines values for MetricNamesQueryKind. -const ( - MetricNamesQueryKindMetricNamesQuery MetricNamesQueryKind = "MetricNamesQuery" -) - -// Defines values for MetricNamespaceQueryKind. -const ( - MetricNamespaceQueryKindMetricNamespaceQuery MetricNamespaceQueryKind = "MetricNamespaceQuery" -) - -// Defines values for ResourceGroupsQueryKind. -const ( - ResourceGroupsQueryKindResourceGroupsQuery ResourceGroupsQueryKind = "ResourceGroupsQuery" -) - -// Defines values for ResourceNamesQueryKind. -const ( - ResourceNamesQueryKindResourceNamesQuery ResourceNamesQueryKind = "ResourceNamesQuery" -) - -// Defines values for ResultFormat. -const ( - ResultFormatLogs ResultFormat = "logs" - ResultFormatTable ResultFormat = "table" - ResultFormatTimeSeries ResultFormat = "time_series" - ResultFormatTrace ResultFormat = "trace" -) - -// Defines values for SubscriptionsQueryKind. -const ( - SubscriptionsQueryKindSubscriptionsQuery SubscriptionsQueryKind = "SubscriptionsQuery" -) - -// Defines values for UnknownQueryKind. -const ( - UnknownQueryKindUnknownQuery UnknownQueryKind = "UnknownQuery" -) - -// Defines values for WorkspacesQueryKind. -const ( - WorkspacesQueryKindWorkspacesQuery WorkspacesQueryKind = "WorkspacesQuery" -) - -// AppInsightsGroupByQuery defines model for AppInsightsGroupByQuery. -type AppInsightsGroupByQuery struct { - Kind *AppInsightsGroupByQueryKind `json:"kind,omitempty"` - MetricName *string `json:"metricName,omitempty"` - RawQuery *string `json:"rawQuery,omitempty"` -} - -// AppInsightsGroupByQueryKind defines model for AppInsightsGroupByQuery.Kind. -type AppInsightsGroupByQueryKind string - -// AppInsightsMetricNameQuery defines model for AppInsightsMetricNameQuery. -type AppInsightsMetricNameQuery struct { - Kind *AppInsightsMetricNameQueryKind `json:"kind,omitempty"` - RawQuery *string `json:"rawQuery,omitempty"` -} - -// AppInsightsMetricNameQueryKind defines model for AppInsightsMetricNameQuery.Kind. -type AppInsightsMetricNameQueryKind string - -// Azure Monitor Logs sub-query properties -type AzureLogsQuery struct { - // If set to true the query will be run as a basic logs query - BasicLogsQuery *bool `json:"basicLogsQuery,omitempty"` - - // If set to true the dashboard time range will be used as a filter for the query. Otherwise the query time ranges will be used. Defaults to false. - DashboardTime *bool `json:"dashboardTime,omitempty"` - - // @deprecated Use dashboardTime instead - IntersectTime *bool `json:"intersectTime,omitempty"` - - // KQL query to be executed. - Query *string `json:"query,omitempty"` - - // @deprecated Use resources instead - Resource *string `json:"resource,omitempty"` - - // Array of resource URIs to be queried. - Resources []string `json:"resources,omitempty"` - ResultFormat *ResultFormat `json:"resultFormat,omitempty"` - - // If dashboardTime is set to true this value dictates which column the time filter will be applied to. Defaults to the first tables timeSpan column, the first datetime column found, or TimeGenerated - TimeColumn *string `json:"timeColumn,omitempty"` - - // Workspace ID. This was removed in Grafana 8, but remains for backwards compat. - Workspace *string `json:"workspace,omitempty"` -} - -// AzureMetricDimension defines model for AzureMetricDimension. -type AzureMetricDimension struct { - // Name of Dimension to be filtered on. - Dimension *string `json:"dimension,omitempty"` - - // @deprecated filter is deprecated in favour of filters to support multiselect. - Filter *string `json:"filter,omitempty"` - - // Values to match with the filter. - Filters []string `json:"filters,omitempty"` - - // String denoting the filter operation. Supports 'eq' - equals,'ne' - not equals, 'sw' - starts with. Note that some dimensions may not support all operators. - Operator *string `json:"operator,omitempty"` -} - -// AzureMetricQuery defines model for AzureMetricQuery. -type AzureMetricQuery struct { - // The aggregation to be used within the query. Defaults to the primaryAggregationType defined by the metric. - Aggregation *string `json:"aggregation,omitempty"` - - // Aliases can be set to modify the legend labels. e.g. {{ resourceGroup }}. See docs for more detail. - Alias *string `json:"alias,omitempty"` - - // Time grains that are supported by the metric. - AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs,omitempty"` - - // Used as the value for the metricNamespace property when it's different from the resource namespace. - CustomNamespace *string `json:"customNamespace,omitempty"` - - // @deprecated This property was migrated to dimensionFilters and should only be accessed in the migration - Dimension *string `json:"dimension,omitempty"` - - // @deprecated This property was migrated to dimensionFilters and should only be accessed in the migration - DimensionFilter *string `json:"dimensionFilter,omitempty"` - - // Filters to reduce the set of data returned. Dimensions that can be filtered on are defined by the metric. - DimensionFilters []AzureMetricDimension `json:"dimensionFilters,omitempty"` - - // @deprecated Use metricNamespace instead - MetricDefinition *string `json:"metricDefinition,omitempty"` - - // The metric to query data for within the specified metricNamespace. e.g. UsedCapacity - MetricName *string `json:"metricName,omitempty"` - - // metricNamespace is used as the resource type (or resource namespace). - // It's usually equal to the target metric namespace. e.g. microsoft.storage/storageaccounts - // Kept the name of the variable as metricNamespace to avoid backward incompatibility issues. - MetricNamespace *string `json:"metricNamespace,omitempty"` - - // The Azure region containing the resource(s). - Region *string `json:"region,omitempty"` - - // @deprecated Use resources instead - ResourceGroup *string `json:"resourceGroup,omitempty"` - - // @deprecated Use resources instead - ResourceName *string `json:"resourceName,omitempty"` - - // @deprecated Use resourceGroup, resourceName and metricNamespace instead - ResourceUri *string `json:"resourceUri,omitempty"` - - // Array of resource URIs to be queried. - Resources []AzureMonitorResource `json:"resources,omitempty"` - - // The granularity of data points to be queried. Defaults to auto. - TimeGrain *string `json:"timeGrain,omitempty"` - - // TimeGrainUnit @deprecated - TimeGrainUnit *string `json:"timeGrainUnit,omitempty"` - - // Maximum number of records to return. Defaults to 10. - Top *string `json:"top,omitempty"` -} - -// AzureMonitorDataQuery defines model for AzureMonitorDataQuery. -type AzureMonitorDataQuery = map[string]any - -// AzureMonitorQuery defines model for AzureMonitorQuery. type AzureMonitorQuery struct { - // Azure Monitor Logs sub-query properties - AzureLogAnalytics *AzureLogsQuery `json:"azureLogAnalytics,omitempty"` - AzureMonitor *AzureMetricQuery `json:"azureMonitor,omitempty"` + // A unique identifier for the query within the list of targets. + // In server side expressions, the refId is used as a variable name to identify results. + // By default, the UI will assign A->Z; however setting meaningful names may be useful. + RefId string `json:"refId"` + // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. + Hide *bool `json:"hide,omitempty"` + // Specify the query flavor + // TODO make this required and give it a default + QueryType *string `json:"queryType,omitempty"` + // Azure subscription containing the resource(s) to be queried. + Subscription *string `json:"subscription,omitempty"` + // Subscriptions to be queried via Azure Resource Graph. + Subscriptions []string `json:"subscriptions,omitempty"` + // Azure Monitor Metrics sub-query properties. + AzureMonitor *AzureMetricQuery `json:"azureMonitor,omitempty"` + // Azure Monitor Logs sub-query properties. + AzureLogAnalytics *AzureLogsQuery `json:"azureLogAnalytics,omitempty"` + // Azure Resource Graph sub-query properties. AzureResourceGraph *AzureResourceGraphQuery `json:"azureResourceGraph,omitempty"` - - // Application Insights Traces sub-query properties + // Application Insights Traces sub-query properties. AzureTraces *AzureTracesQuery `json:"azureTraces,omitempty"` - + // @deprecated Legacy template variable support. + GrafanaTemplateVariableFn *GrafanaTemplateVariableQuery `json:"grafanaTemplateVariableFn,omitempty"` + // Template variables params. These exist for backwards compatiblity with legacy template variables. + ResourceGroup *string `json:"resourceGroup,omitempty"` + Namespace *string `json:"namespace,omitempty"` + Resource *string `json:"resource,omitempty"` + Region *string `json:"region,omitempty"` // For mixed data sources the selected datasource is on the query level. // For non mixed scenarios this is undefined. // TODO find a better way to do this ^ that's friendly to schema // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - GrafanaTemplateVariableFn *any `json:"grafanaTemplateVariableFn,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - Namespace *string `json:"namespace,omitempty"` - + Datasource any `json:"datasource,omitempty"` // Used only for exemplar queries from Prometheus Query *string `json:"query,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId *string `json:"refId,omitempty"` - Region *string `json:"region,omitempty"` - Resource *string `json:"resource,omitempty"` - - // Template variables params. These exist for backwards compatiblity with legacy template variables. - ResourceGroup *string `json:"resourceGroup,omitempty"` - - // Azure subscription containing the resource(s) to be queried. - Subscription *string `json:"subscription,omitempty"` - - // Subscriptions to be queried via Azure Resource Graph. - Subscriptions []string `json:"subscriptions,omitempty"` } -// AzureMonitorResource defines model for AzureMonitorResource. -type AzureMonitorResource struct { - MetricNamespace *string `json:"metricNamespace,omitempty"` - Region *string `json:"region,omitempty"` - ResourceGroup *string `json:"resourceGroup,omitempty"` - ResourceName *string `json:"resourceName,omitempty"` - Subscription *string `json:"subscription,omitempty"` +// NewAzureMonitorQuery creates a new AzureMonitorQuery object. +func NewAzureMonitorQuery() *AzureMonitorQuery { + return &AzureMonitorQuery{} } // Defines the supported queryTypes. GrafanaTemplateVariableFn is deprecated type AzureQueryType string -// AzureResourceGraphQuery defines model for AzureResourceGraphQuery. -type AzureResourceGraphQuery struct { - // Azure Resource Graph KQL query to be executed. - Query *string `json:"query,omitempty"` +const ( + AzureQueryTypeAzureMonitor AzureQueryType = "Azure Monitor" + AzureQueryTypeLogAnalytics AzureQueryType = "Azure Log Analytics" + AzureQueryTypeAzureResourceGraph AzureQueryType = "Azure Resource Graph" + AzureQueryTypeAzureTraces AzureQueryType = "Azure Traces" + AzureQueryTypeSubscriptionsQuery AzureQueryType = "Azure Subscriptions" + AzureQueryTypeResourceGroupsQuery AzureQueryType = "Azure Resource Groups" + AzureQueryTypeNamespacesQuery AzureQueryType = "Azure Namespaces" + AzureQueryTypeResourceNamesQuery AzureQueryType = "Azure Resource Names" + AzureQueryTypeMetricNamesQuery AzureQueryType = "Azure Metric Names" + AzureQueryTypeWorkspacesQuery AzureQueryType = "Azure Workspaces" + AzureQueryTypeLocationsQuery AzureQueryType = "Azure Regions" + AzureQueryTypeGrafanaTemplateVariableFn AzureQueryType = "Grafana Template Variable Function" + AzureQueryTypeTraceExemplar AzureQueryType = "traceql" +) - // Specifies the format results should be returned as. Defaults to table. - ResultFormat *string `json:"resultFormat,omitempty"` +type AzureMetricQuery struct { + // Array of resource URIs to be queried. + Resources []AzureMonitorResource `json:"resources,omitempty"` + // metricNamespace is used as the resource type (or resource namespace). + // It's usually equal to the target metric namespace. e.g. microsoft.storage/storageaccounts + // Kept the name of the variable as metricNamespace to avoid backward incompatibility issues. + MetricNamespace *string `json:"metricNamespace,omitempty"` + // Used as the value for the metricNamespace property when it's different from the resource namespace. + CustomNamespace *string `json:"customNamespace,omitempty"` + // The metric to query data for within the specified metricNamespace. e.g. UsedCapacity + MetricName *string `json:"metricName,omitempty"` + // The Azure region containing the resource(s). + Region *string `json:"region,omitempty"` + // The granularity of data points to be queried. Defaults to auto. + TimeGrain *string `json:"timeGrain,omitempty"` + // The aggregation to be used within the query. Defaults to the primaryAggregationType defined by the metric. + Aggregation *string `json:"aggregation,omitempty"` + // Filters to reduce the set of data returned. Dimensions that can be filtered on are defined by the metric. + DimensionFilters []AzureMetricDimension `json:"dimensionFilters,omitempty"` + // Maximum number of records to return. Defaults to 10. + Top *string `json:"top,omitempty"` + // Time grains that are supported by the metric. + AllowedTimeGrainsMs []int64 `json:"allowedTimeGrainsMs,omitempty"` + // Aliases can be set to modify the legend labels. e.g. {{ resourceGroup }}. See docs for more detail. + Alias *string `json:"alias,omitempty"` + // @deprecated + TimeGrainUnit *string `json:"timeGrainUnit,omitempty"` + // @deprecated This property was migrated to dimensionFilters and should only be accessed in the migration + Dimension *string `json:"dimension,omitempty"` + // @deprecated This property was migrated to dimensionFilters and should only be accessed in the migration + DimensionFilter *string `json:"dimensionFilter,omitempty"` + // @deprecated Use metricNamespace instead + MetricDefinition *string `json:"metricDefinition,omitempty"` + // @deprecated Use resourceGroup, resourceName and metricNamespace instead + ResourceUri *string `json:"resourceUri,omitempty"` + // @deprecated Use resources instead + ResourceGroup *string `json:"resourceGroup,omitempty"` + // @deprecated Use resources instead + ResourceName *string `json:"resourceName,omitempty"` } -// AzureTracesFilter defines model for AzureTracesFilter. -type AzureTracesFilter struct { - // Values to filter by. - Filters []string `json:"filters"` +// NewAzureMetricQuery creates a new AzureMetricQuery object. +func NewAzureMetricQuery() *AzureMetricQuery { + return &AzureMetricQuery{} +} - // Comparison operator to use. Either equals or not equals. - Operation string `json:"operation"` +// Azure Monitor Logs sub-query properties +type AzureLogsQuery struct { + // KQL query to be executed. + Query *string `json:"query,omitempty"` + // Specifies the format results should be returned as. + ResultFormat *ResultFormat `json:"resultFormat,omitempty"` + // Array of resource URIs to be queried. + Resources []string `json:"resources,omitempty"` + // If set to true the dashboard time range will be used as a filter for the query. Otherwise the query time ranges will be used. Defaults to false. + DashboardTime *bool `json:"dashboardTime,omitempty"` + // If dashboardTime is set to true this value dictates which column the time filter will be applied to. Defaults to the first tables timeSpan column, the first datetime column found, or TimeGenerated + TimeColumn *string `json:"timeColumn,omitempty"` + // If set to true the query will be run as a basic logs query + BasicLogsQuery *bool `json:"basicLogsQuery,omitempty"` + // Workspace ID. This was removed in Grafana 8, but remains for backwards compat. + Workspace *string `json:"workspace,omitempty"` + // @deprecated Use resources instead + Resource *string `json:"resource,omitempty"` + // @deprecated Use dashboardTime instead + IntersectTime *bool `json:"intersectTime,omitempty"` +} - // Property name, auto-populated based on available traces. - Property string `json:"property"` +// NewAzureLogsQuery creates a new AzureLogsQuery object. +func NewAzureLogsQuery() *AzureLogsQuery { + return &AzureLogsQuery{} } // Application Insights Traces sub-query properties type AzureTracesQuery struct { - // Filters for property values. - Filters []AzureTracesFilter `json:"filters,omitempty"` - + // Specifies the format results should be returned as. + ResultFormat *ResultFormat `json:"resultFormat,omitempty"` + // Array of resource URIs to be queried. + Resources []string `json:"resources,omitempty"` // Operation ID. Used only for Traces queries. OperationId *string `json:"operationId,omitempty"` - - // KQL query to be executed. - Query *string `json:"query,omitempty"` - - // Array of resource URIs to be queried. - Resources []string `json:"resources,omitempty"` - ResultFormat *ResultFormat `json:"resultFormat,omitempty"` - // Types of events to filter by. TraceTypes []string `json:"traceTypes,omitempty"` + // Filters for property values. + Filters []AzureTracesFilter `json:"filters,omitempty"` + // KQL query to be executed. + Query *string `json:"query,omitempty"` } -// BaseGrafanaTemplateVariableQuery defines model for BaseGrafanaTemplateVariableQuery. +// NewAzureTracesQuery creates a new AzureTracesQuery object. +func NewAzureTracesQuery() *AzureTracesQuery { + return &AzureTracesQuery{} +} + +type AzureTracesFilter struct { + // Property name, auto-populated based on available traces. + Property string `json:"property"` + // Comparison operator to use. Either equals or not equals. + Operation string `json:"operation"` + // Values to filter by. + Filters []string `json:"filters"` +} + +// NewAzureTracesFilter creates a new AzureTracesFilter object. +func NewAzureTracesFilter() *AzureTracesFilter { + return &AzureTracesFilter{} +} + +type ResultFormat string + +const ( + ResultFormatTable ResultFormat = "table" + ResultFormatTimeSeries ResultFormat = "time_series" + ResultFormatTrace ResultFormat = "trace" + ResultFormatLogs ResultFormat = "logs" +) + +type AzureResourceGraphQuery struct { + // Azure Resource Graph KQL query to be executed. + Query *string `json:"query,omitempty"` + // Specifies the format results should be returned as. Defaults to table. + ResultFormat *string `json:"resultFormat,omitempty"` +} + +// NewAzureResourceGraphQuery creates a new AzureResourceGraphQuery object. +func NewAzureResourceGraphQuery() *AzureResourceGraphQuery { + return &AzureResourceGraphQuery{} +} + +type AzureMonitorResource struct { + Subscription *string `json:"subscription,omitempty"` + ResourceGroup *string `json:"resourceGroup,omitempty"` + ResourceName *string `json:"resourceName,omitempty"` + MetricNamespace *string `json:"metricNamespace,omitempty"` + Region *string `json:"region,omitempty"` +} + +// NewAzureMonitorResource creates a new AzureMonitorResource object. +func NewAzureMonitorResource() *AzureMonitorResource { + return &AzureMonitorResource{} +} + +type AzureMetricDimension struct { + // Name of Dimension to be filtered on. + Dimension *string `json:"dimension,omitempty"` + // String denoting the filter operation. Supports 'eq' - equals,'ne' - not equals, 'sw' - starts with. Note that some dimensions may not support all operators. + Operator *string `json:"operator,omitempty"` + // Values to match with the filter. + Filters []string `json:"filters,omitempty"` + // @deprecated filter is deprecated in favour of filters to support multiselect. + Filter *string `json:"filter,omitempty"` +} + +// NewAzureMetricDimension creates a new AzureMetricDimension object. +func NewAzureMetricDimension() *AzureMetricDimension { + return &AzureMetricDimension{} +} + +type GrafanaTemplateVariableQueryType string + +const ( + GrafanaTemplateVariableQueryTypeAppInsightsMetricNameQuery GrafanaTemplateVariableQueryType = "AppInsightsMetricNameQuery" + GrafanaTemplateVariableQueryTypeAppInsightsGroupByQuery GrafanaTemplateVariableQueryType = "AppInsightsGroupByQuery" + GrafanaTemplateVariableQueryTypeSubscriptionsQuery GrafanaTemplateVariableQueryType = "SubscriptionsQuery" + GrafanaTemplateVariableQueryTypeResourceGroupsQuery GrafanaTemplateVariableQueryType = "ResourceGroupsQuery" + GrafanaTemplateVariableQueryTypeResourceNamesQuery GrafanaTemplateVariableQueryType = "ResourceNamesQuery" + GrafanaTemplateVariableQueryTypeMetricNamespaceQuery GrafanaTemplateVariableQueryType = "MetricNamespaceQuery" + GrafanaTemplateVariableQueryTypeMetricNamesQuery GrafanaTemplateVariableQueryType = "MetricNamesQuery" + GrafanaTemplateVariableQueryTypeWorkspacesQuery GrafanaTemplateVariableQueryType = "WorkspacesQuery" + GrafanaTemplateVariableQueryTypeUnknownQuery GrafanaTemplateVariableQueryType = "UnknownQuery" +) + type BaseGrafanaTemplateVariableQuery struct { RawQuery *string `json:"rawQuery,omitempty"` } -// These are the common properties available to all queries in all datasources. -// Specific implementations will *extend* this interface, adding the required -// properties for the given context. -type DataQuery struct { - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId string `json:"refId"` +// NewBaseGrafanaTemplateVariableQuery creates a new BaseGrafanaTemplateVariableQuery object. +func NewBaseGrafanaTemplateVariableQuery() *BaseGrafanaTemplateVariableQuery { + return &BaseGrafanaTemplateVariableQuery{} } -// GrafanaTemplateVariableQueryType defines model for GrafanaTemplateVariableQueryType. -type GrafanaTemplateVariableQueryType string +type UnknownQuery struct { + RawQuery *string `json:"rawQuery,omitempty"` + Kind string `json:"kind"` +} + +// NewUnknownQuery creates a new UnknownQuery object. +func NewUnknownQuery() *UnknownQuery { + return &UnknownQuery{ + Kind: "UnknownQuery", + } +} + +type AppInsightsMetricNameQuery struct { + RawQuery *string `json:"rawQuery,omitempty"` + Kind string `json:"kind"` +} + +// NewAppInsightsMetricNameQuery creates a new AppInsightsMetricNameQuery object. +func NewAppInsightsMetricNameQuery() *AppInsightsMetricNameQuery { + return &AppInsightsMetricNameQuery{ + Kind: "AppInsightsMetricNameQuery", + } +} + +type AppInsightsGroupByQuery struct { + RawQuery *string `json:"rawQuery,omitempty"` + Kind string `json:"kind"` + MetricName string `json:"metricName"` +} + +// NewAppInsightsGroupByQuery creates a new AppInsightsGroupByQuery object. +func NewAppInsightsGroupByQuery() *AppInsightsGroupByQuery { + return &AppInsightsGroupByQuery{ + Kind: "AppInsightsGroupByQuery", + } +} + +type SubscriptionsQuery struct { + RawQuery *string `json:"rawQuery,omitempty"` + Kind string `json:"kind"` +} + +// NewSubscriptionsQuery creates a new SubscriptionsQuery object. +func NewSubscriptionsQuery() *SubscriptionsQuery { + return &SubscriptionsQuery{ + Kind: "SubscriptionsQuery", + } +} + +type ResourceGroupsQuery struct { + RawQuery *string `json:"rawQuery,omitempty"` + Kind string `json:"kind"` + Subscription string `json:"subscription"` +} + +// NewResourceGroupsQuery creates a new ResourceGroupsQuery object. +func NewResourceGroupsQuery() *ResourceGroupsQuery { + return &ResourceGroupsQuery{ + Kind: "ResourceGroupsQuery", + } +} + +type ResourceNamesQuery struct { + RawQuery *string `json:"rawQuery,omitempty"` + Kind string `json:"kind"` + Subscription string `json:"subscription"` + ResourceGroup string `json:"resourceGroup"` + MetricNamespace string `json:"metricNamespace"` +} + +// NewResourceNamesQuery creates a new ResourceNamesQuery object. +func NewResourceNamesQuery() *ResourceNamesQuery { + return &ResourceNamesQuery{ + Kind: "ResourceNamesQuery", + } +} + +type MetricNamespaceQuery struct { + RawQuery *string `json:"rawQuery,omitempty"` + Kind string `json:"kind"` + Subscription string `json:"subscription"` + ResourceGroup string `json:"resourceGroup"` + MetricNamespace *string `json:"metricNamespace,omitempty"` + ResourceName *string `json:"resourceName,omitempty"` +} + +// NewMetricNamespaceQuery creates a new MetricNamespaceQuery object. +func NewMetricNamespaceQuery() *MetricNamespaceQuery { + return &MetricNamespaceQuery{ + Kind: "MetricNamespaceQuery", + } +} // @deprecated Use MetricNamespaceQuery instead type MetricDefinitionsQuery struct { - Kind *MetricDefinitionsQueryKind `json:"kind,omitempty"` - MetricNamespace *string `json:"metricNamespace,omitempty"` - RawQuery *string `json:"rawQuery,omitempty"` - ResourceGroup *string `json:"resourceGroup,omitempty"` - ResourceName *string `json:"resourceName,omitempty"` - Subscription *string `json:"subscription,omitempty"` + RawQuery *string `json:"rawQuery,omitempty"` + Kind string `json:"kind"` + Subscription string `json:"subscription"` + ResourceGroup string `json:"resourceGroup"` + MetricNamespace *string `json:"metricNamespace,omitempty"` + ResourceName *string `json:"resourceName,omitempty"` } -// MetricDefinitionsQueryKind defines model for MetricDefinitionsQuery.Kind. -type MetricDefinitionsQueryKind string +// NewMetricDefinitionsQuery creates a new MetricDefinitionsQuery object. +func NewMetricDefinitionsQuery() *MetricDefinitionsQuery { + return &MetricDefinitionsQuery{ + Kind: "MetricDefinitionsQuery", + } +} -// MetricNamesQuery defines model for MetricNamesQuery. type MetricNamesQuery struct { - Kind *MetricNamesQueryKind `json:"kind,omitempty"` - MetricNamespace *string `json:"metricNamespace,omitempty"` - RawQuery *string `json:"rawQuery,omitempty"` - ResourceGroup *string `json:"resourceGroup,omitempty"` - ResourceName *string `json:"resourceName,omitempty"` - Subscription *string `json:"subscription,omitempty"` + RawQuery *string `json:"rawQuery,omitempty"` + Kind string `json:"kind"` + Subscription string `json:"subscription"` + ResourceGroup string `json:"resourceGroup"` + ResourceName string `json:"resourceName"` + MetricNamespace string `json:"metricNamespace"` } -// MetricNamesQueryKind defines model for MetricNamesQuery.Kind. -type MetricNamesQueryKind string - -// MetricNamespaceQuery defines model for MetricNamespaceQuery. -type MetricNamespaceQuery struct { - Kind *MetricNamespaceQueryKind `json:"kind,omitempty"` - MetricNamespace *string `json:"metricNamespace,omitempty"` - RawQuery *string `json:"rawQuery,omitempty"` - ResourceGroup *string `json:"resourceGroup,omitempty"` - ResourceName *string `json:"resourceName,omitempty"` - Subscription *string `json:"subscription,omitempty"` +// NewMetricNamesQuery creates a new MetricNamesQuery object. +func NewMetricNamesQuery() *MetricNamesQuery { + return &MetricNamesQuery{ + Kind: "MetricNamesQuery", + } } -// MetricNamespaceQueryKind defines model for MetricNamespaceQuery.Kind. -type MetricNamespaceQueryKind string - -// ResourceGroupsQuery defines model for ResourceGroupsQuery. -type ResourceGroupsQuery struct { - Kind *ResourceGroupsQueryKind `json:"kind,omitempty"` - RawQuery *string `json:"rawQuery,omitempty"` - Subscription *string `json:"subscription,omitempty"` -} - -// ResourceGroupsQueryKind defines model for ResourceGroupsQuery.Kind. -type ResourceGroupsQueryKind string - -// ResourceNamesQuery defines model for ResourceNamesQuery. -type ResourceNamesQuery struct { - Kind *ResourceNamesQueryKind `json:"kind,omitempty"` - MetricNamespace *string `json:"metricNamespace,omitempty"` - RawQuery *string `json:"rawQuery,omitempty"` - ResourceGroup *string `json:"resourceGroup,omitempty"` - Subscription *string `json:"subscription,omitempty"` -} - -// ResourceNamesQueryKind defines model for ResourceNamesQuery.Kind. -type ResourceNamesQueryKind string - -// ResultFormat defines model for ResultFormat. -type ResultFormat string - -// SubscriptionsQuery defines model for SubscriptionsQuery. -type SubscriptionsQuery struct { - Kind *SubscriptionsQueryKind `json:"kind,omitempty"` - RawQuery *string `json:"rawQuery,omitempty"` -} - -// SubscriptionsQueryKind defines model for SubscriptionsQuery.Kind. -type SubscriptionsQueryKind string - -// UnknownQuery defines model for UnknownQuery. -type UnknownQuery struct { - Kind *UnknownQueryKind `json:"kind,omitempty"` - RawQuery *string `json:"rawQuery,omitempty"` -} - -// UnknownQueryKind defines model for UnknownQuery.Kind. -type UnknownQueryKind string - -// WorkspacesQuery defines model for WorkspacesQuery. type WorkspacesQuery struct { - Kind *WorkspacesQueryKind `json:"kind,omitempty"` - RawQuery *string `json:"rawQuery,omitempty"` - Subscription *string `json:"subscription,omitempty"` + RawQuery *string `json:"rawQuery,omitempty"` + Kind string `json:"kind"` + Subscription string `json:"subscription"` } -// WorkspacesQueryKind defines model for WorkspacesQuery.Kind. -type WorkspacesQueryKind string +// NewWorkspacesQuery creates a new WorkspacesQuery object. +func NewWorkspacesQuery() *WorkspacesQuery { + return &WorkspacesQuery{ + Kind: "WorkspacesQuery", + } +} + +type GrafanaTemplateVariableQuery = AppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery + +// NewGrafanaTemplateVariableQuery creates a new GrafanaTemplateVariableQuery object. +func NewGrafanaTemplateVariableQuery() *GrafanaTemplateVariableQuery { + return NewAppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery() +} + +type AppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery struct { + AppInsightsMetricNameQuery *AppInsightsMetricNameQuery `json:"AppInsightsMetricNameQuery,omitempty"` + AppInsightsGroupByQuery *AppInsightsGroupByQuery `json:"AppInsightsGroupByQuery,omitempty"` + SubscriptionsQuery *SubscriptionsQuery `json:"SubscriptionsQuery,omitempty"` + ResourceGroupsQuery *ResourceGroupsQuery `json:"ResourceGroupsQuery,omitempty"` + ResourceNamesQuery *ResourceNamesQuery `json:"ResourceNamesQuery,omitempty"` + MetricNamespaceQuery *MetricNamespaceQuery `json:"MetricNamespaceQuery,omitempty"` + MetricDefinitionsQuery *MetricDefinitionsQuery `json:"MetricDefinitionsQuery,omitempty"` + MetricNamesQuery *MetricNamesQuery `json:"MetricNamesQuery,omitempty"` + WorkspacesQuery *WorkspacesQuery `json:"WorkspacesQuery,omitempty"` + UnknownQuery *UnknownQuery `json:"UnknownQuery,omitempty"` +} + +// NewAppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery creates a new AppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery object. +func NewAppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery() *AppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery { + return &AppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `AppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery` as JSON. +func (resource AppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery) MarshalJSON() ([]byte, error) { + if resource.AppInsightsMetricNameQuery != nil { + return json.Marshal(resource.AppInsightsMetricNameQuery) + } + if resource.AppInsightsGroupByQuery != nil { + return json.Marshal(resource.AppInsightsGroupByQuery) + } + if resource.SubscriptionsQuery != nil { + return json.Marshal(resource.SubscriptionsQuery) + } + if resource.ResourceGroupsQuery != nil { + return json.Marshal(resource.ResourceGroupsQuery) + } + if resource.ResourceNamesQuery != nil { + return json.Marshal(resource.ResourceNamesQuery) + } + if resource.MetricNamespaceQuery != nil { + return json.Marshal(resource.MetricNamespaceQuery) + } + if resource.MetricDefinitionsQuery != nil { + return json.Marshal(resource.MetricDefinitionsQuery) + } + if resource.MetricNamesQuery != nil { + return json.Marshal(resource.MetricNamesQuery) + } + if resource.WorkspacesQuery != nil { + return json.Marshal(resource.WorkspacesQuery) + } + if resource.UnknownQuery != nil { + return json.Marshal(resource.UnknownQuery) + } + + return nil, fmt.Errorf("no value for disjunction of refs") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `AppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery` from JSON. +func (resource *AppInsightsMetricNameQueryOrAppInsightsGroupByQueryOrSubscriptionsQueryOrResourceGroupsQueryOrResourceNamesQueryOrMetricNamespaceQueryOrMetricDefinitionsQueryOrMetricNamesQueryOrWorkspacesQueryOrUnknownQuery) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + // FIXME: this is wasteful, we need to find a more efficient way to unmarshal this. + parsedAsMap := make(map[string]any) + if err := json.Unmarshal(raw, &parsedAsMap); err != nil { + return err + } + + discriminator, found := parsedAsMap["kind"] + if !found { + return errors.New("discriminator field 'kind' not found in payload") + } + + switch discriminator { + case "AppInsightsGroupByQuery": + var appInsightsGroupByQuery AppInsightsGroupByQuery + if err := json.Unmarshal(raw, &appInsightsGroupByQuery); err != nil { + return err + } + + resource.AppInsightsGroupByQuery = &appInsightsGroupByQuery + return nil + case "AppInsightsMetricNameQuery": + var appInsightsMetricNameQuery AppInsightsMetricNameQuery + if err := json.Unmarshal(raw, &appInsightsMetricNameQuery); err != nil { + return err + } + + resource.AppInsightsMetricNameQuery = &appInsightsMetricNameQuery + return nil + case "MetricDefinitionsQuery": + var metricDefinitionsQuery MetricDefinitionsQuery + if err := json.Unmarshal(raw, &metricDefinitionsQuery); err != nil { + return err + } + + resource.MetricDefinitionsQuery = &metricDefinitionsQuery + return nil + case "MetricNamesQuery": + var metricNamesQuery MetricNamesQuery + if err := json.Unmarshal(raw, &metricNamesQuery); err != nil { + return err + } + + resource.MetricNamesQuery = &metricNamesQuery + return nil + case "MetricNamespaceQuery": + var metricNamespaceQuery MetricNamespaceQuery + if err := json.Unmarshal(raw, &metricNamespaceQuery); err != nil { + return err + } + + resource.MetricNamespaceQuery = &metricNamespaceQuery + return nil + case "ResourceGroupsQuery": + var resourceGroupsQuery ResourceGroupsQuery + if err := json.Unmarshal(raw, &resourceGroupsQuery); err != nil { + return err + } + + resource.ResourceGroupsQuery = &resourceGroupsQuery + return nil + case "ResourceNamesQuery": + var resourceNamesQuery ResourceNamesQuery + if err := json.Unmarshal(raw, &resourceNamesQuery); err != nil { + return err + } + + resource.ResourceNamesQuery = &resourceNamesQuery + return nil + case "SubscriptionsQuery": + var subscriptionsQuery SubscriptionsQuery + if err := json.Unmarshal(raw, &subscriptionsQuery); err != nil { + return err + } + + resource.SubscriptionsQuery = &subscriptionsQuery + return nil + case "UnknownQuery": + var unknownQuery UnknownQuery + if err := json.Unmarshal(raw, &unknownQuery); err != nil { + return err + } + + resource.UnknownQuery = &unknownQuery + return nil + case "WorkspacesQuery": + var workspacesQuery WorkspacesQuery + if err := json.Unmarshal(raw, &workspacesQuery); err != nil { + return err + } + + resource.WorkspacesQuery = &workspacesQuery + return nil + } + + return fmt.Errorf("could not unmarshal resource with `kind = %v`", discriminator) +} diff --git a/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource.go b/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource.go index c8c9592804f..456fff77ac1 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource.go +++ b/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource.go @@ -80,7 +80,7 @@ func (e *AzureLogAnalyticsDatasource) GetBasicLogsUsage(ctx context.Context, url }, TimeColumn: "TimeGenerated", Resources: []string{payload.Resource}, - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, URL: getApiURL(payload.Resource, false, false), } @@ -170,7 +170,7 @@ func buildLogAnalyticsQuery(query backend.DataQuery, dsInfo types.DatasourceInfo basicLogsQuery := false basicLogsEnabled := false - resultFormat := ParseResultFormat(azureLogAnalyticsTarget.ResultFormat, dataquery.AzureQueryTypeAzureLogAnalytics) + resultFormat := ParseResultFormat(azureLogAnalyticsTarget.ResultFormat, dataquery.AzureQueryTypeLogAnalytics) basicLogsQueryFlag := false if azureLogAnalyticsTarget.BasicLogsQuery != nil { @@ -238,7 +238,7 @@ func (e *AzureLogAnalyticsDatasource) buildQuery(ctx context.Context, query back return nil, fmt.Errorf("failed to compile Application Insights regex") } - if query.QueryType == string(dataquery.AzureQueryTypeAzureLogAnalytics) { + if query.QueryType == string(dataquery.AzureQueryTypeLogAnalytics) { azureLogAnalyticsQuery, err = buildLogAnalyticsQuery(query, dsInfo, appInsightsRegExp, fromAlert) if err != nil { errorMessage := fmt.Errorf("failed to build azure log analytics query: %w", err) @@ -246,8 +246,8 @@ func (e *AzureLogAnalyticsDatasource) buildQuery(ctx context.Context, query back } } - if query.QueryType == string(dataquery.AzureQueryTypeAzureTraces) || query.QueryType == string(dataquery.AzureQueryTypeTraceql) { - if query.QueryType == string(dataquery.AzureQueryTypeTraceql) { + if query.QueryType == string(dataquery.AzureQueryTypeAzureTraces) || query.QueryType == string(dataquery.AzureQueryTypeTraceExemplar) { + if query.QueryType == string(dataquery.AzureQueryTypeTraceExemplar) { cfg := backend.GrafanaConfigFromContext(ctx) hasPromExemplarsToggle := cfg.FeatureToggles().IsEnabled("azureMonitorPrometheusExemplars") if !hasPromExemplarsToggle { @@ -346,7 +346,7 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A // Set the preferred visualization switch query.ResultFormat { case dataquery.ResultFormatTrace: - if query.QueryType == dataquery.AzureQueryTypeAzureTraces || query.QueryType == dataquery.AzureQueryTypeTraceql { + if query.QueryType == dataquery.AzureQueryTypeAzureTraces || query.QueryType == dataquery.AzureQueryTypeTraceExemplar { frame.Meta.PreferredVisualization = data.VisTypeTrace } case dataquery.ResultFormatTable: @@ -427,7 +427,7 @@ func addTraceDataLinksToFields(query *AzureLogAnalyticsQuery, azurePortalBaseUrl queryJSONModel.AzureTraces.OperationId = &traceIdVariable } - logsQueryType := string(dataquery.AzureQueryTypeAzureLogAnalytics) + logsQueryType := string(dataquery.AzureQueryTypeLogAnalytics) logsJSONModel := dataquery.AzureMonitorQuery{ QueryType: &logsQueryType, AzureLogAnalytics: &dataquery.AzureLogsQuery{ @@ -501,7 +501,7 @@ func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, queryUR body["query_datetimescope_column"] = query.TimeColumn } - if len(query.Resources) > 1 && query.QueryType == dataquery.AzureQueryTypeAzureLogAnalytics && !query.AppInsightsQuery { + if len(query.Resources) > 1 && query.QueryType == dataquery.AzureQueryTypeLogAnalytics && !query.AppInsightsQuery { str := strings.ToLower(query.Resources[0]) if strings.Contains(str, "microsoft.operationalinsights/workspaces") { diff --git a/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource_test.go b/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource_test.go index 7c3c537c969..d4e66294100 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource_test.go +++ b/pkg/tsdb/azuremonitor/loganalytics/azure-log-analytics-datasource_test.go @@ -111,7 +111,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), RefID: "A", TimeRange: timeRange, - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: makeQueryPointer(AzureLogAnalyticsQuery{ RefID: "A", @@ -129,7 +129,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { Query: "Perf | where ['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z') | where ['Computer'] in ('comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, 34000ms), Computer", Resources: []string{"/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace"}, TimeRange: timeRange, - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: false, DashboardTime: false, }), @@ -148,7 +148,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { } }`, dataquery.ResultFormatTimeSeries)), RefID: "A", - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: makeQueryPointer(AzureLogAnalyticsQuery{ RefID: "A", @@ -164,7 +164,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), Query: "Perf", Resources: []string{}, - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: false, DashboardTime: false, }), @@ -183,7 +183,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { } }`, dataquery.ResultFormatTimeSeries)), RefID: "A", - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: makeQueryPointer(AzureLogAnalyticsQuery{ RefID: "A", @@ -199,7 +199,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), Query: "Perf", Resources: []string{}, - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: false, DashboardTime: false, }), @@ -219,7 +219,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { } }`, dataquery.ResultFormatTimeSeries)), RefID: "A", - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: makeQueryPointer(AzureLogAnalyticsQuery{ RefID: "A", @@ -236,7 +236,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), Query: "Perf", Resources: []string{"/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace"}, - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: false, DashboardTime: false, }), @@ -257,7 +257,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), RefID: "A", TimeRange: timeRange, - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: makeQueryPointer(AzureLogAnalyticsQuery{ RefID: "A", @@ -275,7 +275,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { Query: "Perf", Resources: []string{"/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace", "/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace2"}, TimeRange: timeRange, - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: false, DashboardTime: false, }), @@ -297,7 +297,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), RefID: "A", TimeRange: timeRange, - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: makeQueryPointer(AzureLogAnalyticsQuery{ RefID: "A", @@ -316,7 +316,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { Query: "Perf", Resources: []string{"/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/AppInsightsTestDataWorkspace"}, TimeRange: timeRange, - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: false, DashboardTime: true, TimeColumn: "TimeGenerated", @@ -341,7 +341,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), RefID: "A", TimeRange: timeRange, - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: makeQueryPointer(AzureLogAnalyticsQuery{ RefID: "A", @@ -361,7 +361,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { Query: "Perf", Resources: []string{"/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.OperationalInsights/workspaces/TestDataWorkspace"}, TimeRange: timeRange, - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: false, DashboardTime: true, BasicLogs: true, @@ -387,7 +387,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), RefID: "A", TimeRange: timeRange, - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: nil, Err: require.Error, @@ -410,7 +410,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), RefID: "A", TimeRange: timeRange, - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: nil, Err: require.Error, @@ -433,7 +433,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), RefID: "A", TimeRange: timeRange, - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: nil, Err: require.Error, @@ -456,7 +456,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), RefID: "A", TimeRange: timeRange, - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: nil, Err: require.Error, @@ -477,7 +477,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), RefID: "A", TimeRange: timeRange, - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: makeQueryPointer(AzureLogAnalyticsQuery{ RefID: "A", @@ -495,7 +495,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { Query: "Perf | where ['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z') | where ['Computer'] in ('comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, 34000ms), Computer", Resources: []string{"/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/Microsoft.Insights/components/AppInsightsTestDataWorkspace"}, TimeRange: timeRange, - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: true, DashboardTime: false, }), @@ -516,7 +516,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { }`, dataquery.ResultFormatTimeSeries)), RefID: "A", TimeRange: timeRange, - QueryType: string(dataquery.AzureQueryTypeAzureLogAnalytics), + QueryType: string(dataquery.AzureQueryTypeLogAnalytics), }, azureLogAnalyticsQuery: makeQueryPointer(AzureLogAnalyticsQuery{ RefID: "A", @@ -534,7 +534,7 @@ func TestBuildLogAnalyticsQuery(t *testing.T) { Query: "Perf | where ['TimeGenerated'] >= datetime('2018-03-15T13:00:00Z') and ['TimeGenerated'] <= datetime('2018-03-15T13:34:00Z') | where ['Computer'] in ('comp1','comp2') | summarize avg(CounterValue) by bin(TimeGenerated, 34000ms), Computer", Resources: []string{"/subscriptions/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/resourceGroups/cloud-datasources/providers/microsoft.insights/components/AppInsightsTestDataWorkspace"}, TimeRange: timeRange, - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: true, DashboardTime: false, }), @@ -595,7 +595,7 @@ func TestLogAnalyticsCreateRequest(t *testing.T) { req, err := ds.createRequest(ctx, url, &AzureLogAnalyticsQuery{ Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r2"}, Query: "Perf", - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: false, DashboardTime: false, }) @@ -615,7 +615,7 @@ func TestLogAnalyticsCreateRequest(t *testing.T) { req, err := ds.createRequest(ctx, url, &AzureLogAnalyticsQuery{ Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/Microsoft.OperationalInsights/workspaces/r2"}, Query: "Perf", - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, TimeRange: backend.TimeRange{ From: from, To: to, @@ -662,7 +662,7 @@ func TestLogAnalyticsCreateRequest(t *testing.T) { req, err := ds.createRequest(ctx, url, &AzureLogAnalyticsQuery{ Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/microsoft.operationalInsights/workSpaces/ws1", "microsoft.operationalInsights/workspaces/ws2"}, // Note different casings and partial paths Query: "Perf", - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: false, DashboardTime: false, }) @@ -680,7 +680,7 @@ func TestLogAnalyticsCreateRequest(t *testing.T) { req, err := ds.createRequest(ctx, url, &AzureLogAnalyticsQuery{ Resources: []string{"/subscriptions/test-sub/resourceGroups/test-rg/providers/SomeOtherService/serviceInstances/r1", "/subscriptions/test-sub/resourceGroups/test-rg/providers/SomeOtherService/serviceInstances/r2"}, Query: "Perf", - QueryType: dataquery.AzureQueryTypeAzureLogAnalytics, + QueryType: dataquery.AzureQueryTypeLogAnalytics, AppInsightsQuery: false, DashboardTime: false, }) @@ -767,7 +767,7 @@ func Test_exemplarsFeatureToggle(t *testing.T) { "query": "traceid" }`), RefID: "A", - QueryType: string(dataquery.AzureQueryTypeTraceql), + QueryType: string(dataquery.AzureQueryTypeTraceExemplar), } _, err := ds.buildQuery(ctx, query, dsInfo, false) @@ -787,7 +787,7 @@ func Test_exemplarsFeatureToggle(t *testing.T) { "query": "traceid" }`), RefID: "A", - QueryType: string(dataquery.AzureQueryTypeTraceql), + QueryType: string(dataquery.AzureQueryTypeTraceExemplar), } _, err := ds.buildQuery(ctx, query, dsInfo, false) diff --git a/pkg/tsdb/azuremonitor/loganalytics/azure-response-table-frame.go b/pkg/tsdb/azuremonitor/loganalytics/azure-response-table-frame.go index efe24ca10a9..264560ae184 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/azure-response-table-frame.go +++ b/pkg/tsdb/azuremonitor/loganalytics/azure-response-table-frame.go @@ -78,7 +78,7 @@ func converterFrameForTable(t *types.AzureResponseTable, queryType dataquery.Azu if !ok { return nil, fmt.Errorf("unsupported analytics column type %v", col.Type) } - if (queryType == dataquery.AzureQueryTypeAzureTraces || queryType == dataquery.AzureQueryTypeTraceql) && resultFormat == dataquery.ResultFormatTrace && (col.Name == "serviceTags" || col.Name == "tags") { + if (queryType == dataquery.AzureQueryTypeAzureTraces || queryType == dataquery.AzureQueryTypeTraceExemplar) && resultFormat == dataquery.ResultFormatTrace && (col.Name == "serviceTags" || col.Name == "tags") { converter = tagsConverter } converters = append(converters, converter) diff --git a/pkg/tsdb/azuremonitor/loganalytics/azure-response-table-frame_test.go b/pkg/tsdb/azuremonitor/loganalytics/azure-response-table-frame_test.go index c73ca055cac..3b6ecb5a6bd 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/azure-response-table-frame_test.go +++ b/pkg/tsdb/azuremonitor/loganalytics/azure-response-table-frame_test.go @@ -56,7 +56,7 @@ func TestLogTableToFrame(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { res := loadTestFileWithNumber(t, tt.testFile) - frame, err := ResponseTableToFrame(&res.Tables[0], "A", "query", dataquery.AzureQueryTypeAzureLogAnalytics, dataquery.ResultFormatTable, false) + frame, err := ResponseTableToFrame(&res.Tables[0], "A", "query", dataquery.AzureQueryTypeLogAnalytics, dataquery.ResultFormatTable, false) appendErrorNotice(frame, res.Error) require.NoError(t, err) @@ -107,7 +107,7 @@ func TestTraceTableToFrame(t *testing.T) { name: "single trace as trace format from exemplars query", testFile: "traces/2-traces-single-table.json", resultFormat: dataquery.ResultFormatTrace, - queryType: dataquery.AzureQueryTypeTraceql, + queryType: dataquery.AzureQueryTypeTraceExemplar, }, } @@ -142,7 +142,7 @@ func TestLargeLogsResponse(t *testing.T) { } res.Tables[0].Rows = rows resultFormat := dataquery.ResultFormatLogs - frame, err := ResponseTableToFrame(&res.Tables[0], "A", "query", dataquery.AzureQueryTypeAzureLogAnalytics, resultFormat, false) + frame, err := ResponseTableToFrame(&res.Tables[0], "A", "query", dataquery.AzureQueryTypeLogAnalytics, resultFormat, false) appendErrorNotice(frame, res.Error) require.NoError(t, err) require.Equal(t, frame.Rows(), 30000) @@ -171,7 +171,7 @@ func TestLargeLogsResponse(t *testing.T) { } res.Tables[0].Rows = rows resultFormat := dataquery.ResultFormatLogs - frame, err := ResponseTableToFrame(&res.Tables[0], "A", "query", dataquery.AzureQueryTypeAzureLogAnalytics, resultFormat, true) + frame, err := ResponseTableToFrame(&res.Tables[0], "A", "query", dataquery.AzureQueryTypeLogAnalytics, resultFormat, true) appendErrorNotice(frame, res.Error) require.NoError(t, err) require.Equal(t, frame.Rows(), 40000) diff --git a/pkg/tsdb/azuremonitor/loganalytics/traces.go b/pkg/tsdb/azuremonitor/loganalytics/traces.go index e9b7ddf5078..5bda96c7268 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/traces.go +++ b/pkg/tsdb/azuremonitor/loganalytics/traces.go @@ -198,7 +198,7 @@ func buildAppInsightsQuery(ctx context.Context, query backend.DataQuery, dsInfo resultFormat := ParseResultFormat(azureTracesTarget.ResultFormat, dataquery.AzureQueryTypeAzureTraces) resources := azureTracesTarget.Resources - if query.QueryType == string(dataquery.AzureQueryTypeTraceql) { + if query.QueryType == string(dataquery.AzureQueryTypeTraceExemplar) { subscription, err := utils.GetFirstSubscriptionOrDefault(ctx, dsInfo, logger) if err != nil { errorMessage := fmt.Errorf("failed to retrieve subscription for trace exemplars query: %w", err) @@ -234,7 +234,7 @@ func buildAppInsightsQuery(ctx context.Context, query backend.DataQuery, dsInfo } sort.Strings(queryResources) - if query.QueryType == string(dataquery.AzureQueryTypeTraceql) { + if query.QueryType == string(dataquery.AzureQueryTypeTraceExemplar) { resources = queryResources resourceOrWorkspace = resources[0] } diff --git a/pkg/tsdb/azuremonitor/loganalytics/utils.go b/pkg/tsdb/azuremonitor/loganalytics/utils.go index 093d5dc41da..435f683fe40 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/utils.go +++ b/pkg/tsdb/azuremonitor/loganalytics/utils.go @@ -69,7 +69,7 @@ func ParseResultFormat(queryResultFormat *dataquery.ResultFormat, queryType data if queryResultFormat != nil && *queryResultFormat != "" { return *queryResultFormat } - if queryType == dataquery.AzureQueryTypeAzureLogAnalytics { + if queryType == dataquery.AzureQueryTypeLogAnalytics { // Default to time series format for logs queries. It was time series before this change return dataquery.ResultFormatTimeSeries } diff --git a/pkg/tsdb/azuremonitor/loganalytics/utils_test.go b/pkg/tsdb/azuremonitor/loganalytics/utils_test.go index 7af29c467b0..67f4eca9cb5 100644 --- a/pkg/tsdb/azuremonitor/loganalytics/utils_test.go +++ b/pkg/tsdb/azuremonitor/loganalytics/utils_test.go @@ -19,7 +19,7 @@ func TestParseResultFormat(t *testing.T) { { name: "returns the time series format as default for logs queries if input format is nil", queryResultFormat: nil, - queryType: dataquery.AzureQueryTypeAzureLogAnalytics, + queryType: dataquery.AzureQueryTypeLogAnalytics, expectedResultFormat: dataquery.ResultFormatTimeSeries, }, { @@ -31,7 +31,7 @@ func TestParseResultFormat(t *testing.T) { { name: "returns the logs format as default for logs queries if input format is empty", queryResultFormat: &emptyResultFormat, - queryType: dataquery.AzureQueryTypeAzureLogAnalytics, + queryType: dataquery.AzureQueryTypeLogAnalytics, expectedResultFormat: dataquery.ResultFormatTimeSeries, }, { diff --git a/pkg/tsdb/cloud-monitoring/cloudmonitoring.go b/pkg/tsdb/cloud-monitoring/cloudmonitoring.go index 753bac515c0..319a0c66992 100644 --- a/pkg/tsdb/cloud-monitoring/cloudmonitoring.go +++ b/pkg/tsdb/cloud-monitoring/cloudmonitoring.go @@ -51,11 +51,11 @@ var ( const ( gceAuthentication = "gce" jwtAuthentication = "jwt" - annotationQueryType = dataquery.QueryTypeAnnotation - timeSeriesListQueryType = dataquery.QueryTypeTimeSeriesList - timeSeriesQueryQueryType = dataquery.QueryTypeTimeSeriesQuery - sloQueryType = dataquery.QueryTypeSlo - promQLQueryType = dataquery.QueryTypePromQL + annotationQueryType = dataquery.QueryTypeANNOTATION + timeSeriesListQueryType = dataquery.QueryTypeTIMESERIESLIST + timeSeriesQueryQueryType = dataquery.QueryTypeTIMESERIESQUERY + sloQueryType = dataquery.QueryTypeSLO + promQLQueryType = dataquery.QueryTypePROMQL crossSeriesReducerDefault = "REDUCE_NONE" perSeriesAlignerDefault = "ALIGN_MEAN" ) @@ -217,10 +217,6 @@ func migrateMetricTypeFilter(metricTypeFilter string, prevFilters any) []string return metricTypeFilterArray } -func strPtr(s string) *string { - return &s -} - func migrateRequest(req *backend.QueryDataRequest) error { for i, q := range req.Queries { var rawQuery map[string]any @@ -239,7 +235,7 @@ func migrateRequest(req *backend.QueryDataRequest) error { if err != nil { return err } - q.QueryType = string(dataquery.QueryTypeTimeSeriesList) + q.QueryType = string(dataquery.QueryTypeTIMESERIESLIST) gq := grafanaQuery{ TimeSeriesList: &mq, } @@ -260,7 +256,7 @@ func migrateRequest(req *backend.QueryDataRequest) error { // Migrate type to queryType, which is only used for annotations if rawQuery["type"] != nil && rawQuery["type"].(string) == "annotationQuery" { - q.QueryType = string(dataquery.QueryTypeAnnotation) + q.QueryType = string(dataquery.QueryTypeANNOTATION) } if rawQuery["queryType"] != nil { q.QueryType = rawQuery["queryType"].(string) @@ -274,9 +270,9 @@ func migrateRequest(req *backend.QueryDataRequest) error { rawQuery["timeSeriesQuery"] = &dataquery.TimeSeriesQuery{ ProjectName: toString(metricQuery["projectName"]), Query: toString(metricQuery["query"]), - GraphPeriod: strPtr(toString(metricQuery["graphPeriod"])), + GraphPeriod: toString(metricQuery["graphPeriod"]), } - q.QueryType = string(dataquery.QueryTypeTimeSeriesQuery) + q.QueryType = string(dataquery.QueryTypeTIMESERIESQUERY) } else { tslb, err := json.Marshal(metricQuery) if err != nil { @@ -292,7 +288,7 @@ func migrateRequest(req *backend.QueryDataRequest) error { tsl.Filters = migrateMetricTypeFilter(metricQuery["metricType"].(string), metricQuery["filters"]) } rawQuery["timeSeriesList"] = tsl - q.QueryType = string(dataquery.QueryTypeTimeSeriesList) + q.QueryType = string(dataquery.QueryTypeTIMESERIESLIST) } // AliasBy is now a top level property if metricQuery["aliasBy"] != nil { @@ -305,7 +301,7 @@ func migrateRequest(req *backend.QueryDataRequest) error { q.JSON = b } - if rawQuery["sloQuery"] != nil && q.QueryType == string(dataquery.QueryTypeSlo) { + if rawQuery["sloQuery"] != nil && q.QueryType == string(dataquery.QueryTypeSLO) { sloQuery := rawQuery["sloQuery"].(map[string]any) // AliasBy is now a top level property if sloQuery["aliasBy"] != nil { @@ -349,7 +345,7 @@ func (s *Service) QueryData(ctx context.Context, req *backend.QueryDataRequest) } switch req.Queries[0].QueryType { - case string(dataquery.QueryTypeAnnotation): + case string(dataquery.QueryTypeANNOTATION): return s.executeAnnotationQuery(ctx, req, *dsInfo, queries, logger) default: return s.executeTimeSeriesQuery(ctx, req, *dsInfo, queries, logger) @@ -403,7 +399,7 @@ func (s *Service) buildQueryExecutors(logger log.Logger, req *backend.QueryDataR var queryInterface cloudMonitoringQueryExecutor switch query.QueryType { - case string(dataquery.QueryTypeTimeSeriesList), string(dataquery.QueryTypeAnnotation): + case string(dataquery.QueryTypeTIMESERIESLIST), string(dataquery.QueryTypeANNOTATION): cmtsf := &cloudMonitoringTimeSeriesList{ refID: query.RefID, aliasBy: q.AliasBy, @@ -415,7 +411,7 @@ func (s *Service) buildQueryExecutors(logger log.Logger, req *backend.QueryDataR cmtsf.parameters = q.TimeSeriesList cmtsf.setParams(startTime, endTime, durationSeconds, query.Interval.Milliseconds()) queryInterface = cmtsf - case string(dataquery.QueryTypeTimeSeriesQuery): + case string(dataquery.QueryTypeTIMESERIESQUERY): queryInterface = &cloudMonitoringTimeSeriesQuery{ refID: query.RefID, aliasBy: q.AliasBy, @@ -424,7 +420,7 @@ func (s *Service) buildQueryExecutors(logger log.Logger, req *backend.QueryDataR timeRange: req.Queries[0].TimeRange, logger: logger, } - case string(dataquery.QueryTypeSlo): + case string(dataquery.QueryTypeSLO): cmslo := &cloudMonitoringSLO{ refID: query.RefID, aliasBy: q.AliasBy, @@ -432,7 +428,7 @@ func (s *Service) buildQueryExecutors(logger log.Logger, req *backend.QueryDataR } cmslo.setParams(startTime, endTime, durationSeconds, query.Interval.Milliseconds()) queryInterface = cmslo - case string(dataquery.QueryTypePromQL): + case string(dataquery.QueryTypePROMQL): cmp := &cloudMonitoringProm{ refID: query.RefID, aliasBy: q.AliasBy, @@ -629,7 +625,7 @@ func unmarshalResponse(res *http.Response, logger log.Logger) (cloudMonitoringRe return data, nil } -func addConfigData(frames data.Frames, dl string, unit string, period *string, logger log.Logger) data.Frames { +func addConfigData(frames data.Frames, dl string, unit string, period string, logger log.Logger) data.Frames { for i := range frames { if frames[i].Fields[1].Config == nil { frames[i].Fields[1].Config = &data.FieldConfig{} @@ -650,8 +646,8 @@ func addConfigData(frames data.Frames, dl string, unit string, period *string, l if frames[i].Fields[0].Config == nil { frames[i].Fields[0].Config = &data.FieldConfig{} } - if period != nil && *period != "" { - err := addInterval(*period, frames[i].Fields[0]) + if period != "" { + err := addInterval(period, frames[i].Fields[0]) if err != nil { logger.Error("Failed to add interval: %s", err, "statusSource", backend.ErrorSourceDownstream) } diff --git a/pkg/tsdb/cloud-monitoring/cloudmonitoring_test.go b/pkg/tsdb/cloud-monitoring/cloudmonitoring_test.go index 31972c68f2a..1dfbff4a37b 100644 --- a/pkg/tsdb/cloud-monitoring/cloudmonitoring_test.go +++ b/pkg/tsdb/cloud-monitoring/cloudmonitoring_test.go @@ -1104,7 +1104,7 @@ func baseTimeSeriesList() *backend.QueryDataRequest { From: fromStart, To: fromStart.Add(34 * time.Minute), }, - QueryType: string(dataquery.QueryTypeTimeSeriesList), + QueryType: string(dataquery.QueryTypeTIMESERIESLIST), JSON: json.RawMessage(`{ "timeSeriesList": { "filters": ["metric.type=\"a/metric/type\""], @@ -1128,7 +1128,7 @@ func baseTimeSeriesQuery() *backend.QueryDataRequest { From: fromStart, To: fromStart.Add(34 * time.Minute), }, - QueryType: string(dataquery.QueryTypeTimeSeriesQuery), + QueryType: string(dataquery.QueryTypeTIMESERIESQUERY), JSON: json.RawMessage(`{ "queryType": "metrics", "timeSeriesQuery": { diff --git a/pkg/tsdb/cloud-monitoring/kinds/dataquery/types_dataquery_gen.go b/pkg/tsdb/cloud-monitoring/kinds/dataquery/types_dataquery_gen.go index f89711a9cf2..272b4e3a1d9 100644 --- a/pkg/tsdb/cloud-monitoring/kinds/dataquery/types_dataquery_gen.go +++ b/pkg/tsdb/cloud-monitoring/kinds/dataquery/types_dataquery_gen.go @@ -7,338 +7,294 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package dataquery -// Defines values for AlignmentTypes. -const ( - AlignmentTypesALIGNCOUNT AlignmentTypes = "ALIGN_COUNT" - AlignmentTypesALIGNCOUNTFALSE AlignmentTypes = "ALIGN_COUNT_FALSE" - AlignmentTypesALIGNCOUNTTRUE AlignmentTypes = "ALIGN_COUNT_TRUE" - AlignmentTypesALIGNDELTA AlignmentTypes = "ALIGN_DELTA" - AlignmentTypesALIGNFRACTIONTRUE AlignmentTypes = "ALIGN_FRACTION_TRUE" - AlignmentTypesALIGNINTERPOLATE AlignmentTypes = "ALIGN_INTERPOLATE" - AlignmentTypesALIGNMAX AlignmentTypes = "ALIGN_MAX" - AlignmentTypesALIGNMEAN AlignmentTypes = "ALIGN_MEAN" - AlignmentTypesALIGNMIN AlignmentTypes = "ALIGN_MIN" - AlignmentTypesALIGNNEXTOLDER AlignmentTypes = "ALIGN_NEXT_OLDER" - AlignmentTypesALIGNNONE AlignmentTypes = "ALIGN_NONE" - AlignmentTypesALIGNPERCENTCHANGE AlignmentTypes = "ALIGN_PERCENT_CHANGE" - AlignmentTypesALIGNPERCENTILE05 AlignmentTypes = "ALIGN_PERCENTILE_05" - AlignmentTypesALIGNPERCENTILE50 AlignmentTypes = "ALIGN_PERCENTILE_50" - AlignmentTypesALIGNPERCENTILE95 AlignmentTypes = "ALIGN_PERCENTILE_95" - AlignmentTypesALIGNPERCENTILE99 AlignmentTypes = "ALIGN_PERCENTILE_99" - AlignmentTypesALIGNRATE AlignmentTypes = "ALIGN_RATE" - AlignmentTypesALIGNSTDDEV AlignmentTypes = "ALIGN_STDDEV" - AlignmentTypesALIGNSUM AlignmentTypes = "ALIGN_SUM" -) - -// Defines values for MetricFindQueryTypes. -const ( - MetricFindQueryTypesAggregations MetricFindQueryTypes = "aggregations" - MetricFindQueryTypesAligners MetricFindQueryTypes = "aligners" - MetricFindQueryTypesAlignmentPeriods MetricFindQueryTypes = "alignmentPeriods" - MetricFindQueryTypesDefaultProject MetricFindQueryTypes = "defaultProject" - MetricFindQueryTypesLabelKeys MetricFindQueryTypes = "labelKeys" - MetricFindQueryTypesLabelValues MetricFindQueryTypes = "labelValues" - MetricFindQueryTypesMetricTypes MetricFindQueryTypes = "metricTypes" - MetricFindQueryTypesProjects MetricFindQueryTypes = "projects" - MetricFindQueryTypesResourceTypes MetricFindQueryTypes = "resourceTypes" - MetricFindQueryTypesSelectors MetricFindQueryTypes = "selectors" - MetricFindQueryTypesServices MetricFindQueryTypes = "services" - MetricFindQueryTypesSlo MetricFindQueryTypes = "slo" - MetricFindQueryTypesSloServices MetricFindQueryTypes = "sloServices" -) - -// Defines values for MetricKind. -const ( - MetricKindCUMULATIVE MetricKind = "CUMULATIVE" - MetricKindDELTA MetricKind = "DELTA" - MetricKindGAUGE MetricKind = "GAUGE" - MetricKindMETRICKINDUNSPECIFIED MetricKind = "METRIC_KIND_UNSPECIFIED" -) - -// Defines values for PreprocessorType. -const ( - PreprocessorTypeDelta PreprocessorType = "delta" - PreprocessorTypeNone PreprocessorType = "none" - PreprocessorTypeRate PreprocessorType = "rate" -) - -// Defines values for QueryType. -const ( - QueryTypeAnnotation QueryType = "annotation" - QueryTypePromQL QueryType = "promQL" - QueryTypeSlo QueryType = "slo" - QueryTypeTimeSeriesList QueryType = "timeSeriesList" - QueryTypeTimeSeriesQuery QueryType = "timeSeriesQuery" -) - -// Defines values for ValueTypes. -const ( - ValueTypesBOOL ValueTypes = "BOOL" - ValueTypesDISTRIBUTION ValueTypes = "DISTRIBUTION" - ValueTypesDOUBLE ValueTypes = "DOUBLE" - ValueTypesINT64 ValueTypes = "INT64" - ValueTypesMONEY ValueTypes = "MONEY" - ValueTypesSTRING ValueTypes = "STRING" - ValueTypesVALUETYPEUNSPECIFIED ValueTypes = "VALUE_TYPE_UNSPECIFIED" -) - -// AlignmentTypes defines model for AlignmentTypes. -type AlignmentTypes string - -// CloudMonitoringQuery defines model for CloudMonitoringQuery. type CloudMonitoringQuery struct { - // Aliases can be set to modify the legend labels. e.g. {{metric.label.xxx}}. See docs for more detail. - AliasBy *string `json:"aliasBy,omitempty"` - - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Time interval in milliseconds. - IntervalMs *float32 `json:"intervalMs,omitempty"` - - // PromQL sub-query properties. - PromQLQuery *PromQLQuery `json:"promQLQuery,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - // A unique identifier for the query within the list of targets. // In server side expressions, the refId is used as a variable name to identify results. // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId *string `json:"refId,omitempty"` - - // SLO sub-query properties. - SloQuery *SLOQuery `json:"sloQuery,omitempty"` - + RefId string `json:"refId"` + // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. + Hide *bool `json:"hide,omitempty"` + // Specify the query flavor + // TODO make this required and give it a default + QueryType *string `json:"queryType,omitempty"` + // Aliases can be set to modify the legend labels. e.g. {{metric.label.xxx}}. See docs for more detail. + AliasBy *string `json:"aliasBy,omitempty"` + // GCM query type. + // queryType: #QueryType // Time Series List sub-query properties. TimeSeriesList *TimeSeriesList `json:"timeSeriesList,omitempty"` - // Time Series sub-query properties. TimeSeriesQuery *TimeSeriesQuery `json:"timeSeriesQuery,omitempty"` -} - -// These are the common properties available to all queries in all datasources. -// Specific implementations will *extend* this interface, adding the required -// properties for the given context. -type DataQuery struct { + // SLO sub-query properties. + SloQuery *SLOQuery `json:"sloQuery,omitempty"` + // PromQL sub-query properties. + PromQLQuery *PromQLQuery `json:"promQLQuery,omitempty"` // For mixed data sources the selected datasource is on the query level. // For non mixed scenarios this is undefined. // TODO find a better way to do this ^ that's friendly to schema // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId string `json:"refId"` + Datasource any `json:"datasource,omitempty"` + // Time interval in milliseconds. + IntervalMs *float64 `json:"intervalMs,omitempty"` } -// Query filter representation. -type Filter struct { - // Filter condition. - Condition *string `json:"condition,omitempty"` - - // Filter key. - Key string `json:"key"` - - // Filter operator. - Operator string `json:"operator"` - - // Filter value. - Value string `json:"value"` -} - -// GoogleCloudMonitoringDataQuery defines model for GoogleCloudMonitoringDataQuery. -type GoogleCloudMonitoringDataQuery = map[string]any - -// @deprecated Use TimeSeriesList instead. Legacy annotation query properties for migration purposes. -type LegacyCloudMonitoringAnnotationQuery struct { - // Array of filters to query data by. Labels that can be filtered on are defined by the metric. - Filters []string `json:"filters"` - MetricKind MetricKind `json:"metricKind"` - MetricType string `json:"metricType"` - - // GCP project to execute the query against. - ProjectName string `json:"projectName"` - - // Query refId. - RefId string `json:"refId"` - - // Annotation text. - Text string `json:"text"` - - // Annotation title. - Title string `json:"title"` - ValueType string `json:"valueType"` -} - -// MetricFindQueryTypes defines model for MetricFindQueryTypes. -type MetricFindQueryTypes string - -// MetricKind defines model for MetricKind. -type MetricKind string - -// @deprecated This type is for migration purposes only. Replaced by TimeSeriesList Metric sub-query properties. -type MetricQuery struct { - // Aliases can be set to modify the legend labels. e.g. {{metric.label.xxx}}. See docs for more detail. - AliasBy *string `json:"aliasBy,omitempty"` - - // Alignment period to use when regularizing data. Defaults to cloud-monitoring-auto. - AlignmentPeriod *string `json:"alignmentPeriod,omitempty"` - - // Reducer applied across a set of time-series values. Defaults to REDUCE_NONE. - CrossSeriesReducer string `json:"crossSeriesReducer"` - EditorMode string `json:"editorMode"` - - // Array of filters to query data by. Labels that can be filtered on are defined by the metric. - Filters []string `json:"filters,omitempty"` - - // To disable the graphPeriod, it should explictly be set to 'disabled'. - GraphPeriod *string `json:"graphPeriod,omitempty"` - - // Array of labels to group data by. - GroupBys []string `json:"groupBys,omitempty"` - MetricKind *MetricKind `json:"metricKind,omitempty"` - MetricType string `json:"metricType"` - - // Alignment function to be used. Defaults to ALIGN_MEAN. - PerSeriesAligner *string `json:"perSeriesAligner,omitempty"` - - // Types of pre-processor available. Defined by the metric. - Preprocessor *PreprocessorType `json:"preprocessor,omitempty"` - - // GCP project to execute the query against. - ProjectName string `json:"projectName"` - - // MQL query to be executed. - Query string `json:"query"` - ValueType *string `json:"valueType,omitempty"` - View *string `json:"view,omitempty"` -} - -// Types of pre-processor available. Defined by the metric. -type PreprocessorType string - -// PromQL sub-query properties. -type PromQLQuery struct { - // PromQL expression/query to be executed. - Expr string `json:"expr"` - - // GCP project to execute the query against. - ProjectName string `json:"projectName"` - - // PromQL min step - Step string `json:"step"` +// NewCloudMonitoringQuery creates a new CloudMonitoringQuery object. +func NewCloudMonitoringQuery() *CloudMonitoringQuery { + return &CloudMonitoringQuery{} } // Defines the supported queryTypes. type QueryType string -// SLO sub-query properties. -type SLOQuery struct { - // Alignment period to use when regularizing data. Defaults to cloud-monitoring-auto. - AlignmentPeriod *string `json:"alignmentPeriod,omitempty"` - - // SLO goal value. - Goal *float32 `json:"goal,omitempty"` - - // Specific lookback period for the SLO. - LookbackPeriod *string `json:"lookbackPeriod,omitempty"` - - // Alignment function to be used. Defaults to ALIGN_MEAN. - PerSeriesAligner *string `json:"perSeriesAligner,omitempty"` - - // GCP project to execute the query against. - ProjectName string `json:"projectName"` - - // SLO selector. - SelectorName string `json:"selectorName"` - - // ID for the service the SLO is in. - ServiceId string `json:"serviceId"` - - // Name for the service the SLO is in. - ServiceName string `json:"serviceName"` - - // ID for the SLO. - SloId string `json:"sloId"` - - // Name of the SLO. - SloName string `json:"sloName"` -} +const ( + QueryTypeTIMESERIESLIST QueryType = "timeSeriesList" + QueryTypeTIMESERIESQUERY QueryType = "timeSeriesQuery" + QueryTypeSLO QueryType = "slo" + QueryTypeANNOTATION QueryType = "annotation" + QueryTypePROMQL QueryType = "promQL" +) // Time Series List sub-query properties. type TimeSeriesList struct { - // Alignment period to use when regularizing data. Defaults to cloud-monitoring-auto. - AlignmentPeriod *string `json:"alignmentPeriod,omitempty"` - - // Reducer applied across a set of time-series values. Defaults to REDUCE_NONE. - CrossSeriesReducer string `json:"crossSeriesReducer"` - - // Array of filters to query data by. Labels that can be filtered on are defined by the metric. - Filters []string `json:"filters,omitempty"` - - // Array of labels to group data by. - GroupBys []string `json:"groupBys,omitempty"` - - // Alignment function to be used. Defaults to ALIGN_MEAN. - PerSeriesAligner *string `json:"perSeriesAligner,omitempty"` - - // Types of pre-processor available. Defined by the metric. - Preprocessor *PreprocessorType `json:"preprocessor,omitempty"` - // GCP project to execute the query against. ProjectName string `json:"projectName"` - - // Only present if a preprocessor is selected. Alignment period to use when regularizing data. Defaults to cloud-monitoring-auto. - SecondaryAlignmentPeriod *string `json:"secondaryAlignmentPeriod,omitempty"` - - // Only present if a preprocessor is selected. Reducer applied across a set of time-series values. Defaults to REDUCE_NONE. - SecondaryCrossSeriesReducer *string `json:"secondaryCrossSeriesReducer,omitempty"` - - // Only present if a preprocessor is selected. Array of labels to group data by. - SecondaryGroupBys []string `json:"secondaryGroupBys,omitempty"` - - // Only present if a preprocessor is selected. Alignment function to be used. Defaults to ALIGN_MEAN. - SecondaryPerSeriesAligner *string `json:"secondaryPerSeriesAligner,omitempty"` - - // Annotation text. - Text *string `json:"text,omitempty"` - - // Annotation title. - Title *string `json:"title,omitempty"` - + // Reducer applied across a set of time-series values. Defaults to REDUCE_NONE. + CrossSeriesReducer string `json:"crossSeriesReducer"` + // Alignment period to use when regularizing data. Defaults to cloud-monitoring-auto. + AlignmentPeriod *string `json:"alignmentPeriod,omitempty"` + // Alignment function to be used. Defaults to ALIGN_MEAN. + PerSeriesAligner *string `json:"perSeriesAligner,omitempty"` + // Array of labels to group data by. + GroupBys []string `json:"groupBys,omitempty"` + // Array of filters to query data by. Labels that can be filtered on are defined by the metric. + Filters []string `json:"filters,omitempty"` // Data view, defaults to FULL. View *string `json:"view,omitempty"` + // Annotation title. + Title *string `json:"title,omitempty"` + // Annotation text. + Text *string `json:"text,omitempty"` + // Only present if a preprocessor is selected. Reducer applied across a set of time-series values. Defaults to REDUCE_NONE. + SecondaryCrossSeriesReducer *string `json:"secondaryCrossSeriesReducer,omitempty"` + // Only present if a preprocessor is selected. Alignment period to use when regularizing data. Defaults to cloud-monitoring-auto. + SecondaryAlignmentPeriod *string `json:"secondaryAlignmentPeriod,omitempty"` + // Only present if a preprocessor is selected. Alignment function to be used. Defaults to ALIGN_MEAN. + SecondaryPerSeriesAligner *string `json:"secondaryPerSeriesAligner,omitempty"` + // Only present if a preprocessor is selected. Array of labels to group data by. + SecondaryGroupBys []string `json:"secondaryGroupBys,omitempty"` + // Preprocessor is not part of the API, but is used to store the preprocessor and not affect the UI for the rest of parameters + Preprocessor *PreprocessorType `json:"preprocessor,omitempty"` } +// NewTimeSeriesList creates a new TimeSeriesList object. +func NewTimeSeriesList() *TimeSeriesList { + return &TimeSeriesList{} +} + +// Types of pre-processor available. Defined by the metric. +type PreprocessorType string + +const ( + PreprocessorTypeNone PreprocessorType = "none" + PreprocessorTypeRate PreprocessorType = "rate" + PreprocessorTypeDelta PreprocessorType = "delta" +) + // Time Series sub-query properties. type TimeSeriesQuery struct { - // To disable the graphPeriod, it should explictly be set to 'disabled'. - GraphPeriod *string `json:"graphPeriod,omitempty"` - // GCP project to execute the query against. ProjectName string `json:"projectName"` - // MQL query to be executed. Query string `json:"query"` + // To disable the graphPeriod, it should explictly be set to 'disabled'. + GraphPeriod string `json:"graphPeriod,omitempty"` } -// ValueTypes defines model for ValueTypes. +// NewTimeSeriesQuery creates a new TimeSeriesQuery object. +func NewTimeSeriesQuery() *TimeSeriesQuery { + return &TimeSeriesQuery{} +} + +// SLO sub-query properties. +type SLOQuery struct { + // GCP project to execute the query against. + ProjectName string `json:"projectName"` + // Alignment function to be used. Defaults to ALIGN_MEAN. + PerSeriesAligner *string `json:"perSeriesAligner,omitempty"` + // Alignment period to use when regularizing data. Defaults to cloud-monitoring-auto. + AlignmentPeriod *string `json:"alignmentPeriod,omitempty"` + // SLO selector. + SelectorName string `json:"selectorName"` + // ID for the service the SLO is in. + ServiceId string `json:"serviceId"` + // Name for the service the SLO is in. + ServiceName string `json:"serviceName"` + // ID for the SLO. + SloId string `json:"sloId"` + // Name of the SLO. + SloName string `json:"sloName"` + // SLO goal value. + Goal *float64 `json:"goal,omitempty"` + // Specific lookback period for the SLO. + LookbackPeriod *string `json:"lookbackPeriod,omitempty"` +} + +// NewSLOQuery creates a new SLOQuery object. +func NewSLOQuery() *SLOQuery { + return &SLOQuery{} +} + +// PromQL sub-query properties. +type PromQLQuery struct { + // GCP project to execute the query against. + ProjectName string `json:"projectName"` + // PromQL expression/query to be executed. + Expr string `json:"expr"` + // PromQL min step + Step string `json:"step"` +} + +// NewPromQLQuery creates a new PromQLQuery object. +func NewPromQLQuery() *PromQLQuery { + return &PromQLQuery{} +} + +// @deprecated This type is for migration purposes only. Replaced by TimeSeriesList Metric sub-query properties. +type MetricQuery struct { + // GCP project to execute the query against. + ProjectName string `json:"projectName"` + // Alignment function to be used. Defaults to ALIGN_MEAN. + PerSeriesAligner *string `json:"perSeriesAligner,omitempty"` + // Alignment period to use when regularizing data. Defaults to cloud-monitoring-auto. + AlignmentPeriod *string `json:"alignmentPeriod,omitempty"` + // Aliases can be set to modify the legend labels. e.g. {{metric.label.xxx}}. See docs for more detail. + AliasBy *string `json:"aliasBy,omitempty"` + EditorMode string `json:"editorMode"` + MetricType string `json:"metricType"` + // Reducer applied across a set of time-series values. Defaults to REDUCE_NONE. + CrossSeriesReducer string `json:"crossSeriesReducer"` + // Array of labels to group data by. + GroupBys []string `json:"groupBys,omitempty"` + // Array of filters to query data by. Labels that can be filtered on are defined by the metric. + Filters []string `json:"filters,omitempty"` + MetricKind *MetricKind `json:"metricKind,omitempty"` + ValueType *string `json:"valueType,omitempty"` + View *string `json:"view,omitempty"` + // MQL query to be executed. + Query string `json:"query"` + // Preprocessor is not part of the API, but is used to store the preprocessor and not affect the UI for the rest of parameters + Preprocessor *PreprocessorType `json:"preprocessor,omitempty"` + // To disable the graphPeriod, it should explictly be set to 'disabled'. + GraphPeriod string `json:"graphPeriod,omitempty"` +} + +// NewMetricQuery creates a new MetricQuery object. +func NewMetricQuery() *MetricQuery { + return &MetricQuery{} +} + +type MetricKind string + +const ( + MetricKindMETRICKINDUNSPECIFIED MetricKind = "METRIC_KIND_UNSPECIFIED" + MetricKindGAUGE MetricKind = "GAUGE" + MetricKindDELTA MetricKind = "DELTA" + MetricKindCUMULATIVE MetricKind = "CUMULATIVE" +) + type ValueTypes string + +const ( + ValueTypesVALUETYPEUNSPECIFIED ValueTypes = "VALUE_TYPE_UNSPECIFIED" + ValueTypesBOOL ValueTypes = "BOOL" + ValueTypesINT64 ValueTypes = "INT64" + ValueTypesDOUBLE ValueTypes = "DOUBLE" + ValueTypesSTRING ValueTypes = "STRING" + ValueTypesDISTRIBUTION ValueTypes = "DISTRIBUTION" + ValueTypesMONEY ValueTypes = "MONEY" +) + +type AlignmentTypes string + +const ( + AlignmentTypesALIGNDELTA AlignmentTypes = "ALIGN_DELTA" + AlignmentTypesALIGNRATE AlignmentTypes = "ALIGN_RATE" + AlignmentTypesALIGNINTERPOLATE AlignmentTypes = "ALIGN_INTERPOLATE" + AlignmentTypesALIGNNEXTOLDER AlignmentTypes = "ALIGN_NEXT_OLDER" + AlignmentTypesALIGNMIN AlignmentTypes = "ALIGN_MIN" + AlignmentTypesALIGNMAX AlignmentTypes = "ALIGN_MAX" + AlignmentTypesALIGNMEAN AlignmentTypes = "ALIGN_MEAN" + AlignmentTypesALIGNCOUNT AlignmentTypes = "ALIGN_COUNT" + AlignmentTypesALIGNSUM AlignmentTypes = "ALIGN_SUM" + AlignmentTypesALIGNSTDDEV AlignmentTypes = "ALIGN_STDDEV" + AlignmentTypesALIGNCOUNTTRUE AlignmentTypes = "ALIGN_COUNT_TRUE" + AlignmentTypesALIGNCOUNTFALSE AlignmentTypes = "ALIGN_COUNT_FALSE" + AlignmentTypesALIGNFRACTIONTRUE AlignmentTypes = "ALIGN_FRACTION_TRUE" + AlignmentTypesALIGNPERCENTILE99 AlignmentTypes = "ALIGN_PERCENTILE_99" + AlignmentTypesALIGNPERCENTILE95 AlignmentTypes = "ALIGN_PERCENTILE_95" + AlignmentTypesALIGNPERCENTILE50 AlignmentTypes = "ALIGN_PERCENTILE_50" + AlignmentTypesALIGNPERCENTILE05 AlignmentTypes = "ALIGN_PERCENTILE_05" + AlignmentTypesALIGNPERCENTCHANGE AlignmentTypes = "ALIGN_PERCENT_CHANGE" + AlignmentTypesALIGNNONE AlignmentTypes = "ALIGN_NONE" +) + +// @deprecated Use TimeSeriesList instead. Legacy annotation query properties for migration purposes. +type LegacyCloudMonitoringAnnotationQuery struct { + // GCP project to execute the query against. + ProjectName string `json:"projectName"` + MetricType string `json:"metricType"` + // Query refId. + RefId string `json:"refId"` + // Array of filters to query data by. Labels that can be filtered on are defined by the metric. + Filters []string `json:"filters"` + MetricKind MetricKind `json:"metricKind"` + ValueType string `json:"valueType"` + // Annotation title. + Title string `json:"title"` + // Annotation text. + Text string `json:"text"` +} + +// NewLegacyCloudMonitoringAnnotationQuery creates a new LegacyCloudMonitoringAnnotationQuery object. +func NewLegacyCloudMonitoringAnnotationQuery() *LegacyCloudMonitoringAnnotationQuery { + return &LegacyCloudMonitoringAnnotationQuery{} +} + +// Query filter representation. +type Filter struct { + // Filter key. + Key string `json:"key"` + // Filter operator. + Operator string `json:"operator"` + // Filter value. + Value string `json:"value"` + // Filter condition. + Condition *string `json:"condition,omitempty"` +} + +// NewFilter creates a new Filter object. +func NewFilter() *Filter { + return &Filter{} +} + +type MetricFindQueryTypes string + +const ( + MetricFindQueryTypesProjects MetricFindQueryTypes = "projects" + MetricFindQueryTypesServices MetricFindQueryTypes = "services" + MetricFindQueryTypesDefaultProject MetricFindQueryTypes = "defaultProject" + MetricFindQueryTypesMetricTypes MetricFindQueryTypes = "metricTypes" + MetricFindQueryTypesLabelKeys MetricFindQueryTypes = "labelKeys" + MetricFindQueryTypesLabelValues MetricFindQueryTypes = "labelValues" + MetricFindQueryTypesResourceTypes MetricFindQueryTypes = "resourceTypes" + MetricFindQueryTypesAggregations MetricFindQueryTypes = "aggregations" + MetricFindQueryTypesAligners MetricFindQueryTypes = "aligners" + MetricFindQueryTypesAlignmentPeriods MetricFindQueryTypes = "alignmentPeriods" + MetricFindQueryTypesSelectors MetricFindQueryTypes = "selectors" + MetricFindQueryTypesSLOServices MetricFindQueryTypes = "sloServices" + MetricFindQueryTypesSLO MetricFindQueryTypes = "slo" +) diff --git a/pkg/tsdb/cloud-monitoring/time_series_filter.go b/pkg/tsdb/cloud-monitoring/time_series_filter.go index baade93989a..7df6fd16f4e 100644 --- a/pkg/tsdb/cloud-monitoring/time_series_filter.go +++ b/pkg/tsdb/cloud-monitoring/time_series_filter.go @@ -50,7 +50,7 @@ func parseTimeSeriesResponse(queryRes *backend.DataResponse, if len(response.TimeSeries) > 0 { dl := query.buildDeepLink() aggregationAlignmentString := params.Get("aggregation.alignmentPeriod") - frames = addConfigData(frames, dl, response.Unit, &aggregationAlignmentString, logger) + frames = addConfigData(frames, dl, response.Unit, aggregationAlignmentString, logger) } queryRes.Frames = frames diff --git a/pkg/tsdb/cloud-monitoring/time_series_filter_test.go b/pkg/tsdb/cloud-monitoring/time_series_filter_test.go index c44949e6c51..b5e4d624d45 100644 --- a/pkg/tsdb/cloud-monitoring/time_series_filter_test.go +++ b/pkg/tsdb/cloud-monitoring/time_series_filter_test.go @@ -410,7 +410,7 @@ func TestTimeSeriesFilter(t *testing.T) { parameters: &dataquery.TimeSeriesQuery{ Query: "fetch gce_instance::compute.googleapis.com/instance/cpu/utilization | sum", ProjectName: "test", - GraphPeriod: strPtr("60s"), + GraphPeriod: "60s", }, } err = query.parseResponse(res, data, "", service.logger) diff --git a/pkg/tsdb/cloud-monitoring/time_series_query.go b/pkg/tsdb/cloud-monitoring/time_series_query.go index 169c151bc24..84f66a193a4 100644 --- a/pkg/tsdb/cloud-monitoring/time_series_query.go +++ b/pkg/tsdb/cloud-monitoring/time_series_query.go @@ -15,13 +15,13 @@ import ( func (timeSeriesQuery *cloudMonitoringTimeSeriesQuery) appendGraphPeriod(req *backend.QueryDataRequest) string { // GraphPeriod needs to be explicitly disabled. // If not set, the default behavior is to set an automatic value - if timeSeriesQuery.parameters.GraphPeriod == nil || *timeSeriesQuery.parameters.GraphPeriod != "disabled" { - if timeSeriesQuery.parameters.GraphPeriod == nil || *timeSeriesQuery.parameters.GraphPeriod == "auto" || *timeSeriesQuery.parameters.GraphPeriod == "" { + if timeSeriesQuery.parameters.GraphPeriod != "disabled" { + if timeSeriesQuery.parameters.GraphPeriod == "auto" || timeSeriesQuery.parameters.GraphPeriod == "" { intervalCalculator := gcmTime.NewCalculator(gcmTime.CalculatorOptions{}) interval := intervalCalculator.Calculate(req.Queries[0].TimeRange, time.Duration(timeSeriesQuery.IntervalMS/1000)*time.Second, req.Queries[0].MaxDataPoints) - timeSeriesQuery.parameters.GraphPeriod = &interval.Text + timeSeriesQuery.parameters.GraphPeriod = interval.Text } - return fmt.Sprintf(" | graph_period %s", *timeSeriesQuery.parameters.GraphPeriod) + return fmt.Sprintf(" | graph_period %s", timeSeriesQuery.parameters.GraphPeriod) } return "" } diff --git a/pkg/tsdb/cloud-monitoring/time_series_query_test.go b/pkg/tsdb/cloud-monitoring/time_series_query_test.go index 4097630f43d..4b354ccbb97 100644 --- a/pkg/tsdb/cloud-monitoring/time_series_query_test.go +++ b/pkg/tsdb/cloud-monitoring/time_series_query_test.go @@ -125,7 +125,7 @@ func TestTimeSeriesQuery(t *testing.T) { parameters: &dataquery.TimeSeriesQuery{ ProjectName: "test-proj", Query: "test-query", - GraphPeriod: strPtr("60s"), + GraphPeriod: "60s", }, timeRange: backend.TimeRange{ From: fromStart, @@ -145,7 +145,7 @@ func TestTimeSeriesQuery(t *testing.T) { }) t.Run("skips graph_period if disabled", func(t *testing.T) { - query := &cloudMonitoringTimeSeriesQuery{parameters: &dataquery.TimeSeriesQuery{GraphPeriod: strPtr("disabled")}} + query := &cloudMonitoringTimeSeriesQuery{parameters: &dataquery.TimeSeriesQuery{GraphPeriod: "disabled"}} assert.Equal(t, query.appendGraphPeriod(&backend.QueryDataRequest{Queries: []backend.DataQuery{{}}}), "") }) diff --git a/pkg/tsdb/cloudwatch/annotation_query.go b/pkg/tsdb/cloudwatch/annotation_query.go index fe17f837105..2eb61676354 100644 --- a/pkg/tsdb/cloudwatch/annotation_query.go +++ b/pkg/tsdb/cloudwatch/annotation_query.go @@ -12,7 +12,6 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/tsdb/cloudwatch/kinds/dataquery" - "github.com/grafana/grafana/pkg/tsdb/cloudwatch/utils" ) type annotationEvent struct { @@ -51,12 +50,7 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, pluginC actionPrefix := model.ActionPrefix alarmNamePrefix := model.AlarmNamePrefix - region := "" - if model.Region != nil { - region = *model.Region - } - - cli, err := e.getCWClient(ctx, pluginCtx, region) + cli, err := e.getCWClient(ctx, pluginCtx, model.Region) if err != nil { result.Responses[query.RefID] = backend.ErrorResponseWithErrorSource(fmt.Errorf("%v: %w", "failed to get client", err)) return result, nil @@ -84,27 +78,23 @@ func (e *cloudWatchExecutor) executeAnnotationQuery(ctx context.Context, pluginC result.Responses[query.RefID] = backend.ErrorResponseWithErrorSource(backend.DownstreamError(fmt.Errorf("%v: %w", "failed to call cloudwatch:DescribeAlarms", err))) return result, nil } - alarmNames = filterAlarms(resp, utils.Depointerizer(model.Namespace), metricName, dimensions, statistic, period) + alarmNames = filterAlarms(resp, model.Namespace, metricName, dimensions, statistic, period) } else { - if model.Region == nil || model.Namespace == nil || metricName == "" || statistic == "" { + if model.Region == "" || model.Namespace == "" || metricName == "" || statistic == "" { return result, backend.DownstreamError(errors.New("invalid annotations query")) } var qd []*cloudwatch.Dimension for k, v := range dimensions { - if vv, ok := v.([]any); ok { - for _, vvv := range vv { - if vvvv, ok := vvv.(string); ok { - qd = append(qd, &cloudwatch.Dimension{ - Name: aws.String(k), - Value: aws.String(vvvv), - }) - } - } + for _, vvv := range v.ArrayOfString { + qd = append(qd, &cloudwatch.Dimension{ + Name: aws.String(k), + Value: aws.String(vvv), + }) } } params := &cloudwatch.DescribeAlarmsForMetricInput{ - Namespace: aws.String(utils.Depointerizer(model.Namespace)), + Namespace: aws.String(model.Namespace), MetricName: aws.String(metricName), Dimensions: qd, Statistic: aws.String(statistic), @@ -172,7 +162,7 @@ func transformAnnotationToTable(annotations []*annotationEvent, query backend.Da } func filterAlarms(alarms *cloudwatch.DescribeAlarmsOutput, namespace string, metricName string, - dimensions map[string]any, statistic string, period int64) []*string { + dimensions dataquery.Dimensions, statistic string, period int64) []*string { alarmNames := make([]*string, 0) for _, alarm := range alarms.MetricAlarms { diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index d8d4d4fa5c4..74ae89e7748 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -211,8 +211,8 @@ func (e *cloudWatchExecutor) QueryData(ctx context.Context, req *backend.QueryDa // Since `model.Type` is set during execution on the frontend by the query runner and isn't saved with the query, we are checking here is // missing the `model.Type` property and if it is a log query in order to determine if it is a public dashboard query. queryMode := "" - if model.QueryMode != nil { - queryMode = string(*model.QueryMode) + if model.QueryMode != "" { + queryMode = string(model.QueryMode) } fromPublicDashboard := model.Type == "" && queryMode == logsQueryMode isSyncLogQuery := ((fromAlert || fromExpression) && queryMode == logsQueryMode) || fromPublicDashboard diff --git a/pkg/tsdb/cloudwatch/kinds/dataquery/types_dataquery_gen.go b/pkg/tsdb/cloudwatch/kinds/dataquery/types_dataquery_gen.go index 2b9acf11dec..a2f35aa9eb4 100644 --- a/pkg/tsdb/cloudwatch/kinds/dataquery/types_dataquery_gen.go +++ b/pkg/tsdb/cloudwatch/kinds/dataquery/types_dataquery_gen.go @@ -7,458 +7,727 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package dataquery -// Defines values for CloudWatchQueryMode. +import ( + json "encoding/json" + errors "errors" + fmt "fmt" +) + +type MetricStat struct { + // AWS region to query for the metric + Region string `json:"region"` + // A namespace is a container for CloudWatch metrics. Metrics in different namespaces are isolated from each other, so that metrics from different applications are not mistakenly aggregated into the same statistics. For example, Amazon EC2 uses the AWS/EC2 namespace. + Namespace string `json:"namespace"` + // Name of the metric + MetricName *string `json:"metricName,omitempty"` + // The dimensions of the metric + Dimensions *Dimensions `json:"dimensions,omitempty"` + // Only show metrics that exactly match all defined dimension names. + MatchExact *bool `json:"matchExact,omitempty"` + // The length of time associated with a specific Amazon CloudWatch statistic. Can be specified by a number of seconds, 'auto', or as a duration string e.g. '15m' being 15 minutes + Period *string `json:"period,omitempty"` + // The ID of the AWS account to query for the metric, specifying `all` will query all accounts that the monitoring account is permitted to query. + AccountId *string `json:"accountId,omitempty"` + // Metric data aggregations over specified periods of time. For detailed definitions of the statistics supported by CloudWatch, see https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Statistics-definitions.html. + Statistic *string `json:"statistic,omitempty"` + // @deprecated use statistic + Statistics []string `json:"statistics,omitempty"` +} + +// NewMetricStat creates a new MetricStat object. +func NewMetricStat() *MetricStat { + return &MetricStat{} +} + +// A name/value pair that is part of the identity of a metric. For example, you can get statistics for a specific EC2 instance by specifying the InstanceId dimension when you search for metrics. +type Dimensions map[string]StringOrArrayOfString + +// Shape of a CloudWatch Metrics query +type CloudWatchMetricsQuery struct { + // Whether a query is a Metrics, Logs, or Annotations query + QueryMode *CloudWatchQueryMode `json:"queryMode,omitempty"` + // Whether to use a metric search or metric insights query + MetricQueryType *MetricQueryType `json:"metricQueryType,omitempty"` + // Whether to use the query builder or code editor to create the query + MetricEditorMode *MetricEditorMode `json:"metricEditorMode,omitempty"` + // ID can be used to reference other queries in math expressions. The ID can include numbers, letters, and underscore, and must start with a lowercase letter. + Id string `json:"id"` + // Deprecated: use label + // @deprecated use label + Alias *string `json:"alias,omitempty"` + // Change the time series legend names using dynamic labels. See https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/graph-dynamic-labels.html for more details. + Label *string `json:"label,omitempty"` + // Math expression query + Expression *string `json:"expression,omitempty"` + // When the metric query type is set to `Insights`, this field is used to specify the query string. + SqlExpression *string `json:"sqlExpression,omitempty"` + // A unique identifier for the query within the list of targets. + // In server side expressions, the refId is used as a variable name to identify results. + // By default, the UI will assign A->Z; however setting meaningful names may be useful. + RefId string `json:"refId"` + // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. + Hide *bool `json:"hide,omitempty"` + // Specify the query flavor + // TODO make this required and give it a default + QueryType *string `json:"queryType,omitempty"` + // AWS region to query for the metric + Region string `json:"region"` + // A namespace is a container for CloudWatch metrics. Metrics in different namespaces are isolated from each other, so that metrics from different applications are not mistakenly aggregated into the same statistics. For example, Amazon EC2 uses the AWS/EC2 namespace. + Namespace string `json:"namespace"` + // Name of the metric + MetricName *string `json:"metricName,omitempty"` + // The dimensions of the metric + Dimensions *Dimensions `json:"dimensions,omitempty"` + // Only show metrics that exactly match all defined dimension names. + MatchExact *bool `json:"matchExact,omitempty"` + // The length of time associated with a specific Amazon CloudWatch statistic. Can be specified by a number of seconds, 'auto', or as a duration string e.g. '15m' being 15 minutes + Period *string `json:"period,omitempty"` + // The ID of the AWS account to query for the metric, specifying `all` will query all accounts that the monitoring account is permitted to query. + AccountId *string `json:"accountId,omitempty"` + // Metric data aggregations over specified periods of time. For detailed definitions of the statistics supported by CloudWatch, see https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Statistics-definitions.html. + Statistic *string `json:"statistic,omitempty"` + // When the metric query type is set to `Insights` and the `metricEditorMode` is set to `Builder`, this field is used to build up an object representation of a SQL query. + Sql *SQLExpression `json:"sql,omitempty"` + // For mixed data sources the selected datasource is on the query level. + // For non mixed scenarios this is undefined. + // TODO find a better way to do this ^ that's friendly to schema + // TODO this shouldn't be unknown but DataSourceRef | null + Datasource any `json:"datasource,omitempty"` + // @deprecated use statistic + Statistics []string `json:"statistics,omitempty"` +} + +// NewCloudWatchMetricsQuery creates a new CloudWatchMetricsQuery object. +func NewCloudWatchMetricsQuery() *CloudWatchMetricsQuery { + return &CloudWatchMetricsQuery{} +} + +type CloudWatchQueryMode string + const ( - CloudWatchQueryModeAnnotations CloudWatchQueryMode = "Annotations" - CloudWatchQueryModeLogs CloudWatchQueryMode = "Logs" CloudWatchQueryModeMetrics CloudWatchQueryMode = "Metrics" + CloudWatchQueryModeLogs CloudWatchQueryMode = "Logs" + CloudWatchQueryModeAnnotations CloudWatchQueryMode = "Annotations" ) -// Defines values for LogsQueryLanguage. +type MetricQueryType int64 + const ( - LogsQueryLanguageCWLI LogsQueryLanguage = "CWLI" - LogsQueryLanguagePPL LogsQueryLanguage = "PPL" - LogsQueryLanguageSQL LogsQueryLanguage = "SQL" + MetricQueryTypeSearch MetricQueryType = 0 + MetricQueryTypeInsights MetricQueryType = 1 ) -// Defines values for MetricEditorMode. +type MetricEditorMode int64 + const ( - MetricEditorModeN0 MetricEditorMode = 0 - MetricEditorModeN1 MetricEditorMode = 1 + MetricEditorModeBuilder MetricEditorMode = 0 + MetricEditorModeCode MetricEditorMode = 1 ) -// Defines values for MetricQueryType. -const ( - MetricQueryTypeN0 MetricQueryType = 0 - MetricQueryTypeN1 MetricQueryType = 1 -) +type SQLExpression struct { + // SELECT part of the SQL expression + Select *QueryEditorFunctionExpression `json:"select,omitempty"` + // FROM part of the SQL expression + From *QueryEditorPropertyExpressionOrQueryEditorFunctionExpression `json:"from,omitempty"` + // WHERE part of the SQL expression + Where *QueryEditorArrayExpression `json:"where,omitempty"` + // GROUP BY part of the SQL expression + GroupBy *QueryEditorArrayExpression `json:"groupBy,omitempty"` + // ORDER BY part of the SQL expression + OrderBy *QueryEditorFunctionExpression `json:"orderBy,omitempty"` + // The sort order of the SQL expression, `ASC` or `DESC` + OrderByDirection *string `json:"orderByDirection,omitempty"` + // LIMIT part of the SQL expression + Limit *int64 `json:"limit,omitempty"` +} -// Defines values for QueryEditorArrayExpressionType. -const ( - QueryEditorArrayExpressionTypeAnd QueryEditorArrayExpressionType = "and" - QueryEditorArrayExpressionTypeOr QueryEditorArrayExpressionType = "or" -) +// NewSQLExpression creates a new SQLExpression object. +func NewSQLExpression() *SQLExpression { + return &SQLExpression{} +} + +type QueryEditorFunctionExpression struct { + Type string `json:"type"` + Name *string `json:"name,omitempty"` + Parameters []QueryEditorFunctionParameterExpression `json:"parameters,omitempty"` +} + +// NewQueryEditorFunctionExpression creates a new QueryEditorFunctionExpression object. +func NewQueryEditorFunctionExpression() *QueryEditorFunctionExpression { + return &QueryEditorFunctionExpression{ + Type: "function", + } +} + +type QueryEditorExpressionType string -// Defines values for QueryEditorExpressionType. const ( - QueryEditorExpressionTypeAnd QueryEditorExpressionType = "and" - QueryEditorExpressionTypeFunction QueryEditorExpressionType = "function" - QueryEditorExpressionTypeFunctionParameter QueryEditorExpressionType = "functionParameter" - QueryEditorExpressionTypeGroupBy QueryEditorExpressionType = "groupBy" + QueryEditorExpressionTypeProperty QueryEditorExpressionType = "property" QueryEditorExpressionTypeOperator QueryEditorExpressionType = "operator" QueryEditorExpressionTypeOr QueryEditorExpressionType = "or" - QueryEditorExpressionTypeProperty QueryEditorExpressionType = "property" + QueryEditorExpressionTypeAnd QueryEditorExpressionType = "and" + QueryEditorExpressionTypeGroupBy QueryEditorExpressionType = "groupBy" + QueryEditorExpressionTypeFunction QueryEditorExpressionType = "function" + QueryEditorExpressionTypeFunctionParameter QueryEditorExpressionType = "functionParameter" ) -// Defines values for QueryEditorFunctionExpressionType. -const ( - QueryEditorFunctionExpressionTypeAnd QueryEditorFunctionExpressionType = "and" - QueryEditorFunctionExpressionTypeFunction QueryEditorFunctionExpressionType = "function" - QueryEditorFunctionExpressionTypeFunctionParameter QueryEditorFunctionExpressionType = "functionParameter" - QueryEditorFunctionExpressionTypeGroupBy QueryEditorFunctionExpressionType = "groupBy" - QueryEditorFunctionExpressionTypeOperator QueryEditorFunctionExpressionType = "operator" - QueryEditorFunctionExpressionTypeOr QueryEditorFunctionExpressionType = "or" - QueryEditorFunctionExpressionTypeProperty QueryEditorFunctionExpressionType = "property" -) +type QueryEditorFunctionParameterExpression struct { + Type string `json:"type"` + Name *string `json:"name,omitempty"` +} -// Defines values for QueryEditorFunctionParameterExpressionType. -const ( - QueryEditorFunctionParameterExpressionTypeAnd QueryEditorFunctionParameterExpressionType = "and" - QueryEditorFunctionParameterExpressionTypeFunction QueryEditorFunctionParameterExpressionType = "function" - QueryEditorFunctionParameterExpressionTypeFunctionParameter QueryEditorFunctionParameterExpressionType = "functionParameter" - QueryEditorFunctionParameterExpressionTypeGroupBy QueryEditorFunctionParameterExpressionType = "groupBy" - QueryEditorFunctionParameterExpressionTypeOperator QueryEditorFunctionParameterExpressionType = "operator" - QueryEditorFunctionParameterExpressionTypeOr QueryEditorFunctionParameterExpressionType = "or" - QueryEditorFunctionParameterExpressionTypeProperty QueryEditorFunctionParameterExpressionType = "property" -) +// NewQueryEditorFunctionParameterExpression creates a new QueryEditorFunctionParameterExpression object. +func NewQueryEditorFunctionParameterExpression() *QueryEditorFunctionParameterExpression { + return &QueryEditorFunctionParameterExpression{ + Type: "functionParameter", + } +} -// Defines values for QueryEditorGroupByExpressionType. -const ( - QueryEditorGroupByExpressionTypeAnd QueryEditorGroupByExpressionType = "and" - QueryEditorGroupByExpressionTypeFunction QueryEditorGroupByExpressionType = "function" - QueryEditorGroupByExpressionTypeFunctionParameter QueryEditorGroupByExpressionType = "functionParameter" - QueryEditorGroupByExpressionTypeGroupBy QueryEditorGroupByExpressionType = "groupBy" - QueryEditorGroupByExpressionTypeOperator QueryEditorGroupByExpressionType = "operator" - QueryEditorGroupByExpressionTypeOr QueryEditorGroupByExpressionType = "or" - QueryEditorGroupByExpressionTypeProperty QueryEditorGroupByExpressionType = "property" -) +type QueryEditorPropertyExpression struct { + Type string `json:"type"` + Property QueryEditorProperty `json:"property"` +} -// Defines values for QueryEditorOperatorExpressionType. -const ( - QueryEditorOperatorExpressionTypeAnd QueryEditorOperatorExpressionType = "and" - QueryEditorOperatorExpressionTypeFunction QueryEditorOperatorExpressionType = "function" - QueryEditorOperatorExpressionTypeFunctionParameter QueryEditorOperatorExpressionType = "functionParameter" - QueryEditorOperatorExpressionTypeGroupBy QueryEditorOperatorExpressionType = "groupBy" - QueryEditorOperatorExpressionTypeOperator QueryEditorOperatorExpressionType = "operator" - QueryEditorOperatorExpressionTypeOr QueryEditorOperatorExpressionType = "or" - QueryEditorOperatorExpressionTypeProperty QueryEditorOperatorExpressionType = "property" -) +// NewQueryEditorPropertyExpression creates a new QueryEditorPropertyExpression object. +func NewQueryEditorPropertyExpression() *QueryEditorPropertyExpression { + return &QueryEditorPropertyExpression{ + Type: "property", + Property: *NewQueryEditorProperty(), + } +} -// Defines values for QueryEditorPropertyExpressionType. -const ( - QueryEditorPropertyExpressionTypeAnd QueryEditorPropertyExpressionType = "and" - QueryEditorPropertyExpressionTypeFunction QueryEditorPropertyExpressionType = "function" - QueryEditorPropertyExpressionTypeFunctionParameter QueryEditorPropertyExpressionType = "functionParameter" - QueryEditorPropertyExpressionTypeGroupBy QueryEditorPropertyExpressionType = "groupBy" - QueryEditorPropertyExpressionTypeOperator QueryEditorPropertyExpressionType = "operator" - QueryEditorPropertyExpressionTypeOr QueryEditorPropertyExpressionType = "or" - QueryEditorPropertyExpressionTypeProperty QueryEditorPropertyExpressionType = "property" -) +type QueryEditorGroupByExpression struct { + Type string `json:"type"` + Property QueryEditorProperty `json:"property"` +} + +// NewQueryEditorGroupByExpression creates a new QueryEditorGroupByExpression object. +func NewQueryEditorGroupByExpression() *QueryEditorGroupByExpression { + return &QueryEditorGroupByExpression{ + Type: "groupBy", + Property: *NewQueryEditorProperty(), + } +} + +type QueryEditorOperatorExpression struct { + Type string `json:"type"` + Property QueryEditorProperty `json:"property"` + // TS type is operator: QueryEditorOperator, extended in veneer + Operator QueryEditorOperator `json:"operator"` +} + +// NewQueryEditorOperatorExpression creates a new QueryEditorOperatorExpression object. +func NewQueryEditorOperatorExpression() *QueryEditorOperatorExpression { + return &QueryEditorOperatorExpression{ + Type: "operator", + Property: *NewQueryEditorProperty(), + Operator: *NewQueryEditorOperator(), + } +} + +// TS type is QueryEditorOperator, extended in veneer +type QueryEditorOperator struct { + Name *string `json:"name,omitempty"` + Value *StringOrBoolOrInt64OrArrayOfQueryEditorOperatorType `json:"value,omitempty"` +} + +// NewQueryEditorOperator creates a new QueryEditorOperator object. +func NewQueryEditorOperator() *QueryEditorOperator { + return &QueryEditorOperator{} +} + +type QueryEditorOperatorValueType = StringOrBoolOrInt64OrArrayOfQueryEditorOperatorType + +// NewQueryEditorOperatorValueType creates a new QueryEditorOperatorValueType object. +func NewQueryEditorOperatorValueType() *QueryEditorOperatorValueType { + return NewStringOrBoolOrInt64OrArrayOfQueryEditorOperatorType() +} + +type QueryEditorOperatorType = StringOrBoolOrInt64 + +// NewQueryEditorOperatorType creates a new QueryEditorOperatorType object. +func NewQueryEditorOperatorType() *QueryEditorOperatorType { + return NewStringOrBoolOrInt64() +} + +type QueryEditorProperty struct { + Type QueryEditorPropertyType `json:"type"` + Name *string `json:"name,omitempty"` +} + +// NewQueryEditorProperty creates a new QueryEditorProperty object. +func NewQueryEditorProperty() *QueryEditorProperty { + return &QueryEditorProperty{} +} + +type QueryEditorPropertyType string -// Defines values for QueryEditorPropertyType. const ( QueryEditorPropertyTypeString QueryEditorPropertyType = "string" ) +type QueryEditorArrayExpression struct { + Type QueryEditorArrayExpressionType `json:"type"` + Expressions ArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression `json:"expressions"` +} + +// NewQueryEditorArrayExpression creates a new QueryEditorArrayExpression object. +func NewQueryEditorArrayExpression() *QueryEditorArrayExpression { + return &QueryEditorArrayExpression{ + Expressions: *NewArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression(), + } +} + +type QueryEditorExpression any + +type LogsQueryLanguage string + +const ( + LogsQueryLanguageCWLI LogsQueryLanguage = "CWLI" + LogsQueryLanguageSQL LogsQueryLanguage = "SQL" + LogsQueryLanguagePPL LogsQueryLanguage = "PPL" +) + +// Shape of a CloudWatch Logs query +type CloudWatchLogsQuery struct { + // Whether a query is a Metrics, Logs, or Annotations query + QueryMode CloudWatchQueryMode `json:"queryMode"` + Id string `json:"id"` + // AWS region to query for the logs + Region string `json:"region"` + // The CloudWatch Logs Insights query to execute + Expression *string `json:"expression,omitempty"` + // Fields to group the results by, this field is automatically populated whenever the query is updated + StatsGroups []string `json:"statsGroups,omitempty"` + // Log groups to query + LogGroups []LogGroup `json:"logGroups,omitempty"` + // @deprecated use logGroups + LogGroupNames []string `json:"logGroupNames,omitempty"` + // A unique identifier for the query within the list of targets. + // In server side expressions, the refId is used as a variable name to identify results. + // By default, the UI will assign A->Z; however setting meaningful names may be useful. + RefId string `json:"refId"` + // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. + Hide *bool `json:"hide,omitempty"` + // Specify the query flavor + // TODO make this required and give it a default + QueryType *string `json:"queryType,omitempty"` + // Language used for querying logs, can be CWLI, SQL, or PPL. If empty, the default language is CWLI. + QueryLanguage *LogsQueryLanguage `json:"queryLanguage,omitempty"` + // For mixed data sources the selected datasource is on the query level. + // For non mixed scenarios this is undefined. + // TODO find a better way to do this ^ that's friendly to schema + // TODO this shouldn't be unknown but DataSourceRef | null + Datasource any `json:"datasource,omitempty"` +} + +// NewCloudWatchLogsQuery creates a new CloudWatchLogsQuery object. +func NewCloudWatchLogsQuery() *CloudWatchLogsQuery { + return &CloudWatchLogsQuery{} +} + +type LogGroup struct { + // ARN of the log group + Arn string `json:"arn"` + // Name of the log group + Name string `json:"name"` + // AccountId of the log group + AccountId *string `json:"accountId,omitempty"` + // Label of the log group + AccountLabel *string `json:"accountLabel,omitempty"` +} + +// NewLogGroup creates a new LogGroup object. +func NewLogGroup() *LogGroup { + return &LogGroup{} +} + // Shape of a CloudWatch Annotation query -// // TS type is CloudWatchDefaultQuery = Omit & CloudWatchMetricsQuery, declared in veneer // #CloudWatchDefaultQuery: #CloudWatchLogsQuery & #CloudWatchMetricsQuery @cuetsy(kind="type") type CloudWatchAnnotationQuery struct { - // The ID of the AWS account to query for the metric, specifying `all` will query all accounts that the monitoring account is permitted to query. - AccountId *string `json:"accountId,omitempty"` - + // Whether a query is a Metrics, Logs, or Annotations query + QueryMode CloudWatchQueryMode `json:"queryMode"` + // Enable matching on the prefix of the action name or alarm name, specify the prefixes with actionPrefix and/or alarmNamePrefix + PrefixMatching *bool `json:"prefixMatching,omitempty"` // Use this parameter to filter the results of the operation to only those alarms // that use a certain alarm action. For example, you could specify the ARN of // an SNS topic to find all alarms that send notifications to that topic. // e.g. `arn:aws:sns:us-east-1:123456789012:my-app-` would match `arn:aws:sns:us-east-1:123456789012:my-app-action` // but not match `arn:aws:sns:us-east-1:123456789012:your-app-action` ActionPrefix *string `json:"actionPrefix,omitempty"` - - // An alarm name prefix. If you specify this parameter, you receive information - // about all alarms that have names that start with this prefix. - // e.g. `my-team-service-` would match `my-team-service-high-cpu` but not match `your-team-service-high-cpu` - AlarmNamePrefix *string `json:"alarmNamePrefix,omitempty"` - - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // A name/value pair that is part of the identity of a metric. For example, you can get statistics for a specific EC2 instance by specifying the InstanceId dimension when you search for metrics. - Dimensions *Dimensions `json:"dimensions,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Only show metrics that exactly match all defined dimension names. - MatchExact *bool `json:"matchExact,omitempty"` - - // Name of the metric - MetricName *string `json:"metricName,omitempty"` - - // A namespace is a container for CloudWatch metrics. Metrics in different namespaces are isolated from each other, so that metrics from different applications are not mistakenly aggregated into the same statistics. For example, Amazon EC2 uses the AWS/EC2 namespace. - Namespace *string `json:"namespace,omitempty"` - - // The length of time associated with a specific Amazon CloudWatch statistic. Can be specified by a number of seconds, 'auto', or as a duration string e.g. '15m' being 15 minutes - Period *string `json:"period,omitempty"` - - // Enable matching on the prefix of the action name or alarm name, specify the prefixes with actionPrefix and/or alarmNamePrefix - PrefixMatching *bool `json:"prefixMatching,omitempty"` - QueryMode *CloudWatchQueryMode `json:"queryMode,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId *string `json:"refId,omitempty"` - - // AWS region to query for the metric - Region *string `json:"region,omitempty"` - - // Metric data aggregations over specified periods of time. For detailed definitions of the statistics supported by CloudWatch, see https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Statistics-definitions.html. - Statistic *string `json:"statistic,omitempty"` - - // @deprecated use statistic - Statistics []string `json:"statistics,omitempty"` -} - -// CloudWatchDataQuery defines model for CloudWatchDataQuery. -type CloudWatchDataQuery = map[string]any - -// Shape of a CloudWatch Logs query -type CloudWatchLogsQuery struct { - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // The CloudWatch Logs Insights query to execute - Expression *string `json:"expression,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - Id *string `json:"id,omitempty"` - - // @deprecated use logGroups - LogGroupNames []string `json:"logGroupNames,omitempty"` - - // Log groups to query - LogGroups []LogGroup `json:"logGroups,omitempty"` - QueryLanguage *LogsQueryLanguage `json:"queryLanguage,omitempty"` - QueryMode *CloudWatchQueryMode `json:"queryMode,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId *string `json:"refId,omitempty"` - - // AWS region to query for the logs - Region *string `json:"region,omitempty"` - - // Fields to group the results by, this field is automatically populated whenever the query is updated - StatsGroups []string `json:"statsGroups,omitempty"` -} - -// Shape of a CloudWatch Metrics query -type CloudWatchMetricsQuery struct { - // The ID of the AWS account to query for the metric, specifying `all` will query all accounts that the monitoring account is permitted to query. - AccountId *string `json:"accountId,omitempty"` - - // Deprecated: use label - // @deprecated use label - Alias *string `json:"alias,omitempty"` - - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // A name/value pair that is part of the identity of a metric. For example, you can get statistics for a specific EC2 instance by specifying the InstanceId dimension when you search for metrics. - Dimensions *Dimensions `json:"dimensions,omitempty"` - - // Math expression query - Expression *string `json:"expression,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // ID can be used to reference other queries in math expressions. The ID can include numbers, letters, and underscore, and must start with a lowercase letter. - Id *string `json:"id,omitempty"` - - // Change the time series legend names using dynamic labels. See https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/graph-dynamic-labels.html for more details. - Label *string `json:"label,omitempty"` - - // Only show metrics that exactly match all defined dimension names. - MatchExact *bool `json:"matchExact,omitempty"` - MetricEditorMode *MetricEditorMode `json:"metricEditorMode,omitempty"` - - // Name of the metric - MetricName *string `json:"metricName,omitempty"` - MetricQueryType *MetricQueryType `json:"metricQueryType,omitempty"` - - // A namespace is a container for CloudWatch metrics. Metrics in different namespaces are isolated from each other, so that metrics from different applications are not mistakenly aggregated into the same statistics. For example, Amazon EC2 uses the AWS/EC2 namespace. - Namespace *string `json:"namespace,omitempty"` - - // The length of time associated with a specific Amazon CloudWatch statistic. Can be specified by a number of seconds, 'auto', or as a duration string e.g. '15m' being 15 minutes - Period *string `json:"period,omitempty"` - QueryMode *CloudWatchQueryMode `json:"queryMode,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId *string `json:"refId,omitempty"` - - // AWS region to query for the metric - Region *string `json:"region,omitempty"` - Sql *SQLExpression `json:"sql,omitempty"` - - // When the metric query type is set to `Insights`, this field is used to specify the query string. - SqlExpression *string `json:"sqlExpression,omitempty"` - - // Metric data aggregations over specified periods of time. For detailed definitions of the statistics supported by CloudWatch, see https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Statistics-definitions.html. - Statistic *string `json:"statistic,omitempty"` - - // @deprecated use statistic - Statistics []string `json:"statistics,omitempty"` -} - -// CloudWatchQueryMode defines model for CloudWatchQueryMode. -type CloudWatchQueryMode string - -// These are the common properties available to all queries in all datasources. -// Specific implementations will *extend* this interface, adding the required -// properties for the given context. -type DataQuery struct { - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - // A unique identifier for the query within the list of targets. // In server side expressions, the refId is used as a variable name to identify results. // By default, the UI will assign A->Z; however setting meaningful names may be useful. RefId string `json:"refId"` -} - -// A name/value pair that is part of the identity of a metric. For example, you can get statistics for a specific EC2 instance by specifying the InstanceId dimension when you search for metrics. -type Dimensions map[string]any - -// LogGroup defines model for LogGroup. -type LogGroup struct { - // AccountId of the log group - AccountId *string `json:"accountId,omitempty"` - - // Label of the log group - AccountLabel *string `json:"accountLabel,omitempty"` - - // ARN of the log group - Arn string `json:"arn"` - - // Name of the log group - Name string `json:"name"` -} - -// LogsQueryLanguage defines model for LogsQueryLanguage. -type LogsQueryLanguage string - -// MetricEditorMode defines model for MetricEditorMode. -type MetricEditorMode int - -// MetricQueryType defines model for MetricQueryType. -type MetricQueryType int - -// MetricStat defines model for MetricStat. -type MetricStat struct { - // The ID of the AWS account to query for the metric, specifying `all` will query all accounts that the monitoring account is permitted to query. - AccountId *string `json:"accountId,omitempty"` - - // A name/value pair that is part of the identity of a metric. For example, you can get statistics for a specific EC2 instance by specifying the InstanceId dimension when you search for metrics. - Dimensions *Dimensions `json:"dimensions,omitempty"` - - // Only show metrics that exactly match all defined dimension names. - MatchExact *bool `json:"matchExact,omitempty"` - - // Name of the metric - MetricName *string `json:"metricName,omitempty"` - - // A namespace is a container for CloudWatch metrics. Metrics in different namespaces are isolated from each other, so that metrics from different applications are not mistakenly aggregated into the same statistics. For example, Amazon EC2 uses the AWS/EC2 namespace. - Namespace string `json:"namespace"` - - // The length of time associated with a specific Amazon CloudWatch statistic. Can be specified by a number of seconds, 'auto', or as a duration string e.g. '15m' being 15 minutes - Period *string `json:"period,omitempty"` - + // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. + Hide *bool `json:"hide,omitempty"` + // Specify the query flavor + // TODO make this required and give it a default + QueryType *string `json:"queryType,omitempty"` // AWS region to query for the metric Region string `json:"region"` - + // A namespace is a container for CloudWatch metrics. Metrics in different namespaces are isolated from each other, so that metrics from different applications are not mistakenly aggregated into the same statistics. For example, Amazon EC2 uses the AWS/EC2 namespace. + Namespace string `json:"namespace"` + // Name of the metric + MetricName *string `json:"metricName,omitempty"` + // The dimensions of the metric + Dimensions *Dimensions `json:"dimensions,omitempty"` + // Only show metrics that exactly match all defined dimension names. + MatchExact *bool `json:"matchExact,omitempty"` + // The length of time associated with a specific Amazon CloudWatch statistic. Can be specified by a number of seconds, 'auto', or as a duration string e.g. '15m' being 15 minutes + Period *string `json:"period,omitempty"` + // The ID of the AWS account to query for the metric, specifying `all` will query all accounts that the monitoring account is permitted to query. + AccountId *string `json:"accountId,omitempty"` // Metric data aggregations over specified periods of time. For detailed definitions of the statistics supported by CloudWatch, see https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/Statistics-definitions.html. Statistic *string `json:"statistic,omitempty"` - + // An alarm name prefix. If you specify this parameter, you receive information + // about all alarms that have names that start with this prefix. + // e.g. `my-team-service-` would match `my-team-service-high-cpu` but not match `your-team-service-high-cpu` + AlarmNamePrefix *string `json:"alarmNamePrefix,omitempty"` + // For mixed data sources the selected datasource is on the query level. + // For non mixed scenarios this is undefined. + // TODO find a better way to do this ^ that's friendly to schema + // TODO this shouldn't be unknown but DataSourceRef | null + Datasource any `json:"datasource,omitempty"` // @deprecated use statistic Statistics []string `json:"statistics,omitempty"` } -// QueryEditorArrayExpression defines model for QueryEditorArrayExpression. -type QueryEditorArrayExpression struct { - Expressions []any `json:"expressions"` - Type QueryEditorArrayExpressionType `json:"type"` +// NewCloudWatchAnnotationQuery creates a new CloudWatchAnnotationQuery object. +func NewCloudWatchAnnotationQuery() *CloudWatchAnnotationQuery { + return &CloudWatchAnnotationQuery{} } -// QueryEditorArrayExpressionType defines model for QueryEditorArrayExpression.Type. type QueryEditorArrayExpressionType string -// QueryEditorExpressionType defines model for QueryEditorExpressionType. -type QueryEditorExpressionType string +const ( + QueryEditorArrayExpressionTypeAnd QueryEditorArrayExpressionType = "and" + QueryEditorArrayExpressionTypeOr QueryEditorArrayExpressionType = "or" +) -// QueryEditorFunctionExpression defines model for QueryEditorFunctionExpression. -type QueryEditorFunctionExpression struct { - Name *string `json:"name,omitempty"` - Parameters []QueryEditorFunctionParameterExpression `json:"parameters,omitempty"` - Type QueryEditorFunctionExpressionType `json:"type"` +type StringOrArrayOfString struct { + String *string `json:"String,omitempty"` + ArrayOfString []string `json:"ArrayOfString,omitempty"` } -// QueryEditorFunctionExpressionType defines model for QueryEditorFunctionExpression.Type. -type QueryEditorFunctionExpressionType string - -// QueryEditorFunctionParameterExpression defines model for QueryEditorFunctionParameterExpression. -type QueryEditorFunctionParameterExpression struct { - Name *string `json:"name,omitempty"` - Type QueryEditorFunctionParameterExpressionType `json:"type"` +// NewStringOrArrayOfString creates a new StringOrArrayOfString object. +func NewStringOrArrayOfString() *StringOrArrayOfString { + return &StringOrArrayOfString{} } -// QueryEditorFunctionParameterExpressionType defines model for QueryEditorFunctionParameterExpression.Type. -type QueryEditorFunctionParameterExpressionType string +// MarshalJSON implements a custom JSON marshalling logic to encode `StringOrArrayOfString` as JSON. +func (resource StringOrArrayOfString) MarshalJSON() ([]byte, error) { + if resource.String != nil { + return json.Marshal(resource.String) + } -// QueryEditorGroupByExpression defines model for QueryEditorGroupByExpression. -type QueryEditorGroupByExpression struct { - Property QueryEditorProperty `json:"property"` - Type QueryEditorGroupByExpressionType `json:"type"` + if resource.ArrayOfString != nil { + return json.Marshal(resource.ArrayOfString) + } + + return nil, fmt.Errorf("no value for disjunction of scalars") } -// QueryEditorGroupByExpressionType defines model for QueryEditorGroupByExpression.Type. -type QueryEditorGroupByExpressionType string +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `StringOrArrayOfString` from JSON. +func (resource *StringOrArrayOfString) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } -// TS type is QueryEditorOperator, extended in veneer -type QueryEditorOperator struct { - Name *string `json:"name,omitempty"` - Value *any `json:"value,omitempty"` + var errList []error + + // String + var String string + if err := json.Unmarshal(raw, &String); err != nil { + errList = append(errList, err) + resource.String = nil + } else { + resource.String = &String + return nil + } + + // ArrayOfString + var ArrayOfString []string + if err := json.Unmarshal(raw, &ArrayOfString); err != nil { + errList = append(errList, err) + resource.ArrayOfString = nil + } else { + resource.ArrayOfString = ArrayOfString + return nil + } + + return errors.Join(errList...) } -// QueryEditorOperatorExpression defines model for QueryEditorOperatorExpression. -type QueryEditorOperatorExpression struct { - // TS type is QueryEditorOperator, extended in veneer - Operator QueryEditorOperator `json:"operator"` - Property QueryEditorProperty `json:"property"` - Type QueryEditorOperatorExpressionType `json:"type"` +type QueryEditorPropertyExpressionOrQueryEditorFunctionExpression struct { + QueryEditorPropertyExpression *QueryEditorPropertyExpression `json:"QueryEditorPropertyExpression,omitempty"` + QueryEditorFunctionExpression *QueryEditorFunctionExpression `json:"QueryEditorFunctionExpression,omitempty"` } -// QueryEditorOperatorExpressionType defines model for QueryEditorOperatorExpression.Type. -type QueryEditorOperatorExpressionType string - -// QueryEditorProperty defines model for QueryEditorProperty. -type QueryEditorProperty struct { - Name *string `json:"name,omitempty"` - Type QueryEditorPropertyType `json:"type"` +// NewQueryEditorPropertyExpressionOrQueryEditorFunctionExpression creates a new QueryEditorPropertyExpressionOrQueryEditorFunctionExpression object. +func NewQueryEditorPropertyExpressionOrQueryEditorFunctionExpression() *QueryEditorPropertyExpressionOrQueryEditorFunctionExpression { + return &QueryEditorPropertyExpressionOrQueryEditorFunctionExpression{} } -// QueryEditorPropertyExpression defines model for QueryEditorPropertyExpression. -type QueryEditorPropertyExpression struct { - Property QueryEditorProperty `json:"property"` - Type QueryEditorPropertyExpressionType `json:"type"` +// MarshalJSON implements a custom JSON marshalling logic to encode `QueryEditorPropertyExpressionOrQueryEditorFunctionExpression` as JSON. +func (resource QueryEditorPropertyExpressionOrQueryEditorFunctionExpression) MarshalJSON() ([]byte, error) { + if resource.QueryEditorPropertyExpression != nil { + return json.Marshal(resource.QueryEditorPropertyExpression) + } + if resource.QueryEditorFunctionExpression != nil { + return json.Marshal(resource.QueryEditorFunctionExpression) + } + + return nil, fmt.Errorf("no value for disjunction of refs") } -// QueryEditorPropertyExpressionType defines model for QueryEditorPropertyExpression.Type. -type QueryEditorPropertyExpressionType string +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `QueryEditorPropertyExpressionOrQueryEditorFunctionExpression` from JSON. +func (resource *QueryEditorPropertyExpressionOrQueryEditorFunctionExpression) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } -// QueryEditorPropertyType defines model for QueryEditorPropertyType. -type QueryEditorPropertyType string + // FIXME: this is wasteful, we need to find a more efficient way to unmarshal this. + parsedAsMap := make(map[string]any) + if err := json.Unmarshal(raw, &parsedAsMap); err != nil { + return err + } -// SQLExpression defines model for SQLExpression. -type SQLExpression struct { - // FROM part of the SQL expression - From *any `json:"from,omitempty"` - GroupBy *QueryEditorArrayExpression `json:"groupBy,omitempty"` + discriminator, found := parsedAsMap["type"] + if !found { + return errors.New("discriminator field 'type' not found in payload") + } - // LIMIT part of the SQL expression - Limit *int64 `json:"limit,omitempty"` - OrderBy *QueryEditorFunctionExpression `json:"orderBy,omitempty"` + switch discriminator { + case "function": + var queryEditorFunctionExpression QueryEditorFunctionExpression + if err := json.Unmarshal(raw, &queryEditorFunctionExpression); err != nil { + return err + } - // The sort order of the SQL expression, `ASC` or `DESC` - OrderByDirection *string `json:"orderByDirection,omitempty"` - Select *QueryEditorFunctionExpression `json:"select,omitempty"` - Where *QueryEditorArrayExpression `json:"where,omitempty"` + resource.QueryEditorFunctionExpression = &queryEditorFunctionExpression + return nil + case "property": + var queryEditorPropertyExpression QueryEditorPropertyExpression + if err := json.Unmarshal(raw, &queryEditorPropertyExpression); err != nil { + return err + } + + resource.QueryEditorPropertyExpression = &queryEditorPropertyExpression + return nil + } + + return fmt.Errorf("could not unmarshal resource with `type = %v`", discriminator) +} + +type StringOrBoolOrInt64OrArrayOfQueryEditorOperatorType struct { + String *string `json:"String,omitempty"` + Bool *bool `json:"Bool,omitempty"` + Int64 *int64 `json:"Int64,omitempty"` + ArrayOfQueryEditorOperatorType []QueryEditorOperatorType `json:"ArrayOfQueryEditorOperatorType,omitempty"` +} + +// NewStringOrBoolOrInt64OrArrayOfQueryEditorOperatorType creates a new StringOrBoolOrInt64OrArrayOfQueryEditorOperatorType object. +func NewStringOrBoolOrInt64OrArrayOfQueryEditorOperatorType() *StringOrBoolOrInt64OrArrayOfQueryEditorOperatorType { + return &StringOrBoolOrInt64OrArrayOfQueryEditorOperatorType{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `StringOrBoolOrInt64OrArrayOfQueryEditorOperatorType` as JSON. +func (resource StringOrBoolOrInt64OrArrayOfQueryEditorOperatorType) MarshalJSON() ([]byte, error) { + if resource.String != nil { + return json.Marshal(resource.String) + } + + if resource.Bool != nil { + return json.Marshal(resource.Bool) + } + + if resource.Int64 != nil { + return json.Marshal(resource.Int64) + } + + if resource.ArrayOfQueryEditorOperatorType != nil { + return json.Marshal(resource.ArrayOfQueryEditorOperatorType) + } + + return nil, fmt.Errorf("no value for disjunction of scalars") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `StringOrBoolOrInt64OrArrayOfQueryEditorOperatorType` from JSON. +func (resource *StringOrBoolOrInt64OrArrayOfQueryEditorOperatorType) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + var errList []error + + // String + var String string + if err := json.Unmarshal(raw, &String); err != nil { + errList = append(errList, err) + resource.String = nil + } else { + resource.String = &String + return nil + } + + // Bool + var Bool bool + if err := json.Unmarshal(raw, &Bool); err != nil { + errList = append(errList, err) + resource.Bool = nil + } else { + resource.Bool = &Bool + return nil + } + + // Int64 + var Int64 int64 + if err := json.Unmarshal(raw, &Int64); err != nil { + errList = append(errList, err) + resource.Int64 = nil + } else { + resource.Int64 = &Int64 + return nil + } + + // ArrayOfQueryEditorOperatorType + var ArrayOfQueryEditorOperatorType []QueryEditorOperatorType + if err := json.Unmarshal(raw, &ArrayOfQueryEditorOperatorType); err != nil { + errList = append(errList, err) + resource.ArrayOfQueryEditorOperatorType = nil + } else { + resource.ArrayOfQueryEditorOperatorType = ArrayOfQueryEditorOperatorType + return nil + } + + return errors.Join(errList...) +} + +type StringOrBoolOrInt64 struct { + String *string `json:"String,omitempty"` + Bool *bool `json:"Bool,omitempty"` + Int64 *int64 `json:"Int64,omitempty"` +} + +// NewStringOrBoolOrInt64 creates a new StringOrBoolOrInt64 object. +func NewStringOrBoolOrInt64() *StringOrBoolOrInt64 { + return &StringOrBoolOrInt64{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `StringOrBoolOrInt64` as JSON. +func (resource StringOrBoolOrInt64) MarshalJSON() ([]byte, error) { + if resource.String != nil { + return json.Marshal(resource.String) + } + + if resource.Bool != nil { + return json.Marshal(resource.Bool) + } + + if resource.Int64 != nil { + return json.Marshal(resource.Int64) + } + + return nil, fmt.Errorf("no value for disjunction of scalars") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `StringOrBoolOrInt64` from JSON. +func (resource *StringOrBoolOrInt64) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + var errList []error + + // String + var String string + if err := json.Unmarshal(raw, &String); err != nil { + errList = append(errList, err) + resource.String = nil + } else { + resource.String = &String + return nil + } + + // Bool + var Bool bool + if err := json.Unmarshal(raw, &Bool); err != nil { + errList = append(errList, err) + resource.Bool = nil + } else { + resource.Bool = &Bool + return nil + } + + // Int64 + var Int64 int64 + if err := json.Unmarshal(raw, &Int64); err != nil { + errList = append(errList, err) + resource.Int64 = nil + } else { + resource.Int64 = &Int64 + return nil + } + + return errors.Join(errList...) +} + +type ArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression struct { + ArrayOfQueryEditorExpression []QueryEditorExpression `json:"ArrayOfQueryEditorExpression,omitempty"` + ArrayOfQueryEditorArrayExpression []QueryEditorArrayExpression `json:"ArrayOfQueryEditorArrayExpression,omitempty"` +} + +// NewArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression creates a new ArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression object. +func NewArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression() *ArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression { + return &ArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `ArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression` as JSON. +func (resource ArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression) MarshalJSON() ([]byte, error) { + if resource.ArrayOfQueryEditorExpression != nil { + return json.Marshal(resource.ArrayOfQueryEditorExpression) + } + + if resource.ArrayOfQueryEditorArrayExpression != nil { + return json.Marshal(resource.ArrayOfQueryEditorArrayExpression) + } + + return nil, fmt.Errorf("no value for disjunction of scalars") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `ArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression` from JSON. +func (resource *ArrayOfQueryEditorExpressionOrArrayOfQueryEditorArrayExpression) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + var errList []error + + // ArrayOfQueryEditorExpression + var ArrayOfQueryEditorExpression []QueryEditorExpression + if err := json.Unmarshal(raw, &ArrayOfQueryEditorExpression); err != nil { + errList = append(errList, err) + resource.ArrayOfQueryEditorExpression = nil + } else { + resource.ArrayOfQueryEditorExpression = ArrayOfQueryEditorExpression + return nil + } + + // ArrayOfQueryEditorArrayExpression + var ArrayOfQueryEditorArrayExpression []QueryEditorArrayExpression + if err := json.Unmarshal(raw, &ArrayOfQueryEditorArrayExpression); err != nil { + errList = append(errList, err) + resource.ArrayOfQueryEditorArrayExpression = nil + } else { + resource.ArrayOfQueryEditorArrayExpression = ArrayOfQueryEditorArrayExpression + return nil + } + + return errors.Join(errList...) } diff --git a/pkg/tsdb/cloudwatch/log_actions.go b/pkg/tsdb/cloudwatch/log_actions.go index cdf80b977f4..d8c95db35ca 100644 --- a/pkg/tsdb/cloudwatch/log_actions.go +++ b/pkg/tsdb/cloudwatch/log_actions.go @@ -141,8 +141,8 @@ func (e *cloudWatchExecutor) executeLogAction(ctx context.Context, logsQuery mod } region := instance.Settings.Region - if logsQuery.Region != nil { - region = *logsQuery.Region + if logsQuery.Region != "" { + region = logsQuery.Region } logsClient, err := e.getCWLogsClient(ctx, pluginCtx, region) @@ -305,8 +305,8 @@ func (e *cloudWatchExecutor) handleStartQuery(ctx context.Context, logsClient cl dataFrame.RefID = refID region := "default" - if logsQuery.Region != nil { - region = *logsQuery.Region + if logsQuery.Region != "" { + region = logsQuery.Region } dataFrame.Meta = &data.FrameMeta{ diff --git a/pkg/tsdb/cloudwatch/log_sync_query.go b/pkg/tsdb/cloudwatch/log_sync_query.go index c029704effe..28c5b21b0f1 100644 --- a/pkg/tsdb/cloudwatch/log_sync_query.go +++ b/pkg/tsdb/cloudwatch/log_sync_query.go @@ -13,7 +13,6 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" "github.com/grafana/grafana/pkg/tsdb/cloudwatch/kinds/dataquery" "github.com/grafana/grafana/pkg/tsdb/cloudwatch/models" - "github.com/grafana/grafana/pkg/tsdb/cloudwatch/utils" ) const initialAlertPollPeriod = time.Second @@ -39,9 +38,9 @@ var executeSyncLogQuery = func(ctx context.Context, e *cloudWatchExecutor, req * logsQuery.QueryString = *logsQuery.Expression } - region := utils.Depointerizer(logsQuery.Region) + region := logsQuery.Region if region == "" || region == defaultRegion { - logsQuery.Region = utils.Pointer(instance.Settings.Region) + logsQuery.Region = instance.Settings.Region } logsClient, err := e.getCWLogsClient(ctx, req.PluginContext, region) diff --git a/pkg/tsdb/cloudwatch/models/cloudwatch_query.go b/pkg/tsdb/cloudwatch/models/cloudwatch_query.go index a44b00c17af..6907257c55e 100644 --- a/pkg/tsdb/cloudwatch/models/cloudwatch_query.go +++ b/pkg/tsdb/cloudwatch/models/cloudwatch_query.go @@ -17,7 +17,6 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend" "github.com/grafana/grafana-plugin-sdk-go/backend/log" "github.com/grafana/grafana/pkg/tsdb/cloudwatch/kinds/dataquery" - "github.com/grafana/grafana/pkg/tsdb/cloudwatch/utils" ) type ( @@ -27,13 +26,13 @@ type ( ) const ( - MetricEditorModeBuilder = dataquery.MetricEditorModeN0 - MetricEditorModeRaw = dataquery.MetricEditorModeN1 + MetricEditorModeBuilder = dataquery.MetricEditorModeBuilder + MetricEditorModeRaw = dataquery.MetricEditorModeCode ) const ( - MetricQueryTypeSearch = dataquery.MetricQueryTypeN0 - MetricQueryTypeQuery = dataquery.MetricQueryTypeN1 + MetricQueryTypeSearch = dataquery.MetricQueryTypeSearch + MetricQueryTypeQuery = dataquery.MetricQueryTypeInsights ) const ( @@ -256,9 +255,9 @@ func ParseMetricDataQueries(dataQueries []backend.DataQuery, startTime time.Time StartTime: startTime, EndTime: endTime, RefId: refId, - Id: utils.Depointerizer(mdq.Id), - Region: utils.Depointerizer(mdq.Region), - Namespace: utils.Depointerizer(mdq.Namespace), + Id: mdq.Id, + Region: mdq.Region, + Namespace: mdq.Namespace, TimezoneUTCOffset: mdq.TimezoneUTCOffset, } @@ -335,7 +334,7 @@ func (q *CloudWatchQuery) validateAndSetDefaults(refId string, metricsDataQuery q.AccountId = metricsDataQuery.AccountId } - if utils.Depointerizer(metricsDataQuery.Id) == "" { + if metricsDataQuery.Id == "" { // Why not just use refId if id is not specified in the frontend? When specifying an id in the editor, // and alphabetical must be used. The id must be unique, so if an id like for example a, b or c would be used, // it would likely collide with some ref id. That's why the `query` prefix is used. @@ -488,16 +487,14 @@ func getRetainedPeriods(timeSince time.Duration) []int { } } -func parseDimensions(dimensions map[string]any) (map[string][]string, error) { +func parseDimensions(dimensions dataquery.Dimensions) (map[string][]string, error) { parsedDimensions := make(map[string][]string) for k, v := range dimensions { // This is for backwards compatibility. Before 6.5 dimensions values were stored as strings and not arrays - if value, ok := v.(string); ok { - parsedDimensions[k] = []string{value} - } else if values, ok := v.([]any); ok { - for _, value := range values { - parsedDimensions[k] = append(parsedDimensions[k], value.(string)) - } + if v.String != nil { + parsedDimensions[k] = []string{*v.String} + } else if len(v.ArrayOfString) > 0 { + parsedDimensions[k] = append(parsedDimensions[k], v.ArrayOfString...) } else { return nil, errors.New("unknown type as dimension value") } diff --git a/pkg/tsdb/cloudwatch/models/cloudwatch_query_test.go b/pkg/tsdb/cloudwatch/models/cloudwatch_query_test.go index 1459c923555..fe438fb2afd 100644 --- a/pkg/tsdb/cloudwatch/models/cloudwatch_query_test.go +++ b/pkg/tsdb/cloudwatch/models/cloudwatch_query_test.go @@ -435,7 +435,8 @@ func TestRequestParser(t *testing.T) { _, err := ParseMetricDataQueries(query, time.Now().Add(-2*time.Hour), time.Now().Add(-time.Hour), "us-east-2", logger, false) require.Error(t, err) - assert.Equal(t, `error parsing query "", failed to parse dimensions: unknown type as dimension value`, err.Error()) + assert.Equal(t, `error parsing query "", json: cannot unmarshal number into Go value of type string +json: cannot unmarshal number into Go value of type []string`, err.Error()) }) } @@ -935,12 +936,12 @@ func Test_migrateAliasToDynamicLabel_single_query_preserves_old_alias_and_create queryToMigrate := metricsDataQuery{ CloudWatchMetricsQuery: dataquery.CloudWatchMetricsQuery{ - Region: utils.Pointer("us-east-1"), - Namespace: utils.Pointer("ec2"), + Region: "us-east-1", + Namespace: "ec2", MetricName: utils.Pointer("CPUUtilization"), Alias: utils.Pointer(tc.inputAlias), Dimensions: &dataquery.Dimensions{ - "InstanceId": []any{"test"}, + "InstanceId": dataquery.StringOrArrayOfString{ArrayOfString: []string{"test"}}, }, Statistic: &average, Period: utils.Pointer("600"), diff --git a/pkg/tsdb/cloudwatch/utils/utils.go b/pkg/tsdb/cloudwatch/utils/utils.go index bf5e8f9d0a7..55163896228 100644 --- a/pkg/tsdb/cloudwatch/utils/utils.go +++ b/pkg/tsdb/cloudwatch/utils/utils.go @@ -4,15 +4,6 @@ import "github.com/go-stack/stack" func Pointer[T any](arg T) *T { return &arg } -func Depointerizer[T any](v *T) T { - var emptyValue T - if v != nil { - emptyValue = *v - } - - return emptyValue -} - // Stack is copied from grafana/pkg/infra/log // TODO: maybe this should live in grafana-plugin-sdk-go? func Stack(skip int) string { diff --git a/pkg/tsdb/elasticsearch/kinds/dataquery/types_dataquery_gen.go b/pkg/tsdb/elasticsearch/kinds/dataquery/types_dataquery_gen.go index 5b767aba2e9..430a37af4e5 100644 --- a/pkg/tsdb/elasticsearch/kinds/dataquery/types_dataquery_gen.go +++ b/pkg/tsdb/elasticsearch/kinds/dataquery/types_dataquery_gen.go @@ -7,591 +7,1804 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package dataquery -// Defines values for BucketAggregationType. -const ( - BucketAggregationTypeDateHistogram BucketAggregationType = "date_histogram" - BucketAggregationTypeFilters BucketAggregationType = "filters" - BucketAggregationTypeGeohashGrid BucketAggregationType = "geohash_grid" - BucketAggregationTypeHistogram BucketAggregationType = "histogram" - BucketAggregationTypeNested BucketAggregationType = "nested" - BucketAggregationTypeTerms BucketAggregationType = "terms" +import ( + json "encoding/json" + errors "errors" + fmt "fmt" ) -// Defines values for ExtendedStatMetaType. -const ( - ExtendedStatMetaTypeAvg ExtendedStatMetaType = "avg" - ExtendedStatMetaTypeCount ExtendedStatMetaType = "count" - ExtendedStatMetaTypeMax ExtendedStatMetaType = "max" - ExtendedStatMetaTypeMin ExtendedStatMetaType = "min" - ExtendedStatMetaTypeStdDeviation ExtendedStatMetaType = "std_deviation" - ExtendedStatMetaTypeStdDeviationBoundsLower ExtendedStatMetaType = "std_deviation_bounds_lower" - ExtendedStatMetaTypeStdDeviationBoundsUpper ExtendedStatMetaType = "std_deviation_bounds_upper" - ExtendedStatMetaTypeSum ExtendedStatMetaType = "sum" -) +type BucketAggregation = DateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested -// Defines values for MetricAggregationType. -const ( - MetricAggregationTypeAvg MetricAggregationType = "avg" - MetricAggregationTypeBucketScript MetricAggregationType = "bucket_script" - MetricAggregationTypeCardinality MetricAggregationType = "cardinality" - MetricAggregationTypeCount MetricAggregationType = "count" - MetricAggregationTypeCumulativeSum MetricAggregationType = "cumulative_sum" - MetricAggregationTypeDerivative MetricAggregationType = "derivative" - MetricAggregationTypeExtendedStats MetricAggregationType = "extended_stats" - MetricAggregationTypeLogs MetricAggregationType = "logs" - MetricAggregationTypeMax MetricAggregationType = "max" - MetricAggregationTypeMin MetricAggregationType = "min" - MetricAggregationTypeMovingAvg MetricAggregationType = "moving_avg" - MetricAggregationTypeMovingFn MetricAggregationType = "moving_fn" - MetricAggregationTypePercentiles MetricAggregationType = "percentiles" - MetricAggregationTypeRate MetricAggregationType = "rate" - MetricAggregationTypeRawData MetricAggregationType = "raw_data" - MetricAggregationTypeRawDocument MetricAggregationType = "raw_document" - MetricAggregationTypeSerialDiff MetricAggregationType = "serial_diff" - MetricAggregationTypeSum MetricAggregationType = "sum" - MetricAggregationTypeTopMetrics MetricAggregationType = "top_metrics" -) - -// Defines values for MovingAverageModel. -const ( - MovingAverageModelEwma MovingAverageModel = "ewma" - MovingAverageModelHolt MovingAverageModel = "holt" - MovingAverageModelHoltWinters MovingAverageModel = "holt_winters" - MovingAverageModelLinear MovingAverageModel = "linear" - MovingAverageModelSimple MovingAverageModel = "simple" -) - -// Defines values for PipelineMetricAggregationType. -const ( - PipelineMetricAggregationTypeBucketScript PipelineMetricAggregationType = "bucket_script" - PipelineMetricAggregationTypeCumulativeSum PipelineMetricAggregationType = "cumulative_sum" - PipelineMetricAggregationTypeDerivative PipelineMetricAggregationType = "derivative" - PipelineMetricAggregationTypeMovingAvg PipelineMetricAggregationType = "moving_avg" - PipelineMetricAggregationTypeMovingFn PipelineMetricAggregationType = "moving_fn" - PipelineMetricAggregationTypeSerialDiff PipelineMetricAggregationType = "serial_diff" -) - -// Defines values for TermsOrder. -const ( - TermsOrderAsc TermsOrder = "asc" - TermsOrderDesc TermsOrder = "desc" -) - -// Average defines model for Average. -type Average struct { - MetricAggregationWithField - MetricAggregationWithInlineScript - MetricAggregationWithMissingSupport - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Missing *string `json:"missing,omitempty"` - Script *any `json:"script,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` +// NewBucketAggregation creates a new BucketAggregation object. +func NewBucketAggregation() *BucketAggregation { + return NewDateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested() } -// BaseBucketAggregation defines model for BaseBucketAggregation. -type BaseBucketAggregation struct { - Id string `json:"id"` - Settings *any `json:"settings,omitempty"` - Type BucketAggregationType `json:"type"` +type MetricAggregation = CountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics + +// NewMetricAggregation creates a new MetricAggregation object. +func NewMetricAggregation() *MetricAggregation { + return NewCountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics() } -// BaseMetricAggregation defines model for BaseMetricAggregation. -type BaseMetricAggregation struct { - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Type MetricAggregationType `json:"type"` -} - -// BaseMovingAverageModelSettings defines model for BaseMovingAverageModelSettings. -type BaseMovingAverageModelSettings struct { - Model MovingAverageModel `json:"model"` - Predict string `json:"predict"` - Window string `json:"window"` -} - -// BasePipelineMetricAggregation defines model for BasePipelineMetricAggregation. -type BasePipelineMetricAggregation struct { - MetricAggregationWithField - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - PipelineAgg *string `json:"pipelineAgg,omitempty"` - Type MetricAggregationType `json:"type"` -} - -// BucketAggregationType defines model for BucketAggregationType. type BucketAggregationType string -// BucketAggregationWithField defines model for BucketAggregationWithField. -type BucketAggregationWithField struct { - BaseBucketAggregation - Field *string `json:"field,omitempty"` -} +const ( + BucketAggregationTypeTerms BucketAggregationType = "terms" + BucketAggregationTypeFilters BucketAggregationType = "filters" + BucketAggregationTypeGeohashGrid BucketAggregationType = "geohash_grid" + BucketAggregationTypeDateHistogram BucketAggregationType = "date_histogram" + BucketAggregationTypeHistogram BucketAggregationType = "histogram" + BucketAggregationTypeNested BucketAggregationType = "nested" +) -// BucketScript defines model for BucketScript. -type BucketScript struct { - PipelineMetricAggregationWithMultipleBucketPaths - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Script *any `json:"script,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` -} - -// Count defines model for Count. -type Count struct { - BaseMetricAggregation - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Type MetricAggregationType `json:"type"` -} - -// CumulativeSum defines model for CumulativeSum. -type CumulativeSum struct { - BasePipelineMetricAggregation - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Format *string `json:"format,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` -} - -// These are the common properties available to all queries in all datasources. -// Specific implementations will *extend* this interface, adding the required -// properties for the given context. -type DataQuery struct { - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId string `json:"refId"` -} - -// DateHistogram defines model for DateHistogram. -type DateHistogram struct { - BucketAggregationWithField +type BaseBucketAggregation struct { Id string `json:"id"` - Settings *any `json:"settings,omitempty"` Type BucketAggregationType `json:"type"` + Settings any `json:"settings,omitempty"` +} + +// NewBaseBucketAggregation creates a new BaseBucketAggregation object. +func NewBaseBucketAggregation() *BaseBucketAggregation { + return &BaseBucketAggregation{} +} + +type BucketAggregationWithField struct { + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Type BucketAggregationType `json:"type"` + Settings any `json:"settings,omitempty"` +} + +// NewBucketAggregationWithField creates a new BucketAggregationWithField object. +func NewBucketAggregationWithField() *BucketAggregationWithField { + return &BucketAggregationWithField{} +} + +type DateHistogram struct { + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Type string `json:"type"` + Settings *DataqueryDateHistogramSettings `json:"settings,omitempty"` +} + +// NewDateHistogram creates a new DateHistogram object. +func NewDateHistogram() *DateHistogram { + return &DateHistogram{ + Type: "date_histogram", + } } -// DateHistogramSettings defines model for DateHistogramSettings. type DateHistogramSettings struct { Interval *string `json:"interval,omitempty"` MinDocCount *string `json:"min_doc_count,omitempty"` + TrimEdges *string `json:"trimEdges,omitempty"` Offset *string `json:"offset,omitempty"` TimeZone *string `json:"timeZone,omitempty"` - TrimEdges *string `json:"trimEdges,omitempty"` } -// Derivative defines model for Derivative. -type Derivative struct { - BasePipelineMetricAggregation - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Unit *string `json:"unit,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` +// NewDateHistogramSettings creates a new DateHistogramSettings object. +func NewDateHistogramSettings() *DateHistogramSettings { + return &DateHistogramSettings{} } -// ElasticsearchDataQuery defines model for ElasticsearchDataQuery. -type ElasticsearchDataQuery struct { - // DataQuery These are the common properties available to all queries in all datasources. - // Specific implementations will *extend* this interface, adding the required - // properties for the given context. - DataQuery - - // Alias pattern - Alias *string `json:"alias,omitempty"` - - // List of bucket aggregations - BucketAggs []any `json:"bucketAggs,omitempty"` - - // List of metric aggregations - Metrics []any `json:"metrics,omitempty"` - - // Lucene query - Query *string `json:"query,omitempty"` - - // Name of time field - TimeField *string `json:"timeField,omitempty"` -} - -// ExtendedStat defines model for ExtendedStat. -type ExtendedStat struct { - Label string `json:"label"` - Value ExtendedStatMetaType `json:"value"` -} - -// ExtendedStatMetaType defines model for ExtendedStatMetaType. -type ExtendedStatMetaType string - -// ExtendedStats defines model for ExtendedStats. -type ExtendedStats struct { - MetricAggregationWithField - MetricAggregationWithInlineScript - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Meta map[string]any `json:"meta,omitempty"` - Settings *struct { - Missing *string `json:"missing,omitempty"` - Script *any `json:"script,omitempty"` - Sigma *string `json:"sigma,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` -} - -// Filter defines model for Filter. -type Filter struct { - Label string `json:"label"` - Query string `json:"query"` -} - -// Filters defines model for Filters. -type Filters struct { - BaseBucketAggregation - Id string `json:"id"` - Settings *any `json:"settings,omitempty"` - Type BucketAggregationType `json:"type"` -} - -// FiltersSettings defines model for FiltersSettings. -type FiltersSettings struct { - Filters []Filter `json:"filters,omitempty"` -} - -// GeoHashGrid defines model for GeoHashGrid. -type GeoHashGrid struct { - BucketAggregationWithField - Id string `json:"id"` - Settings *any `json:"settings,omitempty"` - Type BucketAggregationType `json:"type"` -} - -// GeoHashGridSettings defines model for GeoHashGridSettings. -type GeoHashGridSettings struct { - Precision *string `json:"precision,omitempty"` -} - -// Histogram defines model for Histogram. type Histogram struct { - BucketAggregationWithField - Id string `json:"id"` - Settings *any `json:"settings,omitempty"` - Type BucketAggregationType `json:"type"` + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Type string `json:"type"` + Settings *DataqueryHistogramSettings `json:"settings,omitempty"` +} + +// NewHistogram creates a new Histogram object. +func NewHistogram() *Histogram { + return &Histogram{ + Type: "histogram", + } } -// HistogramSettings defines model for HistogramSettings. type HistogramSettings struct { Interval *string `json:"interval,omitempty"` MinDocCount *string `json:"min_doc_count,omitempty"` } -// Logs defines model for Logs. -type Logs struct { - BaseMetricAggregation - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Limit *string `json:"limit,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` +// NewHistogramSettings creates a new HistogramSettings object. +func NewHistogramSettings() *HistogramSettings { + return &HistogramSettings{} } -// Max defines model for Max. -type Max struct { - MetricAggregationWithField - MetricAggregationWithInlineScript - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Missing *string `json:"missing,omitempty"` - Script *any `json:"script,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` -} +type TermsOrder string -// MetricAggregationType defines model for MetricAggregationType. -type MetricAggregationType string +const ( + TermsOrderDesc TermsOrder = "desc" + TermsOrderAsc TermsOrder = "asc" +) -// MetricAggregationWithField defines model for MetricAggregationWithField. -type MetricAggregationWithField struct { - BaseMetricAggregation - Field *string `json:"field,omitempty"` -} - -// MetricAggregationWithInlineScript defines model for MetricAggregationWithInlineScript. -type MetricAggregationWithInlineScript struct { - BaseMetricAggregation - Settings *struct { - Script *any `json:"script,omitempty"` - } `json:"settings,omitempty"` -} - -// MetricAggregationWithMissingSupport defines model for MetricAggregationWithMissingSupport. -type MetricAggregationWithMissingSupport struct { - BaseMetricAggregation - Settings *struct { - Missing *string `json:"missing,omitempty"` - } `json:"settings,omitempty"` -} - -// Min defines model for Min. -type Min struct { - MetricAggregationWithField - MetricAggregationWithInlineScript - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Missing *string `json:"missing,omitempty"` - Script *any `json:"script,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` -} - -// MovingAverage defines model for MovingAverage. -type MovingAverage struct { - BasePipelineMetricAggregation - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings map[string]any `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` -} - -// MovingAverageEWMAModelSettings defines model for MovingAverageEWMAModelSettings. -type MovingAverageEWMAModelSettings struct { - BaseMovingAverageModelSettings - Minimize bool `json:"minimize"` - Model MovingAverageModel `json:"model"` - Predict string `json:"predict"` - Settings *struct { - Alpha *string `json:"alpha,omitempty"` - } `json:"settings,omitempty"` - Window string `json:"window"` -} - -// MovingAverageHoltModelSettings defines model for MovingAverageHoltModelSettings. -type MovingAverageHoltModelSettings struct { - BaseMovingAverageModelSettings - Minimize bool `json:"minimize"` - Model MovingAverageModel `json:"model"` - Predict string `json:"predict"` - Settings struct { - Alpha *string `json:"alpha,omitempty"` - Beta *string `json:"beta,omitempty"` - } `json:"settings"` - Window string `json:"window"` -} - -// MovingAverageHoltWintersModelSettings defines model for MovingAverageHoltWintersModelSettings. -type MovingAverageHoltWintersModelSettings struct { - BaseMovingAverageModelSettings - Minimize bool `json:"minimize"` - Model MovingAverageModel `json:"model"` - Predict string `json:"predict"` - Settings struct { - Alpha *string `json:"alpha,omitempty"` - Beta *string `json:"beta,omitempty"` - Gamma *string `json:"gamma,omitempty"` - Pad *bool `json:"pad,omitempty"` - Period *string `json:"period,omitempty"` - } `json:"settings"` - Window string `json:"window"` -} - -// MovingAverageLinearModelSettings defines model for MovingAverageLinearModelSettings. -type MovingAverageLinearModelSettings struct { - BaseMovingAverageModelSettings - Model MovingAverageModel `json:"model"` - Predict string `json:"predict"` - Window string `json:"window"` -} - -// MovingAverageModel defines model for MovingAverageModel. -type MovingAverageModel string - -// MovingAverageModelOption defines model for MovingAverageModelOption. -type MovingAverageModelOption struct { - Label string `json:"label"` - Value MovingAverageModel `json:"value"` -} - -// MovingAverageSimpleModelSettings defines model for MovingAverageSimpleModelSettings. -type MovingAverageSimpleModelSettings struct { - BaseMovingAverageModelSettings - Model MovingAverageModel `json:"model"` - Predict string `json:"predict"` - Window string `json:"window"` -} - -// MovingFunction defines model for MovingFunction. -type MovingFunction struct { - BasePipelineMetricAggregation - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Script *any `json:"script,omitempty"` - Shift *string `json:"shift,omitempty"` - Window *string `json:"window,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` -} - -// Nested defines model for Nested. type Nested struct { - BucketAggregationWithField - Id string `json:"id"` - Settings *any `json:"settings,omitempty"` - Type BucketAggregationType `json:"type"` + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Type string `json:"type"` + Settings any `json:"settings,omitempty"` } -// Percentiles defines model for Percentiles. -type Percentiles struct { - MetricAggregationWithField - MetricAggregationWithInlineScript - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Missing *string `json:"missing,omitempty"` - Percents []string `json:"percents,omitempty"` - Script *any `json:"script,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` +// NewNested creates a new Nested object. +func NewNested() *Nested { + return &Nested{ + Type: "nested", + } +} + +type Terms struct { + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Type string `json:"type"` + Settings *DataqueryTermsSettings `json:"settings,omitempty"` +} + +// NewTerms creates a new Terms object. +func NewTerms() *Terms { + return &Terms{ + Type: "terms", + } +} + +type TermsSettings struct { + Order *TermsOrder `json:"order,omitempty"` + Size *string `json:"size,omitempty"` + MinDocCount *string `json:"min_doc_count,omitempty"` + OrderBy *string `json:"orderBy,omitempty"` + Missing *string `json:"missing,omitempty"` +} + +// NewTermsSettings creates a new TermsSettings object. +func NewTermsSettings() *TermsSettings { + return &TermsSettings{} +} + +type Filters struct { + Id string `json:"id"` + Type string `json:"type"` + Settings *DataqueryFiltersSettings `json:"settings,omitempty"` +} + +// NewFilters creates a new Filters object. +func NewFilters() *Filters { + return &Filters{ + Type: "filters", + } +} + +type Filter struct { + Query string `json:"query"` + Label string `json:"label"` +} + +// NewFilter creates a new Filter object. +func NewFilter() *Filter { + return &Filter{} +} + +type FiltersSettings struct { + Filters []Filter `json:"filters,omitempty"` +} + +// NewFiltersSettings creates a new FiltersSettings object. +func NewFiltersSettings() *FiltersSettings { + return &FiltersSettings{} +} + +type GeoHashGrid struct { + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Type string `json:"type"` + Settings *DataqueryGeoHashGridSettings `json:"settings,omitempty"` +} + +// NewGeoHashGrid creates a new GeoHashGrid object. +func NewGeoHashGrid() *GeoHashGrid { + return &GeoHashGrid{ + Type: "geohash_grid", + } +} + +type GeoHashGridSettings struct { + Precision *string `json:"precision,omitempty"` +} + +// NewGeoHashGridSettings creates a new GeoHashGridSettings object. +func NewGeoHashGridSettings() *GeoHashGridSettings { + return &GeoHashGridSettings{} } -// PipelineMetricAggregationType defines model for PipelineMetricAggregationType. type PipelineMetricAggregationType string -// PipelineMetricAggregationWithMultipleBucketPaths defines model for PipelineMetricAggregationWithMultipleBucketPaths. -type PipelineMetricAggregationWithMultipleBucketPaths struct { - BaseMetricAggregation - PipelineVariables []PipelineVariable `json:"pipelineVariables,omitempty"` +const ( + PipelineMetricAggregationTypeMovingAvg PipelineMetricAggregationType = "moving_avg" + PipelineMetricAggregationTypeMovingFn PipelineMetricAggregationType = "moving_fn" + PipelineMetricAggregationTypeDerivative PipelineMetricAggregationType = "derivative" + PipelineMetricAggregationTypeSerialDiff PipelineMetricAggregationType = "serial_diff" + PipelineMetricAggregationTypeCumulativeSum PipelineMetricAggregationType = "cumulative_sum" + PipelineMetricAggregationTypeBucketScript PipelineMetricAggregationType = "bucket_script" +) + +type MetricAggregationType = StringOrPipelineMetricAggregationType + +// NewMetricAggregationType creates a new MetricAggregationType object. +func NewMetricAggregationType() *MetricAggregationType { + return NewStringOrPipelineMetricAggregationType() +} + +type BaseMetricAggregation struct { + Type MetricAggregationType `json:"type"` + Id string `json:"id"` + Hide *bool `json:"hide,omitempty"` +} + +// NewBaseMetricAggregation creates a new BaseMetricAggregation object. +func NewBaseMetricAggregation() *BaseMetricAggregation { + return &BaseMetricAggregation{ + Type: *NewMetricAggregationType(), + } } -// PipelineVariable defines model for PipelineVariable. type PipelineVariable struct { Name string `json:"name"` PipelineAgg string `json:"pipelineAgg"` } -// Rate defines model for Rate. -type Rate struct { - MetricAggregationWithField - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Mode *string `json:"mode,omitempty"` - Unit *string `json:"unit,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` +// NewPipelineVariable creates a new PipelineVariable object. +func NewPipelineVariable() *PipelineVariable { + return &PipelineVariable{} } -// RawData defines model for RawData. -type RawData struct { - BaseMetricAggregation - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Size *string `json:"size,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` +type MetricAggregationWithField struct { + Field *string `json:"field,omitempty"` + Type MetricAggregationType `json:"type"` + Id string `json:"id"` + Hide *bool `json:"hide,omitempty"` } -// RawDocument defines model for RawDocument. -type RawDocument struct { - BaseMetricAggregation - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Size *string `json:"size,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` +// NewMetricAggregationWithField creates a new MetricAggregationWithField object. +func NewMetricAggregationWithField() *MetricAggregationWithField { + return &MetricAggregationWithField{ + Type: *NewMetricAggregationType(), + } } -// SerialDiff defines model for SerialDiff. -type SerialDiff struct { - BasePipelineMetricAggregation - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Lag *string `json:"lag,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` +type MetricAggregationWithMissingSupport struct { + Settings *DataqueryMetricAggregationWithMissingSupportSettings `json:"settings,omitempty"` + Type MetricAggregationType `json:"type"` + Id string `json:"id"` + Hide *bool `json:"hide,omitempty"` +} + +// NewMetricAggregationWithMissingSupport creates a new MetricAggregationWithMissingSupport object. +func NewMetricAggregationWithMissingSupport() *MetricAggregationWithMissingSupport { + return &MetricAggregationWithMissingSupport{ + Type: *NewMetricAggregationType(), + } +} + +type InlineScript = StringOrDataqueryInlineScript + +// NewInlineScript creates a new InlineScript object. +func NewInlineScript() *InlineScript { + return NewStringOrDataqueryInlineScript() +} + +type MetricAggregationWithInlineScript struct { + Settings *DataqueryMetricAggregationWithInlineScriptSettings `json:"settings,omitempty"` + Type MetricAggregationType `json:"type"` + Id string `json:"id"` + Hide *bool `json:"hide,omitempty"` +} + +// NewMetricAggregationWithInlineScript creates a new MetricAggregationWithInlineScript object. +func NewMetricAggregationWithInlineScript() *MetricAggregationWithInlineScript { + return &MetricAggregationWithInlineScript{ + Type: *NewMetricAggregationType(), + } +} + +type Count struct { + Type string `json:"type"` + Id string `json:"id"` + Hide *bool `json:"hide,omitempty"` +} + +// NewCount creates a new Count object. +func NewCount() *Count { + return &Count{ + Type: "count", + } +} + +type Average struct { + Type string `json:"type"` + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Settings *DataqueryAverageSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewAverage creates a new Average object. +func NewAverage() *Average { + return &Average{ + Type: "avg", + } } -// Sum defines model for Sum. type Sum struct { - MetricAggregationWithField - MetricAggregationWithInlineScript - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Missing *string `json:"missing,omitempty"` - Script *any `json:"script,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` -} - -// Terms defines model for Terms. -type Terms struct { - BucketAggregationWithField + Type string `json:"type"` + Field *string `json:"field,omitempty"` Id string `json:"id"` - Settings *any `json:"settings,omitempty"` - Type BucketAggregationType `json:"type"` + Settings *DataquerySumSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` } -// TermsOrder defines model for TermsOrder. -type TermsOrder string - -// TermsSettings defines model for TermsSettings. -type TermsSettings struct { - MinDocCount *string `json:"min_doc_count,omitempty"` - Missing *string `json:"missing,omitempty"` - Order *TermsOrder `json:"order,omitempty"` - OrderBy *string `json:"orderBy,omitempty"` - Size *string `json:"size,omitempty"` +// NewSum creates a new Sum object. +func NewSum() *Sum { + return &Sum{ + Type: "sum", + } } -// TopMetrics defines model for TopMetrics. -type TopMetrics struct { - BaseMetricAggregation - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Metrics []string `json:"metrics,omitempty"` - Order *string `json:"order,omitempty"` - OrderBy *string `json:"orderBy,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` +type Max struct { + Type string `json:"type"` + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Settings *DataqueryMaxSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewMax creates a new Max object. +func NewMax() *Max { + return &Max{ + Type: "max", + } +} + +type Min struct { + Type string `json:"type"` + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Settings *DataqueryMinSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewMin creates a new Min object. +func NewMin() *Min { + return &Min{ + Type: "min", + } +} + +type ExtendedStatMetaType string + +const ( + ExtendedStatMetaTypeAvg ExtendedStatMetaType = "avg" + ExtendedStatMetaTypeMin ExtendedStatMetaType = "min" + ExtendedStatMetaTypeMax ExtendedStatMetaType = "max" + ExtendedStatMetaTypeSum ExtendedStatMetaType = "sum" + ExtendedStatMetaTypeCount ExtendedStatMetaType = "count" + ExtendedStatMetaTypeStdDeviation ExtendedStatMetaType = "std_deviation" + ExtendedStatMetaTypeStdDeviationBoundsUpper ExtendedStatMetaType = "std_deviation_bounds_upper" + ExtendedStatMetaTypeStdDeviationBoundsLower ExtendedStatMetaType = "std_deviation_bounds_lower" +) + +type ExtendedStat struct { + Label string `json:"label"` + Value ExtendedStatMetaType `json:"value"` +} + +// NewExtendedStat creates a new ExtendedStat object. +func NewExtendedStat() *ExtendedStat { + return &ExtendedStat{} +} + +type ExtendedStats struct { + Type string `json:"type"` + Settings *DataqueryExtendedStatsSettings `json:"settings,omitempty"` + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Meta any `json:"meta,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewExtendedStats creates a new ExtendedStats object. +func NewExtendedStats() *ExtendedStats { + return &ExtendedStats{ + Type: "extended_stats", + } +} + +type Percentiles struct { + Type string `json:"type"` + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Settings *DataqueryPercentilesSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewPercentiles creates a new Percentiles object. +func NewPercentiles() *Percentiles { + return &Percentiles{ + Type: "percentiles", + } } -// UniqueCount defines model for UniqueCount. type UniqueCount struct { - MetricAggregationWithField - Hide *bool `json:"hide,omitempty"` - Id string `json:"id"` - Settings *struct { - Missing *string `json:"missing,omitempty"` - PrecisionThreshold *string `json:"precision_threshold,omitempty"` - } `json:"settings,omitempty"` - Type MetricAggregationType `json:"type"` + Type string `json:"type"` + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Settings *DataqueryUniqueCountSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewUniqueCount creates a new UniqueCount object. +func NewUniqueCount() *UniqueCount { + return &UniqueCount{ + Type: "cardinality", + } +} + +type RawDocument struct { + Type string `json:"type"` + Id string `json:"id"` + Settings *DataqueryRawDocumentSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewRawDocument creates a new RawDocument object. +func NewRawDocument() *RawDocument { + return &RawDocument{ + Type: "raw_document", + } +} + +type RawData struct { + Type string `json:"type"` + Id string `json:"id"` + Settings *DataqueryRawDataSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewRawData creates a new RawData object. +func NewRawData() *RawData { + return &RawData{ + Type: "raw_data", + } +} + +type Logs struct { + Type string `json:"type"` + Id string `json:"id"` + Settings *DataqueryLogsSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewLogs creates a new Logs object. +func NewLogs() *Logs { + return &Logs{ + Type: "logs", + } +} + +type Rate struct { + Type string `json:"type"` + Field *string `json:"field,omitempty"` + Id string `json:"id"` + Settings *DataqueryRateSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewRate creates a new Rate object. +func NewRate() *Rate { + return &Rate{ + Type: "rate", + } +} + +type BasePipelineMetricAggregation struct { + PipelineAgg *string `json:"pipelineAgg,omitempty"` + Field *string `json:"field,omitempty"` + Type string `json:"type"` + Id string `json:"id"` + Hide *bool `json:"hide,omitempty"` +} + +// NewBasePipelineMetricAggregation creates a new BasePipelineMetricAggregation object. +func NewBasePipelineMetricAggregation() *BasePipelineMetricAggregation { + return &BasePipelineMetricAggregation{} +} + +type PipelineMetricAggregationWithMultipleBucketPaths struct { + PipelineVariables []PipelineVariable `json:"pipelineVariables,omitempty"` + Type MetricAggregationType `json:"type"` + Id string `json:"id"` + Hide *bool `json:"hide,omitempty"` +} + +// NewPipelineMetricAggregationWithMultipleBucketPaths creates a new PipelineMetricAggregationWithMultipleBucketPaths object. +func NewPipelineMetricAggregationWithMultipleBucketPaths() *PipelineMetricAggregationWithMultipleBucketPaths { + return &PipelineMetricAggregationWithMultipleBucketPaths{ + Type: *NewMetricAggregationType(), + } +} + +type MovingAverageModel string + +const ( + MovingAverageModelSimple MovingAverageModel = "simple" + MovingAverageModelLinear MovingAverageModel = "linear" + MovingAverageModelEwma MovingAverageModel = "ewma" + MovingAverageModelHolt MovingAverageModel = "holt" + MovingAverageModelHoltWinters MovingAverageModel = "holt_winters" +) + +type MovingAverageModelOption struct { + Label string `json:"label"` + Value MovingAverageModel `json:"value"` +} + +// NewMovingAverageModelOption creates a new MovingAverageModelOption object. +func NewMovingAverageModelOption() *MovingAverageModelOption { + return &MovingAverageModelOption{} +} + +type BaseMovingAverageModelSettings struct { + Model MovingAverageModel `json:"model"` + Window string `json:"window"` + Predict string `json:"predict"` +} + +// NewBaseMovingAverageModelSettings creates a new BaseMovingAverageModelSettings object. +func NewBaseMovingAverageModelSettings() *BaseMovingAverageModelSettings { + return &BaseMovingAverageModelSettings{} +} + +type MovingAverageSimpleModelSettings struct { + Model string `json:"model"` + Window string `json:"window"` + Predict string `json:"predict"` +} + +// NewMovingAverageSimpleModelSettings creates a new MovingAverageSimpleModelSettings object. +func NewMovingAverageSimpleModelSettings() *MovingAverageSimpleModelSettings { + return &MovingAverageSimpleModelSettings{ + Model: "simple", + } +} + +type MovingAverageLinearModelSettings struct { + Model string `json:"model"` + Window string `json:"window"` + Predict string `json:"predict"` +} + +// NewMovingAverageLinearModelSettings creates a new MovingAverageLinearModelSettings object. +func NewMovingAverageLinearModelSettings() *MovingAverageLinearModelSettings { + return &MovingAverageLinearModelSettings{ + Model: "linear", + } +} + +type MovingAverageEWMAModelSettings struct { + Model string `json:"model"` + Settings *DataqueryMovingAverageEWMAModelSettingsSettings `json:"settings,omitempty"` + Window string `json:"window"` + Minimize bool `json:"minimize"` + Predict string `json:"predict"` +} + +// NewMovingAverageEWMAModelSettings creates a new MovingAverageEWMAModelSettings object. +func NewMovingAverageEWMAModelSettings() *MovingAverageEWMAModelSettings { + return &MovingAverageEWMAModelSettings{ + Model: "ewma", + } +} + +type MovingAverageHoltModelSettings struct { + Model string `json:"model"` + Settings DataqueryMovingAverageHoltModelSettingsSettings `json:"settings"` + Window string `json:"window"` + Minimize bool `json:"minimize"` + Predict string `json:"predict"` +} + +// NewMovingAverageHoltModelSettings creates a new MovingAverageHoltModelSettings object. +func NewMovingAverageHoltModelSettings() *MovingAverageHoltModelSettings { + return &MovingAverageHoltModelSettings{ + Model: "holt", + Settings: *NewDataqueryMovingAverageHoltModelSettingsSettings(), + } +} + +type MovingAverageHoltWintersModelSettings struct { + Model string `json:"model"` + Settings DataqueryMovingAverageHoltWintersModelSettingsSettings `json:"settings"` + Window string `json:"window"` + Minimize bool `json:"minimize"` + Predict string `json:"predict"` +} + +// NewMovingAverageHoltWintersModelSettings creates a new MovingAverageHoltWintersModelSettings object. +func NewMovingAverageHoltWintersModelSettings() *MovingAverageHoltWintersModelSettings { + return &MovingAverageHoltWintersModelSettings{ + Model: "holt_winters", + Settings: *NewDataqueryMovingAverageHoltWintersModelSettingsSettings(), + } +} + +// #MovingAverage's settings are overridden in types.ts +type MovingAverage struct { + PipelineAgg *string `json:"pipelineAgg,omitempty"` + Field *string `json:"field,omitempty"` + Type string `json:"type"` + Id string `json:"id"` + Settings map[string]any `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewMovingAverage creates a new MovingAverage object. +func NewMovingAverage() *MovingAverage { + return &MovingAverage{ + Type: "moving_avg", + } +} + +type MovingFunction struct { + PipelineAgg *string `json:"pipelineAgg,omitempty"` + Field *string `json:"field,omitempty"` + Type string `json:"type"` + Id string `json:"id"` + Settings *DataqueryMovingFunctionSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewMovingFunction creates a new MovingFunction object. +func NewMovingFunction() *MovingFunction { + return &MovingFunction{ + Type: "moving_fn", + } +} + +type Derivative struct { + PipelineAgg *string `json:"pipelineAgg,omitempty"` + Field *string `json:"field,omitempty"` + Type string `json:"type"` + Id string `json:"id"` + Settings *DataqueryDerivativeSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewDerivative creates a new Derivative object. +func NewDerivative() *Derivative { + return &Derivative{ + Type: "derivative", + } +} + +type SerialDiff struct { + PipelineAgg *string `json:"pipelineAgg,omitempty"` + Field *string `json:"field,omitempty"` + Type string `json:"type"` + Id string `json:"id"` + Settings *DataquerySerialDiffSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewSerialDiff creates a new SerialDiff object. +func NewSerialDiff() *SerialDiff { + return &SerialDiff{ + Type: "serial_diff", + } +} + +type CumulativeSum struct { + PipelineAgg *string `json:"pipelineAgg,omitempty"` + Field *string `json:"field,omitempty"` + Type string `json:"type"` + Id string `json:"id"` + Settings *DataqueryCumulativeSumSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewCumulativeSum creates a new CumulativeSum object. +func NewCumulativeSum() *CumulativeSum { + return &CumulativeSum{ + Type: "cumulative_sum", + } +} + +type BucketScript struct { + Type string `json:"type"` + PipelineVariables []PipelineVariable `json:"pipelineVariables,omitempty"` + Id string `json:"id"` + Settings *DataqueryBucketScriptSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewBucketScript creates a new BucketScript object. +func NewBucketScript() *BucketScript { + return &BucketScript{ + Type: "bucket_script", + } +} + +type TopMetrics struct { + Type string `json:"type"` + Id string `json:"id"` + Settings *DataqueryTopMetricsSettings `json:"settings,omitempty"` + Hide *bool `json:"hide,omitempty"` +} + +// NewTopMetrics creates a new TopMetrics object. +func NewTopMetrics() *TopMetrics { + return &TopMetrics{ + Type: "top_metrics", + } +} + +type PipelineMetricAggregation = MovingAverageOrDerivativeOrCumulativeSumOrBucketScript + +// NewPipelineMetricAggregation creates a new PipelineMetricAggregation object. +func NewPipelineMetricAggregation() *PipelineMetricAggregation { + return NewMovingAverageOrDerivativeOrCumulativeSumOrBucketScript() +} + +type MetricAggregationWithSettings = BucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics + +// NewMetricAggregationWithSettings creates a new MetricAggregationWithSettings object. +func NewMetricAggregationWithSettings() *MetricAggregationWithSettings { + return NewBucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics() +} + +type ElasticsearchDataQuery struct { + // Alias pattern + Alias *string `json:"alias,omitempty"` + // Lucene query + Query *string `json:"query,omitempty"` + // Name of time field + TimeField *string `json:"timeField,omitempty"` + // List of bucket aggregations + BucketAggs []BucketAggregation `json:"bucketAggs,omitempty"` + // List of metric aggregations + Metrics []MetricAggregation `json:"metrics,omitempty"` + // A unique identifier for the query within the list of targets. + // In server side expressions, the refId is used as a variable name to identify results. + // By default, the UI will assign A->Z; however setting meaningful names may be useful. + RefId string `json:"refId"` + // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. + Hide *bool `json:"hide,omitempty"` + // Specify the query flavor + // TODO make this required and give it a default + QueryType *string `json:"queryType,omitempty"` + // For mixed data sources the selected datasource is on the query level. + // For non mixed scenarios this is undefined. + // TODO find a better way to do this ^ that's friendly to schema + // TODO this shouldn't be unknown but DataSourceRef | null + Datasource any `json:"datasource,omitempty"` +} + +// NewElasticsearchDataQuery creates a new ElasticsearchDataQuery object. +func NewElasticsearchDataQuery() *ElasticsearchDataQuery { + return &ElasticsearchDataQuery{} +} + +type DataqueryDateHistogramSettings struct { + Interval *string `json:"interval,omitempty"` + MinDocCount *string `json:"min_doc_count,omitempty"` + TrimEdges *string `json:"trimEdges,omitempty"` + Offset *string `json:"offset,omitempty"` + TimeZone *string `json:"timeZone,omitempty"` +} + +// NewDataqueryDateHistogramSettings creates a new DataqueryDateHistogramSettings object. +func NewDataqueryDateHistogramSettings() *DataqueryDateHistogramSettings { + return &DataqueryDateHistogramSettings{} +} + +type DataqueryHistogramSettings struct { + Interval *string `json:"interval,omitempty"` + MinDocCount *string `json:"min_doc_count,omitempty"` +} + +// NewDataqueryHistogramSettings creates a new DataqueryHistogramSettings object. +func NewDataqueryHistogramSettings() *DataqueryHistogramSettings { + return &DataqueryHistogramSettings{} +} + +type DataqueryTermsSettings struct { + Order *TermsOrder `json:"order,omitempty"` + Size *string `json:"size,omitempty"` + MinDocCount *string `json:"min_doc_count,omitempty"` + OrderBy *string `json:"orderBy,omitempty"` + Missing *string `json:"missing,omitempty"` +} + +// NewDataqueryTermsSettings creates a new DataqueryTermsSettings object. +func NewDataqueryTermsSettings() *DataqueryTermsSettings { + return &DataqueryTermsSettings{} +} + +type DataqueryFiltersSettings struct { + Filters []Filter `json:"filters,omitempty"` +} + +// NewDataqueryFiltersSettings creates a new DataqueryFiltersSettings object. +func NewDataqueryFiltersSettings() *DataqueryFiltersSettings { + return &DataqueryFiltersSettings{} +} + +type DataqueryGeoHashGridSettings struct { + Precision *string `json:"precision,omitempty"` +} + +// NewDataqueryGeoHashGridSettings creates a new DataqueryGeoHashGridSettings object. +func NewDataqueryGeoHashGridSettings() *DataqueryGeoHashGridSettings { + return &DataqueryGeoHashGridSettings{} +} + +type DataqueryMetricAggregationWithMissingSupportSettings struct { + Missing *string `json:"missing,omitempty"` +} + +// NewDataqueryMetricAggregationWithMissingSupportSettings creates a new DataqueryMetricAggregationWithMissingSupportSettings object. +func NewDataqueryMetricAggregationWithMissingSupportSettings() *DataqueryMetricAggregationWithMissingSupportSettings { + return &DataqueryMetricAggregationWithMissingSupportSettings{} +} + +type DataqueryInlineScript struct { + Inline *string `json:"inline,omitempty"` +} + +// NewDataqueryInlineScript creates a new DataqueryInlineScript object. +func NewDataqueryInlineScript() *DataqueryInlineScript { + return &DataqueryInlineScript{} +} + +type DataqueryMetricAggregationWithInlineScriptSettings struct { + Script *InlineScript `json:"script,omitempty"` +} + +// NewDataqueryMetricAggregationWithInlineScriptSettings creates a new DataqueryMetricAggregationWithInlineScriptSettings object. +func NewDataqueryMetricAggregationWithInlineScriptSettings() *DataqueryMetricAggregationWithInlineScriptSettings { + return &DataqueryMetricAggregationWithInlineScriptSettings{} +} + +type DataqueryAverageSettings struct { + Script *InlineScript `json:"script,omitempty"` + Missing *string `json:"missing,omitempty"` +} + +// NewDataqueryAverageSettings creates a new DataqueryAverageSettings object. +func NewDataqueryAverageSettings() *DataqueryAverageSettings { + return &DataqueryAverageSettings{} +} + +type DataquerySumSettings struct { + Script *InlineScript `json:"script,omitempty"` + Missing *string `json:"missing,omitempty"` +} + +// NewDataquerySumSettings creates a new DataquerySumSettings object. +func NewDataquerySumSettings() *DataquerySumSettings { + return &DataquerySumSettings{} +} + +type DataqueryMaxSettings struct { + Script *InlineScript `json:"script,omitempty"` + Missing *string `json:"missing,omitempty"` +} + +// NewDataqueryMaxSettings creates a new DataqueryMaxSettings object. +func NewDataqueryMaxSettings() *DataqueryMaxSettings { + return &DataqueryMaxSettings{} +} + +type DataqueryMinSettings struct { + Script *InlineScript `json:"script,omitempty"` + Missing *string `json:"missing,omitempty"` +} + +// NewDataqueryMinSettings creates a new DataqueryMinSettings object. +func NewDataqueryMinSettings() *DataqueryMinSettings { + return &DataqueryMinSettings{} +} + +type DataqueryExtendedStatsSettings struct { + Script *InlineScript `json:"script,omitempty"` + Missing *string `json:"missing,omitempty"` + Sigma *string `json:"sigma,omitempty"` +} + +// NewDataqueryExtendedStatsSettings creates a new DataqueryExtendedStatsSettings object. +func NewDataqueryExtendedStatsSettings() *DataqueryExtendedStatsSettings { + return &DataqueryExtendedStatsSettings{} +} + +type DataqueryPercentilesSettings struct { + Script *InlineScript `json:"script,omitempty"` + Missing *string `json:"missing,omitempty"` + Percents []string `json:"percents,omitempty"` +} + +// NewDataqueryPercentilesSettings creates a new DataqueryPercentilesSettings object. +func NewDataqueryPercentilesSettings() *DataqueryPercentilesSettings { + return &DataqueryPercentilesSettings{} +} + +type DataqueryUniqueCountSettings struct { + PrecisionThreshold *string `json:"precision_threshold,omitempty"` + Missing *string `json:"missing,omitempty"` +} + +// NewDataqueryUniqueCountSettings creates a new DataqueryUniqueCountSettings object. +func NewDataqueryUniqueCountSettings() *DataqueryUniqueCountSettings { + return &DataqueryUniqueCountSettings{} +} + +type DataqueryRawDocumentSettings struct { + Size *string `json:"size,omitempty"` +} + +// NewDataqueryRawDocumentSettings creates a new DataqueryRawDocumentSettings object. +func NewDataqueryRawDocumentSettings() *DataqueryRawDocumentSettings { + return &DataqueryRawDocumentSettings{} +} + +type DataqueryRawDataSettings struct { + Size *string `json:"size,omitempty"` +} + +// NewDataqueryRawDataSettings creates a new DataqueryRawDataSettings object. +func NewDataqueryRawDataSettings() *DataqueryRawDataSettings { + return &DataqueryRawDataSettings{} +} + +type DataqueryLogsSettings struct { + Limit *string `json:"limit,omitempty"` +} + +// NewDataqueryLogsSettings creates a new DataqueryLogsSettings object. +func NewDataqueryLogsSettings() *DataqueryLogsSettings { + return &DataqueryLogsSettings{} +} + +type DataqueryRateSettings struct { + Unit *string `json:"unit,omitempty"` + Mode *string `json:"mode,omitempty"` +} + +// NewDataqueryRateSettings creates a new DataqueryRateSettings object. +func NewDataqueryRateSettings() *DataqueryRateSettings { + return &DataqueryRateSettings{} +} + +type DataqueryMovingAverageEWMAModelSettingsSettings struct { + Alpha *string `json:"alpha,omitempty"` +} + +// NewDataqueryMovingAverageEWMAModelSettingsSettings creates a new DataqueryMovingAverageEWMAModelSettingsSettings object. +func NewDataqueryMovingAverageEWMAModelSettingsSettings() *DataqueryMovingAverageEWMAModelSettingsSettings { + return &DataqueryMovingAverageEWMAModelSettingsSettings{} +} + +type DataqueryMovingAverageHoltModelSettingsSettings struct { + Alpha *string `json:"alpha,omitempty"` + Beta *string `json:"beta,omitempty"` +} + +// NewDataqueryMovingAverageHoltModelSettingsSettings creates a new DataqueryMovingAverageHoltModelSettingsSettings object. +func NewDataqueryMovingAverageHoltModelSettingsSettings() *DataqueryMovingAverageHoltModelSettingsSettings { + return &DataqueryMovingAverageHoltModelSettingsSettings{} +} + +type DataqueryMovingAverageHoltWintersModelSettingsSettings struct { + Alpha *string `json:"alpha,omitempty"` + Beta *string `json:"beta,omitempty"` + Gamma *string `json:"gamma,omitempty"` + Period *string `json:"period,omitempty"` + Pad *bool `json:"pad,omitempty"` +} + +// NewDataqueryMovingAverageHoltWintersModelSettingsSettings creates a new DataqueryMovingAverageHoltWintersModelSettingsSettings object. +func NewDataqueryMovingAverageHoltWintersModelSettingsSettings() *DataqueryMovingAverageHoltWintersModelSettingsSettings { + return &DataqueryMovingAverageHoltWintersModelSettingsSettings{} +} + +type DataqueryMovingFunctionSettings struct { + Window *string `json:"window,omitempty"` + Script *InlineScript `json:"script,omitempty"` + Shift *string `json:"shift,omitempty"` +} + +// NewDataqueryMovingFunctionSettings creates a new DataqueryMovingFunctionSettings object. +func NewDataqueryMovingFunctionSettings() *DataqueryMovingFunctionSettings { + return &DataqueryMovingFunctionSettings{} +} + +type DataqueryDerivativeSettings struct { + Unit *string `json:"unit,omitempty"` +} + +// NewDataqueryDerivativeSettings creates a new DataqueryDerivativeSettings object. +func NewDataqueryDerivativeSettings() *DataqueryDerivativeSettings { + return &DataqueryDerivativeSettings{} +} + +type DataquerySerialDiffSettings struct { + Lag *string `json:"lag,omitempty"` +} + +// NewDataquerySerialDiffSettings creates a new DataquerySerialDiffSettings object. +func NewDataquerySerialDiffSettings() *DataquerySerialDiffSettings { + return &DataquerySerialDiffSettings{} +} + +type DataqueryCumulativeSumSettings struct { + Format *string `json:"format,omitempty"` +} + +// NewDataqueryCumulativeSumSettings creates a new DataqueryCumulativeSumSettings object. +func NewDataqueryCumulativeSumSettings() *DataqueryCumulativeSumSettings { + return &DataqueryCumulativeSumSettings{} +} + +type DataqueryBucketScriptSettings struct { + Script *InlineScript `json:"script,omitempty"` +} + +// NewDataqueryBucketScriptSettings creates a new DataqueryBucketScriptSettings object. +func NewDataqueryBucketScriptSettings() *DataqueryBucketScriptSettings { + return &DataqueryBucketScriptSettings{} +} + +type DataqueryTopMetricsSettings struct { + Order *string `json:"order,omitempty"` + OrderBy *string `json:"orderBy,omitempty"` + Metrics []string `json:"metrics,omitempty"` +} + +// NewDataqueryTopMetricsSettings creates a new DataqueryTopMetricsSettings object. +func NewDataqueryTopMetricsSettings() *DataqueryTopMetricsSettings { + return &DataqueryTopMetricsSettings{} +} + +type DateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested struct { + DateHistogram *DateHistogram `json:"DateHistogram,omitempty"` + Histogram *Histogram `json:"Histogram,omitempty"` + Terms *Terms `json:"Terms,omitempty"` + Filters *Filters `json:"Filters,omitempty"` + GeoHashGrid *GeoHashGrid `json:"GeoHashGrid,omitempty"` + Nested *Nested `json:"Nested,omitempty"` +} + +// NewDateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested creates a new DateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested object. +func NewDateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested() *DateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested { + return &DateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `DateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested` as JSON. +func (resource DateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested) MarshalJSON() ([]byte, error) { + if resource.DateHistogram != nil { + return json.Marshal(resource.DateHistogram) + } + if resource.Histogram != nil { + return json.Marshal(resource.Histogram) + } + if resource.Terms != nil { + return json.Marshal(resource.Terms) + } + if resource.Filters != nil { + return json.Marshal(resource.Filters) + } + if resource.GeoHashGrid != nil { + return json.Marshal(resource.GeoHashGrid) + } + if resource.Nested != nil { + return json.Marshal(resource.Nested) + } + + return nil, fmt.Errorf("no value for disjunction of refs") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `DateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested` from JSON. +func (resource *DateHistogramOrHistogramOrTermsOrFiltersOrGeoHashGridOrNested) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + // FIXME: this is wasteful, we need to find a more efficient way to unmarshal this. + parsedAsMap := make(map[string]any) + if err := json.Unmarshal(raw, &parsedAsMap); err != nil { + return err + } + + discriminator, found := parsedAsMap["type"] + if !found { + return errors.New("discriminator field 'type' not found in payload") + } + + switch discriminator { + case "date_histogram": + var dateHistogram DateHistogram + if err := json.Unmarshal(raw, &dateHistogram); err != nil { + return err + } + + resource.DateHistogram = &dateHistogram + return nil + case "filters": + var filters Filters + if err := json.Unmarshal(raw, &filters); err != nil { + return err + } + + resource.Filters = &filters + return nil + case "geohash_grid": + var geoHashGrid GeoHashGrid + if err := json.Unmarshal(raw, &geoHashGrid); err != nil { + return err + } + + resource.GeoHashGrid = &geoHashGrid + return nil + case "histogram": + var histogram Histogram + if err := json.Unmarshal(raw, &histogram); err != nil { + return err + } + + resource.Histogram = &histogram + return nil + case "nested": + var nested Nested + if err := json.Unmarshal(raw, &nested); err != nil { + return err + } + + resource.Nested = &nested + return nil + case "terms": + var terms Terms + if err := json.Unmarshal(raw, &terms); err != nil { + return err + } + + resource.Terms = &terms + return nil + } + + return fmt.Errorf("could not unmarshal resource with `type = %v`", discriminator) +} + +type CountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics struct { + Count *Count `json:"Count,omitempty"` + MovingAverage *MovingAverage `json:"MovingAverage,omitempty"` + Derivative *Derivative `json:"Derivative,omitempty"` + CumulativeSum *CumulativeSum `json:"CumulativeSum,omitempty"` + BucketScript *BucketScript `json:"BucketScript,omitempty"` + SerialDiff *SerialDiff `json:"SerialDiff,omitempty"` + RawData *RawData `json:"RawData,omitempty"` + RawDocument *RawDocument `json:"RawDocument,omitempty"` + UniqueCount *UniqueCount `json:"UniqueCount,omitempty"` + Percentiles *Percentiles `json:"Percentiles,omitempty"` + ExtendedStats *ExtendedStats `json:"ExtendedStats,omitempty"` + Min *Min `json:"Min,omitempty"` + Max *Max `json:"Max,omitempty"` + Sum *Sum `json:"Sum,omitempty"` + Average *Average `json:"Average,omitempty"` + MovingFunction *MovingFunction `json:"MovingFunction,omitempty"` + Logs *Logs `json:"Logs,omitempty"` + Rate *Rate `json:"Rate,omitempty"` + TopMetrics *TopMetrics `json:"TopMetrics,omitempty"` +} + +// NewCountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics creates a new CountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics object. +func NewCountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics() *CountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics { + return &CountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `CountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics` as JSON. +func (resource CountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics) MarshalJSON() ([]byte, error) { + if resource.Count != nil { + return json.Marshal(resource.Count) + } + if resource.MovingAverage != nil { + return json.Marshal(resource.MovingAverage) + } + if resource.Derivative != nil { + return json.Marshal(resource.Derivative) + } + if resource.CumulativeSum != nil { + return json.Marshal(resource.CumulativeSum) + } + if resource.BucketScript != nil { + return json.Marshal(resource.BucketScript) + } + if resource.SerialDiff != nil { + return json.Marshal(resource.SerialDiff) + } + if resource.RawData != nil { + return json.Marshal(resource.RawData) + } + if resource.RawDocument != nil { + return json.Marshal(resource.RawDocument) + } + if resource.UniqueCount != nil { + return json.Marshal(resource.UniqueCount) + } + if resource.Percentiles != nil { + return json.Marshal(resource.Percentiles) + } + if resource.ExtendedStats != nil { + return json.Marshal(resource.ExtendedStats) + } + if resource.Min != nil { + return json.Marshal(resource.Min) + } + if resource.Max != nil { + return json.Marshal(resource.Max) + } + if resource.Sum != nil { + return json.Marshal(resource.Sum) + } + if resource.Average != nil { + return json.Marshal(resource.Average) + } + if resource.MovingFunction != nil { + return json.Marshal(resource.MovingFunction) + } + if resource.Logs != nil { + return json.Marshal(resource.Logs) + } + if resource.Rate != nil { + return json.Marshal(resource.Rate) + } + if resource.TopMetrics != nil { + return json.Marshal(resource.TopMetrics) + } + + return nil, fmt.Errorf("no value for disjunction of refs") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `CountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics` from JSON. +func (resource *CountOrMovingAverageOrDerivativeOrCumulativeSumOrBucketScriptOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingFunctionOrLogsOrRateOrTopMetrics) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + // FIXME: this is wasteful, we need to find a more efficient way to unmarshal this. + parsedAsMap := make(map[string]any) + if err := json.Unmarshal(raw, &parsedAsMap); err != nil { + return err + } + + discriminator, found := parsedAsMap["type"] + if !found { + return errors.New("discriminator field 'type' not found in payload") + } + + switch discriminator { + case "avg": + var average Average + if err := json.Unmarshal(raw, &average); err != nil { + return err + } + + resource.Average = &average + return nil + case "bucket_script": + var bucketScript BucketScript + if err := json.Unmarshal(raw, &bucketScript); err != nil { + return err + } + + resource.BucketScript = &bucketScript + return nil + case "cardinality": + var uniqueCount UniqueCount + if err := json.Unmarshal(raw, &uniqueCount); err != nil { + return err + } + + resource.UniqueCount = &uniqueCount + return nil + case "count": + var count Count + if err := json.Unmarshal(raw, &count); err != nil { + return err + } + + resource.Count = &count + return nil + case "cumulative_sum": + var cumulativeSum CumulativeSum + if err := json.Unmarshal(raw, &cumulativeSum); err != nil { + return err + } + + resource.CumulativeSum = &cumulativeSum + return nil + case "derivative": + var derivative Derivative + if err := json.Unmarshal(raw, &derivative); err != nil { + return err + } + + resource.Derivative = &derivative + return nil + case "extended_stats": + var extendedStats ExtendedStats + if err := json.Unmarshal(raw, &extendedStats); err != nil { + return err + } + + resource.ExtendedStats = &extendedStats + return nil + case "logs": + var logs Logs + if err := json.Unmarshal(raw, &logs); err != nil { + return err + } + + resource.Logs = &logs + return nil + case "max": + var max Max + if err := json.Unmarshal(raw, &max); err != nil { + return err + } + + resource.Max = &max + return nil + case "min": + var min Min + if err := json.Unmarshal(raw, &min); err != nil { + return err + } + + resource.Min = &min + return nil + case "moving_avg": + var movingAverage MovingAverage + if err := json.Unmarshal(raw, &movingAverage); err != nil { + return err + } + + resource.MovingAverage = &movingAverage + return nil + case "moving_fn": + var movingFunction MovingFunction + if err := json.Unmarshal(raw, &movingFunction); err != nil { + return err + } + + resource.MovingFunction = &movingFunction + return nil + case "percentiles": + var percentiles Percentiles + if err := json.Unmarshal(raw, &percentiles); err != nil { + return err + } + + resource.Percentiles = &percentiles + return nil + case "rate": + var rate Rate + if err := json.Unmarshal(raw, &rate); err != nil { + return err + } + + resource.Rate = &rate + return nil + case "raw_data": + var rawData RawData + if err := json.Unmarshal(raw, &rawData); err != nil { + return err + } + + resource.RawData = &rawData + return nil + case "raw_document": + var rawDocument RawDocument + if err := json.Unmarshal(raw, &rawDocument); err != nil { + return err + } + + resource.RawDocument = &rawDocument + return nil + case "serial_diff": + var serialDiff SerialDiff + if err := json.Unmarshal(raw, &serialDiff); err != nil { + return err + } + + resource.SerialDiff = &serialDiff + return nil + case "sum": + var sum Sum + if err := json.Unmarshal(raw, &sum); err != nil { + return err + } + + resource.Sum = &sum + return nil + case "top_metrics": + var topMetrics TopMetrics + if err := json.Unmarshal(raw, &topMetrics); err != nil { + return err + } + + resource.TopMetrics = &topMetrics + return nil + } + + return fmt.Errorf("could not unmarshal resource with `type = %v`", discriminator) +} + +type StringOrPipelineMetricAggregationType struct { + String *string `json:"String,omitempty"` + PipelineMetricAggregationType *PipelineMetricAggregationType `json:"PipelineMetricAggregationType,omitempty"` +} + +// NewStringOrPipelineMetricAggregationType creates a new StringOrPipelineMetricAggregationType object. +func NewStringOrPipelineMetricAggregationType() *StringOrPipelineMetricAggregationType { + return &StringOrPipelineMetricAggregationType{ + String: (func(input string) *string { return &input })("count"), + } +} + +type StringOrDataqueryInlineScript struct { + String *string `json:"String,omitempty"` + DataqueryInlineScript *DataqueryInlineScript `json:"DataqueryInlineScript,omitempty"` +} + +// NewStringOrDataqueryInlineScript creates a new StringOrDataqueryInlineScript object. +func NewStringOrDataqueryInlineScript() *StringOrDataqueryInlineScript { + return &StringOrDataqueryInlineScript{} +} + +type MovingAverageOrDerivativeOrCumulativeSumOrBucketScript struct { + MovingAverage *MovingAverage `json:"MovingAverage,omitempty"` + Derivative *Derivative `json:"Derivative,omitempty"` + CumulativeSum *CumulativeSum `json:"CumulativeSum,omitempty"` + BucketScript *BucketScript `json:"BucketScript,omitempty"` +} + +// NewMovingAverageOrDerivativeOrCumulativeSumOrBucketScript creates a new MovingAverageOrDerivativeOrCumulativeSumOrBucketScript object. +func NewMovingAverageOrDerivativeOrCumulativeSumOrBucketScript() *MovingAverageOrDerivativeOrCumulativeSumOrBucketScript { + return &MovingAverageOrDerivativeOrCumulativeSumOrBucketScript{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `MovingAverageOrDerivativeOrCumulativeSumOrBucketScript` as JSON. +func (resource MovingAverageOrDerivativeOrCumulativeSumOrBucketScript) MarshalJSON() ([]byte, error) { + if resource.MovingAverage != nil { + return json.Marshal(resource.MovingAverage) + } + if resource.Derivative != nil { + return json.Marshal(resource.Derivative) + } + if resource.CumulativeSum != nil { + return json.Marshal(resource.CumulativeSum) + } + if resource.BucketScript != nil { + return json.Marshal(resource.BucketScript) + } + + return nil, fmt.Errorf("no value for disjunction of refs") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `MovingAverageOrDerivativeOrCumulativeSumOrBucketScript` from JSON. +func (resource *MovingAverageOrDerivativeOrCumulativeSumOrBucketScript) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + // FIXME: this is wasteful, we need to find a more efficient way to unmarshal this. + parsedAsMap := make(map[string]any) + if err := json.Unmarshal(raw, &parsedAsMap); err != nil { + return err + } + + discriminator, found := parsedAsMap["type"] + if !found { + return errors.New("discriminator field 'type' not found in payload") + } + + switch discriminator { + case "bucket_script": + var bucketScript BucketScript + if err := json.Unmarshal(raw, &bucketScript); err != nil { + return err + } + + resource.BucketScript = &bucketScript + return nil + case "cumulative_sum": + var cumulativeSum CumulativeSum + if err := json.Unmarshal(raw, &cumulativeSum); err != nil { + return err + } + + resource.CumulativeSum = &cumulativeSum + return nil + case "derivative": + var derivative Derivative + if err := json.Unmarshal(raw, &derivative); err != nil { + return err + } + + resource.Derivative = &derivative + return nil + case "moving_avg": + var movingAverage MovingAverage + if err := json.Unmarshal(raw, &movingAverage); err != nil { + return err + } + + resource.MovingAverage = &movingAverage + return nil + } + + return fmt.Errorf("could not unmarshal resource with `type = %v`", discriminator) +} + +type BucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics struct { + BucketScript *BucketScript `json:"BucketScript,omitempty"` + CumulativeSum *CumulativeSum `json:"CumulativeSum,omitempty"` + Derivative *Derivative `json:"Derivative,omitempty"` + SerialDiff *SerialDiff `json:"SerialDiff,omitempty"` + RawData *RawData `json:"RawData,omitempty"` + RawDocument *RawDocument `json:"RawDocument,omitempty"` + UniqueCount *UniqueCount `json:"UniqueCount,omitempty"` + Percentiles *Percentiles `json:"Percentiles,omitempty"` + ExtendedStats *ExtendedStats `json:"ExtendedStats,omitempty"` + Min *Min `json:"Min,omitempty"` + Max *Max `json:"Max,omitempty"` + Sum *Sum `json:"Sum,omitempty"` + Average *Average `json:"Average,omitempty"` + MovingAverage *MovingAverage `json:"MovingAverage,omitempty"` + MovingFunction *MovingFunction `json:"MovingFunction,omitempty"` + Logs *Logs `json:"Logs,omitempty"` + Rate *Rate `json:"Rate,omitempty"` + TopMetrics *TopMetrics `json:"TopMetrics,omitempty"` +} + +// NewBucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics creates a new BucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics object. +func NewBucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics() *BucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics { + return &BucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `BucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics` as JSON. +func (resource BucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics) MarshalJSON() ([]byte, error) { + if resource.BucketScript != nil { + return json.Marshal(resource.BucketScript) + } + if resource.CumulativeSum != nil { + return json.Marshal(resource.CumulativeSum) + } + if resource.Derivative != nil { + return json.Marshal(resource.Derivative) + } + if resource.SerialDiff != nil { + return json.Marshal(resource.SerialDiff) + } + if resource.RawData != nil { + return json.Marshal(resource.RawData) + } + if resource.RawDocument != nil { + return json.Marshal(resource.RawDocument) + } + if resource.UniqueCount != nil { + return json.Marshal(resource.UniqueCount) + } + if resource.Percentiles != nil { + return json.Marshal(resource.Percentiles) + } + if resource.ExtendedStats != nil { + return json.Marshal(resource.ExtendedStats) + } + if resource.Min != nil { + return json.Marshal(resource.Min) + } + if resource.Max != nil { + return json.Marshal(resource.Max) + } + if resource.Sum != nil { + return json.Marshal(resource.Sum) + } + if resource.Average != nil { + return json.Marshal(resource.Average) + } + if resource.MovingAverage != nil { + return json.Marshal(resource.MovingAverage) + } + if resource.MovingFunction != nil { + return json.Marshal(resource.MovingFunction) + } + if resource.Logs != nil { + return json.Marshal(resource.Logs) + } + if resource.Rate != nil { + return json.Marshal(resource.Rate) + } + if resource.TopMetrics != nil { + return json.Marshal(resource.TopMetrics) + } + + return nil, fmt.Errorf("no value for disjunction of refs") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `BucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics` from JSON. +func (resource *BucketScriptOrCumulativeSumOrDerivativeOrSerialDiffOrRawDataOrRawDocumentOrUniqueCountOrPercentilesOrExtendedStatsOrMinOrMaxOrSumOrAverageOrMovingAverageOrMovingFunctionOrLogsOrRateOrTopMetrics) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + // FIXME: this is wasteful, we need to find a more efficient way to unmarshal this. + parsedAsMap := make(map[string]any) + if err := json.Unmarshal(raw, &parsedAsMap); err != nil { + return err + } + + discriminator, found := parsedAsMap["type"] + if !found { + return errors.New("discriminator field 'type' not found in payload") + } + + switch discriminator { + case "avg": + var average Average + if err := json.Unmarshal(raw, &average); err != nil { + return err + } + + resource.Average = &average + return nil + case "bucket_script": + var bucketScript BucketScript + if err := json.Unmarshal(raw, &bucketScript); err != nil { + return err + } + + resource.BucketScript = &bucketScript + return nil + case "cardinality": + var uniqueCount UniqueCount + if err := json.Unmarshal(raw, &uniqueCount); err != nil { + return err + } + + resource.UniqueCount = &uniqueCount + return nil + case "cumulative_sum": + var cumulativeSum CumulativeSum + if err := json.Unmarshal(raw, &cumulativeSum); err != nil { + return err + } + + resource.CumulativeSum = &cumulativeSum + return nil + case "derivative": + var derivative Derivative + if err := json.Unmarshal(raw, &derivative); err != nil { + return err + } + + resource.Derivative = &derivative + return nil + case "extended_stats": + var extendedStats ExtendedStats + if err := json.Unmarshal(raw, &extendedStats); err != nil { + return err + } + + resource.ExtendedStats = &extendedStats + return nil + case "logs": + var logs Logs + if err := json.Unmarshal(raw, &logs); err != nil { + return err + } + + resource.Logs = &logs + return nil + case "max": + var max Max + if err := json.Unmarshal(raw, &max); err != nil { + return err + } + + resource.Max = &max + return nil + case "min": + var min Min + if err := json.Unmarshal(raw, &min); err != nil { + return err + } + + resource.Min = &min + return nil + case "moving_avg": + var movingAverage MovingAverage + if err := json.Unmarshal(raw, &movingAverage); err != nil { + return err + } + + resource.MovingAverage = &movingAverage + return nil + case "moving_fn": + var movingFunction MovingFunction + if err := json.Unmarshal(raw, &movingFunction); err != nil { + return err + } + + resource.MovingFunction = &movingFunction + return nil + case "percentiles": + var percentiles Percentiles + if err := json.Unmarshal(raw, &percentiles); err != nil { + return err + } + + resource.Percentiles = &percentiles + return nil + case "rate": + var rate Rate + if err := json.Unmarshal(raw, &rate); err != nil { + return err + } + + resource.Rate = &rate + return nil + case "raw_data": + var rawData RawData + if err := json.Unmarshal(raw, &rawData); err != nil { + return err + } + + resource.RawData = &rawData + return nil + case "raw_document": + var rawDocument RawDocument + if err := json.Unmarshal(raw, &rawDocument); err != nil { + return err + } + + resource.RawDocument = &rawDocument + return nil + case "serial_diff": + var serialDiff SerialDiff + if err := json.Unmarshal(raw, &serialDiff); err != nil { + return err + } + + resource.SerialDiff = &serialDiff + return nil + case "sum": + var sum Sum + if err := json.Unmarshal(raw, &sum); err != nil { + return err + } + + resource.Sum = &sum + return nil + case "top_metrics": + var topMetrics TopMetrics + if err := json.Unmarshal(raw, &topMetrics); err != nil { + return err + } + + resource.TopMetrics = &topMetrics + return nil + } + + return fmt.Errorf("could not unmarshal resource with `type = %v`", discriminator) } diff --git a/pkg/tsdb/grafana-pyroscope-datasource/kinds/dataquery/types_dataquery_gen.go b/pkg/tsdb/grafana-pyroscope-datasource/kinds/dataquery/types_dataquery_gen.go index 4101b39df00..e13aa99e5dc 100644 --- a/pkg/tsdb/grafana-pyroscope-datasource/kinds/dataquery/types_dataquery_gen.go +++ b/pkg/tsdb/grafana-pyroscope-datasource/kinds/dataquery/types_dataquery_gen.go @@ -7,76 +7,50 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package dataquery -// Defines values for PyroscopeQueryType. +type PyroscopeQueryType string + const ( - PyroscopeQueryTypeBoth PyroscopeQueryType = "both" PyroscopeQueryTypeMetrics PyroscopeQueryType = "metrics" PyroscopeQueryTypeProfile PyroscopeQueryType = "profile" + PyroscopeQueryTypeBoth PyroscopeQueryType = "both" ) -// These are the common properties available to all queries in all datasources. -// Specific implementations will *extend* this interface, adding the required -// properties for the given context. -type DataQuery struct { - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - +type GrafanaPyroscopeDataQuery struct { + // Specifies the query label selectors. + LabelSelector string `json:"labelSelector"` + // Specifies the query span selectors. + SpanSelector []string `json:"spanSelector,omitempty"` + // Specifies the type of profile to query. + ProfileTypeId string `json:"profileTypeId"` + // Allows to group the results. + GroupBy []string `json:"groupBy"` + // Sets the maximum number of time series. + Limit *int64 `json:"limit,omitempty"` + // Sets the maximum number of nodes in the flamegraph. + MaxNodes *int64 `json:"maxNodes,omitempty"` // A unique identifier for the query within the list of targets. // In server side expressions, the refId is used as a variable name to identify results. // By default, the UI will assign A->Z; however setting meaningful names may be useful. RefId string `json:"refId"` -} - -// GrafanaPyroscopeDataQuery defines model for GrafanaPyroscopeDataQuery. -type GrafanaPyroscopeDataQuery struct { + // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. + Hide *bool `json:"hide,omitempty"` + // Specify the query flavor + // TODO make this required and give it a default + QueryType *string `json:"queryType,omitempty"` // For mixed data sources the selected datasource is on the query level. // For non mixed scenarios this is undefined. // TODO find a better way to do this ^ that's friendly to schema // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // Allows to group the results. - GroupBy []string `json:"groupBy,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Specifies the query label selectors. - LabelSelector *string `json:"labelSelector,omitempty"` - - // Sets the maximum number of time series. - Limit *int64 `json:"limit,omitempty"` - - // Sets the maximum number of nodes in the flamegraph. - MaxNodes *int64 `json:"maxNodes,omitempty"` - - // Specifies the type of profile to query. - ProfileTypeId *string `json:"profileTypeId,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId *string `json:"refId,omitempty"` - - // Specifies the query span selectors. - SpanSelector []string `json:"spanSelector,omitempty"` + Datasource any `json:"datasource,omitempty"` } -// PyroscopeQueryType defines model for PyroscopeQueryType. -type PyroscopeQueryType string +// NewGrafanaPyroscopeDataQuery creates a new GrafanaPyroscopeDataQuery object. +func NewGrafanaPyroscopeDataQuery() *GrafanaPyroscopeDataQuery { + return &GrafanaPyroscopeDataQuery{ + LabelSelector: "{}", + } +} diff --git a/pkg/tsdb/grafana-pyroscope-datasource/query.go b/pkg/tsdb/grafana-pyroscope-datasource/query.go index 31b9c7fd7bb..83f4174421e 100644 --- a/pkg/tsdb/grafana-pyroscope-datasource/query.go +++ b/pkg/tsdb/grafana-pyroscope-datasource/query.go @@ -52,8 +52,8 @@ func (d *PyroscopeDatasource) query(ctx context.Context, pCtx backend.PluginCont return response } - profileTypeId := depointerizer(qm.ProfileTypeId) - labelSelector := depointerizer(qm.LabelSelector) + profileTypeId := qm.ProfileTypeId + labelSelector := qm.LabelSelector responseMutex := sync.Mutex{} g, gCtx := errgroup.WithContext(ctx) @@ -454,12 +454,3 @@ func seriesToDataFrames(resp *SeriesResponse) []*data.Frame { } return frames } - -func depointerizer[T any](v *T) T { - var emptyValue T - if v != nil { - emptyValue = *v - } - - return emptyValue -} diff --git a/pkg/tsdb/loki/healthcheck.go b/pkg/tsdb/loki/healthcheck.go index ad6dd904683..44670a160e4 100644 --- a/pkg/tsdb/loki/healthcheck.go +++ b/pkg/tsdb/loki/healthcheck.go @@ -11,7 +11,6 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend/log" "github.com/grafana/grafana/pkg/tsdb/loki/kinds/dataquery" - "github.com/grafana/grafana/pkg/util" ) const ( @@ -40,7 +39,7 @@ func healthcheck(ctx context.Context, req *backend.CheckHealthRequest, s *Servic step := "1s" qt := "instant" qm := dataquery.LokiDataQuery{ - Expr: util.Pointer("vector(1)+vector(1)"), + Expr: "vector(1)+vector(1)", Step: &step, QueryType: &qt, } diff --git a/pkg/tsdb/loki/kinds/dataquery/types_dataquery_gen.go b/pkg/tsdb/loki/kinds/dataquery/types_dataquery_gen.go index c386bd5c3b4..c9764b174fd 100644 --- a/pkg/tsdb/loki/kinds/dataquery/types_dataquery_gen.go +++ b/pkg/tsdb/loki/kinds/dataquery/types_dataquery_gen.go @@ -7,110 +7,75 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package dataquery -// Defines values for LokiQueryDirection. +type QueryEditorMode string + const ( - LokiQueryDirectionBackward LokiQueryDirection = "backward" - LokiQueryDirectionForward LokiQueryDirection = "forward" - LokiQueryDirectionScan LokiQueryDirection = "scan" + QueryEditorModeCode QueryEditorMode = "code" + QueryEditorModeBuilder QueryEditorMode = "builder" ) -// Defines values for LokiQueryType. +type LokiQueryType string + const ( - LokiQueryTypeInstant LokiQueryType = "instant" LokiQueryTypeRange LokiQueryType = "range" + LokiQueryTypeInstant LokiQueryType = "instant" LokiQueryTypeStream LokiQueryType = "stream" ) -// Defines values for QueryEditorMode. -const ( - QueryEditorModeBuilder QueryEditorMode = "builder" - QueryEditorModeCode QueryEditorMode = "code" -) +type SupportingQueryType string -// Defines values for SupportingQueryType. const ( + SupportingQueryTypeLogsVolume SupportingQueryType = "logsVolume" + SupportingQueryTypeLogsSample SupportingQueryType = "logsSample" SupportingQueryTypeDataSample SupportingQueryType = "dataSample" SupportingQueryTypeInfiniteScroll SupportingQueryType = "infiniteScroll" - SupportingQueryTypeLogsSample SupportingQueryType = "logsSample" - SupportingQueryTypeLogsVolume SupportingQueryType = "logsVolume" ) -// These are the common properties available to all queries in all datasources. -// Specific implementations will *extend* this interface, adding the required -// properties for the given context. -type DataQuery struct { - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` +type LokiQueryDirection string - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` +const ( + LokiQueryDirectionForward LokiQueryDirection = "forward" + LokiQueryDirectionBackward LokiQueryDirection = "backward" + LokiQueryDirectionScan LokiQueryDirection = "scan" +) +type LokiDataQuery struct { + // The LogQL query. + Expr string `json:"expr"` + // Used to override the name of the series. + LegendFormat *string `json:"legendFormat,omitempty"` + // Used to limit the number of log rows returned. + MaxLines *int64 `json:"maxLines,omitempty"` + // @deprecated, now use step. + Resolution *int64 `json:"resolution,omitempty"` + EditorMode *QueryEditorMode `json:"editorMode,omitempty"` + // @deprecated, now use queryType. + Range *bool `json:"range,omitempty"` + // @deprecated, now use queryType. + Instant *bool `json:"instant,omitempty"` + // Used to set step value for range queries. + Step *string `json:"step,omitempty"` // A unique identifier for the query within the list of targets. // In server side expressions, the refId is used as a variable name to identify results. // By default, the UI will assign A->Z; however setting meaningful names may be useful. RefId string `json:"refId"` -} - -// LokiDataQuery defines model for LokiDataQuery. -type LokiDataQuery struct { + // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. + Hide *bool `json:"hide,omitempty"` + // Specify the query flavor + // TODO make this required and give it a default + QueryType *string `json:"queryType,omitempty"` // For mixed data sources the selected datasource is on the query level. // For non mixed scenarios this is undefined. // TODO find a better way to do this ^ that's friendly to schema // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - EditorMode *QueryEditorMode `json:"editorMode,omitempty"` - - // The LogQL query. - Expr *string `json:"expr,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // @deprecated, now use queryType. - Instant *bool `json:"instant,omitempty"` - - // Used to override the name of the series. - LegendFormat *string `json:"legendFormat,omitempty"` - - // Used to limit the number of log rows returned. - MaxLines *int64 `json:"maxLines,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // @deprecated, now use queryType. - Range *bool `json:"range,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId *string `json:"refId,omitempty"` - - // @deprecated, now use step. - Resolution *int64 `json:"resolution,omitempty"` - - // Used to set step value for range queries. - Step *string `json:"step,omitempty"` + Datasource any `json:"datasource,omitempty"` } -// LokiQueryDirection defines model for LokiQueryDirection. -type LokiQueryDirection string - -// LokiQueryType defines model for LokiQueryType. -type LokiQueryType string - -// QueryEditorMode defines model for QueryEditorMode. -type QueryEditorMode string - -// SupportingQueryType defines model for SupportingQueryType. -type SupportingQueryType string +// NewLokiDataQuery creates a new LokiDataQuery object. +func NewLokiDataQuery() *LokiDataQuery { + return &LokiDataQuery{} +} diff --git a/pkg/tsdb/loki/parse_query.go b/pkg/tsdb/loki/parse_query.go index 7efed0cab83..ce45715eb29 100644 --- a/pkg/tsdb/loki/parse_query.go +++ b/pkg/tsdb/loki/parse_query.go @@ -154,7 +154,7 @@ func parseQuery(queryContext *backend.QueryDataRequest, logqlScopesEnabled bool) return nil, err } - expr := interpolateVariables(depointerizer(model.Expr), interval, timeRange, queryType, step) + expr := interpolateVariables(model.Expr, interval, timeRange, queryType, step) direction, err := parseDirection(model.Direction) if err != nil { @@ -197,12 +197,3 @@ func parseQuery(queryContext *backend.QueryDataRequest, logqlScopesEnabled bool) return qs, nil } - -func depointerizer[T any](v *T) T { - var emptyValue T - if v != nil { - emptyValue = *v - } - - return emptyValue -} diff --git a/pkg/tsdb/loki/streaming.go b/pkg/tsdb/loki/streaming.go index 46c4d13eefd..01dba69ed7c 100644 --- a/pkg/tsdb/loki/streaming.go +++ b/pkg/tsdb/loki/streaming.go @@ -34,7 +34,7 @@ func (s *Service) SubscribeStream(ctx context.Context, req *backend.SubscribeStr if err != nil { return nil, err } - if query.Expr == nil || *query.Expr == "" { + if query.Expr == "" { return &backend.SubscribeStreamResponse{ Status: backend.SubscribeStreamStatusNotFound, }, fmt.Errorf("missing expr in channel (subscribe)") @@ -69,7 +69,7 @@ func (s *Service) RunStream(ctx context.Context, req *backend.RunStreamRequest, if err != nil { return err } - if query.Expr == nil || *query.Expr == "" { + if query.Expr == "" { return fmt.Errorf("missing expr in cuannel") } @@ -80,7 +80,7 @@ func (s *Service) RunStream(ctx context.Context, req *backend.RunStreamRequest, signal.Notify(interrupt, os.Interrupt) params := url.Values{} - params.Add("query", *query.Expr) + params.Add("query", query.Expr) wsurl, _ := url.Parse(dsInfo.URL) diff --git a/pkg/tsdb/parca/kinds/dataquery/types_dataquery_gen.go b/pkg/tsdb/parca/kinds/dataquery/types_dataquery_gen.go index 2acdac9bc9c..ec31c510b59 100644 --- a/pkg/tsdb/parca/kinds/dataquery/types_dataquery_gen.go +++ b/pkg/tsdb/parca/kinds/dataquery/types_dataquery_gen.go @@ -7,64 +7,42 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package dataquery -// Defines values for ParcaQueryType. +type ParcaQueryType string + const ( - ParcaQueryTypeBoth ParcaQueryType = "both" ParcaQueryTypeMetrics ParcaQueryType = "metrics" ParcaQueryTypeProfile ParcaQueryType = "profile" + ParcaQueryTypeBoth ParcaQueryType = "both" ) -// These are the common properties available to all queries in all datasources. -// Specific implementations will *extend* this interface, adding the required -// properties for the given context. -type DataQuery struct { - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - +type ParcaDataQuery struct { + // Specifies the query label selectors. + LabelSelector string `json:"labelSelector"` + // Specifies the type of profile to query. + ProfileTypeId string `json:"profileTypeId"` // A unique identifier for the query within the list of targets. // In server side expressions, the refId is used as a variable name to identify results. // By default, the UI will assign A->Z; however setting meaningful names may be useful. RefId string `json:"refId"` -} - -// ParcaDataQuery defines model for ParcaDataQuery. -type ParcaDataQuery struct { + // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. + Hide *bool `json:"hide,omitempty"` + // Specify the query flavor + // TODO make this required and give it a default + QueryType *string `json:"queryType,omitempty"` // For mixed data sources the selected datasource is on the query level. // For non mixed scenarios this is undefined. // TODO find a better way to do this ^ that's friendly to schema // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Specifies the query label selectors. - LabelSelector *string `json:"labelSelector,omitempty"` - - // Specifies the type of profile to query. - ProfileTypeId *string `json:"profileTypeId,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId *string `json:"refId,omitempty"` + Datasource any `json:"datasource,omitempty"` } -// ParcaQueryType defines model for ParcaQueryType. -type ParcaQueryType string +// NewParcaDataQuery creates a new ParcaDataQuery object. +func NewParcaDataQuery() *ParcaDataQuery { + return &ParcaDataQuery{ + LabelSelector: "{}", + } +} diff --git a/pkg/tsdb/parca/query.go b/pkg/tsdb/parca/query.go index d151cdb3072..7bcbc293d25 100644 --- a/pkg/tsdb/parca/query.go +++ b/pkg/tsdb/parca/query.go @@ -22,7 +22,6 @@ import ( "go.opentelemetry.io/otel/trace" "google.golang.org/protobuf/types/known/timestamppb" - "github.com/grafana/grafana/pkg/tsdb/cloudwatch/utils" "github.com/grafana/grafana/pkg/tsdb/parca/kinds/dataquery" ) @@ -64,7 +63,7 @@ func (d *ParcaDatasource) query(ctx context.Context, pCtx backend.PluginContext, return response } - response.Frames = append(response.Frames, seriesToDataFrame(seriesResp, utils.Depointerizer(qm.ProfileTypeId))...) + response.Frames = append(response.Frames, seriesToDataFrame(seriesResp, qm.ProfileTypeId)...) } if query.QueryType == queryTypeProfile || query.QueryType == queryTypeBoth { @@ -102,7 +101,7 @@ func makeProfileRequest(qm queryModel, query backend.DataQuery) *connect.Request Mode: v1alpha1.QueryRequest_MODE_MERGE, Options: &v1alpha1.QueryRequest_Merge{ Merge: &v1alpha1.MergeProfile{ - Query: fmt.Sprintf("%s%s", utils.Depointerizer(qm.ProfileTypeId), utils.Depointerizer(qm.LabelSelector)), + Query: fmt.Sprintf("%s%s", qm.ProfileTypeId, qm.LabelSelector), Start: ×tamppb.Timestamp{ Seconds: query.TimeRange.From.Unix(), }, @@ -120,7 +119,7 @@ func makeProfileRequest(qm queryModel, query backend.DataQuery) *connect.Request func makeMetricRequest(qm queryModel, query backend.DataQuery) *connect.Request[v1alpha1.QueryRangeRequest] { return &connect.Request[v1alpha1.QueryRangeRequest]{ Msg: &v1alpha1.QueryRangeRequest{ - Query: fmt.Sprintf("%s%s", utils.Depointerizer(qm.ProfileTypeId), utils.Depointerizer(qm.LabelSelector)), + Query: fmt.Sprintf("%s%s", qm.ProfileTypeId, qm.LabelSelector), Start: ×tamppb.Timestamp{ Seconds: query.TimeRange.From.Unix(), }, diff --git a/pkg/tsdb/tempo/kinds/dataquery/types_dataquery_gen.go b/pkg/tsdb/tempo/kinds/dataquery/types_dataquery_gen.go index 9e893b15cb9..a709b74c0e6 100644 --- a/pkg/tsdb/tempo/kinds/dataquery/types_dataquery_gen.go +++ b/pkg/tsdb/tempo/kinds/dataquery/types_dataquery_gen.go @@ -7,164 +7,181 @@ // // Run 'make gen-cue' from repository root to regenerate. +// Code generated - EDITING IS FUTILE. DO NOT EDIT. + package dataquery -// Defines values for SearchStreamingState. -const ( - SearchStreamingStateDone SearchStreamingState = "done" - SearchStreamingStateError SearchStreamingState = "error" - SearchStreamingStatePending SearchStreamingState = "pending" - SearchStreamingStateStreaming SearchStreamingState = "streaming" +import ( + json "encoding/json" + errors "errors" + fmt "fmt" ) -// Defines values for SearchTableType. -const ( - SearchTableTypeRaw SearchTableType = "raw" - SearchTableTypeSpans SearchTableType = "spans" - SearchTableTypeTraces SearchTableType = "traces" -) - -// Defines values for TempoQueryType. -const ( - TempoQueryTypeClear TempoQueryType = "clear" - TempoQueryTypeNativeSearch TempoQueryType = "nativeSearch" - TempoQueryTypeServiceMap TempoQueryType = "serviceMap" - TempoQueryTypeTraceId TempoQueryType = "traceId" - TempoQueryTypeTraceql TempoQueryType = "traceql" - TempoQueryTypeTraceqlSearch TempoQueryType = "traceqlSearch" - TempoQueryTypeUpload TempoQueryType = "upload" -) - -// Defines values for TraceqlSearchScope. -const ( - TraceqlSearchScopeEvent TraceqlSearchScope = "event" - TraceqlSearchScopeInstrumentation TraceqlSearchScope = "instrumentation" - TraceqlSearchScopeIntrinsic TraceqlSearchScope = "intrinsic" - TraceqlSearchScopeLink TraceqlSearchScope = "link" - TraceqlSearchScopeResource TraceqlSearchScope = "resource" - TraceqlSearchScopeSpan TraceqlSearchScope = "span" - TraceqlSearchScopeUnscoped TraceqlSearchScope = "unscoped" -) - -// These are the common properties available to all queries in all datasources. -// Specific implementations will *extend* this interface, adding the required -// properties for the given context. -type DataQuery struct { - // For mixed data sources the selected datasource is on the query level. - // For non mixed scenarios this is undefined. - // TODO find a better way to do this ^ that's friendly to schema - // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - +type TempoQuery struct { // A unique identifier for the query within the list of targets. // In server side expressions, the refId is used as a variable name to identify results. // By default, the UI will assign A->Z; however setting meaningful names may be useful. RefId string `json:"refId"` -} - -// The state of the TraceQL streaming search query -type SearchStreamingState string - -// The type of the table that is used to display the search results -type SearchTableType string - -// TempoDataQuery defines model for TempoDataQuery. -type TempoDataQuery = map[string]any - -// TempoQuery defines model for TempoQuery. -type TempoQuery struct { + // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. + Hide *bool `json:"hide,omitempty"` + // Specify the query flavor + // TODO make this required and give it a default + QueryType *string `json:"queryType,omitempty"` + // TraceQL query or trace ID + Query *string `json:"query,omitempty"` + // @deprecated Logfmt query to filter traces by their tags. Example: http.status_code=200 error=true + Search *string `json:"search,omitempty"` + // @deprecated Query traces by service name + ServiceName *string `json:"serviceName,omitempty"` + // @deprecated Query traces by span name + SpanName *string `json:"spanName,omitempty"` + // @deprecated Define the minimum duration to select traces. Use duration format, for example: 1.2s, 100ms + MinDuration *string `json:"minDuration,omitempty"` + // @deprecated Define the maximum duration to select traces. Use duration format, for example: 1.2s, 100ms + MaxDuration *string `json:"maxDuration,omitempty"` + // Filters to be included in a PromQL query to select data for the service graph. Example: {client="app",service="app"}. Providing multiple values will produce union of results for each filter, using PromQL OR operator internally. + ServiceMapQuery *StringOrArrayOfString `json:"serviceMapQuery,omitempty"` + // Use service.namespace in addition to service.name to uniquely identify a service. + ServiceMapIncludeNamespace *bool `json:"serviceMapIncludeNamespace,omitempty"` + // Defines the maximum number of traces that are returned from Tempo + Limit *int64 `json:"limit,omitempty"` + // Defines the maximum number of spans per spanset that are returned from Tempo + Spss *int64 `json:"spss,omitempty"` + Filters []TraceqlFilter `json:"filters"` + // Filters that are used to query the metrics summary + GroupBy []TraceqlFilter `json:"groupBy,omitempty"` + // The type of the table that is used to display the search results + TableType *SearchTableType `json:"tableType,omitempty"` + // For metric queries, the step size to use + Step *string `json:"step,omitempty"` // For mixed data sources the selected datasource is on the query level. // For non mixed scenarios this is undefined. // TODO find a better way to do this ^ that's friendly to schema // TODO this shouldn't be unknown but DataSourceRef | null - Datasource *any `json:"datasource,omitempty"` - + Datasource any `json:"datasource,omitempty"` // For metric queries, how many exemplars to request, 0 means no exemplars - Exemplars *int64 `json:"exemplars,omitempty"` - Filters []TraceqlFilter `json:"filters,omitempty"` - - // Filters that are used to query the metrics summary - GroupBy []TraceqlFilter `json:"groupBy,omitempty"` - - // If hide is set to true, Grafana will filter out the response(s) associated with this query before returning it to the panel. - Hide *bool `json:"hide,omitempty"` - - // Defines the maximum number of traces that are returned from Tempo - Limit *int64 `json:"limit,omitempty"` - - // @deprecated Define the maximum duration to select traces. Use duration format, for example: 1.2s, 100ms - MaxDuration *string `json:"maxDuration,omitempty"` - - // @deprecated Define the minimum duration to select traces. Use duration format, for example: 1.2s, 100ms - MinDuration *string `json:"minDuration,omitempty"` - - // TraceQL query or trace ID - Query *string `json:"query,omitempty"` - - // Specify the query flavor - // TODO make this required and give it a default - QueryType *string `json:"queryType,omitempty"` - - // A unique identifier for the query within the list of targets. - // In server side expressions, the refId is used as a variable name to identify results. - // By default, the UI will assign A->Z; however setting meaningful names may be useful. - RefId *string `json:"refId,omitempty"` - - // @deprecated Logfmt query to filter traces by their tags. Example: http.status_code=200 error=true - Search *string `json:"search,omitempty"` - - // Use service.namespace in addition to service.name to uniquely identify a service. - ServiceMapIncludeNamespace *bool `json:"serviceMapIncludeNamespace,omitempty"` - - // Filters to be included in a PromQL query to select data for the service graph. Example: {client="app",service="app"}. Providing multiple values will produce union of results for each filter, using PromQL OR operator internally. - ServiceMapQuery *any `json:"serviceMapQuery,omitempty"` - - // @deprecated Query traces by service name - ServiceName *string `json:"serviceName,omitempty"` - - // @deprecated Query traces by span name - SpanName *string `json:"spanName,omitempty"` - - // Defines the maximum number of spans per spanset that are returned from Tempo - Spss *int64 `json:"spss,omitempty"` - - // For metric queries, the step size to use - Step *string `json:"step,omitempty"` - - // The type of the table that is used to display the search results - TableType *SearchTableType `json:"tableType,omitempty"` + Exemplars *int64 `json:"exemplars,omitempty"` +} + +// NewTempoQuery creates a new TempoQuery object. +func NewTempoQuery() *TempoQuery { + return &TempoQuery{} } -// TempoQueryType defines model for TempoQueryType. type TempoQueryType string -// TraceqlFilter defines model for TraceqlFilter. +const ( + TempoQueryTypeTraceql TempoQueryType = "traceql" + TempoQueryTypeTraceqlSearch TempoQueryType = "traceqlSearch" + TempoQueryTypeServiceMap TempoQueryType = "serviceMap" + TempoQueryTypeUpload TempoQueryType = "upload" + TempoQueryTypeNativeSearch TempoQueryType = "nativeSearch" + TempoQueryTypeTraceId TempoQueryType = "traceId" + TempoQueryTypeClear TempoQueryType = "clear" +) + +// The state of the TraceQL streaming search query +type SearchStreamingState string + +const ( + SearchStreamingStatePending SearchStreamingState = "pending" + SearchStreamingStateStreaming SearchStreamingState = "streaming" + SearchStreamingStateDone SearchStreamingState = "done" + SearchStreamingStateError SearchStreamingState = "error" +) + +// The type of the table that is used to display the search results +type SearchTableType string + +const ( + SearchTableTypeTraces SearchTableType = "traces" + SearchTableTypeSpans SearchTableType = "spans" + SearchTableTypeRaw SearchTableType = "raw" +) + +// static fields are pre-set in the UI, dynamic fields are added by the user +type TraceqlSearchScope string + +const ( + TraceqlSearchScopeIntrinsic TraceqlSearchScope = "intrinsic" + TraceqlSearchScopeUnscoped TraceqlSearchScope = "unscoped" + TraceqlSearchScopeEvent TraceqlSearchScope = "event" + TraceqlSearchScopeInstrumentation TraceqlSearchScope = "instrumentation" + TraceqlSearchScopeLink TraceqlSearchScope = "link" + TraceqlSearchScopeResource TraceqlSearchScope = "resource" + TraceqlSearchScopeSpan TraceqlSearchScope = "span" +) + type TraceqlFilter struct { // Uniquely identify the filter, will not be used in the query generation Id string `json:"id"` - - // The operator that connects the tag to the value, for example: =, >, !=, =~ - Operator *string `json:"operator,omitempty"` - - // Scope static fields are pre-set in the UI, dynamic fields are added by the user - Scope *TraceqlSearchScope `json:"scope,omitempty"` - // The tag for the search filter, for example: .http.status_code, .service.name, status Tag *string `json:"tag,omitempty"` - + // The operator that connects the tag to the value, for example: =, >, !=, =~ + Operator *string `json:"operator,omitempty"` // The value for the search filter - Value *any `json:"value,omitempty"` - + Value *StringOrArrayOfString `json:"value,omitempty"` // The type of the value, used for example to check whether we need to wrap the value in quotes when generating the query ValueType *string `json:"valueType,omitempty"` + // The scope of the filter, can either be unscoped/all scopes, resource or span + Scope *TraceqlSearchScope `json:"scope,omitempty"` } -// TraceqlSearchScope static fields are pre-set in the UI, dynamic fields are added by the user -type TraceqlSearchScope string +// NewTraceqlFilter creates a new TraceqlFilter object. +func NewTraceqlFilter() *TraceqlFilter { + return &TraceqlFilter{} +} + +type StringOrArrayOfString struct { + String *string `json:"String,omitempty"` + ArrayOfString []string `json:"ArrayOfString,omitempty"` +} + +// NewStringOrArrayOfString creates a new StringOrArrayOfString object. +func NewStringOrArrayOfString() *StringOrArrayOfString { + return &StringOrArrayOfString{} +} + +// MarshalJSON implements a custom JSON marshalling logic to encode `StringOrArrayOfString` as JSON. +func (resource StringOrArrayOfString) MarshalJSON() ([]byte, error) { + if resource.String != nil { + return json.Marshal(resource.String) + } + + if resource.ArrayOfString != nil { + return json.Marshal(resource.ArrayOfString) + } + + return nil, fmt.Errorf("no value for disjunction of scalars") +} + +// UnmarshalJSON implements a custom JSON unmarshalling logic to decode `StringOrArrayOfString` from JSON. +func (resource *StringOrArrayOfString) UnmarshalJSON(raw []byte) error { + if raw == nil { + return nil + } + + var errList []error + + // String + var String string + if err := json.Unmarshal(raw, &String); err != nil { + errList = append(errList, err) + resource.String = nil + } else { + resource.String = &String + return nil + } + + // ArrayOfString + var ArrayOfString []string + if err := json.Unmarshal(raw, &ArrayOfString); err != nil { + errList = append(errList, err) + resource.ArrayOfString = nil + } else { + resource.ArrayOfString = ArrayOfString + return nil + } + + return errors.Join(errList...) +} diff --git a/public/api-merged.json b/public/api-merged.json index 79758d8f542..f2d77b80ed8 100644 --- a/public/api-merged.json +++ b/public/api-merged.json @@ -13083,7 +13083,6 @@ }, "AnnotationPanelFilter": { "type": "object", - "title": "AnnotationPanelFilter defines model for AnnotationPanelFilter.", "properties": { "exclude": { "description": "Should the specified panels be included or excluded", @@ -13094,7 +13093,7 @@ "type": "array", "items": { "type": "integer", - "format": "int64" + "format": "uint8" } } } @@ -13117,7 +13116,7 @@ "builtIn": { "description": "Set to 1 for the standard annotation query all dashboards have by default.", "type": "number", - "format": "float" + "format": "double" }, "datasource": { "$ref": "#/definitions/DataSourceRef" @@ -13877,20 +13876,10 @@ }, "CookiePreferences": { "type": "object", - "title": "CookiePreferences defines model for CookiePreferences.", "properties": { - "analytics": { - "type": "object", - "additionalProperties": {} - }, - "functional": { - "type": "object", - "additionalProperties": {} - }, - "performance": { - "type": "object", - "additionalProperties": {} - } + "analytics": {}, + "functional": {}, + "performance": {} } }, "CookieType": { @@ -16899,7 +16888,6 @@ }, "LibraryElementDTOMetaUser": { "type": "object", - "title": "LibraryElementDTOMetaUser defines model for LibraryElementDTOMetaUser.", "properties": { "avatarUrl": { "type": "string" @@ -17289,7 +17277,6 @@ }, "NavbarPreference": { "type": "object", - "title": "NavbarPreference defines model for NavbarPreference.", "properties": { "bookmarkUrls": { "type": "array", @@ -18494,7 +18481,7 @@ "$ref": "#/definitions/QueryHistoryPreference" }, "theme": { - "description": "Theme light, dark, empty is default", + "description": "light, dark, empty is default", "type": "string" }, "timezone": { @@ -18502,7 +18489,7 @@ "type": "string" }, "weekStart": { - "description": "WeekStart day of the week (sunday, monday, etc)", + "description": "day of the week (sunday, monday, etc)", "type": "string" } } @@ -18965,10 +18952,9 @@ }, "QueryHistoryPreference": { "type": "object", - "title": "QueryHistoryPreference defines model for QueryHistoryPreference.", "properties": { "homeTab": { - "description": "HomeTab one of: '' | 'query' | 'starred';", + "description": "one of: '' | 'query' | 'starred';", "type": "string" } } diff --git a/public/openapi3.json b/public/openapi3.json index fe2da223144..b8c9a75b21d 100644 --- a/public/openapi3.json +++ b/public/openapi3.json @@ -3165,13 +3165,12 @@ "ids": { "description": "Panel IDs that should be included or excluded", "items": { - "format": "int64", + "format": "uint8", "type": "integer" }, "type": "array" } }, - "title": "AnnotationPanelFilter defines model for AnnotationPanelFilter.", "type": "object" }, "AnnotationPermission": { @@ -3190,7 +3189,7 @@ "properties": { "builtIn": { "description": "Set to 1 for the standard annotation query all dashboards have by default.", - "format": "float", + "format": "double", "type": "number" }, "datasource": { @@ -3952,20 +3951,10 @@ }, "CookiePreferences": { "properties": { - "analytics": { - "additionalProperties": {}, - "type": "object" - }, - "functional": { - "additionalProperties": {}, - "type": "object" - }, - "performance": { - "additionalProperties": {}, - "type": "object" - } + "analytics": {}, + "functional": {}, + "performance": {} }, - "title": "CookiePreferences defines model for CookiePreferences.", "type": "object" }, "CookieType": { @@ -6985,7 +6974,6 @@ "type": "string" } }, - "title": "LibraryElementDTOMetaUser defines model for LibraryElementDTOMetaUser.", "type": "object" }, "LibraryElementResponse": { @@ -7371,7 +7359,6 @@ "type": "array" } }, - "title": "NavbarPreference defines model for NavbarPreference.", "type": "object" }, "NewApiKeyResult": { @@ -8568,7 +8555,7 @@ "$ref": "#/components/schemas/QueryHistoryPreference" }, "theme": { - "description": "Theme light, dark, empty is default", + "description": "light, dark, empty is default", "type": "string" }, "timezone": { @@ -8576,7 +8563,7 @@ "type": "string" }, "weekStart": { - "description": "WeekStart day of the week (sunday, monday, etc)", + "description": "day of the week (sunday, monday, etc)", "type": "string" } }, @@ -9041,11 +9028,10 @@ "QueryHistoryPreference": { "properties": { "homeTab": { - "description": "HomeTab one of: '' | 'query' | 'starred';", + "description": "one of: '' | 'query' | 'starred';", "type": "string" } }, - "title": "QueryHistoryPreference defines model for QueryHistoryPreference.", "type": "object" }, "QueryHistoryResponse": {