mirror of
https://github.com/grafana/grafana.git
synced 2025-02-25 18:55:37 -06:00
schema: Generate Go and Typescript from Thema coremodels (#49193)
* Add go code generator for coremodels * Just generate the entire coremodel for now Maybe we'll need more flexibility as more coremodels are added, but for now this is fine. * Add note on type comment about stability, grodkit * Remove local replace directive for thema * Generate typescript from coremodel * Update pkg/coremodel/dashboard/addenda.go Co-authored-by: Ryan McKinley <ryantxu@gmail.com> * Update cuetsy to new release * Update thema to latest * Fix enum generation for FieldColorModeId * Put main generated object at the end of the file * Tweaks to generated Go output * Retweak back to var * Add generated coremodel test * Remove local replace statement again * Add Make target and call into cuetsy cmd from gen * Rename and comment linsrc for readability * Move key codegen bits into reusable package * Move body of cuetsifier into codegen pkg Also genericize the diffing output into reusable WriteDiffer. * Refactor coremodel generator to use WriteDiffer * Add gen-cue step to CI * Whip all the codegen automation into shape * Add simplistic coremodel canonicality controls * Remove erroneously committed test * Bump thema version * Remove dead code * Improve wording of non-canonicality comment Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
This commit is contained in:
399
pkg/codegen/coremodel.go
Normal file
399
pkg/codegen/coremodel.go
Normal file
@@ -0,0 +1,399 @@
|
||||
package codegen
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/format"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing/fstest"
|
||||
"text/template"
|
||||
|
||||
"cuelang.org/go/pkg/encoding/yaml"
|
||||
"github.com/deepmap/oapi-codegen/pkg/codegen"
|
||||
"github.com/getkin/kin-openapi/openapi3"
|
||||
"github.com/grafana/cuetsy"
|
||||
"github.com/grafana/grafana/pkg/cuectx"
|
||||
"github.com/grafana/thema"
|
||||
"github.com/grafana/thema/encoding/openapi"
|
||||
"golang.org/x/tools/imports"
|
||||
)
|
||||
|
||||
// ExtractedLineage contains the results of statically analyzing a Grafana
|
||||
// directory for a Thema lineage.
|
||||
type ExtractedLineage struct {
|
||||
Lineage thema.Lineage
|
||||
// Absolute path to the coremodel's lineage.cue file.
|
||||
LineagePath string
|
||||
// Path to the coremodel's lineage.cue file relative to repo root.
|
||||
RelativePath string
|
||||
// Indicates whether the coremodel is considered canonical or not. Generated
|
||||
// code from not-yet-canonical coremodels should include appropriate caveats in
|
||||
// documentation and possibly be hidden from external public API surface areas.
|
||||
IsCanonical bool
|
||||
}
|
||||
|
||||
// ExtractLineage loads a Grafana Thema lineage from the filesystem.
|
||||
//
|
||||
// The provided path must be the absolute path to the file containing the
|
||||
// lineage to be loaded.
|
||||
//
|
||||
// This loading approach is intended primarily for use with code generators, or
|
||||
// other use cases external to grafana-server backend. For code within
|
||||
// grafana-server, prefer lineage loaders provided in e.g. pkg/coremodel/*.
|
||||
func ExtractLineage(path string, lib thema.Library) (*ExtractedLineage, error) {
|
||||
if !filepath.IsAbs(path) {
|
||||
return nil, fmt.Errorf("must provide an absolute path, got %q", path)
|
||||
}
|
||||
|
||||
ec := &ExtractedLineage{
|
||||
LineagePath: path,
|
||||
}
|
||||
|
||||
var find func(path string) (string, error)
|
||||
find = func(path string) (string, error) {
|
||||
parent := filepath.Dir(path)
|
||||
if parent == path {
|
||||
return "", errors.New("grafana root directory could not be found")
|
||||
}
|
||||
fp := filepath.Join(path, "go.mod")
|
||||
if _, err := os.Stat(fp); err == nil {
|
||||
return path, nil
|
||||
}
|
||||
return find(parent)
|
||||
}
|
||||
groot, err := find(path)
|
||||
if err != nil {
|
||||
return ec, err
|
||||
}
|
||||
|
||||
f, err := os.Open(ec.LineagePath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not open lineage file at %s: %w", path, err)
|
||||
}
|
||||
|
||||
byt, err := ioutil.ReadAll(f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fs := fstest.MapFS{
|
||||
"lineage.cue": &fstest.MapFile{
|
||||
Data: byt,
|
||||
},
|
||||
}
|
||||
|
||||
ec.RelativePath, err = filepath.Rel(groot, filepath.Dir(path))
|
||||
if err != nil {
|
||||
// should be unreachable, since we rootclimbed to find groot above
|
||||
panic(err)
|
||||
}
|
||||
ec.Lineage, err = cuectx.LoadGrafanaInstancesWithThema(ec.RelativePath, fs, lib)
|
||||
if err != nil {
|
||||
return ec, err
|
||||
}
|
||||
ec.IsCanonical = isCanonical(ec.Lineage.Name())
|
||||
return ec, nil
|
||||
}
|
||||
|
||||
func isCanonical(name string) bool {
|
||||
return canonicalCoremodels[name]
|
||||
}
|
||||
|
||||
// FIXME specificying coremodel canonicality DOES NOT belong here - it should be part of the coremodel declaration.
|
||||
var canonicalCoremodels = map[string]bool{
|
||||
"dashboard": false,
|
||||
}
|
||||
|
||||
// GenerateGoCoremodel generates a standard Go coremodel from a Thema lineage.
|
||||
//
|
||||
// The provided path must be a directory. Generated code files will be written
|
||||
// to that path. The final element of the path must match the Lineage.Name().
|
||||
func (ls *ExtractedLineage) GenerateGoCoremodel(path string) (WriteDiffer, error) {
|
||||
lin, lib := ls.Lineage, ls.Lineage.Library()
|
||||
_, name := filepath.Split(path)
|
||||
if name != lin.Name() {
|
||||
return nil, fmt.Errorf("lineage name %q must match final element of path, got %q", lin.Name(), path)
|
||||
}
|
||||
|
||||
sch := thema.SchemaP(lin, thema.LatestVersion(lin))
|
||||
f, err := openapi.GenerateSchema(sch, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("thema openapi generation failed: %w", err)
|
||||
}
|
||||
|
||||
str, err := yaml.Marshal(lib.Context().BuildFile(f))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cue-yaml marshaling failed: %w", err)
|
||||
}
|
||||
|
||||
loader := openapi3.NewLoader()
|
||||
oT, err := loader.LoadFromData([]byte(str))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("loading generated openapi failed; %w", err)
|
||||
}
|
||||
|
||||
gostr, err := codegen.Generate(oT, lin.Name(), codegen.Options{
|
||||
GenerateTypes: true,
|
||||
SkipPrune: true,
|
||||
SkipFmt: true,
|
||||
UserTemplates: map[string]string{
|
||||
"imports.tmpl": fmt.Sprintf(tmplImports, ls.RelativePath),
|
||||
"typedef.tmpl": tmplTypedef,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("openapi generation failed: %w", err)
|
||||
}
|
||||
|
||||
vars := goPkg{
|
||||
Name: lin.Name(),
|
||||
LineagePath: ls.RelativePath,
|
||||
LatestSeqv: sch.Version()[0],
|
||||
LatestSchv: sch.Version()[1],
|
||||
}
|
||||
var buuf bytes.Buffer
|
||||
err = tmplAddenda.Execute(&buuf, vars)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fset := token.NewFileSet()
|
||||
gf, err := parser.ParseFile(fset, "coremodel_gen.go", gostr+buuf.String(), parser.ParseComments)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("generated go file parsing failed: %w", err)
|
||||
}
|
||||
m := makeReplacer(lin.Name())
|
||||
ast.Walk(m, gf)
|
||||
|
||||
var buf bytes.Buffer
|
||||
err = format.Node(&buf, fset, gf)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("ast printing failed: %w", err)
|
||||
}
|
||||
|
||||
byt, err := imports.Process("coremodel_gen.go", buf.Bytes(), nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("goimports processing failed: %w", err)
|
||||
}
|
||||
|
||||
// Generate the assignability test. TODO do this in a framework test instead
|
||||
var buf3 bytes.Buffer
|
||||
err = tmplAssignableTest.Execute(&buf3, vars)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed generating assignability test file: %w", err)
|
||||
}
|
||||
|
||||
wd := NewWriteDiffer()
|
||||
wd[filepath.Join(path, "coremodel_gen.go")] = byt
|
||||
wd[filepath.Join(path, "coremodel_gen_test.go")] = buf3.Bytes()
|
||||
|
||||
return wd, nil
|
||||
}
|
||||
|
||||
type goPkg struct {
|
||||
Name string
|
||||
LineagePath string
|
||||
LatestSeqv, LatestSchv uint
|
||||
IsComposed bool
|
||||
}
|
||||
|
||||
func (ls *ExtractedLineage) GenerateTypescriptCoremodel(path string) (WriteDiffer, error) {
|
||||
_, name := filepath.Split(path)
|
||||
if name != ls.Lineage.Name() {
|
||||
return nil, fmt.Errorf("lineage name %q must match final element of path, got %q", ls.Lineage.Name(), path)
|
||||
}
|
||||
|
||||
schv := thema.SchemaP(ls.Lineage, thema.LatestVersion(ls.Lineage)).UnwrapCUE()
|
||||
|
||||
parts, err := cuetsy.GenerateAST(schv, cuetsy.Config{})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cuetsy parts gen failed: %w", err)
|
||||
}
|
||||
|
||||
top, err := cuetsy.GenerateSingleAST(string(makeReplacer(ls.Lineage.Name())), schv, cuetsy.TypeInterface)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cuetsy top gen failed: %w", err)
|
||||
}
|
||||
|
||||
// TODO until cuetsy can toposort its outputs, put the top/parent type at the bottom of the file.
|
||||
parts.Nodes = append(parts.Nodes, top.T, top.D)
|
||||
// parts.Nodes = append([]ts.Decl{top.T, top.D}, parts.Nodes...)
|
||||
|
||||
var strb strings.Builder
|
||||
var str string
|
||||
fpath := ls.Lineage.Name() + ".gen.ts"
|
||||
strb.WriteString(fmt.Sprintf(genHeader, ls.RelativePath))
|
||||
|
||||
if !ls.IsCanonical {
|
||||
fpath = fmt.Sprintf("%s_experimental.gen.ts", ls.Lineage.Name())
|
||||
strb.WriteString(`
|
||||
// This model is a WIP and not yet canonical. Consequently, its members are
|
||||
// not exported to exclude it from grafana-schema's public API surface.
|
||||
|
||||
`)
|
||||
strb.WriteString(fmt.Sprint(parts))
|
||||
// TODO replace this regexp with cuetsy config for whether members are exported
|
||||
re := regexp.MustCompile(`(?m)^export `)
|
||||
str = re.ReplaceAllLiteralString(strb.String(), "")
|
||||
} else {
|
||||
strb.WriteString(fmt.Sprint(parts))
|
||||
str = strb.String()
|
||||
}
|
||||
|
||||
wd := NewWriteDiffer()
|
||||
wd[filepath.Join(path, fpath)] = []byte(str)
|
||||
return wd, nil
|
||||
}
|
||||
|
||||
type modelReplacer string
|
||||
|
||||
func makeReplacer(name string) modelReplacer {
|
||||
return modelReplacer(fmt.Sprintf("%s%s", string(strings.ToUpper(name)[0]), name[1:]))
|
||||
}
|
||||
|
||||
func (m modelReplacer) Visit(n ast.Node) ast.Visitor {
|
||||
switch x := n.(type) {
|
||||
case *ast.Ident:
|
||||
x.Name = m.replacePrefix(x.Name)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
func (m modelReplacer) replacePrefix(str string) string {
|
||||
if len(str) >= len(m) && str[:len(m)] == string(m) {
|
||||
return strings.Replace(str, string(m), "Model", 1)
|
||||
}
|
||||
return str
|
||||
}
|
||||
|
||||
var genHeader = `// This file is autogenerated. DO NOT EDIT.
|
||||
//
|
||||
// To regenerate, run "make gen-cue" from repository root.
|
||||
//
|
||||
// Derived from the Thema lineage at %s
|
||||
|
||||
`
|
||||
|
||||
var tmplImports = genHeader + `package {{ .PackageName }}
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"bytes"
|
||||
"compress/gzip"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"encoding/xml"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/deepmap/oapi-codegen/pkg/runtime"
|
||||
openapi_types "github.com/deepmap/oapi-codegen/pkg/types"
|
||||
"github.com/getkin/kin-openapi/openapi3"
|
||||
"github.com/grafana/thema"
|
||||
"github.com/grafana/grafana/pkg/cuectx"
|
||||
)
|
||||
`
|
||||
|
||||
var tmplAddenda = template.Must(template.New("addenda").Parse(`
|
||||
//go:embed lineage.cue
|
||||
var cueFS embed.FS
|
||||
|
||||
// codegen ensures that this is always the latest Thema schema version
|
||||
var currentVersion = thema.SV({{ .LatestSeqv }}, {{ .LatestSchv }})
|
||||
|
||||
// Lineage returns the Thema lineage representing a Grafana {{ .Name }}.
|
||||
//
|
||||
// The lineage is the canonical specification of the current {{ .Name }} schema,
|
||||
// all prior schema versions, and the mappings that allow migration between
|
||||
// schema versions.
|
||||
{{- if .IsComposed }}//
|
||||
// This is the base variant of the schema. It does not include any composed
|
||||
// plugin schemas.{{ end }}
|
||||
func Lineage(lib thema.Library, opts ...thema.BindOption) (thema.Lineage, error) {
|
||||
return cuectx.LoadGrafanaInstancesWithThema(filepath.Join("pkg", "coremodel", "dashboard"), cueFS, lib, opts...)
|
||||
}
|
||||
|
||||
var _ thema.LineageFactory = Lineage
|
||||
|
||||
// Coremodel contains the foundational schema declaration for {{ .Name }}s.
|
||||
type Coremodel struct {
|
||||
lin thema.Lineage
|
||||
}
|
||||
|
||||
// Lineage returns the canonical dashboard Lineage.
|
||||
func (c *Coremodel) Lineage() thema.Lineage {
|
||||
return c.lin
|
||||
}
|
||||
|
||||
// CurrentSchema returns the current (latest) {{ .Name }} Thema schema.
|
||||
func (c *Coremodel) CurrentSchema() thema.Schema {
|
||||
return thema.SchemaP(c.lin, currentVersion)
|
||||
}
|
||||
|
||||
// GoType returns a pointer to an empty Go struct that corresponds to
|
||||
// the current Thema schema.
|
||||
func (c *Coremodel) GoType() interface{} {
|
||||
return &Model{}
|
||||
}
|
||||
|
||||
func ProvideCoremodel(lib thema.Library) (*Coremodel, error) {
|
||||
lin, err := Lineage(lib)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Coremodel{
|
||||
lin: lin,
|
||||
}, nil
|
||||
}
|
||||
`))
|
||||
|
||||
var tmplAssignableTest = template.Must(template.New("addenda").Parse(fmt.Sprintf(genHeader, "{{ .LineagePath }}") + `package {{ .Name }}
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/cuectx"
|
||||
"github.com/grafana/thema"
|
||||
)
|
||||
|
||||
func TestSchemaAssignability(t *testing.T) {
|
||||
lin, err := Lineage(cuectx.ProvideThemaLibrary())
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
sch := thema.SchemaP(lin, currentVersion)
|
||||
|
||||
err = thema.AssignableTo(sch, &Model{})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
`))
|
||||
|
||||
var tmplTypedef = `{{range .Types}}
|
||||
{{ with .Schema.Description }}{{ . }}{{ else }}// {{.TypeName}} defines model for {{.JsonName}}.{{ end }}
|
||||
//
|
||||
// THIS TYPE IS INTENDED FOR INTERNAL USE BY THE GRAFANA BACKEND, AND IS SUBJECT TO BREAKING CHANGES.
|
||||
// Equivalent Go types at stable import paths are provided in https://github.com/grafana/grok.
|
||||
type {{.TypeName}} {{if and (opts.AliasTypes) (.CanAlias)}}={{end}} {{.Schema.TypeDecl}}
|
||||
{{end}}
|
||||
`
|
134
pkg/codegen/diffwrite.go
Normal file
134
pkg/codegen/diffwrite.go
Normal file
@@ -0,0 +1,134 @@
|
||||
package codegen
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"golang.org/x/sync/errgroup"
|
||||
)
|
||||
|
||||
// WriteDiffer is a pseudo-filesystem that supports batch-writing its contents
|
||||
// to the real filesystem, or batch-comparing its contents to the real
|
||||
// filesystem. Its intended use is for idiomatic `go generate`-style code
|
||||
// generators, where it is expected that the results of codegen are committed to
|
||||
// version control.
|
||||
//
|
||||
// In such cases, the normal behavior of a generator is to write files to disk,
|
||||
// but in CI, that behavior should change to verify that what is already on disk
|
||||
// is identical to the results of code generation. This allows CI to ensure that
|
||||
// the results of code generation are always up to date. WriteDiffer supports
|
||||
// these related behaviors through its Write() and Verify() methods, respectively.
|
||||
//
|
||||
// Note that the statelessness of WriteDiffer means that, if a particular input
|
||||
// to the code generator goes away, it will not notice generated files left
|
||||
// behind if their inputs are removed.
|
||||
// TODO introduce a search/match system
|
||||
type WriteDiffer map[string][]byte
|
||||
|
||||
func NewWriteDiffer() WriteDiffer {
|
||||
return WriteDiffer(make(map[string][]byte))
|
||||
}
|
||||
|
||||
type writeSlice []struct {
|
||||
path string
|
||||
contents []byte
|
||||
}
|
||||
|
||||
// Verify checks the contents of each file against the filesystem. It emits an error
|
||||
// if any of its contained files differ.
|
||||
func (wd WriteDiffer) Verify() error {
|
||||
var result error
|
||||
|
||||
for _, item := range wd.toSlice() {
|
||||
if _, err := os.Stat(item.path); err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
result = multierror.Append(result, fmt.Errorf("%s: generated file should exist, but does not", item.path))
|
||||
} else {
|
||||
result = multierror.Append(result, fmt.Errorf("%s: could not stat generated file: %w", item.path, err))
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
f, err := os.Open(filepath.Clean(item.path))
|
||||
if err != nil {
|
||||
result = multierror.Append(result, fmt.Errorf("%s: %w", item.path, err))
|
||||
continue
|
||||
}
|
||||
|
||||
ob, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
result = multierror.Append(result, fmt.Errorf("%s: %w", item.path, err))
|
||||
continue
|
||||
}
|
||||
dstr := cmp.Diff(string(ob), string(item.contents))
|
||||
if dstr != "" {
|
||||
result = multierror.Append(result, fmt.Errorf("%s would have changed:\n\n%s", item.path, dstr))
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Write writes all of the files to their indicated paths.
|
||||
func (wd WriteDiffer) Write() error {
|
||||
g, _ := errgroup.WithContext(context.TODO())
|
||||
g.SetLimit(12)
|
||||
|
||||
for _, item := range wd.toSlice() {
|
||||
it := item
|
||||
g.Go(func() error {
|
||||
err := os.MkdirAll(filepath.Dir(it.path), os.ModePerm)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: failed to ensure parent directory exists: %w", it.path, err)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(it.path, it.contents, 0644); err != nil {
|
||||
return fmt.Errorf("%s: error while writing file: %w", it.path, err)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
return g.Wait()
|
||||
}
|
||||
|
||||
func (wd WriteDiffer) toSlice() writeSlice {
|
||||
sl := make(writeSlice, 0, len(wd))
|
||||
type ws struct {
|
||||
path string
|
||||
contents []byte
|
||||
}
|
||||
|
||||
for k, v := range wd {
|
||||
sl = append(sl, ws{
|
||||
path: k,
|
||||
contents: v,
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(sl, func(i, j int) bool {
|
||||
return sl[i].path < sl[j].path
|
||||
})
|
||||
|
||||
return sl
|
||||
}
|
||||
|
||||
// Merge combines all the entries from the provided WriteDiffer into the callee
|
||||
// WriteDiffer. Duplicate paths result in an error.
|
||||
func (wd WriteDiffer) Merge(wd2 WriteDiffer) error {
|
||||
for k, v := range wd2 {
|
||||
if _, has := wd[k]; has {
|
||||
return fmt.Errorf("path %s already exists in write differ", k)
|
||||
}
|
||||
wd[k] = v
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
343
pkg/codegen/pluggen.go
Normal file
343
pkg/codegen/pluggen.go
Normal file
@@ -0,0 +1,343 @@
|
||||
package codegen
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
gerrors "errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing/fstest"
|
||||
"text/template"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"cuelang.org/go/cue/ast"
|
||||
"cuelang.org/go/cue/errors"
|
||||
cload "cuelang.org/go/cue/load"
|
||||
"cuelang.org/go/cue/parser"
|
||||
"github.com/grafana/cuetsy"
|
||||
"github.com/grafana/grafana/pkg/schema/load"
|
||||
)
|
||||
|
||||
// The only import statement we currently allow in any models.cue file
|
||||
const allowedImport = "github.com/grafana/grafana/packages/grafana-schema/src/schema"
|
||||
|
||||
var importMap = map[string]string{
|
||||
allowedImport: "@grafana/schema",
|
||||
}
|
||||
|
||||
// Hard-coded list of paths to skip. Remove a particular file as we're ready
|
||||
// to rely on the TypeScript auto-generated by cuetsy for that particular file.
|
||||
var skipPaths = []string{
|
||||
"public/app/plugins/panel/barchart/models.cue",
|
||||
"public/app/plugins/panel/canvas/models.cue",
|
||||
"public/app/plugins/panel/histogram/models.cue",
|
||||
"public/app/plugins/panel/heatmap-new/models.cue",
|
||||
"public/app/plugins/panel/candlestick/models.cue",
|
||||
"public/app/plugins/panel/state-timeline/models.cue",
|
||||
"public/app/plugins/panel/status-history/models.cue",
|
||||
"public/app/plugins/panel/table/models.cue",
|
||||
"public/app/plugins/panel/timeseries/models.cue",
|
||||
}
|
||||
|
||||
const prefix = "/"
|
||||
|
||||
var paths = load.GetDefaultLoadPaths()
|
||||
|
||||
// CuetsifyPlugins runs cuetsy against plugins' models.cue files.
|
||||
func CuetsifyPlugins(ctx *cue.Context, root string) (WriteDiffer, error) {
|
||||
// TODO this whole func has a lot of old, crufty behavior from the scuemata era; needs TLC
|
||||
var fspaths load.BaseLoadPaths
|
||||
var err error
|
||||
|
||||
fspaths.BaseCueFS, err = populateMapFSFromRoot(paths.BaseCueFS, root, "")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fspaths.DistPluginCueFS, err = populateMapFSFromRoot(paths.DistPluginCueFS, root, "")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
overlay, err := defaultOverlay(fspaths)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Prep the cue load config
|
||||
clcfg := &cload.Config{
|
||||
Overlay: overlay,
|
||||
// FIXME these module paths won't work for things not under our cue.mod - AKA third-party plugins
|
||||
ModuleRoot: prefix,
|
||||
Module: "github.com/grafana/grafana",
|
||||
}
|
||||
|
||||
// FIXME hardcoding paths to exclude is not the way to handle this
|
||||
excl := map[string]bool{
|
||||
"cue.mod": true,
|
||||
"cue/scuemata": true,
|
||||
"packages/grafana-schema/src/scuemata/dashboard": true,
|
||||
"packages/grafana-schema/src/scuemata/dashboard/dist": true,
|
||||
}
|
||||
|
||||
exclude := func(path string) bool {
|
||||
dir := filepath.Dir(path)
|
||||
if excl[dir] {
|
||||
return true
|
||||
}
|
||||
for _, p := range skipPaths {
|
||||
if path == p {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
outfiles := NewWriteDiffer()
|
||||
|
||||
cuetsify := func(in fs.FS) error {
|
||||
seen := make(map[string]bool)
|
||||
return fs.WalkDir(in, ".", func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
dir := filepath.Dir(path)
|
||||
|
||||
if d.IsDir() || filepath.Ext(d.Name()) != ".cue" || seen[dir] || exclude(path) {
|
||||
return nil
|
||||
}
|
||||
seen[dir] = true
|
||||
clcfg.Dir = filepath.Join(root, dir)
|
||||
// FIXME Horrible hack to figure out the identifier used for
|
||||
// imported packages - intercept the parser called by the loader to
|
||||
// look at the ast.Files on their way in to building.
|
||||
// Much better if we could work backwards from the cue.Value,
|
||||
// maybe even directly in cuetsy itself, and figure out when a
|
||||
// referenced object is "out of bounds".
|
||||
// var imports sync.Map
|
||||
var imports []*ast.ImportSpec
|
||||
clcfg.ParseFile = func(name string, src interface{}) (*ast.File, error) {
|
||||
f, err := parser.ParseFile(name, src, parser.ParseComments)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
imports = append(imports, f.Imports...)
|
||||
return f, nil
|
||||
}
|
||||
if strings.Contains(path, "public/app/plugins") {
|
||||
clcfg.Package = "grafanaschema"
|
||||
} else {
|
||||
clcfg.Package = ""
|
||||
}
|
||||
|
||||
// FIXME loading in this way causes all files in a dir to be loaded
|
||||
// as a single cue.Instance or cue.Value, which makes it quite
|
||||
// difficult to map them _back_ onto the original file and generate
|
||||
// discrete .gen.ts files for each .cue input. However, going one
|
||||
// .cue file at a time and passing it as the first arg to
|
||||
// load.Instances() means that the other files are ignored
|
||||
// completely, causing references between these files to be
|
||||
// unresolved, and thus encounter a different kind of error.
|
||||
insts := cload.Instances(nil, clcfg)
|
||||
if len(insts) > 1 {
|
||||
panic("extra instances")
|
||||
}
|
||||
bi := insts[0]
|
||||
|
||||
v := ctx.BuildInstance(bi)
|
||||
if v.Err() != nil {
|
||||
return v.Err()
|
||||
}
|
||||
|
||||
var b []byte
|
||||
f := &tsFile{}
|
||||
seen := make(map[string]bool)
|
||||
// FIXME explicitly mapping path patterns to conversion patterns
|
||||
// is exactly what we want to avoid
|
||||
switch {
|
||||
// panel plugin models.cue files
|
||||
case strings.Contains(path, "public/app/plugins"):
|
||||
for _, im := range imports {
|
||||
ip := strings.Trim(im.Path.Value, "\"")
|
||||
if ip != allowedImport {
|
||||
// TODO make a specific error type for this
|
||||
return errors.Newf(im.Pos(), "import %q not allowed, panel plugins may only import from %q", ip, allowedImport)
|
||||
}
|
||||
// TODO this approach will silently swallow the unfixable
|
||||
// error case where multiple files in the same dir import
|
||||
// the same package to a different ident
|
||||
if !seen[ip] {
|
||||
seen[ip] = true
|
||||
f.Imports = append(f.Imports, convertImport(im))
|
||||
}
|
||||
}
|
||||
|
||||
// Extract the latest schema and its version number. (All of this goes away with Thema, whew)
|
||||
f.V = &tsModver{}
|
||||
lins := v.LookupPath(cue.ParsePath("Panel.lineages"))
|
||||
f.V.Lin, _ = lins.Len().Int64()
|
||||
f.V.Lin = f.V.Lin - 1
|
||||
schs := lins.LookupPath(cue.MakePath(cue.Index(int(f.V.Lin))))
|
||||
f.V.Sch, _ = schs.Len().Int64()
|
||||
f.V.Sch = f.V.Sch - 1
|
||||
latest := schs.LookupPath(cue.MakePath(cue.Index(int(f.V.Sch))))
|
||||
|
||||
b, err = cuetsy.Generate(latest, cuetsy.Config{})
|
||||
default:
|
||||
b, err = cuetsy.Generate(v, cuetsy.Config{})
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
f.Body = string(b)
|
||||
|
||||
var buf bytes.Buffer
|
||||
err = tsTemplate.Execute(&buf, f)
|
||||
outfiles[filepath.Join(root, strings.Replace(path, ".cue", ".gen.ts", -1))] = buf.Bytes()
|
||||
return err
|
||||
})
|
||||
}
|
||||
|
||||
err = cuetsify(fspaths.BaseCueFS)
|
||||
if err != nil {
|
||||
return nil, gerrors.New(errors.Details(err, nil))
|
||||
}
|
||||
err = cuetsify(fspaths.DistPluginCueFS)
|
||||
if err != nil {
|
||||
return nil, gerrors.New(errors.Details(err, nil))
|
||||
}
|
||||
|
||||
return outfiles, nil
|
||||
}
|
||||
|
||||
func convertImport(im *ast.ImportSpec) *tsImport {
|
||||
tsim := &tsImport{
|
||||
Pkg: importMap[allowedImport],
|
||||
}
|
||||
if im.Name != nil && im.Name.String() != "" {
|
||||
tsim.Ident = im.Name.String()
|
||||
} else {
|
||||
sl := strings.Split(im.Path.Value, "/")
|
||||
final := sl[len(sl)-1]
|
||||
if idx := strings.Index(final, ":"); idx != -1 {
|
||||
tsim.Pkg = final[idx:]
|
||||
} else {
|
||||
tsim.Pkg = final
|
||||
}
|
||||
}
|
||||
return tsim
|
||||
}
|
||||
|
||||
func defaultOverlay(p load.BaseLoadPaths) (map[string]cload.Source, error) {
|
||||
overlay := make(map[string]cload.Source)
|
||||
|
||||
if err := toOverlay(prefix, p.BaseCueFS, overlay); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := toOverlay(prefix, p.DistPluginCueFS, overlay); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return overlay, nil
|
||||
}
|
||||
|
||||
func toOverlay(prefix string, vfs fs.FS, overlay map[string]cload.Source) error {
|
||||
if !filepath.IsAbs(prefix) {
|
||||
return fmt.Errorf("must provide absolute path prefix when generating cue overlay, got %q", prefix)
|
||||
}
|
||||
err := fs.WalkDir(vfs, ".", func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
f, err := vfs.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func(f fs.File) {
|
||||
err := f.Close()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}(f)
|
||||
|
||||
b, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
overlay[filepath.Join(prefix, path)] = cload.FromBytes(b)
|
||||
return nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Helper function that populates an fs.FS by walking over a virtual filesystem,
|
||||
// and reading files from disk corresponding to each file encountered.
|
||||
func populateMapFSFromRoot(in fs.FS, root, join string) (fs.FS, error) {
|
||||
out := make(fstest.MapFS)
|
||||
err := fs.WalkDir(in, ".", func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
// Ignore gosec warning G304. The input set here is necessarily
|
||||
// constrained to files specified in embed.go
|
||||
// nolint:gosec
|
||||
b, err := os.Open(filepath.Join(root, join, path))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
byt, err := io.ReadAll(b)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
out[path] = &fstest.MapFile{Data: byt}
|
||||
return nil
|
||||
})
|
||||
return out, err
|
||||
}
|
||||
|
||||
type tsFile struct {
|
||||
V *tsModver
|
||||
Imports []*tsImport
|
||||
Body string
|
||||
}
|
||||
|
||||
type tsModver struct {
|
||||
Lin, Sch int64
|
||||
}
|
||||
|
||||
type tsImport struct {
|
||||
Ident string
|
||||
Pkg string
|
||||
}
|
||||
|
||||
var tsTemplate = template.Must(template.New("cuetsygen").Parse(`//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// This file is autogenerated. DO NOT EDIT.
|
||||
//
|
||||
// To regenerate, run "make gen-cue" from the repository root.
|
||||
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
{{range .Imports}}
|
||||
import * as {{.Ident}} from '{{.Pkg}}';{{end}}
|
||||
{{if .V}}
|
||||
export const modelVersion = Object.freeze([{{ .V.Lin }}, {{ .V.Sch }}]);
|
||||
{{end}}
|
||||
{{.Body}}`))
|
Reference in New Issue
Block a user