testing framework: validate the configuration before terraform test (#33559)

* testing framework: call validate on the configuration before running terraform test

* address comments

* make tests pass after merge

* fix tests
This commit is contained in:
Liam Cervante 2023-07-26 10:56:44 +02:00 committed by GitHub
parent 55792309eb
commit e1019b3641
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 632 additions and 36 deletions

View File

@ -13,6 +13,15 @@ type Validate struct {
// unspecified, validate will use the current directory.
Path string
// TestDirectory is the directory containing any test files that should be
// validated alongside the main configuration. Should be relative to the
// Path.
TestDirectory string
// NoTests indicates that Terraform should not validate any test files
// included with the module.
NoTests bool
// ViewType specifies which output format to use: human, JSON, or "raw".
ViewType ViewType
}
@ -29,6 +38,8 @@ func ParseValidate(args []string) (*Validate, tfdiags.Diagnostics) {
var jsonOutput bool
cmdFlags := defaultFlagSet("validate")
cmdFlags.BoolVar(&jsonOutput, "json", false, "json")
cmdFlags.StringVar(&validate.TestDirectory, "test-directory", "tests", "test-directory")
cmdFlags.BoolVar(&validate.NoTests, "no-tests", false, "no-tests")
if err := cmdFlags.Parse(args); err != nil {
diags = diags.Append(tfdiags.Sourceless(

View File

@ -8,6 +8,7 @@ import (
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/hashicorp/terraform/internal/tfdiags"
)
@ -19,22 +20,42 @@ func TestParseValidate_valid(t *testing.T) {
"defaults": {
nil,
&Validate{
Path: ".",
ViewType: ViewHuman,
Path: ".",
TestDirectory: "tests",
ViewType: ViewHuman,
},
},
"json": {
[]string{"-json"},
&Validate{
Path: ".",
ViewType: ViewJSON,
Path: ".",
TestDirectory: "tests",
ViewType: ViewJSON,
},
},
"path": {
[]string{"-json", "foo"},
&Validate{
Path: "foo",
ViewType: ViewJSON,
Path: "foo",
TestDirectory: "tests",
ViewType: ViewJSON,
},
},
"test-directory": {
[]string{"-test-directory", "other"},
&Validate{
Path: ".",
TestDirectory: "other",
ViewType: ViewHuman,
},
},
"no-tests": {
[]string{"-no-tests"},
&Validate{
Path: ".",
TestDirectory: "tests",
ViewType: ViewHuman,
NoTests: true,
},
},
}
@ -61,8 +82,9 @@ func TestParseValidate_invalid(t *testing.T) {
"unknown flag": {
[]string{"-boop"},
&Validate{
Path: ".",
ViewType: ViewHuman,
Path: ".",
TestDirectory: "tests",
ViewType: ViewHuman,
},
tfdiags.Diagnostics{
tfdiags.Sourceless(
@ -75,8 +97,9 @@ func TestParseValidate_invalid(t *testing.T) {
"too many arguments": {
[]string{"-json", "bar", "baz"},
&Validate{
Path: "bar",
ViewType: ViewJSON,
Path: "bar",
TestDirectory: "tests",
ViewType: ViewJSON,
},
tfdiags.Diagnostics{
tfdiags.Sourceless(

View File

@ -51,6 +51,8 @@ Options:
-json If specified, machine readable output will be printed in
JSON format
-no-color If specified, output won't contain any color.
-test-directory=path Set the Terraform test directory, defaults to "tests".
-var 'foo=bar' Set a value for one of the input variables in the root
@ -231,6 +233,23 @@ func (c *TestCommand) Run(rawArgs []string) int {
defer stop()
defer cancel()
// Validate the main config first.
validateDiags := runner.Validate()
// Print out any warnings or errors from the validation.
view.Diagnostics(nil, nil, validateDiags)
if validateDiags.HasErrors() {
// Don't try and run the tests if the validation actually failed.
// We'll also leave the test status as pending as we actually made
// no effort to run the tests.
return
}
if runner.Stopped || runner.Cancelled {
suite.Status = moduletest.Error
return
}
runner.Start(variables)
}()
@ -309,6 +328,64 @@ type TestRunner struct {
Verbose bool
}
func (runner *TestRunner) Validate() tfdiags.Diagnostics {
log.Printf("[TRACE] TestRunner: Validating configuration.")
var diags tfdiags.Diagnostics
diags = diags.Append(runner.validateConfig(runner.Config))
if runner.Cancelled || runner.Stopped {
return diags
}
// We've validated the main configuration under test. We now need to
// validate any other modules that are being executed by the test files.
//
// We only validate modules that are sourced locally, we're making an
// assumption that any remote modules were properly vetted and tested before
// being used in our tests.
validatedModules := make(map[string]bool)
for _, file := range runner.Suite.Files {
for _, run := range file.Runs {
if runner.Cancelled || runner.Stopped {
return diags
}
// While we're here, also do a quick validation of the config of the
// actual run block.
diags = diags.Append(run.Config.Validate())
// If the run block is executing another local module, we should
// validate that before we try and run it.
if run.Config.ConfigUnderTest != nil {
if _, ok := run.Config.Module.Source.(addrs.ModuleSourceLocal); !ok {
// If it's not a local module, we're not going to validate
// it. The idea here is that if we're retrieving this module
// from the registry it's not the job of this run of the
// testing framework to test it. We should assume it's
// working correctly.
continue
}
if validated := validatedModules[run.Config.Module.Source.String()]; validated {
// We've validated this local module before, so don't do
// it again.
continue
}
validatedModules[run.Config.Module.Source.String()] = true
diags = diags.Append(runner.validateConfig(run.Config.ConfigUnderTest))
}
}
}
return diags
}
func (runner *TestRunner) Start(globals map[string]backend.UnparsedVariableValue) {
var files []string
for name := range runner.Suite.Files {
@ -509,6 +586,42 @@ func (runner *TestRunner) ExecuteTestRun(mgr *TestStateManager, run *moduletest.
return state
}
func (runner *TestRunner) validateConfig(config *configs.Config) tfdiags.Diagnostics {
log.Printf("[TRACE] TestRunner: validating specific config %s", config.Path)
var diags tfdiags.Diagnostics
tfCtxOpts, err := runner.command.contextOpts()
diags = diags.Append(err)
if err != nil {
return diags
}
tfCtx, ctxDiags := terraform.NewContext(tfCtxOpts)
diags = diags.Append(ctxDiags)
if ctxDiags.HasErrors() {
return diags
}
runningCtx, done := context.WithCancel(context.Background())
var validateDiags tfdiags.Diagnostics
go func() {
defer logging.PanicHandler()
defer done()
validateDiags = tfCtx.Validate(config)
}()
// We don't need to pass in any metadata here, as we're only validating
// so if something is cancelled it doesn't matter. We only pass in the
// metadata so we can print context around the cancellation which we don't
// need to do in this case.
waitDiags, _ := runner.wait(tfCtx, runningCtx, nil, nil, nil, nil)
diags = diags.Append(validateDiags)
diags = diags.Append(waitDiags)
return diags
}
// execute executes Terraform plan and apply operations for the given arguments.
//
// The command argument decides whether it executes only a plan or also applies
@ -654,9 +767,14 @@ func (runner *TestRunner) execute(mgr *TestStateManager, run *moduletest.Run, fi
}
func (runner *TestRunner) wait(ctx *terraform.Context, runningCtx context.Context, mgr *TestStateManager, run *moduletest.Run, file *moduletest.File, created []*plans.ResourceInstanceChangeSrc) (diags tfdiags.Diagnostics, cancelled bool) {
identifier := file.Name
if run != nil {
identifier = fmt.Sprintf("%s/%s", identifier, run.Name)
var identifier string
if file == nil {
identifier = "validate"
} else {
identifier = file.Name
if run != nil {
identifier = fmt.Sprintf("%s/%s", identifier, run.Name)
}
}
log.Printf("[TRACE] TestRunner: waiting for execution during %s", identifier)
@ -667,12 +785,20 @@ func (runner *TestRunner) wait(ctx *terraform.Context, runningCtx context.Contex
handleCancelled := func() {
log.Printf("[DEBUG] TestRunner: test execution cancelled during %s", identifier)
states := make(map[*moduletest.Run]*states.State)
states[nil] = mgr.State
for _, module := range mgr.States {
states[module.Run] = module.State
if mgr != nil {
// The state manager might be nil if we are waiting for a validate
// call to finish. This is fine, it just means there's no state
// that might be need to be cleaned up.
states := make(map[*moduletest.Run]*states.State)
states[nil] = mgr.State
for _, module := range mgr.States {
states[module.Run] = module.State
}
runner.View.FatalInterruptSummary(run, file, states, created)
}
runner.View.FatalInterruptSummary(run, file, states, created)
cancelled = true
go ctx.Stop()

View File

@ -487,3 +487,131 @@ Success! 2 passed, 0 failed.
t.Errorf("should have deleted all resources on completion but left %v", provider.ResourceString())
}
}
func TestTest_ValidatesBeforeExecution(t *testing.T) {
td := t.TempDir()
testCopyDir(t, testFixturePath(path.Join("test", "invalid")), td)
defer testChdir(t, td)()
provider := testing_command.NewProvider(nil)
view, done := testView(t)
c := &TestCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(provider.Provider),
View: view,
},
}
code := c.Run([]string{"-verbose", "-no-color"})
output := done(t)
if code != 1 {
t.Errorf("expected status code 1 but got %d", code)
}
expectedOut := `
Executed 0 tests.
`
expectedErr := `
Error: Invalid ` + "`expect_failures`" + ` reference
on main.tftest.hcl line 5, in run "invalid":
5: local.my_value,
You cannot expect failures from local.my_value. You can only expect failures
from checkable objects such as input variables, output values, check blocks,
managed resources and data sources.
`
actualOut := output.Stdout()
actualErr := output.Stderr()
if diff := cmp.Diff(actualOut, expectedOut); len(diff) > 0 {
t.Errorf("output didn't match expected:\nexpected:\n%s\nactual:\n%s\ndiff:\n%s", expectedOut, actualOut, diff)
}
if diff := cmp.Diff(actualErr, expectedErr); len(diff) > 0 {
t.Errorf("error didn't match expected:\nexpected:\n%s\nactual:\n%s\ndiff:\n%s", expectedErr, actualErr, diff)
}
if provider.ResourceCount() > 0 {
t.Errorf("should have deleted all resources on completion but left %v", provider.ResourceString())
}
}
func TestTest_ValidatesLocalModulesBeforeExecution(t *testing.T) {
td := t.TempDir()
testCopyDir(t, testFixturePath(path.Join("test", "invalid-module")), td)
defer testChdir(t, td)()
provider := testing_command.NewProvider(nil)
providerSource, close := newMockProviderSource(t, map[string][]string{
"test": {"1.0.0"},
})
defer close()
streams, done := terminal.StreamsForTesting(t)
view := views.NewView(streams)
ui := new(cli.MockUi)
meta := Meta{
testingOverrides: metaOverridesForProvider(provider.Provider),
Ui: ui,
View: view,
Streams: streams,
ProviderSource: providerSource,
}
init := &InitCommand{
Meta: meta,
}
if code := init.Run(nil); code != 0 {
t.Fatalf("expected status code 0 but got %d: %s", code, ui.ErrorWriter)
}
command := &TestCommand{
Meta: meta,
}
code := command.Run([]string{"-no-color"})
output := done(t)
if code != 1 {
t.Errorf("expected status code 1 but got %d", code)
}
expectedOut := `
Executed 0 tests.
`
expectedErr := `
Error: Reference to undeclared input variable
on setup/main.tf line 3, in resource "test_resource" "setup":
3: value = var.not_real // Oh no!
An input variable with the name "not_real" has not been declared. This
variable can be declared with a variable "not_real" {} block.
`
actualOut := output.Stdout()
actualErr := output.Stderr()
if diff := cmp.Diff(actualOut, expectedOut); len(diff) > 0 {
t.Errorf("output didn't match expected:\nexpected:\n%s\nactual:\n%s\ndiff:\n%s", expectedOut, actualOut, diff)
}
if diff := cmp.Diff(actualErr, expectedErr); len(diff) > 0 {
t.Errorf("error didn't match expected:\nexpected:\n%s\nactual:\n%s\ndiff:\n%s", expectedErr, actualErr, diff)
}
if provider.ResourceCount() > 0 {
t.Errorf("should have deleted all resources on completion but left %v", provider.ResourceString())
}
if provider.ResourceCount() > 0 {
t.Errorf("should have deleted all resources on completion but left %v", provider.ResourceString())
}
}

View File

@ -0,0 +1,8 @@
locals {
my_value = "Hello, world!"
}
resource "test_resource" "example" {
value = local.my_value
}

View File

@ -0,0 +1,8 @@
run "invalid" {
module {
source = "./setup"
}
}
run "test" {}

View File

@ -0,0 +1,4 @@
resource "test_resource" "setup" {
value = var.not_real // Oh no!
}

View File

@ -0,0 +1,8 @@
locals {
my_value = "Hello, world!"
}
resource "test_resource" "example" {
value = local.my_value
}

View File

@ -0,0 +1,8 @@
run "invalid" {
expect_failures = [
local.my_value,
]
}

View File

@ -8,8 +8,10 @@ import (
"path/filepath"
"strings"
"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/command/arguments"
"github.com/hashicorp/terraform/internal/command/views"
"github.com/hashicorp/terraform/internal/configs"
"github.com/hashicorp/terraform/internal/terraform"
"github.com/hashicorp/terraform/internal/tfdiags"
)
@ -52,7 +54,7 @@ func (c *ValidateCommand) Run(rawArgs []string) int {
return view.Results(diags)
}
validateDiags := c.validate(dir)
validateDiags := c.validate(dir, args.TestDirectory, args.NoTests)
diags = diags.Append(validateDiags)
// Validating with dev overrides in effect means that the result might
@ -64,30 +66,76 @@ func (c *ValidateCommand) Run(rawArgs []string) int {
return view.Results(diags)
}
func (c *ValidateCommand) validate(dir string) tfdiags.Diagnostics {
func (c *ValidateCommand) validate(dir, testDir string, noTests bool) tfdiags.Diagnostics {
var diags tfdiags.Diagnostics
var cfg *configs.Config
cfg, cfgDiags := c.loadConfig(dir)
diags = diags.Append(cfgDiags)
if noTests {
cfg, diags = c.loadConfig(dir)
} else {
cfg, diags = c.loadConfigWithTests(dir, testDir)
}
if diags.HasErrors() {
return diags
}
opts, err := c.contextOpts()
if err != nil {
diags = diags.Append(err)
validate := func(cfg *configs.Config) tfdiags.Diagnostics {
var diags tfdiags.Diagnostics
opts, err := c.contextOpts()
if err != nil {
diags = diags.Append(err)
return diags
}
tfCtx, ctxDiags := terraform.NewContext(opts)
diags = diags.Append(ctxDiags)
if ctxDiags.HasErrors() {
return diags
}
return diags.Append(tfCtx.Validate(cfg))
}
diags = diags.Append(validate(cfg))
if noTests {
return diags
}
tfCtx, ctxDiags := terraform.NewContext(opts)
diags = diags.Append(ctxDiags)
if ctxDiags.HasErrors() {
return diags
validatedModules := make(map[string]bool)
// We'll also do a quick validation of the Terraform test files. These live
// outside the Terraform graph so we have to do this separately.
for _, file := range cfg.Module.Tests {
for _, run := range file.Runs {
if run.Module != nil {
// Then we can also validate the referenced modules, but we are
// only going to do this is if they are local modules.
//
// Basically, local testing modules are something the user can
// reasonably go and fix. If it's a module being downloaded from
// the registry, the expectation is that the author of the
// module should have ran `terraform validate` themselves.
if _, ok := run.Module.Source.(addrs.ModuleSourceLocal); ok {
if validated := validatedModules[run.Module.Source.String()]; !validated {
// Since we can reference the same module twice, let's
// not validate the same thing multiple times.
validatedModules[run.Module.Source.String()] = true
diags = diags.Append(validate(run.ConfigUnderTest))
}
}
}
diags = diags.Append(run.Validate())
}
}
validateDiags := tfCtx.Validate(cfg)
diags = diags.Append(validateDiags)
return diags
}
@ -123,11 +171,15 @@ Usage: terraform [global options] validate [options]
Options:
-json Produce output in a machine-readable JSON format, suitable for
use in text editor integrations and other automated systems.
Always disables color.
-json Produce output in a machine-readable JSON format,
suitable for use in text editor integrations and other
automated systems. Always disables color.
-no-color If specified, output won't contain any color.
-no-color If specified, output won't contain any color.
-no-tests If specified, Terraform will not validate test files.
-test-directory=path Set the Terraform test directory, defaults to "tests".
`
return strings.TrimSpace(helpText)
}

View File

@ -12,8 +12,11 @@ import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/mitchellh/cli"
"github.com/zclconf/go-cty/cty"
testing_command "github.com/hashicorp/terraform/internal/command/testing"
"github.com/hashicorp/terraform/internal/command/views"
"github.com/hashicorp/terraform/internal/configs/configschema"
"github.com/hashicorp/terraform/internal/providers"
"github.com/hashicorp/terraform/internal/terminal"
@ -217,6 +220,95 @@ func TestMissingDefinedVar(t *testing.T) {
}
}
func TestValidateWithInvalidTestFile(t *testing.T) {
// We're reusing some testing configs that were written for testing the
// test command here, so we have to initalise things slightly differently
// to the other tests.
view, done := testView(t)
provider := testing_command.NewProvider(nil)
c := &ValidateCommand{
Meta: Meta{
testingOverrides: metaOverridesForProvider(provider.Provider),
View: view,
},
}
var args []string
args = append(args, "-no-color")
args = append(args, testFixturePath("test/invalid"))
code := c.Run(args)
output := done(t)
if code != 1 {
t.Fatalf("Should have failed: %d\n\n%s", code, output.Stderr())
}
wantError := "Error: Invalid `expect_failures` reference"
if !strings.Contains(output.Stderr(), wantError) {
t.Fatalf("Missing error string %q\n\n'%s'", wantError, output.Stderr())
}
}
func TestValidateWithInvalidTestModule(t *testing.T) {
// We're reusing some testing configs that were written for testing the
// test command here, so we have to initalise things slightly differently
// to the other tests.
td := t.TempDir()
testCopyDir(t, testFixturePath(path.Join("test", "invalid-module")), td)
defer testChdir(t, td)()
streams, done := terminal.StreamsForTesting(t)
view := views.NewView(streams)
ui := new(cli.MockUi)
provider := testing_command.NewProvider(nil)
providerSource, close := newMockProviderSource(t, map[string][]string{
"test": {"1.0.0"},
})
defer close()
meta := Meta{
testingOverrides: metaOverridesForProvider(provider.Provider),
Ui: ui,
View: view,
Streams: streams,
ProviderSource: providerSource,
}
init := &InitCommand{
Meta: meta,
}
if code := init.Run(nil); code != 0 {
t.Fatalf("expected status code 0 but got %d: %s", code, ui.ErrorWriter)
}
c := &ValidateCommand{
Meta: meta,
}
var args []string
args = append(args, "-no-color")
code := c.Run(args)
output := done(t)
if code != 1 {
t.Fatalf("Should have failed: %d\n\n%s", code, output.Stderr())
}
wantError := "Error: Reference to undeclared input variable"
if !strings.Contains(output.Stderr(), wantError) {
t.Fatalf("Missing error string %q\n\n'%s'", wantError, output.Stderr())
}
}
func TestValidate_json(t *testing.T) {
tests := []struct {
path string

View File

@ -8,6 +8,7 @@ import (
"github.com/hashicorp/terraform/internal/addrs"
"github.com/hashicorp/terraform/internal/getmodules"
"github.com/hashicorp/terraform/internal/tfdiags"
)
// TestCommand represents the Terraform a given run block will execute, plan
@ -123,6 +124,39 @@ type TestRun struct {
DeclRange hcl.Range
}
// Validate does a very simple and cursory check across the run block to look
// for simple issues we can highlight early on.
func (run *TestRun) Validate() tfdiags.Diagnostics {
var diags tfdiags.Diagnostics
// For now, we only want to make sure all the ExpectFailure references are
// the correct kind of reference.
for _, traversal := range run.ExpectFailures {
reference, refDiags := addrs.ParseRefFromTestingScope(traversal)
diags = diags.Append(refDiags)
if refDiags.HasErrors() {
continue
}
switch reference.Subject.(type) {
// You can only reference outputs, inputs, checks, and resources.
case addrs.OutputValue, addrs.InputVariable, addrs.Check, addrs.ResourceInstance, addrs.Resource:
// Do nothing, these are okay!
default:
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid `expect_failures` reference",
Detail: fmt.Sprintf("You cannot expect failures from %s. You can only expect failures from checkable objects such as input variables, output values, check blocks, managed resources and data sources.", reference.Subject.String()),
Subject: reference.SourceRange.ToHCL().Ptr(),
})
}
}
return diags
}
// TestRunModuleCall specifies which module should be executed by a given run
// block.
type TestRunModuleCall struct {

View File

@ -0,0 +1,94 @@
package configs
import (
"testing"
"github.com/google/go-cmp/cmp"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
)
func TestTestRun_Validate(t *testing.T) {
tcs := map[string]struct {
expectedFailures []string
diagnostic string
}{
"empty": {},
"supports_expected": {
expectedFailures: []string{
"check.expected_check",
"var.expected_var",
"output.expected_output",
"test_resource.resource",
"resource.test_resource.resource",
"data.test_resource.resource",
},
},
"count": {
expectedFailures: []string{
"count.index",
},
diagnostic: "You cannot expect failures from count.index. You can only expect failures from checkable objects such as input variables, output values, check blocks, managed resources and data sources.",
},
"foreach": {
expectedFailures: []string{
"each.key",
},
diagnostic: "You cannot expect failures from each.key. You can only expect failures from checkable objects such as input variables, output values, check blocks, managed resources and data sources.",
},
"local": {
expectedFailures: []string{
"local.value",
},
diagnostic: "You cannot expect failures from local.value. You can only expect failures from checkable objects such as input variables, output values, check blocks, managed resources and data sources.",
},
"module": {
expectedFailures: []string{
"module.my_module",
},
diagnostic: "You cannot expect failures from module.my_module. You can only expect failures from checkable objects such as input variables, output values, check blocks, managed resources and data sources.",
},
"path": {
expectedFailures: []string{
"path.walk",
},
diagnostic: "You cannot expect failures from path.walk. You can only expect failures from checkable objects such as input variables, output values, check blocks, managed resources and data sources.",
},
}
for name, tc := range tcs {
t.Run(name, func(t *testing.T) {
run := &TestRun{}
for _, addr := range tc.expectedFailures {
run.ExpectFailures = append(run.ExpectFailures, parseTraversal(t, addr))
}
diags := run.Validate()
if len(diags) > 1 {
t.Fatalf("too many diags: %d", len(diags))
}
if len(tc.diagnostic) == 0 {
if len(diags) != 0 {
t.Fatalf("expected no diags but got: %s", diags[0].Description().Detail)
}
return
}
if diff := cmp.Diff(tc.diagnostic, diags[0].Description().Detail); len(diff) > 0 {
t.Fatalf("unexpected diff:\n%s", diff)
}
})
}
}
func parseTraversal(t *testing.T, addr string) hcl.Traversal {
t.Helper()
traversal, diags := hclsyntax.ParseTraversalAbs([]byte(addr), "", hcl.InitialPos)
if diags.HasErrors() {
t.Fatalf("invalid address: %s", diags.Error())
}
return traversal
}