diff --git a/override.go b/override.go new file mode 100644 index 0000000..095d301 --- /dev/null +++ b/override.go @@ -0,0 +1,405 @@ +package preview + +import ( + "errors" + "fmt" + "io/fs" + "path" + "strings" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclwrite" +) + +// primaryState tracks a parsed primary .tf file during override +// merging. +type primaryState struct { + path string + file *hclwrite.File + modified bool +} + +// mergeOverrides scans the filesystem for .tf Terraform override files +// and returns a new FS where override content has been merged into primary +// files using Terraform's override semantics. +// If no override files are found, the original FS is returned unchanged. +// If an error is encountered, diagnostics are returned in addition to a +// non-nil error. +// Warning diagnostics may also be returned on success (e.g. for skipped +// .tf.json files). +// +// Override files are identified by Terraform's naming convention: +// "override.tf", "*_override.tf", and their .tf.json variants. We only support +// .tf files; .tf.json files get a diagnostic warning and are excluded from +// override merging. +// +// Ref: https://developer.hashicorp.com/terraform/language/files/override +// +// NOTE: Terraform validates primary blocks before merging overrides, so it +// rejects a primary block that is missing a required attribute even if the +// override would supply it. We merge first, so this edge case passes +// validation. This is immaterial in practice: Coder runs terraform plan +// during template import, which would catch it before the template is +// published. +func mergeOverrides(origFS fs.FS) (fs.FS, hcl.Diagnostics, error) { + // Group files by directory, separating primary from override files. + // Walk the entire tree, not just the root directory, because Trivy's + // EvaluateAll processes all modules, so we need to pre-merge overrides at + // every level before Trivy sees the FS. + type dirFiles struct { + primaries []string + overrides []string + // Used to generate warnings at merge stage. + jsonPrimaries []string + } + dirs := make(map[string]*dirFiles) + + var warnings hcl.Diagnostics + + err := fs.WalkDir(origFS, ".", func(p string, d fs.DirEntry, err error) error { + if err != nil { + return err + } + // Skip dirs; we deal with them by acting on their files. + if d.IsDir() { + return nil + } + + ext := tfFileExt(d.Name()) + if ext == "" { + return nil + } + + dir := path.Dir(p) + if dirs[dir] == nil { + dirs[dir] = &dirFiles{} + } + + // We don't support parsing .tf.json files. They remain in the + // FS for Trivy to parse directly but never participate in + // override merging. + if ext == ".tf.json" { + if isOverrideFile(d.Name()) { + warnings = warnings.Append(&hcl.Diagnostic{ + Severity: hcl.DiagWarning, + Summary: "Override file uses unsupported .tf.json format", + Detail: fmt.Sprintf("%s skipped for override merging", p), + }) + } else { + // Save the name of the .tf.json primary so we issue a + // warning only if we do merging for the dir (less noise). + dirs[dir].jsonPrimaries = append(dirs[dir].jsonPrimaries, p) + } + return nil + } + + if isOverrideFile(d.Name()) { + dirs[dir].overrides = append(dirs[dir].overrides, p) + } else { + dirs[dir].primaries = append(dirs[dir].primaries, p) + } + return nil + }) + if err != nil { + return nil, warnings, fmt.Errorf("error reading template files: %w", err) + } + + hasOverrides := false + for _, dir := range dirs { + if len(dir.overrides) > 0 { + hasOverrides = true + break + } + } + if !hasOverrides { + // We are a no-op if there are no supported override files at + // all. Include warnings so callers know about ignored + // .tf.json files. + return origFS, warnings, nil + } + + replaced := make(map[string][]byte) + hidden := make(map[string]bool) + + for _, dir := range dirs { + if len(dir.overrides) == 0 { + continue + } + + for _, jp := range dir.jsonPrimaries { + warnings = warnings.Append(&hcl.Diagnostic{ + Severity: hcl.DiagWarning, + Summary: "Primary file uses .tf.json format", + Detail: fmt.Sprintf("%s skipped for override merging", jp), + }) + } + + // Parse all primary files upfront so override files can be applied + // sequentially, each merging into the already-merged result. + primaries := make([]*primaryState, 0, len(dir.primaries)) + for _, path := range dir.primaries { + content, err := fs.ReadFile(origFS, path) + if err != nil { + return nil, warnings, fmt.Errorf("error reading file %s: %w", path, err) + } + f, diags := hclwrite.ParseConfig(content, path, hcl.Pos{Line: 1, Column: 1}) + if diags.HasErrors() { + return nil, warnings.Extend(diags), errors.New("error parsing file") + } + primaries = append(primaries, &primaryState{path: path, file: f}) + } + + // Check for duplicate primary blocks before merging - Terraform + // rejects them. + diags := checkDuplicatePrimaryBlocks(primaries) + if diags.HasErrors() { + return nil, warnings.Extend(diags), errors.New("error checking duplicate primary blocks") + } + + // Process each override file sequentially. If multiple override files + // define the same block, each merges into the already-merged primary, + // matching Terraform's behavior. + for _, path := range dir.overrides { + content, err := fs.ReadFile(origFS, path) + if err != nil { + return nil, warnings, fmt.Errorf("error reading file %s: %w", path, err) + } + + f, diags := hclwrite.ParseConfig(content, path, hcl.Pos{Line: 1, Column: 1}) + if diags.HasErrors() { + return nil, warnings.Extend(diags), errors.New("error parsing file") + } + + for _, oblock := range f.Body().Blocks() { + // "locals" blocks are label-less and Terraform merges + // them at the individual attribute level, not at the + // block level. + if oblock.Type() == "locals" { + diags := mergeLocalsBlock(primaries, oblock, path) + if diags.HasErrors() { + return nil, warnings.Extend(diags), errors.New("error merging locals block") + } + continue + } + + key := blockKey(oblock.Type(), oblock.Labels()) + matched := false + for _, primary := range primaries { + for _, pblock := range primary.file.Body().Blocks() { + if blockKey(pblock.Type(), pblock.Labels()) == key { + mergeBlock(pblock, oblock) + primary.modified = true + matched = true + break + } + } + if matched { + break + } + } + if !matched { + // Terraform requires every override block to have a corresponding + // primary block — override files can only modify, not create. + return nil, warnings, fmt.Errorf("override block %s in %s has no matching block in a primary file", key, path) + } + } + + hidden[path] = true + } + + // Collect modified primary files. + for _, p := range primaries { + if p.modified { + replaced[p.path] = p.file.Bytes() + } + } + } + + return &overrideFS{ + base: origFS, + replaced: replaced, + hidden: hidden, + }, warnings, nil +} + +// mergeBlock applies override attributes and child blocks to a primary block +// using Terraform's prepareContent semantics. +// +// - Attributes: each override attribute replaces the corresponding primary +// attribute, or is inserted if it does not exist in the primary block. +// +// - Child blocks: if override has any block of type X (including dynamic "X"), +// all blocks of type X and dynamic "X" are removed from primary. Then all +// override child blocks are appended — both replacing suppressed types and +// introducing entirely new block types not present in the primary. +// +// Ref: https://github.com/hashicorp/terraform/blob/7960f60d2147d43f5cf675a898438f6a6693da1b/internal/configs/module_merge_body.go#L76-L121 +// +// Note: Terraform re-validates type/default compatibility after variable block +// merge. We rely on Trivy's evaluator to do that during evaluation of the +// merged HCL. +func mergeBlock(primary, override *hclwrite.Block) { + // hclwrite preserves the formatting of the original block. If the + // primary body is empty and inline (e.g. `variable "x" {}`), + // inserting attributes places them on the same line as the + // opening brace, which HCL rejects. A newline defensively forces + // multi-line formatting. + if len(primary.Body().Attributes()) == 0 && len(primary.Body().Blocks()) == 0 { + primary.Body().AppendNewline() + } + + // Merge attributes: override clobbers base. + for name, attr := range override.Body().Attributes() { + primary.Body().SetAttributeRaw(name, attr.Expr().BuildTokens(nil)) + } + + // Merge blocks: determine which child (nested) block types are + // overridden. + overriddenBlockTypes := make(map[string]bool) + for _, child := range override.Body().Blocks() { + // E.g. `dynamic "option" {...}` + if child.Type() == "dynamic" && len(child.Labels()) > 0 { + overriddenBlockTypes[child.Labels()[0]] = true + } else { + overriddenBlockTypes[child.Type()] = true + } + } + + if len(overriddenBlockTypes) == 0 { + return + } + + // Remove overridden block types from primary. + // Collect blocks to remove first to avoid modifying during iteration. + var toRemove []*hclwrite.Block + for _, child := range primary.Body().Blocks() { + shouldRemove := false + if child.Type() == "dynamic" && len(child.Labels()) > 0 { + shouldRemove = overriddenBlockTypes[child.Labels()[0]] + } else { + shouldRemove = overriddenBlockTypes[child.Type()] + } + if shouldRemove { + toRemove = append(toRemove, child) + } + } + for _, block := range toRemove { + primary.Body().RemoveBlock(block) + } + + // Append all override child blocks. + for _, child := range override.Body().Blocks() { + primary.Body().AppendBlock(child) + } +} + +// mergeLocalsBlock merges an override locals block into the primaries +// at the individual attribute level. Each override attribute replaces +// the matching attribute in whichever primary locals block defines +// it. Attributes not found in any primary block produce an error, +// matching Terraform's "Missing base local value definition to +// override" behavior. +// Ref: https://github.com/hashicorp/terraform/blob/7960f60d2147d43f5cf675a898438f6a6693da1b/internal/configs/module.go#L772-L784 +func mergeLocalsBlock(primaries []*primaryState, override *hclwrite.Block, overridePath string) hcl.Diagnostics { + var diags hcl.Diagnostics + for name, attr := range override.Body().Attributes() { + found := false + for _, primary := range primaries { + for _, pblock := range primary.file.Body().Blocks() { + if pblock.Type() != "locals" { + continue + } + // NOTE: We don't insert new attrs into an empty body. + // If that ever changes, empty inline blocks (e.g. + // `locals {}`) would need the same AppendNewline fix + // as mergeBlock to avoid same line usage that breaks + // HCL. + if _, exists := pblock.Body().Attributes()[name]; exists { + pblock.Body().SetAttributeRaw(name, attr.Expr().BuildTokens(nil)) + primary.modified = true + found = true + break + } + } + if found { + break + } + } + if !found { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Missing base local value definition to override", + Detail: fmt.Sprintf("Local %q in %s has no base definition to override", name, overridePath), + }) + } + } + return diags +} + +// isOverrideFile returns true if the filename matches Terraform's override +// file naming convention: "override.tf", "*_override.tf", and .tf.json variants. +// +// Ref: https://github.com/hashicorp/terraform/blob/7960f60d2147d43f5cf675a898438f6a6693da1b/internal/configs/parser_file_matcher.go#L161-L170 +func isOverrideFile(filename string) bool { + name := path.Base(filename) + ext := tfFileExt(name) + if ext == "" { + return false + } + baseName := name[:len(name)-len(ext)] + return baseName == "override" || strings.HasSuffix(baseName, "_override") +} + +// tfFileExt returns the Terraform file extension (".tf" or ".tf.json") if +// present, or "" otherwise. +func tfFileExt(name string) string { + if strings.HasSuffix(name, ".tf.json") { + return ".tf.json" + } + if strings.HasSuffix(name, ".tf") { + return ".tf" + } + return "" +} + +// blockKey returns a string that uniquely identifies a block for override +// matching purposes. Two blocks with the same key represent the same logical +// entity (one primary, one override). +func blockKey(blockType string, labels []string) string { + if len(labels) == 0 { + return blockType + } + return blockType + "." + strings.Join(labels, ".") +} + +// checkDuplicatePrimaryBlocks detects primary blocks that share the +// same block key. Terraform validates uniqueness before override +// merging, so duplicates are always invalid. +// +// Ref: https://github.com/hashicorp/terraform/blob/7960f60d2147d43f5cf675a898438f6a6693da1b/internal/configs/module.go#L309-L437 +func checkDuplicatePrimaryBlocks(primaries []*primaryState) hcl.Diagnostics { + var diags hcl.Diagnostics + seen := make(map[string]string) // blockKey -> first file path + for _, primary := range primaries { + for _, block := range primary.file.Body().Blocks() { + // Multiple locals and terraform blocks are valid in + // Terraform — locals are containers for individual + // attributes, and terraform blocks accumulate their + // sub-components (required_providers, backend, etc.). + if block.Type() == "locals" || block.Type() == "terraform" { + continue + } + key := blockKey(block.Type(), block.Labels()) + if firstFile, exists := seen[key]; exists { + diags = diags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: fmt.Sprintf("Duplicate block %q", key), + Detail: fmt.Sprintf("Block %q defined in both %s and %s", key, firstFile, primary.path), + }) + } else { + seen[key] = primary.path + } + } + } + return diags +} diff --git a/override_test.go b/override_test.go new file mode 100644 index 0000000..7c00cf7 --- /dev/null +++ b/override_test.go @@ -0,0 +1,822 @@ +package preview + +import ( + "io" + "io/fs" + "strings" + "testing" + "testing/fstest" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclwrite" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestIsOverrideFile(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + name string + filename string + expected bool + }{ + {"override.tf", "override.tf", true}, + {"foo_override.tf", "foo_override.tf", true}, + {"override.tf.json", "override.tf.json", true}, + {"foo_override.tf.json", "foo_override.tf.json", true}, + {"main.tf", "main.tf", false}, + {"overrides.tf", "overrides.tf", false}, + {"my_override_file.tf", "my_override_file.tf", false}, + {"no extension", "override", false}, + {"go file", "override.go", false}, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, isOverrideFile(tc.filename)) + }) + } +} + +func TestBlockKey(t *testing.T) { + t.Parallel() + + for _, tc := range []struct { + name string + blockType string + labels []string + expected string + }{ + {"no labels", "terraform", nil, "terraform"}, + {"one label", "variable", []string{"env"}, "variable.env"}, + {"two labels", "data", []string{"coder_parameter", "region"}, "data.coder_parameter.region"}, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + assert.Equal(t, tc.expected, blockKey(tc.blockType, tc.labels)) + }) + } +} + +func TestMergeBlock(t *testing.T) { + t.Parallel() + + // attrValue returns the text representation of an attribute's value. + attrValue := func(t *testing.T, attrs map[string]*hclwrite.Attribute, name string) string { + t.Helper() + a, ok := attrs[name] + require.True(t, ok, "attribute %q not found", name) + // trim because BuildTokens preserves the leading whitespace. + return strings.TrimSpace(string(a.Expr().BuildTokens(nil).Bytes())) + } + + // parseBlock parses HCL source and returns the first block. + parseBlock := func(t *testing.T, src string) *hclwrite.Block { + t.Helper() + f, diags := hclwrite.ParseConfig([]byte(src), "test.tf", hcl.Pos{Line: 1, Column: 1}) + require.False(t, diags.HasErrors(), diags.Error()) + return f.Body().Blocks()[0] + } + + t.Run("AttributeClobber", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `resource "a" "b" { + x = 1 + y = 2 +}`) + override := parseBlock(t, `resource "a" "b" { + y = 3 +}`) + mergeBlock(primary, override) + + attrs := primary.Body().Attributes() + assert.Equal(t, "1", attrValue(t, attrs, "x")) + assert.Equal(t, "3", attrValue(t, attrs, "y")) + }) + + t.Run("AttributeInsertion", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `resource "a" "b" { + x = 1 +}`) + override := parseBlock(t, `resource "a" "b" { + z = "new" +}`) + mergeBlock(primary, override) + + attrs := primary.Body().Attributes() + require.Contains(t, attrs, "x") + require.Contains(t, attrs, "z") + }) + + t.Run("NestedBlockSuppression", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `data "coder_parameter" "disk" { + name = "disk" + option { + name = "10GB" + value = 10 + } + option { + name = "20GB" + value = 20 + } +}`) + override := parseBlock(t, `data "coder_parameter" "disk" { + option { + name = "30GB" + value = 30 + } +}`) + mergeBlock(primary, override) + + // Primary's two options should be replaced by override's single option. + blocks := primary.Body().Blocks() + require.Len(t, blocks, 1) + assert.Equal(t, "option", blocks[0].Type()) + assert.Equal(t, "30", attrValue(t, blocks[0].Body().Attributes(), "value")) + }) + + t.Run("DynamicStaticSuppression", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `resource "a" "b" { + option { + name = "static" + } +}`) + override := parseBlock(t, `resource "a" "b" { + dynamic "option" { + for_each = var.options + content { + name = option.value + } + } +}`) + mergeBlock(primary, override) + + // Static "option" should be removed and replaced by dynamic "option". + blocks := primary.Body().Blocks() + require.Len(t, blocks, 1) + assert.Equal(t, "dynamic", blocks[0].Type()) + require.Len(t, blocks[0].Labels(), 1) + assert.Equal(t, "option", blocks[0].Labels()[0]) + }) + + t.Run("StaticDynamicSuppression", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `resource "a" "b" { + dynamic "option" { + for_each = var.options + content { + name = option.value + } + } +}`) + override := parseBlock(t, `resource "a" "b" { + option { + name = "static" + } +}`) + mergeBlock(primary, override) + + // Dynamic "option" should be removed and replaced by static "option". + blocks := primary.Body().Blocks() + require.Len(t, blocks, 1) + assert.Equal(t, "option", blocks[0].Type()) + assert.Empty(t, blocks[0].Labels()) + }) + + t.Run("MixedStaticDynamicSuppression", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `resource "a" "b" { + option { + name = "static" + } + dynamic "option" { + for_each = var.list + content { + name = option.value + } + } +}`) + override := parseBlock(t, `resource "a" "b" { + option { + name = "replaced" + } +}`) + mergeBlock(primary, override) + + blocks := primary.Body().Blocks() + require.Len(t, blocks, 1) + assert.Equal(t, "option", blocks[0].Type()) + assert.Empty(t, blocks[0].Labels()) + }) + + t.Run("MixedStaticDynamicSuppressionByDynamic", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `resource "a" "b" { + option { + name = "static" + } + dynamic "option" { + for_each = var.list + content { + name = option.value + } + } +}`) + override := parseBlock(t, `resource "a" "b" { + dynamic "option" { + for_each = var.other + content { + name = option.value + } + } +}`) + mergeBlock(primary, override) + + blocks := primary.Body().Blocks() + require.Len(t, blocks, 1) + assert.Equal(t, "dynamic", blocks[0].Type()) + require.Len(t, blocks[0].Labels(), 1) + assert.Equal(t, "option", blocks[0].Labels()[0]) + }) + + t.Run("StaticSuppressionByMixedOverride", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `resource "a" "b" { + option { + name = "old" + } +}`) + override := parseBlock(t, `resource "a" "b" { + option { + name = "static" + } + dynamic "option" { + for_each = var.list + content { + name = option.value + } + } +}`) + mergeBlock(primary, override) + + blocks := primary.Body().Blocks() + require.Len(t, blocks, 2) + assert.Equal(t, "option", blocks[0].Type()) + assert.Equal(t, "dynamic", blocks[1].Type()) + assert.Equal(t, "option", blocks[1].Labels()[0]) + }) + + t.Run("NoNestedBlocksInOverride", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `resource "a" "b" { + x = 1 + nested { + y = 2 + } +}`) + override := parseBlock(t, `resource "a" "b" { + x = 99 +}`) + mergeBlock(primary, override) + + // Primary's nested blocks should be preserved when override has none. + blocks := primary.Body().Blocks() + require.Len(t, blocks, 1) + assert.Equal(t, "nested", blocks[0].Type()) + // Attribute should still be overridden. + assert.Equal(t, "99", attrValue(t, primary.Body().Attributes(), "x")) + }) + + t.Run("EmptyOverride", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `resource "a" "b" { + x = 1 + nested { + y = 2 + } +}`) + override := parseBlock(t, `resource "a" "b" {}`) + mergeBlock(primary, override) + + // Nothing should change — attributes and blocks preserved. + assert.Equal(t, "1", attrValue(t, primary.Body().Attributes(), "x")) + blocks := primary.Body().Blocks() + require.Len(t, blocks, 1) + assert.Equal(t, "nested", blocks[0].Type()) + }) + + t.Run("EmptyInlineBlock", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `variable "sizes" {}`) + override := parseBlock(t, `variable "sizes" { + type = set(string) + default = ["a", "b"] +}`) + mergeBlock(primary, override) + + attrs := primary.Body().Attributes() + require.Contains(t, attrs, "type") + require.Contains(t, attrs, "default") + // Verify the output is valid HCL by re-parsing it. + _, diags := hclwrite.ParseConfig(primary.BuildTokens(nil).Bytes(), "test.tf", hcl.Pos{Line: 1, Column: 1}) + require.False(t, diags.HasErrors(), diags.Error()) + }) + + t.Run("NewNestedBlockType", func(t *testing.T) { + t.Parallel() + primary := parseBlock(t, `data "coder_parameter" "foo" { + name = "foo" + type = "number" +}`) + override := parseBlock(t, `data "coder_parameter" "foo" { + validation { + monotonic = "increasing" + } +}`) + mergeBlock(primary, override) + + attrs := primary.Body().Attributes() + assert.Equal(t, `"foo"`, attrValue(t, attrs, "name")) + blocks := primary.Body().Blocks() + require.Len(t, blocks, 1) + assert.Equal(t, "validation", blocks[0].Type()) + }) +} + +// readFile reads a file from an fs.FS using Open+Read (since overrideFS +// doesn't implement fs.ReadFileFS). +func readFile(t *testing.T, fsys fs.FS, name string) []byte { + t.Helper() + f, err := fsys.Open(name) + require.NoError(t, err) + defer f.Close() + info, err := f.Stat() + require.NoError(t, err) + buf := make([]byte, info.Size()) + _, err = f.Read(buf) + require.NoError(t, err) + return buf +} + +// testMergeOverrides wraps mergeOverrides and asserts the contract: +// error-severity diagnostics are always accompanied by a non-nil error. +func testMergeOverrides(t *testing.T, fsys fs.FS) (fs.FS, hcl.Diagnostics, error) { + t.Helper() + result, diags, err := mergeOverrides(fsys) + if err == nil { + for _, d := range diags { + if d.Severity == hcl.DiagError { + t.Fatal("mergeOverrides returned error diagnostic without non-nil error") + } + } + } + return result, diags, err +} + +func TestMergeOverrideFiles(t *testing.T) { + t.Parallel() + + t.Run("EmptyOverrideFile", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 1 }`)}, + "override.tf": &fstest.MapFile{Data: []byte(``)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + assert.Empty(t, diags) + + content := string(readFile(t, result, "main.tf")) + assert.Contains(t, content, "x = 1") + }) + + t.Run("NoOverrideFiles", func(t *testing.T) { + t.Parallel() + original := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 1 }`)}, + } + result, diags, err := testMergeOverrides(t, original) + require.NoError(t, err) + assert.Empty(t, diags) + // Should return the exact same FS when there are no overrides. + assert.Equal(t, original, result) + }) + + t.Run("UnmatchedOverrideBlock", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 1 }`)}, + "override.tf": &fstest.MapFile{Data: []byte(`resource "c" "d" { + y = 2 +}`)}, + } + _, _, err := testMergeOverrides(t, fsys) + require.Error(t, err) + assert.Contains(t, err.Error(), "no matching block") + }) + + t.Run("BasicAttributeMerge", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { + x = 1 + y = 2 +}`)}, + "override.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { + y = 99 +}`)}, + } + result, _, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + + // Read the merged primary file. + content := readFile(t, result, "main.tf") + assert.Contains(t, string(content), "99") + }) + + t.Run("OverrideFileHidden", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 1 }`)}, + "override.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 2 }`)}, + } + result, _, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + + _, err = result.Open("override.tf") + require.Error(t, err) + assert.ErrorIs(t, err, fs.ErrNotExist) + }) + + t.Run("DirectoryListingFiltersHidden", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 1 }`)}, + "foo_override.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 2 }`)}, + "other.txt": &fstest.MapFile{Data: []byte("hello")}, + } + result, _, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + + f, err := result.Open(".") + require.NoError(t, err) + defer f.Close() + + rdf, ok := f.(fs.ReadDirFile) + require.True(t, ok) + + entries, err := rdf.ReadDir(-1) + require.NoError(t, err) + + names := make([]string, 0, len(entries)) + for _, e := range entries { + names = append(names, e.Name()) + } + assert.Contains(t, names, "main.tf") + assert.Contains(t, names, "other.txt") + assert.NotContains(t, names, "foo_override.tf") + }) + + t.Run("SequentialOverrideMerge", func(t *testing.T) { + t.Parallel() + // Two override files modify the same block. Because WalkDir processes + // them in lexical order (a_ before b_), both attributes should be + // present in the merged result. + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { + original = "yes" +}`)}, + "a_override.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { + from_a = "aaa" +}`)}, + "b_override.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { + from_b = "bbb" +}`)}, + } + result, _, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + + content := readFile(t, result, "main.tf") + merged := string(content) + assert.Contains(t, merged, "original") + assert.Contains(t, merged, "from_a") + assert.Contains(t, merged, "from_b") + }) + + t.Run("SubdirectoryOverrides", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "root" "r" { x = 1 }`)}, + "modules/child/main.tf": &fstest.MapFile{Data: []byte(`resource "child" "c" { + y = 1 +}`)}, + "modules/child/override.tf": &fstest.MapFile{Data: []byte(`resource "child" "c" { + y = 42 +}`)}, + } + result, _, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + + // Root file should be unchanged (no overrides in root). + rootContent := readFile(t, result, "main.tf") + assert.Contains(t, string(rootContent), "x = 1") + + // Child module should have merged content. + childContent := readFile(t, result, "modules/child/main.tf") + assert.Contains(t, string(childContent), "y = 42") + + // Override file in subdirectory should be hidden. + _, err = result.Open("modules/child/override.tf") + require.Error(t, err) + assert.ErrorIs(t, err, fs.ErrNotExist) + }) + + t.Run("TfJsonOverrideWarning", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 1 }`)}, + "override.tf.json": &fstest.MapFile{Data: []byte(`{}`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + + // Should warn about the .tf.json override file. + require.Len(t, diags, 1) + assert.Equal(t, hcl.DiagWarning, diags[0].Severity) + assert.Contains(t, diags[0].Detail, "override.tf.json") + + // Original FS returned since no .tf overrides exist. + assert.Equal(t, fsys, result) + }) + + t.Run("TfJsonPrimaryNoWarning", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 1 }`)}, + "other.tf.json": &fstest.MapFile{Data: []byte(`{}`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + + // No warnings for primary .tf.json files because there are no overrides. + assert.Empty(t, diags) + + // Original FS returned since no overrides exist. + assert.Equal(t, fsys, result) + }) + + t.Run("TfJsonFilesPassThrough", func(t *testing.T) { + t.Parallel() + jsonContent := []byte(`{"resource": {"a": {"b": {"x": 1}}}}`) + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 1 }`)}, + "extra.tf.json": &fstest.MapFile{Data: jsonContent}, + "override.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 2 }`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + + require.Len(t, diags, 1) + assert.Contains(t, diags[0].Summary, "Primary file uses .tf.json format") + + // .tf.json file should still be accessible in the result FS. + content := readFile(t, result, "extra.tf.json") + assert.Equal(t, jsonContent, content) + }) + + t.Run("MixedTfAndTfJsonOverrides", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { + x = 1 +}`)}, + "a_override.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 99 }`)}, + "b_override.tf.json": &fstest.MapFile{Data: []byte(`{}`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + + // Warning for the .tf.json override only. + require.Len(t, diags, 1) + assert.Equal(t, hcl.DiagWarning, diags[0].Severity) + assert.Contains(t, diags[0].Detail, "b_override.tf.json") + + // .tf override should still be merged. + content := readFile(t, result, "main.tf") + assert.Contains(t, string(content), "99") + }) + + t.Run("LocalsAttributeLevelMerge", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`locals { + foo = "original" + bar = "keep" +}`)}, + "override.tf": &fstest.MapFile{Data: []byte(`locals { + foo = "overridden" +}`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + assert.Empty(t, diags) + + content := string(readFile(t, result, "main.tf")) + assert.Contains(t, content, `"overridden"`) + assert.Contains(t, content, `"keep"`) + assert.NotContains(t, content, `"original"`) + }) + + t.Run("LocalsAcrossMultipleFiles", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "a.tf": &fstest.MapFile{Data: []byte(`locals { + from_a = "aaa" +}`)}, + "b.tf": &fstest.MapFile{Data: []byte(`locals { + from_b = "bbb" +}`)}, + "override.tf": &fstest.MapFile{Data: []byte(`locals { + from_a = "overridden_a" + from_b = "overridden_b" +}`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + assert.Empty(t, diags) + + contentA := string(readFile(t, result, "a.tf")) + assert.Contains(t, contentA, `"overridden_a"`) + + contentB := string(readFile(t, result, "b.tf")) + assert.Contains(t, contentB, `"overridden_b"`) + }) + + t.Run("LocalsMultiplePrimaryAndOverrideBlocks", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(` +locals { + x = "x_orig" +} + +locals { + y = "y_orig" +}`)}, + "other.tf": &fstest.MapFile{Data: []byte(`locals { + z = "z_orig" +}`)}, + "a_override.tf": &fstest.MapFile{Data: []byte(`locals { + x = "x_from_a" + z = "z_from_a" +}`)}, + "b_override.tf": &fstest.MapFile{Data: []byte(`locals { + y = "y_from_b" +}`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + assert.Empty(t, diags) + + mainContent := string(readFile(t, result, "main.tf")) + assert.Contains(t, mainContent, `"x_from_a"`) + assert.Contains(t, mainContent, `"y_from_b"`) + + otherContent := string(readFile(t, result, "other.tf")) + assert.Contains(t, otherContent, `"z_from_a"`) + }) + + t.Run("LocalsNewAttributeErrors", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`locals { + existing = "yes" +}`)}, + "override.tf": &fstest.MapFile{Data: []byte(`locals { + nonexistent = "nope" +}`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.Error(t, err) + assert.Nil(t, result) + require.Len(t, diags, 1) + assert.Equal(t, hcl.DiagError, diags[0].Severity) + assert.Contains(t, diags[0].Summary, "Missing base local") + }) + + t.Run("DuplicatePrimaryBlockErrors", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`data "coder_parameter" "foo" { + name = "foo" +} + +data "coder_parameter" "foo" { + name = "foo" +}`)}, + "override.tf": &fstest.MapFile{Data: []byte(`data "coder_parameter" "foo" { + name = "bar" +}`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.Error(t, err) + assert.Nil(t, result, "FS should be nil so caller keeps original") + require.Len(t, diags, 1) + assert.Equal(t, hcl.DiagError, diags[0].Severity) + assert.Contains(t, diags[0].Summary, "Duplicate block") + }) + + t.Run("DuplicatePrimaryLocalsNoError", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`locals { + a = 1 +} + +locals { + b = 2 +}`)}, + "override.tf": &fstest.MapFile{Data: []byte(`locals { + a = 99 +}`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + assert.Empty(t, diags) + + content := string(readFile(t, result, "main.tf")) + assert.Contains(t, content, "a = 99") + }) + + t.Run("DuplicatePrimaryTerraformNoError", func(t *testing.T) { + t.Parallel() + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`terraform { + required_version = ">= 1.0" +} + +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + } + } +} + +resource "a" "b" { x = 1 }`)}, + "override.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 2 }`)}, + } + result, diags, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + assert.Empty(t, diags) + + content := string(readFile(t, result, "main.tf")) + assert.Contains(t, content, "x = 2") + }) +} + +// TestFilteredReadDir verifies that filteredDir.ReadDir(n) with n > 0 +// never returns an empty slice with nil error, which would violate +// the fs.ReadDirFile contract. +func TestFilteredReadDir(t *testing.T) { + t.Parallel() + // Create an FS where every other file is hidden (override files). + fsys := fstest.MapFS{ + "main.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 1 }`)}, + "a_override.tf": &fstest.MapFile{Data: []byte(`resource "a" "b" { x = 2 }`)}, + "other.tf": &fstest.MapFile{Data: []byte(`resource "c" "d" { y = 1 }`)}, + "b_override.tf": &fstest.MapFile{Data: []byte(`resource "c" "d" { y = 2 }`)}, + } + result, _, err := testMergeOverrides(t, fsys) + require.NoError(t, err) + + f, err := result.Open(".") + require.NoError(t, err) + defer f.Close() + + rdf, ok := f.(fs.ReadDirFile) + require.True(t, ok) + + // Read one entry at a time - should never get empty slice with + // nil error. + var names []string + for { + entries, err := rdf.ReadDir(1) + for _, e := range entries { + names = append(names, e.Name()) + } + if err == io.EOF { + break + } + require.NoError(t, err) + assert.NotEmpty(t, entries, "ReadDir(1) returned empty slice with nil error") + } + + assert.Contains(t, names, "main.tf") + assert.Contains(t, names, "other.tf") + assert.NotContains(t, names, "a_override.tf") + assert.NotContains(t, names, "b_override.tf") +} diff --git a/overridefs.go b/overridefs.go new file mode 100644 index 0000000..079cbb0 --- /dev/null +++ b/overridefs.go @@ -0,0 +1,115 @@ +package preview + +import ( + "bytes" + "fmt" + "io/fs" + "path" + "time" +) + +var ( + _ fs.FS = (*overrideFS)(nil) + _ fs.File = (*memFile)(nil) + _ fs.FileInfo = (*memFileInfo)(nil) + _ fs.ReadDirFile = (*filteredDir)(nil) +) + +// overrideFS wraps a base fs.FS, serving modified content for merged files +// and hiding consumed override files. +type overrideFS struct { + base fs.FS + replaced map[string][]byte // path -> merged content + hidden map[string]bool // paths to exclude +} + +func (o *overrideFS) Open(name string) (fs.File, error) { + if o.hidden[name] { + return nil, &fs.PathError{Op: "open", Path: name, Err: fs.ErrNotExist} + } + + if content, ok := o.replaced[name]; ok { + return &memFile{ + name: path.Base(name), // Stat().Name() returns base name + content: bytes.NewReader(content), + size: int64(len(content)), + }, nil + } + + f, err := o.base.Open(name) + if err != nil { + return nil, err + } + + // If this is a directory, wrap it to filter hidden entries. + info, err := f.Stat() + if err != nil { + f.Close() + return nil, &fs.PathError{Op: "open", Path: name, Err: fmt.Errorf("stat: %w", err)} + } + if info.IsDir() { + if rdf, ok := f.(fs.ReadDirFile); ok { + return &filteredDir{ReadDirFile: rdf, hidden: o.hidden, dirPath: name}, nil + } + } + + return f, nil +} + +// memFile is an in-memory file that implements fs.File. +type memFile struct { + name string + content *bytes.Reader + size int64 +} + +func (f *memFile) Stat() (fs.FileInfo, error) { + return &memFileInfo{name: f.name, size: f.size}, nil +} +func (f *memFile) Read(b []byte) (int, error) { return f.content.Read(b) } +func (f *memFile) Close() error { return nil } + +// memFileInfo implements fs.FileInfo for in-memory files. +type memFileInfo struct { + name string + size int64 +} + +func (fi *memFileInfo) Name() string { return fi.name } +func (fi *memFileInfo) Size() int64 { return fi.size } +func (fi *memFileInfo) Mode() fs.FileMode { return 0o444 } +func (fi *memFileInfo) ModTime() time.Time { return time.Time{} } +func (fi *memFileInfo) IsDir() bool { return false } +func (fi *memFileInfo) Sys() any { return nil } + +// filteredDir wraps a ReadDirFile to filter out hidden entries. +type filteredDir struct { + fs.ReadDirFile + hidden map[string]bool + dirPath string +} + +func (d *filteredDir) ReadDir(n int) ([]fs.DirEntry, error) { + entries, err := d.ReadDirFile.ReadDir(n) + filtered := d.filter(entries) + + // If n > 0, an empty slice must have a non-nil error per the + // fs.ReadDirFile contract. Keep reading until we have results + // or the underlying reader signals EOF/error. + for n > 0 && len(filtered) == 0 && err == nil { + entries, err = d.ReadDirFile.ReadDir(n) + filtered = d.filter(entries) + } + + return filtered, err +} + +func (d *filteredDir) filter(entries []fs.DirEntry) []fs.DirEntry { + filtered := make([]fs.DirEntry, 0, len(entries)) + for _, entry := range entries { + if !d.hidden[path.Join(d.dirPath, entry.Name())] { + filtered = append(filtered, entry) + } + } + return filtered +} diff --git a/preview.go b/preview.go index f157509..32ac43f 100644 --- a/preview.go +++ b/preview.go @@ -156,6 +156,29 @@ func Preview(ctx context.Context, input Input, dir fs.FS) (output *Output, diagn } }() + // Merge override files into primary files before parsing, so + // Trivy sees post-merge content with no duplicate blocks. This + // replicates Terraform's override file semantics. + // + // TODO: It'd be nice if Trivy did this for us. + mergedDir, overrideDiags, err := mergeOverrides(dir) + // Override merging is best-effort; downgrade all override error + // diagnostics to warnings so they never abort the preview. + for _, d := range overrideDiags { + if d.Severity == hcl.DiagError { + d.Severity = hcl.DiagWarning + } + } + if err != nil { + overrideDiags = overrideDiags.Append(&hcl.Diagnostic{ + Severity: hcl.DiagWarning, + Summary: "Override file merging disabled due to an error", + Detail: err.Error(), + }) + } else { + dir = mergedDir + } + varFiles, err := tfvars.TFVarFiles("", dir) if err != nil { return nil, hcl.Diagnostics{ @@ -267,7 +290,7 @@ func Preview(ctx context.Context, input Input, dir fs.FS) (output *Output, diagn Presets: preValidPresets, Files: p.Files(), Variables: vars, - }, diags.Extend(rpDiags).Extend(tagDiags) + }, diags.Extend(overrideDiags).Extend(rpDiags).Extend(tagDiags) } func (i Input) RichParameterValue(key string) (string, bool) { diff --git a/preview_test.go b/preview_test.go index 1b073ca..49a7fb6 100644 --- a/preview_test.go +++ b/preview_test.go @@ -657,6 +657,27 @@ func Test_Extract(t *testing.T) { prebuildCount(1), }, }, + { + name: "override", + dir: "override", + params: map[string]assertParam{ + "region": ap().value("ap").def("ap").optVals("ap"), + "size": ap().value("50").def("50").optVals("10", "50", "100"), + "static_to_dynamic": ap().value("a").def("a").optVals("a", "b", "c"), + "dynamic_to_static": ap().value("x").def("x").optVals("x", "y"), + }, + presets: map[string]assertPreset{ + "dev-override": aPre().value("region", "ap"), + }, + expTags: map[string]string{ + "env": "production", + "team": "mango", + }, + variables: map[string]assertVariable{ + "string_to_number": av().def(cty.NumberIntVal(40)).typeEq(cty.Number), + "zones": av().def(cty.SetVal([]cty.Value{cty.StringVal("a"), cty.StringVal("b"), cty.StringVal("c")})).typeEq(cty.Set(cty.String)), + }, + }, } { t.Run(tc.name, func(t *testing.T) { t.Parallel() diff --git a/testdata/override/a_override.tf b/testdata/override/a_override.tf new file mode 100644 index 0000000..bbb6071 --- /dev/null +++ b/testdata/override/a_override.tf @@ -0,0 +1,80 @@ +terraform { + # An override. + required_version = ">= 1.1" +} + +# Override region's default (will be overridden again by override.tf). +data "coder_parameter" "region" { + default = "eu" +} + +# Override size's options. +data "coder_parameter" "size" { + option { + name = "10GB" + value = 10 + } + option { + name = "40GB" + value = 40 + } +} + +locals { + # Override the local value in main.tf. + default_size = 50 +} + +# Override size again in the same file — adds 50GB option that makes the +# overridden default valid. +data "coder_parameter" "size" { + option { + name = "10GB" + value = 10 + } + option { + name = "50GB" + value = 50 + } + option { + name = "100GB" + value = 100 + } +} + +# Override tags. +data "coder_workspace_tags" "tags" { + tags = { + "env" = "production" + "team" = "mango" + } +} + +# Override static options with dynamic. +data "coder_parameter" "static_to_dynamic" { + dynamic "option" { + for_each = var.zones + content { + name = option.value + value = option.value + } + } +} + +# Override dynamic options with static. +data "coder_parameter" "dynamic_to_static" { + option { + name = "X" + value = "x" + } + option { + name = "Y" + value = "y" + } +} + +# Override variable. +variable "string_to_number" { + type = number + default = 40 +} diff --git a/testdata/override/main.tf b/testdata/override/main.tf new file mode 100644 index 0000000..4cc12c7 --- /dev/null +++ b/testdata/override/main.tf @@ -0,0 +1,108 @@ +terraform { + required_providers { + coder = { + source = "coder/coder" + version = "2.4.0-pre0" + } + } +} + +terraform { + required_version = ">= 1.0" +} + +data "coder_parameter" "region" { + name = "region" + type = "string" + default = "us" + + option { + name = "US" + value = "us" + } + option { + name = "EU" + value = "eu" + } +} + +locals { + # Just so that we have >1 locals blocks. + foo = "bar" +} + +locals { + # Will be overridden in a_override.tf + default_size = 70 +} + +data "coder_parameter" "size" { + name = "size" + type = "number" + # Invalid value should become valid once the options and locals are overridden. + default = local.default_size + + option { + name = "10GB" + value = 10 + } + option { + name = "20GB" + value = 20 + } +} + +data "coder_workspace_preset" "dev" { + name = "dev" + parameters = { + region = "us" + } +} + +data "coder_workspace_tags" "tags" { + tags = { + "env" = "staging" + } +} + +variable "zones" { + type = set(string) + default = ["a", "b", "c"] +} + +# Static options, will be overridden by dynamic "option" in a_override. +data "coder_parameter" "static_to_dynamic" { + name = "static_to_dynamic" + type = "string" + default = "a" + + option { + name = "A" + value = "a" + } + option { + name = "B" + value = "b" + } +} + +# Dynamic options, will be overridden by static option blocks in a_override. +data "coder_parameter" "dynamic_to_static" { + name = "dynamic_to_static" + type = "string" + # Invalid value should become valid once the options are overridden. + default = "x" + + dynamic "option" { + for_each = var.zones + content { + name = option.value + value = option.value + } + } +} + +variable "string_to_number" { + type = string + default = "foo" +} diff --git a/testdata/override/override.tf b/testdata/override/override.tf new file mode 100644 index 0000000..1885d71 --- /dev/null +++ b/testdata/override/override.tf @@ -0,0 +1,17 @@ +# Override region's default again (sequential: us -> eu -> ap). +data "coder_parameter" "region" { + default = "ap" + + option { + name = "AP" + value = "ap" + } +} + +# Override preset. +data "coder_workspace_preset" "dev" { + name = "dev-override" + parameters = { + region = "ap" + } +}