From 543b91ba262c13a2b936ea1905fc6005d6bc1af4 Mon Sep 17 00:00:00 2001 From: Ville Vesilehto Date: Sat, 20 Sep 2025 05:34:17 +0300 Subject: [PATCH] fix(caddyfile): bump expansion limits (#9) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit A single maxSnippetExpansions of 1000 could be too low for some real world scenarios. Snippet-based configuration can be used broadly across large CoreDNS Corefiles. We cannot implement a pure “import depth” integer with the current splice-then-continue parser because imports are expanded by lexing and splicing tokens into the stream, not by recursive function calls. Instead, we introduce high default per-directive caps for snippet and file imports, keeping globs counted as one. Prevent trivial snippet self-import. Add tests that lower caps to validate failure on cycles and success with large glob imports. This should present a good enough middleground, instead of rewriting the parser. Signed-off-by: Ville Vesilehto --- caddyfile/parse.go | 9 +++++++-- caddyfile/parse_test.go | 31 +++++++++++++++++++++++++++++++ 2 files changed, 38 insertions(+), 2 deletions(-) diff --git a/caddyfile/parse.go b/caddyfile/parse.go index bd32934646d..ead7afccd00 100644 --- a/caddyfile/parse.go +++ b/caddyfile/parse.go @@ -60,7 +60,12 @@ type parser struct { } // maxSnippetExpansions is a hard cap to prevent excessively deep or cyclic snippet imports. -const maxSnippetExpansions = 1000 +// set as a variable to allow modifications for testing +var maxSnippetExpansions = 10000 + +// maxFileExpansions is a hard cap to prevent excessively deep or cyclic file imports. +// set as a variable to allow modifications for testing +var maxFileExpansions = 100000 func (p *parser) parseAll() ([]ServerBlock, error) { var blocks []ServerBlock @@ -268,7 +273,7 @@ func (p *parser) doImport() error { p.snippetExpansions++ importedTokens = p.definedSnippets[importPattern] } else { - if p.fileExpansions >= maxSnippetExpansions { + if p.fileExpansions >= maxFileExpansions { return p.Errf("maximum file import depth (%d) exceeded", maxSnippetExpansions) } p.fileExpansions++ diff --git a/caddyfile/parse_test.go b/caddyfile/parse_test.go index 9a12e5965a5..e5e6694174a 100644 --- a/caddyfile/parse_test.go +++ b/caddyfile/parse_test.go @@ -15,6 +15,7 @@ package caddyfile import ( + "fmt" "io/ioutil" "os" "path/filepath" @@ -22,6 +23,12 @@ import ( "testing" ) +func init() { + // set a lower limit for testing only + maxSnippetExpansions = 10 + maxFileExpansions = 10 +} + func TestAllTokens(t *testing.T) { tests := []struct { name string @@ -801,3 +808,27 @@ func TestFileImportCycleError(t *testing.T) { t.Fatalf("expected error for file import cycle, got nil") } } + +func TestFileImportDir(t *testing.T) { + dir, err := ioutil.TempDir("", t.Name()) + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(dir) + + // create 10x the maxFileExpansions files + // a single import with a glob should not error + for i := 0; i < maxFileExpansions*10; i++ { + fp := filepath.Join(dir, filepath.Base(dir)+"_"+fmt.Sprintf("%d", i)) + if err := ioutil.WriteFile(fp, []byte(""), 0644); err != nil { + t.Fatal(err) + } + } + + input := "import " + filepath.Join(dir, "*") + p := testParser(input) + _, err = p.parseAll() + if err != nil { + t.Fatalf("unexpected error importing temp dir via glob: %v", err) + } +}