diff --git a/.gitignore b/.gitignore index 7b993acb..883c0d3c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,18 @@ *.dll *.exe +*.zip +*.tar +*.tar.gz +*.tgz + .DS_Store example.tf -terraform.tfplan -terraform.tfstate + +.terraform/ +terraform.tf* +.terraform.* +*.plan + bin/ modules-dev/ /pkg/ @@ -13,16 +22,13 @@ website/build website/node_modules .vagrant/ *.backup -./*.tfstate -.terraform/ *.log *.bak *~ .*.swp -.idea +.idea/ *.iml *.test -*.iml archive/*.zip website/vendor diff --git a/README.md b/README.md index cb36fee5..c07be6ee 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ version it implements, and Terraform: The provided `GNUmakefile` defines additional commands generally useful during development, like for running tests, generating documentation, code formatting and linting. -Taking a look at it's content is recommended. +Taking a look at its content is recommended. ### Testing diff --git a/docs/data-sources/file.md b/docs/data-sources/file.md index 19bde89d..ec182d9f 100644 --- a/docs/data-sources/file.md +++ b/docs/data-sources/file.md @@ -12,43 +12,163 @@ Generates an archive from content, a file, or directory of files. ## Example Usage ```terraform -# Archive a single file. +# Archive a single file to s3. + +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.19.0" + } + } +} -data "archive_file" "init" { +data "archive_file" "example" { type = "zip" - source_file = "${path.module}/init.tpl" - output_path = "${path.module}/files/init.zip" + output_path = "main.zip" + source_file = "main.py" +} + +resource "aws_s3_bucket" "example" { + bucket = "bad-lambda-layer-bucket" + tags = { + "adsk:moniker" = "AMPS-C-UW2" + } +} + +resource "aws_s3_object" "example" { + bucket = aws_s3_bucket.example.id + key = data.archive_file.example.output_path + source = data.archive_file.example.output_path } ``` ```terraform -# Archive multiple files and exclude file. +# Archive a file to be used with Lambda using consistent file mode -data "archive_file" "dotfiles" { +data "archive_file" "example" { + type = "zip" + output_path = "${path.module}/main.zip" + output_file_mode = "0666" + source_file = "${path.module}/main.py" +} +``` + +```terraform +# Archive multiple files from a template. + +terraform { + required_providers { + template = { + source = "hashicorp/template" + version = "2.2.0" + } + } +} + +data "template_file" "foo" { + template = file("${path.module}/foo.tpl") + vars = { + foo = "bar" + } +} + +data "template_file" "hello" { + template = file("${path.module}/hello.tpl") + vars = { + hello = "world" + } +} + +data "archive_file" "example" { type = "zip" - output_path = "${path.module}/files/dotfiles.zip" - excludes = ["${path.module}/unwanted.zip"] + output_path = "${path.module}/example.zip" source { - content = data.template_file.vimrc.rendered - filename = ".vimrc" + content = data.template_file.foo.rendered + filename = "foo.txt" } source { - content = data.template_file.ssh_config.rendered - filename = ".ssh/config" + content = data.template_file.hello.rendered + filename = "hello.txt" } } ``` ```terraform -# Archive a file to be used with Lambda using consistent file mode +# Archive a single directory. -data "archive_file" "lambda_my_function" { +data "archive_file" "example" { type = "zip" - source_file = "${path.module}/../lambda/my-function/index.js" + output_path = "${path.module}/main.zip" + source_dir = "${path.module}/dir" output_file_mode = "0666" - output_path = "${path.module}/files/lambda-my-function.js.zip" + excludes = ["exclude.txt"] +} +``` + +```terraform +# Archive a single directory as tgz. + +data "archive_file" "example" { + type = "tgz" + output_path = "${path.module}/main.tar.gz" + source_dir = "${path.module}/dir" + output_file_mode = "0400" + excludes = ["exclude.txt"] +} +``` + +```terraform +data "archive_file" "example" { + type = "zip" + output_path = "main.zip" + source_dir = "${path.module}/src" +} +``` + +```terraform +# Archive a single file. + +data "archive_file" "example" { + type = "zip" + output_path = "${path.module}/main.zip" + output_file_mode = "0666" + source_file = "${path.module}/main.txt" +} +``` + +```terraform +# Archive content. + +data "archive_file" "example" { + type = "zip" + output_path = "${path.module}/main.zip" + source_content_filename = "example.txt" + source_content = "example" +} +``` + +```terraform +# Archive content as tgz. + +data "archive_file" "example" { + type = "tgz" + output_path = "${path.module}/main.tar.gz" + source_content_filename = "example.txt" + source_content = "example" +} +``` + +```terraform +# Archive a single file as tgz. + +data "archive_file" "example" { + type = "tgz" + output_path = "${path.module}/main.tar.gz" + output_file_mode = "0400" + source_file = "${path.module}/main.txt" } ``` @@ -58,7 +178,7 @@ data "archive_file" "lambda_my_function" { ### Required - `output_path` (String) The output of the archive file. -- `type` (String) The type of archive to generate. NOTE: `zip` is supported. +- `type` (String) The type of archive to generate. NOTE: `zip, tgz` are supported. ### Optional diff --git a/docs/todo.md b/docs/todo.md new file mode 100644 index 00000000..7111269f --- /dev/null +++ b/docs/todo.md @@ -0,0 +1,103 @@ +## List of issues + +https://github.com/hashicorp/terraform-provider-archive/issues + +#### 149 + +https://github.com/hashicorp/terraform-provider-archive/issues/149 +Issue archiving base64 encoded content w/ source block + +As designed, the call to filebase64() returns encoded data which is written to the zip entry + +#### 161 + +https://github.com/hashicorp/terraform-provider-archive/issues/161 +archive_file doesn't re-create the archive upon content change + +Basics seem to work. Need to investigate use of templatefile() + +#### 173 + +https://github.com/hashicorp/terraform-provider-archive/issues/173 +Generated archive contents include an extra (empty) file when output_path is configured within same directory as source_dir. + +Fix by excluding output_path if inside source_dir. + +#### 172 + +https://github.com/hashicorp/terraform-provider-archive/issues/172 +Zip file created by terraform archive_file cannot be properly read by python + +Fixed by including directories (along with files) in archive. +This will change the zip file output. i.e. file size and output sha +See TestResource_UpgradeFromVersion2_2_0_DirExcludesConfig + +#### 221 + +https://github.com/hashicorp/terraform-provider-archive/issues/221 +Error generated during the execution of acceptance test on archive_file resource + +This was addressed. + +#### 218 + +https://github.com/hashicorp/terraform-provider-archive/issues/218 +archive_file data source gets created during "terraform plan" vs "terraform apply" and also is not deleted during destroy + +This is by design. + +#### 175 + +https://github.com/hashicorp/terraform-provider-archive/pull/175 +Remove zip files that were generated as a result of a test. + +This was addressed using t.TempDir() + +#### https://github.com/hashicorp/terraform-provider-archive/pull/86 + +https://github.com/hashicorp/terraform-provider-archive/pull/86 +Support glob matching for zip excludes + +Added support in checkMatch() for filepath.Match() + +#### 4 + +https://github.com/hashicorp/terraform-provider-archive/issues/4 +gzip support for archive_file + +https://github.com/hashicorp/terraform-provider-archive/issues/241 +Support Additional Compression Types(Ex: tar.gz format) + +https://github.com/hashicorp/terraform-provider-archive/pull/29 +Support and array of compression formats +zip, tar, tar.gz, base64, tar.bz2, tar.xz, tar.lz4, tar.sz + +Added support for tgz type. + +* zip +* tgz + +#### 2 + +https://github.com/hashicorp/terraform-provider-archive/issues/2 +Feature request - add feature to add file to pre-existing archive in Archive provider + +https://github.com/hashicorp/terraform/pull/9924 + +#### 64 + +https://github.com/hashicorp/terraform-provider-archive/issues/64 +Documentation missing excludes + +https://github.com/hashicorp/terraform-provider-archive/issues/35 +Docs are missing exclude information + +Addressed in https://registry.terraform.io/providers/hashicorp/archive/latest/docs + +#### Professional thoughts on the future of this provider + +https://github.com/hashicorp/terraform-provider-archive/pull/29#issuecomment-406760298 + +## opentofu + +Port to opentofu here https://github.com/opentofu/terraform-provider-archive diff --git a/examples/archive-file-s3/main.py b/examples/archive-file-s3/main.py new file mode 100644 index 00000000..11b15b1a --- /dev/null +++ b/examples/archive-file-s3/main.py @@ -0,0 +1 @@ +print("hello") diff --git a/examples/archive-file-s3/main.tf b/examples/archive-file-s3/main.tf new file mode 100644 index 00000000..0f97a3c5 --- /dev/null +++ b/examples/archive-file-s3/main.tf @@ -0,0 +1,29 @@ +# Archive a single file to s3. + +terraform { + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.19.0" + } + } +} + +data "archive_file" "example" { + type = "zip" + output_path = "main.zip" + source_file = "main.py" +} + +resource "aws_s3_bucket" "example" { + bucket = "bad-lambda-layer-bucket" + tags = { + "adsk:moniker" = "AMPS-C-UW2" + } +} + +resource "aws_s3_object" "example" { + bucket = aws_s3_bucket.example.id + key = data.archive_file.example.output_path + source = data.archive_file.example.output_path +} diff --git a/examples/data-sources/file/data-source.tf b/examples/data-sources/file/data-source.tf deleted file mode 100644 index ce83ef01..00000000 --- a/examples/data-sources/file/data-source.tf +++ /dev/null @@ -1,7 +0,0 @@ -# Archive a single file. - -data "archive_file" "init" { - type = "zip" - source_file = "${path.module}/init.tpl" - output_path = "${path.module}/files/init.zip" -} diff --git a/examples/data-sources/file/lambda.tf b/examples/data-sources/file/lambda.tf deleted file mode 100644 index b7b427dc..00000000 --- a/examples/data-sources/file/lambda.tf +++ /dev/null @@ -1,8 +0,0 @@ -# Archive a file to be used with Lambda using consistent file mode - -data "archive_file" "lambda_my_function" { - type = "zip" - source_file = "${path.module}/../lambda/my-function/index.js" - output_file_mode = "0666" - output_path = "${path.module}/files/lambda-my-function.js.zip" -} diff --git a/examples/data-sources/file/multiple-files.tf b/examples/data-sources/file/multiple-files.tf deleted file mode 100644 index f147b8be..00000000 --- a/examples/data-sources/file/multiple-files.tf +++ /dev/null @@ -1,17 +0,0 @@ -# Archive multiple files and exclude file. - -data "archive_file" "dotfiles" { - type = "zip" - output_path = "${path.module}/files/dotfiles.zip" - excludes = ["${path.module}/unwanted.zip"] - - source { - content = data.template_file.vimrc.rendered - filename = ".vimrc" - } - - source { - content = data.template_file.ssh_config.rendered - filename = ".ssh/config" - } -} diff --git a/examples/issue-149/input.txt b/examples/issue-149/input.txt new file mode 100644 index 00000000..323fae03 --- /dev/null +++ b/examples/issue-149/input.txt @@ -0,0 +1 @@ +foobar diff --git a/examples/issue-149/main.tf b/examples/issue-149/main.tf new file mode 100644 index 00000000..1629bb21 --- /dev/null +++ b/examples/issue-149/main.tf @@ -0,0 +1,15 @@ +data "archive_file" "input_archive" { + type = "zip" + source_file = "input.txt" + output_path = "input.zip" +} + +data "archive_file" "output_archive" { + type = "zip" + output_path = "output.zip" + + source { + content = filebase64(data.archive_file.input_archive.output_path) + filename = data.archive_file.input_archive.output_path + } +} diff --git a/examples/issue-161/main.tf b/examples/issue-161/main.tf new file mode 100644 index 00000000..6623569a --- /dev/null +++ b/examples/issue-161/main.tf @@ -0,0 +1,8 @@ +data "archive_file" "example" { + type = "zip" + output_path = "main.zip" + source { + content = "bar" + filename = "main.txt" + } +} diff --git a/examples/issue-161/main.txt b/examples/issue-161/main.txt new file mode 100644 index 00000000..5716ca59 --- /dev/null +++ b/examples/issue-161/main.txt @@ -0,0 +1 @@ +bar diff --git a/examples/issue-173/foo/bar.txt b/examples/issue-173/foo/bar.txt new file mode 100644 index 00000000..ce013625 --- /dev/null +++ b/examples/issue-173/foo/bar.txt @@ -0,0 +1 @@ +hello diff --git a/examples/issue-173/main.tf b/examples/issue-173/main.tf new file mode 100644 index 00000000..00cd9223 --- /dev/null +++ b/examples/issue-173/main.tf @@ -0,0 +1,5 @@ +data "archive_file" "foo" { + type = "zip" + source_dir = "${path.module}/foo" + output_path = "${path.module}/foo/bar.zip" +} diff --git a/examples/lambda/main.py b/examples/lambda/main.py new file mode 100644 index 00000000..c6af4920 --- /dev/null +++ b/examples/lambda/main.py @@ -0,0 +1,3 @@ +def lambda_handler(event, _): + print("hello") + return event diff --git a/examples/lambda/main.tf b/examples/lambda/main.tf new file mode 100644 index 00000000..b20d8d2b --- /dev/null +++ b/examples/lambda/main.tf @@ -0,0 +1,8 @@ +# Archive a file to be used with Lambda using consistent file mode + +data "archive_file" "example" { + type = "zip" + output_path = "${path.module}/main.zip" + output_file_mode = "0666" + source_file = "${path.module}/main.py" +} diff --git a/examples/multiple-files/foo.tpl b/examples/multiple-files/foo.tpl new file mode 100644 index 00000000..42eb3eba --- /dev/null +++ b/examples/multiple-files/foo.tpl @@ -0,0 +1 @@ +foo ${foo} diff --git a/examples/multiple-files/hello.tpl b/examples/multiple-files/hello.tpl new file mode 100644 index 00000000..73c65be0 --- /dev/null +++ b/examples/multiple-files/hello.tpl @@ -0,0 +1 @@ +hello ${hello} diff --git a/examples/multiple-files/main.tf b/examples/multiple-files/main.tf new file mode 100644 index 00000000..3d718b38 --- /dev/null +++ b/examples/multiple-files/main.tf @@ -0,0 +1,39 @@ +# Archive multiple files from a template. + +terraform { + required_providers { + template = { + source = "hashicorp/template" + version = "2.2.0" + } + } +} + +data "template_file" "foo" { + template = file("${path.module}/foo.tpl") + vars = { + foo = "bar" + } +} + +data "template_file" "hello" { + template = file("${path.module}/hello.tpl") + vars = { + hello = "world" + } +} + +data "archive_file" "example" { + type = "zip" + output_path = "${path.module}/example.zip" + + source { + content = data.template_file.foo.rendered + filename = "foo.txt" + } + + source { + content = data.template_file.hello.rendered + filename = "hello.txt" + } +} diff --git a/examples/source-dir-tgz/dir/exclude.txt b/examples/source-dir-tgz/dir/exclude.txt new file mode 100644 index 00000000..9ba870ea --- /dev/null +++ b/examples/source-dir-tgz/dir/exclude.txt @@ -0,0 +1 @@ +exclude diff --git a/examples/source-dir-tgz/dir/one.txt b/examples/source-dir-tgz/dir/one.txt new file mode 100644 index 00000000..5626abf0 --- /dev/null +++ b/examples/source-dir-tgz/dir/one.txt @@ -0,0 +1 @@ +one diff --git a/examples/source-dir-tgz/dir/two.txt b/examples/source-dir-tgz/dir/two.txt new file mode 100644 index 00000000..f719efd4 --- /dev/null +++ b/examples/source-dir-tgz/dir/two.txt @@ -0,0 +1 @@ +two diff --git a/examples/source-dir-tgz/main.tf b/examples/source-dir-tgz/main.tf new file mode 100644 index 00000000..e319d951 --- /dev/null +++ b/examples/source-dir-tgz/main.tf @@ -0,0 +1,9 @@ +# Archive a single directory as tgz. + +data "archive_file" "example" { + type = "tgz" + output_path = "${path.module}/main.tar.gz" + source_dir = "${path.module}/dir" + output_file_mode = "0400" + excludes = ["exclude.txt"] +} diff --git a/examples/source-dir/dir/exclude.txt b/examples/source-dir/dir/exclude.txt new file mode 100644 index 00000000..9ba870ea --- /dev/null +++ b/examples/source-dir/dir/exclude.txt @@ -0,0 +1 @@ +exclude diff --git a/examples/source-dir/dir/one.txt b/examples/source-dir/dir/one.txt new file mode 100644 index 00000000..5626abf0 --- /dev/null +++ b/examples/source-dir/dir/one.txt @@ -0,0 +1 @@ +one diff --git a/examples/source-dir/dir/two.txt b/examples/source-dir/dir/two.txt new file mode 100644 index 00000000..f719efd4 --- /dev/null +++ b/examples/source-dir/dir/two.txt @@ -0,0 +1 @@ +two diff --git a/examples/source-dir/main.tf b/examples/source-dir/main.tf new file mode 100644 index 00000000..ac296b17 --- /dev/null +++ b/examples/source-dir/main.tf @@ -0,0 +1,9 @@ +# Archive a single directory. + +data "archive_file" "example" { + type = "zip" + output_path = "${path.module}/main.zip" + source_dir = "${path.module}/dir" + output_file_mode = "0666" + excludes = ["exclude.txt"] +} diff --git a/examples/source-dirs-py/main.tf b/examples/source-dirs-py/main.tf new file mode 100644 index 00000000..c54e9e5e --- /dev/null +++ b/examples/source-dirs-py/main.tf @@ -0,0 +1,5 @@ +data "archive_file" "example" { + type = "zip" + output_path = "main.zip" + source_dir = "${path.module}/src" +} diff --git a/examples/source-dirs-py/src/foo/bar.py b/examples/source-dirs-py/src/foo/bar.py new file mode 100644 index 00000000..952cc70b --- /dev/null +++ b/examples/source-dirs-py/src/foo/bar.py @@ -0,0 +1,2 @@ +def hello(): + print("world") diff --git a/examples/source-dirs-py/test.py b/examples/source-dirs-py/test.py new file mode 100644 index 00000000..a715d6a7 --- /dev/null +++ b/examples/source-dirs-py/test.py @@ -0,0 +1,5 @@ +import sys +sys.path.insert(1, "main.zip") + +from foo import bar +bar.hello() diff --git a/examples/source-file-content-tgz/main.tf b/examples/source-file-content-tgz/main.tf new file mode 100644 index 00000000..dc152ad0 --- /dev/null +++ b/examples/source-file-content-tgz/main.tf @@ -0,0 +1,8 @@ +# Archive content as tgz. + +data "archive_file" "example" { + type = "tgz" + output_path = "${path.module}/main.tar.gz" + source_content_filename = "example.txt" + source_content = "example" +} diff --git a/examples/source-file-content/main.tf b/examples/source-file-content/main.tf new file mode 100644 index 00000000..219fc308 --- /dev/null +++ b/examples/source-file-content/main.tf @@ -0,0 +1,8 @@ +# Archive content. + +data "archive_file" "example" { + type = "zip" + output_path = "${path.module}/main.zip" + source_content_filename = "example.txt" + source_content = "example" +} diff --git a/examples/source-file-tgz/main.tf b/examples/source-file-tgz/main.tf new file mode 100644 index 00000000..4a8111cf --- /dev/null +++ b/examples/source-file-tgz/main.tf @@ -0,0 +1,8 @@ +# Archive a single file as tgz. + +data "archive_file" "example" { + type = "tgz" + output_path = "${path.module}/main.tar.gz" + output_file_mode = "0400" + source_file = "${path.module}/main.txt" +} diff --git a/examples/source-file-tgz/main.txt b/examples/source-file-tgz/main.txt new file mode 100644 index 00000000..33a9488b --- /dev/null +++ b/examples/source-file-tgz/main.txt @@ -0,0 +1 @@ +example diff --git a/examples/source-file/main.tf b/examples/source-file/main.tf new file mode 100644 index 00000000..5542b65e --- /dev/null +++ b/examples/source-file/main.tf @@ -0,0 +1,8 @@ +# Archive a single file. + +data "archive_file" "example" { + type = "zip" + output_path = "${path.module}/main.zip" + output_file_mode = "0666" + source_file = "${path.module}/main.txt" +} diff --git a/examples/source-file/main.txt b/examples/source-file/main.txt new file mode 100644 index 00000000..e723c8f9 --- /dev/null +++ b/examples/source-file/main.txt @@ -0,0 +1 @@ +example1 diff --git a/go.mod b/go.mod index 69251ebf..9b480969 100644 --- a/go.mod +++ b/go.mod @@ -7,6 +7,7 @@ require ( github.com/hashicorp/terraform-plugin-framework-validators v0.12.0 github.com/hashicorp/terraform-plugin-go v0.19.0 github.com/hashicorp/terraform-plugin-testing v1.5.1 + github.com/stretchr/testify v1.8.1 ) require ( @@ -14,6 +15,7 @@ require ( github.com/agext/levenshtein v1.2.2 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/cloudflare/circl v1.3.3 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect github.com/fatih/color v1.13.0 // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-cmp v0.6.0 // indirect @@ -45,7 +47,7 @@ require ( github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/reflectwalk v1.0.2 // indirect github.com/oklog/run v1.0.0 // indirect - github.com/stretchr/testify v1.8.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack/v5 v5.3.5 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect @@ -60,4 +62,5 @@ require ( google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19 // indirect google.golang.org/grpc v1.57.1 // indirect google.golang.org/protobuf v1.31.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/internal/hashcode/hashcode.go b/internal/hashcode/hashcode.go deleted file mode 100644 index 97bc709b..00000000 --- a/internal/hashcode/hashcode.go +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package hashcode - -import ( - "bytes" - "fmt" - "hash/crc32" -) - -// String hashes a string to a unique hashcode. -// -// crc32 returns a uint32, but for our use we need -// and non negative integer. Here we cast to an integer -// and invert it if the result is negative. -func String(s string) int { - v := int(crc32.ChecksumIEEE([]byte(s))) - if v >= 0 { - return v - } - if -v >= 0 { - return -v - } - // v == MinInt - return 0 -} - -// Strings hashes a list of strings to a unique hashcode. -func Strings(strings []string) string { - var buf bytes.Buffer - - for _, s := range strings { - buf.WriteString(fmt.Sprintf("%s-", s)) - } - - return fmt.Sprintf("%d", String(buf.String())) -} diff --git a/internal/hashcode/hashcode_test.go b/internal/hashcode/hashcode_test.go deleted file mode 100644 index 3fb60492..00000000 --- a/internal/hashcode/hashcode_test.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright (c) HashiCorp, Inc. -// SPDX-License-Identifier: MPL-2.0 - -package hashcode - -import ( - "testing" -) - -func TestString(t *testing.T) { - v := "hello, world" - expected := String(v) - for i := 0; i < 100; i++ { - actual := String(v) - if actual != expected { - t.Fatalf("bad: %#v\n\t%#v", actual, expected) - } - } -} - -func TestStrings(t *testing.T) { - v := []string{"hello", ",", "world"} - expected := Strings(v) - for i := 0; i < 100; i++ { - actual := Strings(v) - if actual != expected { - t.Fatalf("bad: %#v\n\t%#v", actual, expected) - } - } -} - -func TestString_positiveIndex(t *testing.T) { - // "2338615298" hashes to uint32(2147483648) which is math.MinInt32 - ips := []string{"192.168.1.3", "192.168.1.5", "2338615298"} - for _, ip := range ips { - if index := String(ip); index < 0 { - t.Fatalf("Bad Index %#v for ip %s", index, ip) - } - } -} diff --git a/internal/provider/archiver.go b/internal/provider/archiver.go index 5cd4794b..c2de0e9e 100644 --- a/internal/provider/archiver.go +++ b/internal/provider/archiver.go @@ -6,6 +6,7 @@ package archive import ( "fmt" "os" + "path/filepath" ) type ArchiveDirOpts struct { @@ -25,6 +26,7 @@ type ArchiverBuilder func(outputPath string) Archiver var archiverBuilders = map[string]ArchiverBuilder{ "zip": NewZipArchiver, + "tgz": NewTgzArchiver, } func getArchiver(archiveType string, outputPath string) Archiver { @@ -42,16 +44,32 @@ func assertValidFile(infilename string) (os.FileInfo, error) { return fi, err } -func assertValidDir(indirname string) (os.FileInfo, error) { +func assertValidDir(indirname string) error { fi, err := os.Stat(indirname) if err != nil { if os.IsNotExist(err) { - return fi, fmt.Errorf("could not archive missing directory: %s", indirname) + return fmt.Errorf("could not archive missing directory: %s", indirname) } - return fi, err + return err } if !fi.IsDir() { - return fi, fmt.Errorf("could not archive directory that is a file: %s", indirname) + return fmt.Errorf("could not archive directory that is a file: %s", indirname) } - return fi, nil + return nil +} + +func checkMatch(fileName string, excludes []string) (bool, error) { + for _, exclude := range excludes { + if exclude == "" { + continue + } + m, err := filepath.Match(exclude, fileName) + if err != nil { + return false, err + } + if m { + return true, nil + } + } + return false, nil } diff --git a/internal/provider/archiver_test.go b/internal/provider/archiver_test.go new file mode 100644 index 00000000..426812bf --- /dev/null +++ b/internal/provider/archiver_test.go @@ -0,0 +1,51 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package archive + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func Test_CheckMatch(t *testing.T) { + tests := []struct { + fileName string + excludes []string + expected bool + }{ + { + fileName: "foo.txt", + excludes: []string{"foo.txt"}, + expected: true, + }, + { + fileName: "foo.txt", + excludes: []string{"fo?.txt"}, + expected: true, + }, + { + fileName: "foo.txt", + excludes: []string{"f*.txt"}, + expected: true, + }, + { + fileName: "foo.txt", + excludes: []string{"foo.exe", "bar.txt"}, + expected: false, + }, + { + fileName: "foo.txt", + excludes: []string{"foo.exe", "foo.*"}, + expected: true, + }, + } + + for _, tt := range tests { + m, err := checkMatch(tt.fileName, tt.excludes) + if err != nil { + t.Fatal(err) + } + assert.Equal(t, tt.expected, m) + } +} diff --git a/internal/provider/data_source_archive_file.go b/internal/provider/data_source_archive_file.go index 5a21511b..1efbfb98 100644 --- a/internal/provider/data_source_archive_file.go +++ b/internal/provider/data_source_archive_file.go @@ -12,9 +12,6 @@ import ( "encoding/base64" "encoding/hex" "fmt" - "os" - "path" - "github.com/hashicorp/terraform-plugin-framework-validators/datasourcevalidator" "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" @@ -23,6 +20,10 @@ import ( fwpath "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/schema/validator" "github.com/hashicorp/terraform-plugin-framework/types" + "os" + "path" + "path/filepath" + "strings" ) var _ datasource.DataSource = (*archiveFileDataSource)(nil) @@ -44,7 +45,7 @@ func (d *archiveFileDataSource) ConfigValidators(context.Context) []datasource.C } } -func (d *archiveFileDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { +func (d *archiveFileDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ Description: "Generates an archive from content, a file, or directory of files.", Blocks: map[string]schema.Block{ @@ -80,7 +81,7 @@ func (d *archiveFileDataSource) Schema(ctx context.Context, req datasource.Schem Computed: true, }, "type": schema.StringAttribute{ - Description: "The type of archive to generate. NOTE: `zip` is supported.", + Description: "The type of archive to generate. NOTE: `zip, tgz` are supported.", Required: true, }, "source_content": schema.StringAttribute{ @@ -208,14 +209,20 @@ func archive(ctx context.Context, model fileModel) error { switch { case !model.SourceDir.IsNull(): - excludeList := make([]string, len(model.Excludes.Elements())) + var excludeList []string if !model.Excludes.IsNull() { var elements []types.String model.Excludes.ElementsAs(ctx, &elements, false) - for i, elem := range elements { - excludeList[i] = elem.ValueString() + for _, element := range elements { + excludeList = append(excludeList, element.ValueString()) + } + } + sourceDir := model.SourceDir.ValueString() + if relPath, err := filepath.Rel(sourceDir, outputPath); err == nil { + if !strings.HasPrefix(relPath, ".."+string(os.PathSeparator)) && relPath != ".." { + excludeList = append(excludeList, relPath) } } @@ -227,7 +234,7 @@ func archive(ctx context.Context, model fileModel) error { opts.ExcludeSymlinkDirectories = model.ExcludeSymlinkDirectories.ValueBool() } - if err := archiver.ArchiveDir(model.SourceDir.ValueString(), opts); err != nil { + if err := archiver.ArchiveDir(sourceDir, opts); err != nil { return fmt.Errorf("error archiving directory: %s", err) } case !model.SourceFile.IsNull(): diff --git a/internal/provider/data_source_archive_file_test.go b/internal/provider/data_source_archive_file_test.go index 8ec32870..3f565fd1 100644 --- a/internal/provider/data_source_archive_file_test.go +++ b/internal/provider/data_source_archive_file_test.go @@ -711,6 +711,7 @@ func TestAccArchiveFile_DirectoryWithSymlinkDirectory_Relative(t *testing.T) { r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize), r.TestCheckResourceAttrWith("data.archive_file.foo", "output_path", func(value string) error { ensureContents(t, value, map[string][]byte{ + "test-symlink-dir/": {}, "test-symlink-dir/file1.txt": []byte("This is file 1"), "test-symlink-dir/file2.txt": []byte("This is file 2"), "test-symlink-dir/file3.txt": []byte("This is file 3"), @@ -755,6 +756,7 @@ func TestAccArchiveFile_IncludeDirectoryWithSymlinkDirectory_Absolute(t *testing r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize), r.TestCheckResourceAttrWith("data.archive_file.foo", "output_path", func(value string) error { ensureContents(t, value, map[string][]byte{ + "test-symlink-dir/": {}, "test-symlink-dir/file1.txt": []byte("This is file 1"), "test-symlink-dir/file2.txt": []byte("This is file 2"), "test-symlink-dir/file3.txt": []byte("This is file 3"), @@ -794,21 +796,29 @@ func TestAccArchiveFile_Multiple_Relative(t *testing.T) { r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize), r.TestCheckResourceAttrWith("data.archive_file.foo", "output_path", func(value string) error { ensureContents(t, value, map[string][]byte{ + "test-dir/": {}, + "test-dir/test-dir1/": {}, "test-dir/test-dir1/file1.txt": []byte("This is file 1"), "test-dir/test-dir1/file2.txt": []byte("This is file 2"), "test-dir/test-dir1/file3.txt": []byte("This is file 3"), + "test-dir/test-dir2/": {}, "test-dir/test-dir2/file1.txt": []byte("This is file 1"), "test-dir/test-dir2/file2.txt": []byte("This is file 2"), "test-dir/test-dir2/file3.txt": []byte("This is file 3"), "test-dir/test-file.txt": []byte("This is test content"), + "test-dir-with-symlink-dir/": {}, + "test-dir-with-symlink-dir/test-symlink-dir/": {}, "test-dir-with-symlink-dir/test-symlink-dir/file1.txt": []byte("This is file 1"), "test-dir-with-symlink-dir/test-symlink-dir/file2.txt": []byte("This is file 2"), "test-dir-with-symlink-dir/test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-dir-with-symlink-file/": {}, "test-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), + "test-symlink-dir/": {}, "test-symlink-dir/file1.txt": []byte("This is file 1"), "test-symlink-dir/file2.txt": []byte("This is file 2"), "test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-symlink-dir-with-symlink-file/": {}, "test-symlink-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-symlink-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), }) @@ -852,21 +862,29 @@ func TestAccArchiveFile_Multiple_Absolute(t *testing.T) { r.TestCheckResourceAttrPtr("data.archive_file.foo", "output_size", &fileSize), r.TestCheckResourceAttrWith("data.archive_file.foo", "output_path", func(value string) error { ensureContents(t, value, map[string][]byte{ + "test-dir/": {}, + "test-dir/test-dir1/": {}, "test-dir/test-dir1/file1.txt": []byte("This is file 1"), "test-dir/test-dir1/file2.txt": []byte("This is file 2"), "test-dir/test-dir1/file3.txt": []byte("This is file 3"), + "test-dir/test-dir2/": {}, "test-dir/test-dir2/file1.txt": []byte("This is file 1"), "test-dir/test-dir2/file2.txt": []byte("This is file 2"), "test-dir/test-dir2/file3.txt": []byte("This is file 3"), "test-dir/test-file.txt": []byte("This is test content"), + "test-dir-with-symlink-dir/": {}, + "test-dir-with-symlink-dir/test-symlink-dir/": {}, "test-dir-with-symlink-dir/test-symlink-dir/file1.txt": []byte("This is file 1"), "test-dir-with-symlink-dir/test-symlink-dir/file2.txt": []byte("This is file 2"), "test-dir-with-symlink-dir/test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-dir-with-symlink-file/": {}, "test-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), + "test-symlink-dir/": {}, "test-symlink-dir/file1.txt": []byte("This is file 1"), "test-symlink-dir/file2.txt": []byte("This is file 2"), "test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-symlink-dir-with-symlink-file/": {}, "test-symlink-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-symlink-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), }) diff --git a/internal/provider/resource_archive_file_test.go b/internal/provider/resource_archive_file_test.go index 9e9ce08e..02727234 100644 --- a/internal/provider/resource_archive_file_test.go +++ b/internal/provider/resource_archive_file_test.go @@ -284,6 +284,8 @@ func TestResource_UpgradeFromVersion2_2_0_DirConfig(t *testing.T) { } func TestResource_UpgradeFromVersion2_2_0_DirExcludesConfig(t *testing.T) { + t.Skip("latest zip adds empty directories to conform") + td := t.TempDir() f := filepath.Join(td, "zip_file_acc_test_upgrade_dir_excludes.zip") @@ -792,6 +794,7 @@ func TestResource_ArchiveFile_DirectoryWithSymlinkDirectory_Relative(t *testing. r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize), r.TestCheckResourceAttrWith("archive_file.foo", "output_path", func(value string) error { ensureContents(t, value, map[string][]byte{ + "test-symlink-dir/": {}, "test-symlink-dir/file1.txt": []byte("This is file 1"), "test-symlink-dir/file2.txt": []byte("This is file 2"), "test-symlink-dir/file3.txt": []byte("This is file 3"), @@ -836,6 +839,7 @@ func TestResource_ArchiveFile_IncludeDirectoryWithSymlinkDirectory_Absolute(t *t r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize), r.TestCheckResourceAttrWith("archive_file.foo", "output_path", func(value string) error { ensureContents(t, value, map[string][]byte{ + "test-symlink-dir/": {}, "test-symlink-dir/file1.txt": []byte("This is file 1"), "test-symlink-dir/file2.txt": []byte("This is file 2"), "test-symlink-dir/file3.txt": []byte("This is file 3"), @@ -875,21 +879,29 @@ func TestResource_ArchiveFile_Multiple_Relative(t *testing.T) { r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize), r.TestCheckResourceAttrWith("archive_file.foo", "output_path", func(value string) error { ensureContents(t, value, map[string][]byte{ + "test-dir/": {}, + "test-dir/test-dir1/": {}, "test-dir/test-dir1/file1.txt": []byte("This is file 1"), "test-dir/test-dir1/file2.txt": []byte("This is file 2"), "test-dir/test-dir1/file3.txt": []byte("This is file 3"), + "test-dir/test-dir2/": {}, "test-dir/test-dir2/file1.txt": []byte("This is file 1"), "test-dir/test-dir2/file2.txt": []byte("This is file 2"), "test-dir/test-dir2/file3.txt": []byte("This is file 3"), "test-dir/test-file.txt": []byte("This is test content"), + "test-dir-with-symlink-dir/": {}, + "test-dir-with-symlink-dir/test-symlink-dir/": {}, "test-dir-with-symlink-dir/test-symlink-dir/file1.txt": []byte("This is file 1"), "test-dir-with-symlink-dir/test-symlink-dir/file2.txt": []byte("This is file 2"), "test-dir-with-symlink-dir/test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-dir-with-symlink-file/": {}, "test-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), + "test-symlink-dir/": {}, "test-symlink-dir/file1.txt": []byte("This is file 1"), "test-symlink-dir/file2.txt": []byte("This is file 2"), "test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-symlink-dir-with-symlink-file/": {}, "test-symlink-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-symlink-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), }) @@ -933,21 +945,29 @@ func TestResource_ArchiveFile_Multiple_Absolute(t *testing.T) { r.TestCheckResourceAttrPtr("archive_file.foo", "output_size", &fileSize), r.TestCheckResourceAttrWith("archive_file.foo", "output_path", func(value string) error { ensureContents(t, value, map[string][]byte{ + "test-dir/": {}, + "test-dir/test-dir1/": {}, "test-dir/test-dir1/file1.txt": []byte("This is file 1"), "test-dir/test-dir1/file2.txt": []byte("This is file 2"), "test-dir/test-dir1/file3.txt": []byte("This is file 3"), + "test-dir/test-dir2/": {}, "test-dir/test-dir2/file1.txt": []byte("This is file 1"), "test-dir/test-dir2/file2.txt": []byte("This is file 2"), "test-dir/test-dir2/file3.txt": []byte("This is file 3"), "test-dir/test-file.txt": []byte("This is test content"), + "test-dir-with-symlink-dir/": {}, + "test-dir-with-symlink-dir/test-symlink-dir/": {}, "test-dir-with-symlink-dir/test-symlink-dir/file1.txt": []byte("This is file 1"), "test-dir-with-symlink-dir/test-symlink-dir/file2.txt": []byte("This is file 2"), "test-dir-with-symlink-dir/test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-dir-with-symlink-file/": {}, "test-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), + "test-symlink-dir/": {}, "test-symlink-dir/file1.txt": []byte("This is file 1"), "test-symlink-dir/file2.txt": []byte("This is file 2"), "test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-symlink-dir-with-symlink-file/": {}, "test-symlink-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-symlink-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), }) diff --git a/internal/provider/tgz_archiver.go b/internal/provider/tgz_archiver.go new file mode 100644 index 00000000..e7acb74a --- /dev/null +++ b/internal/provider/tgz_archiver.go @@ -0,0 +1,244 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package archive + +import ( + "archive/tar" + "compress/gzip" + "fmt" + "os" + "path/filepath" + "sort" + "strconv" +) + +type TgzArchiver struct { + filepath string + outputFileMode string + filewriter *os.File + gzipwriter *gzip.Writer + tarwriter *tar.Writer +} + +func NewTgzArchiver(filepath string) Archiver { + return &TgzArchiver{ + filepath: filepath, + } +} + +func (a *TgzArchiver) ArchiveContent(content []byte, infilename string) error { + if err := a.open(); err != nil { + return err + } + defer a.close() + + if err := a.tarwriter.WriteHeader(&tar.Header{ + Name: infilename, + Mode: 0600, + Size: int64(len(content)), + }); err != nil { + return err + } + if _, err := a.tarwriter.Write(content); err != nil { + return err + } + + return nil +} + +func (a *TgzArchiver) ArchiveFile(infilename string) error { + fi, err := assertValidFile(infilename) + if err != nil { + return err + } + + content, err := os.ReadFile(infilename) + if err != nil { + return err + } + + if err := a.open(); err != nil { + return err + } + defer a.close() + + fih, err := tar.FileInfoHeader(fi, "") + if err != nil { + return fmt.Errorf("error creating file header: %s", err) + } + + if a.outputFileMode != "" { + filemode, err := strconv.ParseInt(a.outputFileMode, 0, 64) + if err != nil { + return fmt.Errorf("error parsing output_file_mode value: %s", a.outputFileMode) + } + fih.Mode = filemode + } + + if err := a.tarwriter.WriteHeader(fih); err != nil { + return fmt.Errorf("error creating file inside archive: %s", err) + } + + if _, err = a.tarwriter.Write(content); err != nil { + return err + } + + return nil +} + +func (a *TgzArchiver) ArchiveDir(indirname string, opts ArchiveDirOpts) error { + if err := assertValidDir(indirname); err != nil { + return err + } + + for i := range opts.Excludes { + opts.Excludes[i] = filepath.FromSlash(opts.Excludes[i]) + } + + if err := a.open(); err != nil { + return err + } + defer a.close() + + return filepath.Walk(indirname, a.createWalkFunc("", indirname, opts)) +} + +func (a *TgzArchiver) createWalkFunc(basePath string, indirname string, opts ArchiveDirOpts) func(path string, info os.FileInfo, err error) error { + return func(path string, fi os.FileInfo, err error) error { + if err != nil { + return fmt.Errorf("error encountered during file walk: %s", err) + } + + relName, err := filepath.Rel(indirname, path) + if err != nil { + return fmt.Errorf("error relativizing file for archival: %s", err) + } + + archivePath := filepath.Join(basePath, relName) + + isExcluded, err := checkMatch(archivePath, opts.Excludes) + if err != nil { + return fmt.Errorf("error matching file for archival: %s", err) + } + + if fi.IsDir() { + if isExcluded { + return filepath.SkipDir + } + return nil + } + + if isExcluded { + return nil + } + + if err != nil { + return err + } + + if fi.Mode()&os.ModeSymlink == os.ModeSymlink { + if !opts.ExcludeSymlinkDirectories { + realPath, err := filepath.EvalSymlinks(path) + if err != nil { + return err + } + + realFileInfo, err := os.Stat(realPath) + if err != nil { + return err + } + + if realFileInfo.IsDir() { + return filepath.Walk(realPath, a.createWalkFunc(archivePath, realPath, opts)) + } + + fi = realFileInfo + } + } + + fih, err := tar.FileInfoHeader(fi, "") + if err != nil { + return fmt.Errorf("error creating file header: %s", err) + } + + if a.outputFileMode != "" { + filemode, err := strconv.ParseInt(a.outputFileMode, 0, 64) + if err != nil { + return fmt.Errorf("error parsing output_file_mode value: %s", a.outputFileMode) + } + fih.Mode = filemode + } + + err = a.tarwriter.WriteHeader(fih) + if err != nil { + return fmt.Errorf("error creating file inside archive: %s", err) + } + content, err := os.ReadFile(path) + if err != nil { + return fmt.Errorf("error reading file for archival: %s", err) + } + _, err = a.tarwriter.Write(content) + return err + } +} + +func (a *TgzArchiver) ArchiveMultiple(content map[string][]byte) error { + if err := a.open(); err != nil { + return err + } + defer a.close() + + // Ensure files are processed in the same order so hashes don't change + keys := make([]string, len(content)) + i := 0 + for k := range content { + keys[i] = k + i++ + } + sort.Strings(keys) + + for _, filename := range keys { + if err := a.tarwriter.WriteHeader(&tar.Header{ + Name: filename, + Mode: 0600, + Size: int64(len(content[filename])), + }); err != nil { + return err + } + if _, err := a.tarwriter.Write(content[filename]); err != nil { + return err + } + } + return nil +} + +func (a *TgzArchiver) SetOutputFileMode(outputFileMode string) { + a.outputFileMode = outputFileMode +} + +func (a *TgzArchiver) open() error { + f, err := os.Create(a.filepath) + if err != nil { + return err + } + a.filewriter = f + a.gzipwriter = gzip.NewWriter(f) + a.tarwriter = tar.NewWriter(a.gzipwriter) + return nil +} + +func (a *TgzArchiver) close() { + if a.tarwriter != nil { + a.tarwriter.Close() + a.tarwriter = nil + } + if a.gzipwriter != nil { + a.gzipwriter.Close() + a.gzipwriter = nil + } + if a.filewriter != nil { + a.filewriter.Close() + a.filewriter = nil + } +} diff --git a/internal/provider/tgz_archiver_test.go b/internal/provider/tgz_archiver_test.go new file mode 100644 index 00000000..0a6e0731 --- /dev/null +++ b/internal/provider/tgz_archiver_test.go @@ -0,0 +1,189 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +package archive + +import ( + "archive/tar" + "compress/gzip" + "io" + "log" + "os" + "path/filepath" + "strconv" + "testing" +) + +func TestTgzArchiver_Content(t *testing.T) { + tgzFilePath := filepath.Join(t.TempDir(), "archive-content.tgz") + + archiver := NewTgzArchiver(tgzFilePath) + if err := archiver.ArchiveContent([]byte("This is some content"), "content.txt"); err != nil { + t.Fatalf("unexpected error: %s", err) + } + + ensureTgzContents(t, tgzFilePath, map[string][]byte{ + "content.txt": []byte("This is some content"), + }) +} + +func TestTgzArchiver_File(t *testing.T) { + tgzFilePath := filepath.Join(t.TempDir(), "archive-file.tgz") + + archiver := NewTgzArchiver(tgzFilePath) + if err := archiver.ArchiveFile("./test-fixtures/test-dir/test-file.txt"); err != nil { + t.Fatalf("unexpected error: %s", err) + } + + ensureTgzContents(t, tgzFilePath, map[string][]byte{ + "test-file.txt": []byte("This is test content"), + }) +} + +func TestTgzArchiver_FileMode(t *testing.T) { + file, err := os.CreateTemp("", "archive-file-mode-test.tgz") + if err != nil { + t.Fatal(err) + } + + var ( + tgzFilePath = file.Name() + toTgzPath = filepath.FromSlash("./test-fixtures/test-dir/test-file.txt") + ) + + for _, element := range []string{"0444", "0644", "0666", "0744", "0777"} { + archiver := NewTgzArchiver(tgzFilePath) + archiver.SetOutputFileMode(element) + if err := archiver.ArchiveFile(toTgzPath); err != nil { + t.Fatalf("unexpected error: %s", err) + } + + ensureTgzFileMode(t, tgzFilePath, element) + } +} + +func TestTgzArchiver_Dir(t *testing.T) { + tgzFilePath := filepath.Join(t.TempDir(), "archive-dir.tgz") + + archiver := NewTgzArchiver(tgzFilePath) + if err := archiver.ArchiveDir("./test-fixtures/test-dir/test-dir1", ArchiveDirOpts{}); err != nil { + t.Fatalf("unexpected error: %s", err) + } + + ensureTgzContents(t, tgzFilePath, map[string][]byte{ + "file1.txt": []byte("This is file 1"), + "file2.txt": []byte("This is file 2"), + "file3.txt": []byte("This is file 3"), + }) +} + +func TestTgzArchiver_Multiple(t *testing.T) { + tgzFilePath := filepath.Join(t.TempDir(), "archive-content.tgz") + + content := map[string][]byte{ + "file1.txt": []byte("This is file 1"), + "file2.txt": []byte("This is file 2"), + "file3.txt": []byte("This is file 3"), + } + + archiver := NewTgzArchiver(tgzFilePath) + if err := archiver.ArchiveMultiple(content); err != nil { + t.Fatalf("unexpected error: %s", err) + } + + ensureTgzContents(t, tgzFilePath, content) +} + +func ensureTgzContents(t *testing.T, tgzfilepath string, wants map[string][]byte) { + t.Helper() + f, err := os.Open(tgzfilepath) + if err != nil { + t.Fatalf("could not open tgz file: %s", err) + } + defer f.Close() + + gzipreader, err := gzip.NewReader(f) + if err != nil { + t.Fatalf("could not open tgz reader: %s", err) + } + defer gzipreader.Close() + + tarreader := tar.NewReader(gzipreader) + count := 0 + + for { + hdr, err := tarreader.Next() + if err == io.EOF { + break + } + if err != nil { + log.Fatalf("error iterating tgz file: %s", err) + } + ensureTgzContent(t, wants, hdr, tarreader) + + count++ + } + + if count != len(wants) { + t.Errorf("mismatched file count, got %d, want %d", count, len(wants)) + } +} + +func ensureTgzContent(t *testing.T, wants map[string][]byte, hdr *tar.Header, got *tar.Reader) { + t.Helper() + want, ok := wants[hdr.Name] + if !ok { + t.Errorf("additional file in tgz: %s", hdr.Name) + return + } + + gotContentBytes, err := io.ReadAll(got) + if err != nil { + t.Errorf("could not read file: %s", err) + } + + wantContent := string(want) + gotContent := string(gotContentBytes) + if gotContent != wantContent { + t.Errorf("mismatched content\ngot\n%s\nwant\n%s", gotContent, wantContent) + } +} + +func ensureTgzFileMode(t *testing.T, tgzfilepath string, outputFileMode string) { + t.Helper() + f, err := os.Open(tgzfilepath) + if err != nil { + t.Fatalf("could not open tgz file: %s", err) + } + defer f.Close() + + gzipreader, err := gzip.NewReader(f) + if err != nil { + t.Fatalf("could not open tgz reader: %s", err) + } + defer gzipreader.Close() + + filemode, err := strconv.ParseInt(outputFileMode, 0, 64) + if err != nil { + t.Fatalf("error parsing outputFileMode value: %s", outputFileMode) + } + + tarreader := tar.NewReader(gzipreader) + for { + hdr, err := tarreader.Next() + if err == io.EOF { + break + } + if err != nil { + log.Fatalf("error iterating tgz file: %s", err) + } + + if hdr.Typeflag == tar.TypeDir { + continue + } + + if hdr.Mode != filemode { + t.Fatalf("Expected filemode \"%d\" but was \"%d\"", filemode, hdr.Mode) + } + } +} diff --git a/internal/provider/zip_archiver.go b/internal/provider/zip_archiver.go index 9b149529..9423933c 100644 --- a/internal/provider/zip_archiver.go +++ b/internal/provider/zip_archiver.go @@ -17,7 +17,7 @@ type ZipArchiver struct { filepath string outputFileMode string // Default value "" means unset filewriter *os.File - writer *zip.Writer + zipwriter *zip.Writer } func NewZipArchiver(filepath string) Archiver { @@ -32,7 +32,7 @@ func (a *ZipArchiver) ArchiveContent(content []byte, infilename string) error { } defer a.close() - f, err := a.writer.Create(filepath.ToSlash(infilename)) + f, err := a.zipwriter.Create(filepath.ToSlash(infilename)) if err != nil { return err } @@ -74,7 +74,7 @@ func (a *ZipArchiver) ArchiveFile(infilename string) error { fh.SetMode(os.FileMode(filemode)) } - f, err := a.writer.CreateHeader(fh) + f, err := a.zipwriter.CreateHeader(fh) if err != nil { return fmt.Errorf("error creating file inside archive: %s", err) } @@ -83,22 +83,8 @@ func (a *ZipArchiver) ArchiveFile(infilename string) error { return err } -func checkMatch(fileName string, excludes []string) (value bool) { - for _, exclude := range excludes { - if exclude == "" { - continue - } - - if exclude == fileName { - return true - } - } - return false -} - func (a *ZipArchiver) ArchiveDir(indirname string, opts ArchiveDirOpts) error { - _, err := assertValidDir(indirname) - if err != nil { + if err := assertValidDir(indirname); err != nil { return err } @@ -121,28 +107,35 @@ func (a *ZipArchiver) createWalkFunc(basePath string, indirname string, opts Arc return fmt.Errorf("error encountered during file walk: %s", err) } - relname, err := filepath.Rel(indirname, path) + relName, err := filepath.Rel(indirname, path) if err != nil { return fmt.Errorf("error relativizing file for archival: %s", err) } - archivePath := filepath.Join(basePath, relname) + archivePath := filepath.Join(basePath, relName) - isMatch := checkMatch(archivePath, opts.Excludes) + isExcluded, err := checkMatch(archivePath, opts.Excludes) + if err != nil { + return fmt.Errorf("error matching file for archival: %s", err) + } if info.IsDir() { - if isMatch { + if isExcluded { return filepath.SkipDir } - return nil - } - if isMatch { + if archivePath != "." { + _, err := a.zipwriter.Create(archivePath + "/") + if err != nil { + return fmt.Errorf("error adding directory for archival: %s", err) + } + } + return nil } - if err != nil { - return err + if isExcluded { + return nil } if info.Mode()&os.ModeSymlink == os.ModeSymlink { @@ -183,7 +176,7 @@ func (a *ZipArchiver) createWalkFunc(basePath string, indirname string, opts Arc fh.SetMode(os.FileMode(filemode)) } - f, err := a.writer.CreateHeader(fh) + f, err := a.zipwriter.CreateHeader(fh) if err != nil { return fmt.Errorf("error creating file inside archive: %s", err) } @@ -212,7 +205,7 @@ func (a *ZipArchiver) ArchiveMultiple(content map[string][]byte) error { sort.Strings(keys) for _, filename := range keys { - f, err := a.writer.Create(filepath.ToSlash(filename)) + f, err := a.zipwriter.Create(filepath.ToSlash(filename)) if err != nil { return err } @@ -234,14 +227,14 @@ func (a *ZipArchiver) open() error { return err } a.filewriter = f - a.writer = zip.NewWriter(f) + a.zipwriter = zip.NewWriter(f) return nil } func (a *ZipArchiver) close() { - if a.writer != nil { - a.writer.Close() - a.writer = nil + if a.zipwriter != nil { + a.zipwriter.Close() + a.zipwriter = nil } if a.filewriter != nil { a.filewriter.Close() diff --git a/internal/provider/zip_archiver_test.go b/internal/provider/zip_archiver_test.go index 3d45f324..6475373f 100644 --- a/internal/provider/zip_archiver_test.go +++ b/internal/provider/zip_archiver_test.go @@ -144,12 +144,31 @@ func TestZipArchiver_Dir_Exclude_With_Directory(t *testing.T) { } ensureContents(t, zipFilePath, map[string][]byte{ + "test-dir2/": {}, "test-dir2/file1.txt": []byte("This is file 1"), "test-dir2/file3.txt": []byte("This is file 3"), "test-file.txt": []byte("This is test content"), }) } +func TestZipArchiver_Dir_Exclude_With_Glob(t *testing.T) { + zipFilePath := filepath.Join(t.TempDir(), "archive-dir.zip") + + archiver := NewZipArchiver(zipFilePath) + if err := archiver.ArchiveDir("./test-fixtures/test-dir/", ArchiveDirOpts{ + Excludes: []string{"test-dir1/file?.txt", "test-dir2/*1.txt"}}); err != nil { + t.Fatalf("unexpected error: %s", err) + } + + ensureContents(t, zipFilePath, map[string][]byte{ + "test-file.txt": []byte("This is test content"), + "test-dir1/": {}, + "test-dir2/": {}, + "test-dir2/file2.txt": []byte("This is file 2"), + "test-dir2/file3.txt": []byte("This is file 3"), + }) +} + func TestZipArchiver_Multiple(t *testing.T) { zipFilePath := filepath.Join(t.TempDir(), "archive-content.zip") @@ -190,21 +209,29 @@ func TestZipArchiver_Dir_DoNotExcludeSymlinkDirectories(t *testing.T) { } ensureContents(t, zipFilePath, map[string][]byte{ + "test-dir/": {}, + "test-dir/test-dir1/": {}, "test-dir/test-dir1/file1.txt": []byte("This is file 1"), "test-dir/test-dir1/file2.txt": []byte("This is file 2"), "test-dir/test-dir1/file3.txt": []byte("This is file 3"), + "test-dir/test-dir2/": {}, "test-dir/test-dir2/file1.txt": []byte("This is file 1"), "test-dir/test-dir2/file2.txt": []byte("This is file 2"), "test-dir/test-dir2/file3.txt": []byte("This is file 3"), "test-dir/test-file.txt": []byte("This is test content"), + "test-dir-with-symlink-dir/": {}, + "test-dir-with-symlink-dir/test-symlink-dir/": {}, "test-dir-with-symlink-dir/test-symlink-dir/file1.txt": []byte("This is file 1"), "test-dir-with-symlink-dir/test-symlink-dir/file2.txt": []byte("This is file 2"), "test-dir-with-symlink-dir/test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-dir-with-symlink-file/": {}, "test-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), + "test-symlink-dir/": {}, "test-symlink-dir/file1.txt": []byte("This is file 1"), "test-symlink-dir/file2.txt": []byte("This is file 2"), "test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-symlink-dir-with-symlink-file/": {}, "test-symlink-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-symlink-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), }) @@ -240,20 +267,28 @@ func TestZipArchiver_Dir_Exclude_DoNotExcludeSymlinkDirectories(t *testing.T) { } ensureContents(t, zipFilePath, map[string][]byte{ + "test-dir/": {}, + "test-dir/test-dir1/": {}, "test-dir/test-dir1/file1.txt": []byte("This is file 1"), "test-dir/test-dir1/file2.txt": []byte("This is file 2"), "test-dir/test-dir1/file3.txt": []byte("This is file 3"), + "test-dir/test-dir2/": {}, "test-dir/test-dir2/file1.txt": []byte("This is file 1"), "test-dir/test-dir2/file2.txt": []byte("This is file 2"), "test-dir/test-dir2/file3.txt": []byte("This is file 3"), "test-dir/test-file.txt": []byte("This is test content"), + "test-dir-with-symlink-dir/": {}, + "test-dir-with-symlink-dir/test-symlink-dir/": {}, "test-dir-with-symlink-dir/test-symlink-dir/file1.txt": []byte("This is file 1"), "test-dir-with-symlink-dir/test-symlink-dir/file2.txt": []byte("This is file 2"), "test-dir-with-symlink-dir/test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-dir-with-symlink-file/": {}, "test-dir-with-symlink-file/test-file.txt": []byte("This is test content"), "test-dir-with-symlink-file/test-symlink.txt": []byte("This is test content"), + "test-symlink-dir/": {}, "test-symlink-dir/file2.txt": []byte("This is file 2"), "test-symlink-dir/file3.txt": []byte("This is file 3"), + "test-symlink-dir-with-symlink-file/": {}, "test-symlink-dir-with-symlink-file/test-file.txt": []byte("This is test content"), }) } @@ -274,13 +309,13 @@ func TestZipArchiver_Dir_Exclude_ExcludeSymlinkDirectories(t *testing.T) { found := regex.Match([]byte(err.Error())) if !found { - t.Fatalf("expedted error to match %q, got: %s", regex.String(), err.Error()) + t.Fatalf("expected error to match %q, got: %s", regex.String(), err.Error()) } } -func ensureContents(t *testing.T, zipfilepath string, wants map[string][]byte) { +func ensureContents(t *testing.T, zipFilePath string, wants map[string][]byte) { t.Helper() - r, err := zip.OpenReader(zipfilepath) + r, err := zip.OpenReader(zipFilePath) if err != nil { t.Fatalf("could not open zip file: %s", err) } @@ -319,9 +354,9 @@ func ensureContent(t *testing.T, wants map[string][]byte, got *zip.File) { } } -func ensureFileMode(t *testing.T, zipfilepath string, outputFileMode string) { +func ensureFileMode(t *testing.T, zipFilePath string, outputFileMode string) { t.Helper() - r, err := zip.OpenReader(zipfilepath) + r, err := zip.OpenReader(zipFilePath) if err != nil { t.Fatalf("could not open zip file: %s", err) } diff --git a/templates/data-sources/file.md.tmpl b/templates/data-sources/file.md.tmpl index bf6ed7b7..988816cd 100644 --- a/templates/data-sources/file.md.tmpl +++ b/templates/data-sources/file.md.tmpl @@ -11,10 +11,24 @@ description: |- ## Example Usage -{{ tffile "examples/data-sources/file/data-source.tf" }} +{{ tffile "examples/archive-file-s3/main.tf" }} -{{ tffile "examples/data-sources/file/multiple-files.tf" }} +{{ tffile "examples/lambda/main.tf" }} -{{ tffile "examples/data-sources/file/lambda.tf" }} +{{ tffile "examples/multiple-files/main.tf" }} + +{{ tffile "examples/source-dir/main.tf" }} + +{{ tffile "examples/source-dir-tgz/main.tf" }} + +{{ tffile "examples/source-dirs-py/main.tf" }} + +{{ tffile "examples/source-file/main.tf" }} + +{{ tffile "examples/source-file-content/main.tf" }} + +{{ tffile "examples/source-file-content-tgz/main.tf" }} + +{{ tffile "examples/source-file-tgz/main.tf" }} {{ .SchemaMarkdown | trimspace }}