From d41a0d64bc3bf3ec456dc5fb009700c608e07a89 Mon Sep 17 00:00:00 2001 From: Yujun Zhang Date: Sat, 29 Feb 2020 16:47:26 +0800 Subject: [PATCH] Use a tailored go-getter to get rid of transitive depenencies --- api/go.mod | 8 +- api/go.sum | 71 +- api/internal/getter/LICENSE | 354 ++++++++++ api/internal/getter/README.md | 5 + api/internal/getter/checksum.go | 314 +++++++++ api/internal/getter/client.go | 298 ++++++++ api/internal/getter/client_mode.go | 24 + api/internal/getter/client_option.go | 46 ++ api/internal/getter/client_option_progress.go | 38 ++ .../getter/client_option_progress_test.go | 65 ++ api/internal/getter/common.go | 14 + api/internal/getter/copy_dir.go | 78 +++ api/internal/getter/decompress.go | 58 ++ api/internal/getter/decompress_bzip2.go | 45 ++ api/internal/getter/decompress_bzip2_test.go | 34 + api/internal/getter/decompress_gzip.go | 49 ++ api/internal/getter/decompress_gzip_test.go | 34 + api/internal/getter/decompress_tar.go | 160 +++++ api/internal/getter/decompress_tar_test.go | 43 ++ api/internal/getter/decompress_tbz2.go | 33 + api/internal/getter/decompress_tbz2_test.go | 73 ++ api/internal/getter/decompress_testing.go | 171 +++++ api/internal/getter/decompress_tgz.go | 39 ++ api/internal/getter/decompress_tgz_test.go | 95 +++ api/internal/getter/decompress_txz.go | 39 ++ api/internal/getter/decompress_txz_test.go | 84 +++ api/internal/getter/decompress_xz.go | 49 ++ api/internal/getter/decompress_xz_test.go | 34 + api/internal/getter/decompress_zip.go | 101 +++ api/internal/getter/decompress_zip_test.go | 98 +++ api/internal/getter/detect.go | 103 +++ api/internal/getter/detect_bitbucket.go | 66 ++ api/internal/getter/detect_bitbucket_test.go | 67 ++ api/internal/getter/detect_file.go | 67 ++ api/internal/getter/detect_file_test.go | 115 ++++ api/internal/getter/detect_file_unix_test.go | 70 ++ api/internal/getter/detect_git.go | 26 + api/internal/getter/detect_git_test.go | 69 ++ api/internal/getter/detect_github.go | 47 ++ api/internal/getter/detect_github_test.go | 44 ++ api/internal/getter/detect_ssh.go | 49 ++ api/internal/getter/detect_test.go | 92 +++ api/internal/getter/folder_storage.go | 65 ++ api/internal/getter/folder_storage_test.go | 48 ++ api/internal/getter/get.go | 150 ++++ api/internal/getter/get_base.go | 20 + api/internal/getter/get_file.go | 36 + api/internal/getter/get_file_copy.go | 29 + api/internal/getter/get_file_copy_test.go | 82 +++ api/internal/getter/get_file_test.go | 204 ++++++ api/internal/getter/get_file_unix.go | 103 +++ api/internal/getter/get_file_windows.go | 136 ++++ api/internal/getter/get_git.go | 313 +++++++++ api/internal/getter/get_git_test.go | 643 ++++++++++++++++++ api/internal/getter/get_hg.go | 135 ++++ api/internal/getter/get_hg_test.go | 99 +++ api/internal/getter/get_http.go | 328 +++++++++ api/internal/getter/get_http_test.go | 516 ++++++++++++++ api/internal/getter/get_mock.go | 54 ++ api/internal/getter/get_test.go | 524 ++++++++++++++ api/internal/getter/helper/url/url.go | 14 + api/internal/getter/helper/url/url_test.go | 88 +++ api/internal/getter/helper/url/url_unix.go | 11 + api/internal/getter/helper/url/url_windows.go | 39 ++ api/internal/getter/module_test.go | 89 +++ api/internal/getter/netrc.go | 67 ++ api/internal/getter/netrc_test.go | 63 ++ api/internal/getter/source.go | 75 ++ api/internal/getter/source_test.go | 106 +++ api/internal/getter/storage.go | 13 + .../archive-rooted-multi/archive.tar.gz | Bin 0 -> 210 bytes .../testdata/archive-rooted/archive.tar.gz | Bin 0 -> 173 bytes api/internal/getter/testdata/archive.tar.gz | Bin 0 -> 141 bytes .../getter/testdata/basic%2Ftest/foo/main.tf | 1 + .../getter/testdata/basic%2Ftest/main.tf | 5 + .../testdata/basic%2Ftest/subdir/sub.tf | 0 .../getter/testdata/basic-dot/main.tf | 5 + .../basic-file-archive/archive.tar.gz | Bin 0 -> 141 bytes .../getter/testdata/basic-file/foo.txt | 1 + .../testdata/basic-hg/.hg/00changelog.i | Bin 0 -> 57 bytes .../getter/testdata/basic-hg/.hg/branch | 1 + .../basic-hg/.hg/cache/branch2-served | 3 + .../testdata/basic-hg/.hg/cache/rbc-names-v1 | 1 + .../testdata/basic-hg/.hg/cache/rbc-revs-v1 | Bin 0 -> 8 bytes .../getter/testdata/basic-hg/.hg/cache/tags | 2 + .../getter/testdata/basic-hg/.hg/dirstate | Bin 0 -> 88 bytes .../testdata/basic-hg/.hg/last-message.txt | 2 + .../getter/testdata/basic-hg/.hg/requires | 4 + .../testdata/basic-hg/.hg/store/00changelog.i | Bin 0 -> 528 bytes .../testdata/basic-hg/.hg/store/00manifest.i | Bin 0 -> 376 bytes .../basic-hg/.hg/store/data/foo.txt.i | Bin 0 -> 71 bytes .../basic-hg/.hg/store/data/main.tf.i | Bin 0 -> 112 bytes .../basic-hg/.hg/store/data/main__branch.tf.i | Bin 0 -> 64 bytes .../testdata/basic-hg/.hg/store/fncache | 3 + .../testdata/basic-hg/.hg/store/phaseroots | 1 + .../getter/testdata/basic-hg/.hg/store/undo | Bin 0 -> 52 bytes .../basic-hg/.hg/store/undo.backup.fncache | 2 + .../basic-hg/.hg/store/undo.backupfiles | Bin 0 -> 33 bytes .../basic-hg/.hg/store/undo.phaseroots | 1 + .../testdata/basic-hg/.hg/undo.bookmarks | 0 .../getter/testdata/basic-hg/.hg/undo.branch | 1 + .../getter/testdata/basic-hg/.hg/undo.desc | 2 + .../testdata/basic-hg/.hg/undo.dirstate | Bin 0 -> 88 bytes api/internal/getter/testdata/basic-hg/foo.txt | 1 + api/internal/getter/testdata/basic-hg/main.tf | 5 + .../getter/testdata/basic-parent/a/a.tf | 3 + .../getter/testdata/basic-parent/c/c.tf | 1 + .../getter/testdata/basic-parent/main.tf | 3 + .../testdata/basic-subdir/foo/sub/baz/main.tf | 0 .../testdata/basic-subdir/foo/sub/main.tf | 3 + .../getter/testdata/basic-subdir/main.tf | 3 + .../getter/testdata/basic-tgz/main.tf | 1 + .../getter/testdata/basic/foo/main.tf | 1 + api/internal/getter/testdata/basic/main.tf | 5 + .../getter/testdata/basic/subdir/sub.tf | 0 .../getter/testdata/checksum-file/content.txt | 1 + .../testdata/checksum-file/md5-bsd-bad.sum | 1 + .../testdata/checksum-file/md5-bsd-wrong.sum | 1 + .../getter/testdata/checksum-file/md5-bsd.sum | 1 + .../getter/testdata/checksum-file/md5-p.sum | 1 + .../getter/testdata/checksum-file/sha1-p.sum | 1 + .../getter/testdata/checksum-file/sha1.sum | 1 + .../testdata/checksum-file/sha256-p.sum | 1 + .../testdata/checksum-file/sha512-p.sum | 1 + .../getter/testdata/child/foo/bar/main.tf | 2 + .../getter/testdata/child/foo/main.tf | 5 + api/internal/getter/testdata/child/main.tf | 5 + .../getter/testdata/decompress-bz2/single.bz2 | Bin 0 -> 40 bytes .../getter/testdata/decompress-gz/single.gz | Bin 0 -> 29 bytes .../decompress-tar/extended_header.tar | Bin 0 -> 10240 bytes .../testdata/decompress-tar/implied_dir.tar | Bin 0 -> 2048 bytes .../testdata/decompress-tar/unix_time_0.tar | Bin 0 -> 10240 bytes .../testdata/decompress-tbz2/empty.tar.bz2 | Bin 0 -> 46 bytes .../testdata/decompress-tbz2/multiple.tar.bz2 | Bin 0 -> 166 bytes .../testdata/decompress-tbz2/ordering.tar.bz2 | Bin 0 -> 248 bytes .../testdata/decompress-tbz2/single.tar.bz2 | Bin 0 -> 135 bytes .../testdata/decompress-tgz/empty.tar.gz | Bin 0 -> 45 bytes .../testdata/decompress-tgz/multiple.tar.gz | Bin 0 -> 157 bytes .../decompress-tgz/multiple_dir.tar.gz | Bin 0 -> 194 bytes .../testdata/decompress-tgz/ordering.tar.gz | Bin 0 -> 187 bytes .../decompress-tgz/outside_parent.tar.gz | Bin 0 -> 192 bytes .../testdata/decompress-tgz/single.tar.gz | Bin 0 -> 137 bytes .../testdata/decompress-txz/empty.tar.xz | Bin 0 -> 108 bytes .../testdata/decompress-txz/multiple.tar.xz | Bin 0 -> 204 bytes .../decompress-txz/multiple_dir.tar.xz | Bin 0 -> 232 bytes .../testdata/decompress-txz/ordering.tar.xz | Bin 0 -> 220 bytes .../testdata/decompress-txz/single.tar.xz | Bin 0 -> 192 bytes .../getter/testdata/decompress-xz/single.xz | Bin 0 -> 60 bytes .../getter/testdata/decompress-zip/empty.zip | Bin 0 -> 22 bytes .../testdata/decompress-zip/multiple.zip | Bin 0 -> 306 bytes .../decompress-zip/outside_parent.zip | Bin 0 -> 237 bytes .../getter/testdata/decompress-zip/single.zip | Bin 0 -> 162 bytes .../getter/testdata/decompress-zip/subdir.zip | Bin 0 -> 466 bytes .../testdata/decompress-zip/subdir_empty.zip | Bin 0 -> 308 bytes .../decompress-zip/subdir_missing_dir.zip | Bin 0 -> 324 bytes .../detect-file-symlink-pwd/real/hello.txt | 0 .../testdata/detect-file-symlink-pwd/syml/pwd | 1 + api/internal/getter/testdata/dup/foo/main.tf | 0 api/internal/getter/testdata/dup/main.tf | 7 + api/internal/getter/testdata/netrc/basic | 3 + .../child/main.tf | 1 + .../validate-bad-output-to-module/main.tf | 8 + .../validate-bad-output/child/main.tf | 0 .../testdata/validate-bad-output/main.tf | 7 + .../testdata/validate-bad-var/child/main.tf | 0 .../getter/testdata/validate-bad-var/main.tf | 5 + .../testdata/validate-child-bad/child/main.tf | 3 + .../testdata/validate-child-bad/main.tf | 3 + .../validate-child-good/child/main.tf | 3 + .../testdata/validate-child-good/main.tf | 8 + .../validate-required-var/child/main.tf | 1 + .../testdata/validate-required-var/main.tf | 3 + .../getter/testdata/validate-root-bad/main.tf | 3 + api/internal/getter/util_test.go | 49 ++ api/loader/loader.go | 3 +- 175 files changed, 7541 insertions(+), 68 deletions(-) create mode 100644 api/internal/getter/LICENSE create mode 100644 api/internal/getter/README.md create mode 100644 api/internal/getter/checksum.go create mode 100644 api/internal/getter/client.go create mode 100644 api/internal/getter/client_mode.go create mode 100644 api/internal/getter/client_option.go create mode 100644 api/internal/getter/client_option_progress.go create mode 100644 api/internal/getter/client_option_progress_test.go create mode 100644 api/internal/getter/common.go create mode 100644 api/internal/getter/copy_dir.go create mode 100644 api/internal/getter/decompress.go create mode 100644 api/internal/getter/decompress_bzip2.go create mode 100644 api/internal/getter/decompress_bzip2_test.go create mode 100644 api/internal/getter/decompress_gzip.go create mode 100644 api/internal/getter/decompress_gzip_test.go create mode 100644 api/internal/getter/decompress_tar.go create mode 100644 api/internal/getter/decompress_tar_test.go create mode 100644 api/internal/getter/decompress_tbz2.go create mode 100644 api/internal/getter/decompress_tbz2_test.go create mode 100644 api/internal/getter/decompress_testing.go create mode 100644 api/internal/getter/decompress_tgz.go create mode 100644 api/internal/getter/decompress_tgz_test.go create mode 100644 api/internal/getter/decompress_txz.go create mode 100644 api/internal/getter/decompress_txz_test.go create mode 100644 api/internal/getter/decompress_xz.go create mode 100644 api/internal/getter/decompress_xz_test.go create mode 100644 api/internal/getter/decompress_zip.go create mode 100644 api/internal/getter/decompress_zip_test.go create mode 100644 api/internal/getter/detect.go create mode 100644 api/internal/getter/detect_bitbucket.go create mode 100644 api/internal/getter/detect_bitbucket_test.go create mode 100644 api/internal/getter/detect_file.go create mode 100644 api/internal/getter/detect_file_test.go create mode 100644 api/internal/getter/detect_file_unix_test.go create mode 100644 api/internal/getter/detect_git.go create mode 100644 api/internal/getter/detect_git_test.go create mode 100644 api/internal/getter/detect_github.go create mode 100644 api/internal/getter/detect_github_test.go create mode 100644 api/internal/getter/detect_ssh.go create mode 100644 api/internal/getter/detect_test.go create mode 100644 api/internal/getter/folder_storage.go create mode 100644 api/internal/getter/folder_storage_test.go create mode 100644 api/internal/getter/get.go create mode 100644 api/internal/getter/get_base.go create mode 100644 api/internal/getter/get_file.go create mode 100644 api/internal/getter/get_file_copy.go create mode 100644 api/internal/getter/get_file_copy_test.go create mode 100644 api/internal/getter/get_file_test.go create mode 100644 api/internal/getter/get_file_unix.go create mode 100644 api/internal/getter/get_file_windows.go create mode 100644 api/internal/getter/get_git.go create mode 100644 api/internal/getter/get_git_test.go create mode 100644 api/internal/getter/get_hg.go create mode 100644 api/internal/getter/get_hg_test.go create mode 100644 api/internal/getter/get_http.go create mode 100644 api/internal/getter/get_http_test.go create mode 100644 api/internal/getter/get_mock.go create mode 100644 api/internal/getter/get_test.go create mode 100644 api/internal/getter/helper/url/url.go create mode 100644 api/internal/getter/helper/url/url_test.go create mode 100644 api/internal/getter/helper/url/url_unix.go create mode 100644 api/internal/getter/helper/url/url_windows.go create mode 100644 api/internal/getter/module_test.go create mode 100644 api/internal/getter/netrc.go create mode 100644 api/internal/getter/netrc_test.go create mode 100644 api/internal/getter/source.go create mode 100644 api/internal/getter/source_test.go create mode 100644 api/internal/getter/storage.go create mode 100644 api/internal/getter/testdata/archive-rooted-multi/archive.tar.gz create mode 100644 api/internal/getter/testdata/archive-rooted/archive.tar.gz create mode 100644 api/internal/getter/testdata/archive.tar.gz create mode 100644 api/internal/getter/testdata/basic%2Ftest/foo/main.tf create mode 100644 api/internal/getter/testdata/basic%2Ftest/main.tf create mode 100644 api/internal/getter/testdata/basic%2Ftest/subdir/sub.tf create mode 100644 api/internal/getter/testdata/basic-dot/main.tf create mode 100644 api/internal/getter/testdata/basic-file-archive/archive.tar.gz create mode 100644 api/internal/getter/testdata/basic-file/foo.txt create mode 100644 api/internal/getter/testdata/basic-hg/.hg/00changelog.i create mode 100644 api/internal/getter/testdata/basic-hg/.hg/branch create mode 100644 api/internal/getter/testdata/basic-hg/.hg/cache/branch2-served create mode 100644 api/internal/getter/testdata/basic-hg/.hg/cache/rbc-names-v1 create mode 100644 api/internal/getter/testdata/basic-hg/.hg/cache/rbc-revs-v1 create mode 100644 api/internal/getter/testdata/basic-hg/.hg/cache/tags create mode 100644 api/internal/getter/testdata/basic-hg/.hg/dirstate create mode 100644 api/internal/getter/testdata/basic-hg/.hg/last-message.txt create mode 100644 api/internal/getter/testdata/basic-hg/.hg/requires create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/00changelog.i create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/00manifest.i create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/data/foo.txt.i create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/data/main.tf.i create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/data/main__branch.tf.i create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/fncache create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/phaseroots create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/undo create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/undo.backup.fncache create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/undo.backupfiles create mode 100644 api/internal/getter/testdata/basic-hg/.hg/store/undo.phaseroots create mode 100644 api/internal/getter/testdata/basic-hg/.hg/undo.bookmarks create mode 100644 api/internal/getter/testdata/basic-hg/.hg/undo.branch create mode 100644 api/internal/getter/testdata/basic-hg/.hg/undo.desc create mode 100644 api/internal/getter/testdata/basic-hg/.hg/undo.dirstate create mode 100644 api/internal/getter/testdata/basic-hg/foo.txt create mode 100644 api/internal/getter/testdata/basic-hg/main.tf create mode 100644 api/internal/getter/testdata/basic-parent/a/a.tf create mode 100644 api/internal/getter/testdata/basic-parent/c/c.tf create mode 100644 api/internal/getter/testdata/basic-parent/main.tf create mode 100644 api/internal/getter/testdata/basic-subdir/foo/sub/baz/main.tf create mode 100644 api/internal/getter/testdata/basic-subdir/foo/sub/main.tf create mode 100644 api/internal/getter/testdata/basic-subdir/main.tf create mode 100644 api/internal/getter/testdata/basic-tgz/main.tf create mode 100644 api/internal/getter/testdata/basic/foo/main.tf create mode 100644 api/internal/getter/testdata/basic/main.tf create mode 100644 api/internal/getter/testdata/basic/subdir/sub.tf create mode 100644 api/internal/getter/testdata/checksum-file/content.txt create mode 100644 api/internal/getter/testdata/checksum-file/md5-bsd-bad.sum create mode 100644 api/internal/getter/testdata/checksum-file/md5-bsd-wrong.sum create mode 100644 api/internal/getter/testdata/checksum-file/md5-bsd.sum create mode 100644 api/internal/getter/testdata/checksum-file/md5-p.sum create mode 100644 api/internal/getter/testdata/checksum-file/sha1-p.sum create mode 100644 api/internal/getter/testdata/checksum-file/sha1.sum create mode 100644 api/internal/getter/testdata/checksum-file/sha256-p.sum create mode 100644 api/internal/getter/testdata/checksum-file/sha512-p.sum create mode 100644 api/internal/getter/testdata/child/foo/bar/main.tf create mode 100644 api/internal/getter/testdata/child/foo/main.tf create mode 100644 api/internal/getter/testdata/child/main.tf create mode 100644 api/internal/getter/testdata/decompress-bz2/single.bz2 create mode 100644 api/internal/getter/testdata/decompress-gz/single.gz create mode 100644 api/internal/getter/testdata/decompress-tar/extended_header.tar create mode 100644 api/internal/getter/testdata/decompress-tar/implied_dir.tar create mode 100644 api/internal/getter/testdata/decompress-tar/unix_time_0.tar create mode 100644 api/internal/getter/testdata/decompress-tbz2/empty.tar.bz2 create mode 100644 api/internal/getter/testdata/decompress-tbz2/multiple.tar.bz2 create mode 100644 api/internal/getter/testdata/decompress-tbz2/ordering.tar.bz2 create mode 100644 api/internal/getter/testdata/decompress-tbz2/single.tar.bz2 create mode 100644 api/internal/getter/testdata/decompress-tgz/empty.tar.gz create mode 100644 api/internal/getter/testdata/decompress-tgz/multiple.tar.gz create mode 100644 api/internal/getter/testdata/decompress-tgz/multiple_dir.tar.gz create mode 100644 api/internal/getter/testdata/decompress-tgz/ordering.tar.gz create mode 100644 api/internal/getter/testdata/decompress-tgz/outside_parent.tar.gz create mode 100644 api/internal/getter/testdata/decompress-tgz/single.tar.gz create mode 100644 api/internal/getter/testdata/decompress-txz/empty.tar.xz create mode 100644 api/internal/getter/testdata/decompress-txz/multiple.tar.xz create mode 100644 api/internal/getter/testdata/decompress-txz/multiple_dir.tar.xz create mode 100644 api/internal/getter/testdata/decompress-txz/ordering.tar.xz create mode 100644 api/internal/getter/testdata/decompress-txz/single.tar.xz create mode 100644 api/internal/getter/testdata/decompress-xz/single.xz create mode 100644 api/internal/getter/testdata/decompress-zip/empty.zip create mode 100644 api/internal/getter/testdata/decompress-zip/multiple.zip create mode 100644 api/internal/getter/testdata/decompress-zip/outside_parent.zip create mode 100644 api/internal/getter/testdata/decompress-zip/single.zip create mode 100644 api/internal/getter/testdata/decompress-zip/subdir.zip create mode 100644 api/internal/getter/testdata/decompress-zip/subdir_empty.zip create mode 100644 api/internal/getter/testdata/decompress-zip/subdir_missing_dir.zip create mode 100644 api/internal/getter/testdata/detect-file-symlink-pwd/real/hello.txt create mode 120000 api/internal/getter/testdata/detect-file-symlink-pwd/syml/pwd create mode 100644 api/internal/getter/testdata/dup/foo/main.tf create mode 100644 api/internal/getter/testdata/dup/main.tf create mode 100644 api/internal/getter/testdata/netrc/basic create mode 100644 api/internal/getter/testdata/validate-bad-output-to-module/child/main.tf create mode 100644 api/internal/getter/testdata/validate-bad-output-to-module/main.tf create mode 100644 api/internal/getter/testdata/validate-bad-output/child/main.tf create mode 100644 api/internal/getter/testdata/validate-bad-output/main.tf create mode 100644 api/internal/getter/testdata/validate-bad-var/child/main.tf create mode 100644 api/internal/getter/testdata/validate-bad-var/main.tf create mode 100644 api/internal/getter/testdata/validate-child-bad/child/main.tf create mode 100644 api/internal/getter/testdata/validate-child-bad/main.tf create mode 100644 api/internal/getter/testdata/validate-child-good/child/main.tf create mode 100644 api/internal/getter/testdata/validate-child-good/main.tf create mode 100644 api/internal/getter/testdata/validate-required-var/child/main.tf create mode 100644 api/internal/getter/testdata/validate-required-var/main.tf create mode 100644 api/internal/getter/testdata/validate-root-bad/main.tf create mode 100644 api/internal/getter/util_test.go diff --git a/api/go.mod b/api/go.mod index 82c41f64357..55b61a2ab0a 100644 --- a/api/go.mod +++ b/api/go.mod @@ -3,12 +3,18 @@ module sigs.k8s.io/kustomize/api go 1.13 require ( + github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d github.com/evanphx/json-patch v4.5.0+incompatible github.com/go-openapi/spec v0.19.4 github.com/golangci/golangci-lint v1.21.0 github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 - github.com/hashicorp/go-getter v1.4.1 + github.com/hashicorp/go-cleanhttp v0.5.1 + github.com/hashicorp/go-safetemp v1.0.0 + github.com/hashicorp/go-version v1.2.0 + github.com/mitchellh/go-homedir v1.1.0 + github.com/mitchellh/go-testing-interface v1.0.0 github.com/pkg/errors v0.8.1 + github.com/ulikunitz/xz v0.5.7 golang.org/x/tools v0.0.0-20191010075000-0337d82405ff gopkg.in/yaml.v2 v2.2.4 k8s.io/api v0.17.0 diff --git a/api/go.sum b/api/go.sum index fa9a4c73364..583b93dae26 100644 --- a/api/go.sum +++ b/api/go.sum @@ -1,12 +1,6 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= -cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= -cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= -cloud.google.com/go v0.45.1 h1:lRi0CHyU+ytlvylOlFKKq0af6JncuyoRh1J+QJBqQx0= -cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= -cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= -cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= @@ -16,7 +10,6 @@ github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6L github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OpenPeeDeeP/depguard v1.0.1 h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us= @@ -31,8 +24,6 @@ github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrU github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/aws/aws-sdk-go v1.15.78 h1:LaXy6lWR0YK7LKyuU0QWy2ws/LWTPfYV/UgfiBu4tvY= -github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3ATZkfNZeM= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1UJrqV3uuy861HCTo708pDMbjHHdCas= @@ -40,7 +31,6 @@ github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ github.com/bombsimon/wsl v1.2.5 h1:9gTOkIwVtoDZywvX802SDHokeX4kW1cKnV8ZTVAPkRs= github.com/bombsimon/wsl v1.2.5/go.mod h1:43lEF/i0kpXbLCeDXL9LMT8c92HyBywXb0AsgMHYngM= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= -github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= @@ -128,7 +118,6 @@ github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4er github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= github.com/golang/protobuf v0.0.0-20161109072736-4bd1920723d7/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -174,14 +163,11 @@ github.com/google/gofuzz v1.0.0 h1:A8PeW59pxE9IoFRqBp37U+mSNaQoZ46F1f0f863XSXw= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= -github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= -github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= -github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d h1:7XGaL1e6bYS1yIonGp9761ExpPPV1ui0SAC59Yube9k= github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= github.com/gophercloud/gophercloud v0.1.0/go.mod h1:vxM41WHh5uqHVBMZHzuwNOHh8XEoIEcSTewFxm1c5g8= @@ -192,14 +178,12 @@ github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:Fecb github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= -github.com/hashicorp/go-cleanhttp v0.5.0 h1:wvCrVc9TjDls6+YGAF2hAifE1E5U1+b4tH6KdvN3Gig= -github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= -github.com/hashicorp/go-getter v1.4.1 h1:3A2Mh8smGFcf5M+gmcv898mZdrxpseik45IpcyISLsA= -github.com/hashicorp/go-getter v1.4.1/go.mod h1:7qxyCd8rBfcShwsvxgIguu4KbS3l8bUCwg2Umn7RjeY= +github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-safetemp v1.0.0 h1:2HR189eFNrjHQyENnQMMpCiBAsRxzbTMIgBhEyExpmo= github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= -github.com/hashicorp/go-version v1.1.0 h1:bPIoEKD27tNdebFGGxxYwcL4nepeY4j1QP23PFRGzg0= -github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.2.0 h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= @@ -210,8 +194,6 @@ github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpO github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8 h1:12VvqtR6Aowv3l/EQUlocDHW2Cp4G9WJVH7uyH8QFJE= -github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/json-iterator/go v0.0.0-20180612202835-f2b4162afba3/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.8 h1:QiWkFLKq0T7mpzwOTu6BzNDbfTE8OLrYhVKYMLF46Ok= @@ -246,16 +228,12 @@ github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e h1:hB2xlXdHp/pmPZq github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/matoous/godox v0.0.0-20190911065817-5d6d842e92eb h1:RHba4YImhrUVQDHUCe2BNSOz4tVy2yGyXhvYDvxGgeE= github.com/matoous/godox v0.0.0-20190911065817-5d6d842e92eb/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s= -github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b/go.mod h1:r1VsdOzOPt1ZSrGZWFoNhsAedKnEd6r9Np1+5blZCWk= @@ -351,8 +329,8 @@ github.com/timakin/bodyclose v0.0.0-20190930140734-f7f2e9bca95e/go.mod h1:Qimiff github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/ulikunitz/xz v0.5.5 h1:pFrO0lVpTBXLpYw+pnLj6TbvHuyjXMfjGeCwSqCVwok= -github.com/ulikunitz/xz v0.5.5/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= +github.com/ulikunitz/xz v0.5.7 h1:YvTNdFzX6+W5m9msiYg/zpkSURPPtOlzbqYjrFn7Yt4= +github.com/ulikunitz/xz v0.5.7/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/ultraware/funlen v0.0.2 h1:Av96YVBwwNSe4MLR7iI/BIa3VyI7/djnto/pK3Uxbdo= github.com/ultraware/funlen v0.0.2/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= github.com/ultraware/whitespace v0.0.4 h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg= @@ -367,8 +345,6 @@ github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= -go.opencensus.io v0.22.0 h1:C9hSCOW830chIVkdja34wa6Ky+IzWllkUinR+BtRZd4= -go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= @@ -377,19 +353,14 @@ golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnf golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= -golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -402,10 +373,7 @@ golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= -golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -431,14 +399,9 @@ golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190922100055-0a153f010e69 h1:rOhMmluY6kLMhdnrivzec6lLgaVbMHMn2ISQXJeJ5EM= golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -460,17 +423,12 @@ golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190521203540-521d6ed310dd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59 h1:QjA/9ArTfVTLfEhClDCG7SGrZkZixxWpwNCDiwJfh88= golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= -golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190719005602-e377ae9d6386/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI= golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190930201159-7c411dea38b0/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -478,34 +436,19 @@ golang.org/x/tools v0.0.0-20191010075000-0337d82405ff h1:XdBG6es/oFDr1HwaxkxgVve golang.org/x/tools v0.0.0-20191010075000-0337d82405ff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= -google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= -google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= -google.golang.org/api v0.9.0 h1:jbyannxz0XFD3zdjgrSUsaJbgpH4eTrkdhRChkHPfO8= -google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/appengine v1.6.1 h1:QzqyMA1tlu6CgqCDUtU9V+ZKhLFT2dkJuANu5QaxI3I= -google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= -google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55 h1:gSJIx1SDwno+2ElGhA4+qG2zF97qiUzTM+rQ0klBOcE= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= -google.golang.org/grpc v1.21.1 h1:j6XxA85m/6txkUCHvzlV5f+HBNl/1r5cZ2A/3IEFOO8= -google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= @@ -521,7 +464,6 @@ gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM= honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= k8s.io/api v0.17.0 h1:H9d/lw+VkZKEVIUc8F3wgiQ+FUXTTr21M87jXLU7yqM= @@ -544,7 +486,6 @@ mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphD mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f h1:Cq7MalBHYACRd6EesksG1Q8EoIAKOsiZviGKbOLIej4= mvdan.cc/unparam v0.0.0-20190720180237-d51796306d8f/go.mod h1:4G1h5nDURzA3bwVMZIVpwbkw+04kSxk3rAtzlimaUJw= -rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= sigs.k8s.io/structured-merge-diff v0.0.0-20190525122527-15d366b2352e/go.mod h1:wWxsB5ozmmv/SG7nM11ayaAW51xMvak/t1r0CSlcokI= sigs.k8s.io/yaml v1.1.0 h1:4A07+ZFc2wgJwo8YNlQpr1rVlgUDlxXHhPJciaPY5gs= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= diff --git a/api/internal/getter/LICENSE b/api/internal/getter/LICENSE new file mode 100644 index 00000000000..c33dcc7c928 --- /dev/null +++ b/api/internal/getter/LICENSE @@ -0,0 +1,354 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. “Contributor” + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. “Contributor Version” + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor’s Contribution. + +1.3. “Contribution” + + means Covered Software of a particular Contributor. + +1.4. “Covered Software” + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. “Incompatible With Secondary Licenses” + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of version + 1.1 or earlier of the License, but not also under the terms of a + Secondary License. + +1.6. “Executable Form” + + means any form of the work other than Source Code Form. + +1.7. “Larger Work” + + means a work that combines Covered Software with other material, in a separate + file or files, that is not Covered Software. + +1.8. “License” + + means this document. + +1.9. “Licensable” + + means having the right to grant, to the maximum extent possible, whether at the + time of the initial grant or subsequently, any and all of the rights conveyed by + this License. + +1.10. “Modifications” + + means any of the following: + + a. any file in Source Code Form that results from an addition to, deletion + from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. “Patent Claims” of a Contributor + + means any patent claim(s), including without limitation, method, process, + and apparatus claims, in any patent Licensable by such Contributor that + would be infringed, but for the grant of the License, by the making, + using, selling, offering for sale, having made, import, or transfer of + either its Contributions or its Contributor Version. + +1.12. “Secondary License” + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. “Source Code Form” + + means the form of the work preferred for making modifications. + +1.14. “You” (or “Your”) + + means an individual or a legal entity exercising rights under this + License. For legal entities, “You” includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, “control” means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or as + part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its Contributions + or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution become + effective for each Contribution on the date the Contributor first distributes + such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under this + License. No additional rights or licenses will be implied from the distribution + or licensing of Covered Software under this License. Notwithstanding Section + 2.1(b) above, no patent license is granted by a Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party’s + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of its + Contributions. + + This License does not grant any rights in the trademarks, service marks, or + logos of any Contributor (except as may be necessary to comply with the + notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this License + (see Section 10.2) or under the terms of a Secondary License (if permitted + under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its Contributions + are its original creation(s) or it has sufficient rights to grant the + rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under applicable + copyright doctrines of fair use, fair dealing, or other equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under the + terms of this License. You must inform recipients that the Source Code Form + of the Covered Software is governed by the terms of this License, and how + they can obtain a copy of this License. You may not attempt to alter or + restrict the recipients’ rights in the Source Code Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this License, + or sublicense it under different terms, provided that the license for + the Executable Form does not attempt to limit or alter the recipients’ + rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for the + Covered Software. If the Larger Work is a combination of Covered Software + with a work governed by one or more Secondary Licenses, and the Covered + Software is not Incompatible With Secondary Licenses, this License permits + You to additionally distribute such Covered Software under the terms of + such Secondary License(s), so that the recipient of the Larger Work may, at + their option, further distribute the Covered Software under the terms of + either this License or such Secondary License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices (including + copyright notices, patent notices, disclaimers of warranty, or limitations + of liability) contained within the Source Code Form of the Covered + Software, except that You may alter any license notices to the extent + required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on behalf + of any Contributor. You must make it absolutely clear that any such + warranty, support, indemnity, or liability obligation is offered by You + alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, judicial + order, or regulation then You must: (a) comply with the terms of this License + to the maximum extent possible; and (b) describe the limitations and the code + they affect. Such description must be placed in a text file included with all + distributions of the Covered Software under this License. Except to the + extent prohibited by statute or regulation, such description must be + sufficiently detailed for a recipient of ordinary skill to be able to + understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing basis, + if such Contributor fails to notify You of the non-compliance by some + reasonable means prior to 60 days after You have come back into compliance. + Moreover, Your grants from a particular Contributor are reinstated on an + ongoing basis if such Contributor notifies You of the non-compliance by + some reasonable means, this is the first time You have received notice of + non-compliance with this License from such Contributor, and You become + compliant prior to 30 days after Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, counter-claims, + and cross-claims) alleging that a Contributor Version directly or + indirectly infringes any patent, then the rights granted to You by any and + all Contributors for the Covered Software under Section 2.1 of this License + shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an “as is” basis, without + warranty of any kind, either expressed, implied, or statutory, including, + without limitation, warranties that the Covered Software is free of defects, + merchantable, fit for a particular purpose or non-infringing. The entire + risk as to the quality and performance of the Covered Software is with You. + Should any Covered Software prove defective in any respect, You (not any + Contributor) assume the cost of any necessary servicing, repair, or + correction. This disclaimer of warranty constitutes an essential part of this + License. No use of any Covered Software is authorized under this License + except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from such + party’s negligence to the extent applicable law prohibits such limitation. + Some jurisdictions do not allow the exclusion or limitation of incidental or + consequential damages, so this exclusion and limitation may not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts of + a jurisdiction where the defendant maintains its principal place of business + and such litigation shall be governed by laws of that jurisdiction, without + reference to its conflict-of-law provisions. Nothing in this Section shall + prevent a party’s ability to bring cross-claims or counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject matter + hereof. If any provision of this License is held to be unenforceable, such + provision shall be reformed only to the extent necessary to make it + enforceable. Any law or regulation which provides that the language of a + contract shall be construed against the drafter shall not be used to construe + this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version of + the License under which You originally received the Covered Software, or + under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a modified + version of this License if you rename the license and remove any + references to the name of the license steward (except to note that such + modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses + If You choose to distribute Source Code Form that is Incompatible With + Secondary Licenses under the terms of this version of the License, the + notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, then +You may include the notice in a location (such as a LICENSE file in a relevant +directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - “Incompatible With Secondary Licenses” Notice + + This Source Code Form is “Incompatible + With Secondary Licenses”, as defined by + the Mozilla Public License, v. 2.0. + diff --git a/api/internal/getter/README.md b/api/internal/getter/README.md new file mode 100644 index 00000000000..076d1d77323 --- /dev/null +++ b/api/internal/getter/README.md @@ -0,0 +1,5 @@ +# getter + +A tailored version of [go-getter](https://github.com/hashicorp/go-getter). + +Removed s3 and gcs support to avoid transitive dependency modules. diff --git a/api/internal/getter/checksum.go b/api/internal/getter/checksum.go new file mode 100644 index 00000000000..b652adb3088 --- /dev/null +++ b/api/internal/getter/checksum.go @@ -0,0 +1,314 @@ +package getter + +import ( + "bufio" + "bytes" + "crypto/md5" + "crypto/sha1" + "crypto/sha256" + "crypto/sha512" + "encoding/hex" + "fmt" + "hash" + "io" + "net/url" + "os" + "path/filepath" + "strings" + + urlhelper "./helper/url" +) + +// FileChecksum helps verifying the checksum for a file. +type FileChecksum struct { + Type string + Hash hash.Hash + Value []byte + Filename string +} + +// A ChecksumError is returned when a checksum differs +type ChecksumError struct { + Hash hash.Hash + Actual []byte + Expected []byte + File string +} + +func (cerr *ChecksumError) Error() string { + if cerr == nil { + return "" + } + return fmt.Sprintf( + "Checksums did not match for %s.\nExpected: %s\nGot: %s\n%T", + cerr.File, + hex.EncodeToString(cerr.Expected), + hex.EncodeToString(cerr.Actual), + cerr.Hash, // ex: *sha256.digest + ) +} + +// checksum is a simple method to compute the checksum of a source file +// and compare it to the given expected value. +func (c *FileChecksum) checksum(source string) error { + f, err := os.Open(source) + if err != nil { + return fmt.Errorf("Failed to open file for checksum: %s", err) + } + defer f.Close() + + c.Hash.Reset() + if _, err := io.Copy(c.Hash, f); err != nil { + return fmt.Errorf("Failed to hash: %s", err) + } + + if actual := c.Hash.Sum(nil); !bytes.Equal(actual, c.Value) { + return &ChecksumError{ + Hash: c.Hash, + Actual: actual, + Expected: c.Value, + File: source, + } + } + + return nil +} + +// extractChecksum will return a FileChecksum based on the 'checksum' +// parameter of u. +// ex: +// http://hashicorp.com/terraform?checksum= +// http://hashicorp.com/terraform?checksum=: +// http://hashicorp.com/terraform?checksum=file: +// when checksumming from a file, extractChecksum will go get checksum_url +// in a temporary directory, parse the content of the file then delete it. +// Content of files are expected to be BSD style or GNU style. +// +// BSD-style checksum: +// MD5 (file1) = +// MD5 (file2) = +// +// GNU-style: +// file1 +// *file2 +// +// see parseChecksumLine for more detail on checksum file parsing +func (c *Client) extractChecksum(u *url.URL) (*FileChecksum, error) { + q := u.Query() + v := q.Get("checksum") + + if v == "" { + return nil, nil + } + + vs := strings.SplitN(v, ":", 2) + switch len(vs) { + case 2: + break // good + default: + // here, we try to guess the checksum from it's length + // if the type was not passed + return newChecksumFromValue(v, filepath.Base(u.EscapedPath())) + } + + checksumType, checksumValue := vs[0], vs[1] + + switch checksumType { + case "file": + return c.ChecksumFromFile(checksumValue, u) + default: + return newChecksumFromType(checksumType, checksumValue, filepath.Base(u.EscapedPath())) + } +} + +func newChecksum(checksumValue, filename string) (*FileChecksum, error) { + c := &FileChecksum{ + Filename: filename, + } + var err error + c.Value, err = hex.DecodeString(checksumValue) + if err != nil { + return nil, fmt.Errorf("invalid checksum: %s", err) + } + return c, nil +} + +func newChecksumFromType(checksumType, checksumValue, filename string) (*FileChecksum, error) { + c, err := newChecksum(checksumValue, filename) + if err != nil { + return nil, err + } + + c.Type = strings.ToLower(checksumType) + switch c.Type { + case "md5": + c.Hash = md5.New() + case "sha1": + c.Hash = sha1.New() + case "sha256": + c.Hash = sha256.New() + case "sha512": + c.Hash = sha512.New() + default: + return nil, fmt.Errorf( + "unsupported checksum type: %s", checksumType) + } + + return c, nil +} + +func newChecksumFromValue(checksumValue, filename string) (*FileChecksum, error) { + c, err := newChecksum(checksumValue, filename) + if err != nil { + return nil, err + } + + switch len(c.Value) { + case md5.Size: + c.Hash = md5.New() + c.Type = "md5" + case sha1.Size: + c.Hash = sha1.New() + c.Type = "sha1" + case sha256.Size: + c.Hash = sha256.New() + c.Type = "sha256" + case sha512.Size: + c.Hash = sha512.New() + c.Type = "sha512" + default: + return nil, fmt.Errorf("Unknown type for checksum %s", checksumValue) + } + + return c, nil +} + +// ChecksumFromFile will return all the FileChecksums found in file +// +// ChecksumFromFile will try to guess the hashing algorithm based on content +// of checksum file +// +// ChecksumFromFile will only return checksums for files that match file +// behind src +func (c *Client) ChecksumFromFile(checksumFile string, src *url.URL) (*FileChecksum, error) { + checksumFileURL, err := urlhelper.Parse(checksumFile) + if err != nil { + return nil, err + } + + tempfile, err := tmpFile("", filepath.Base(checksumFileURL.Path)) + if err != nil { + return nil, err + } + defer os.Remove(tempfile) + + c2 := &Client{ + Ctx: c.Ctx, + Getters: c.Getters, + Decompressors: c.Decompressors, + Detectors: c.Detectors, + Pwd: c.Pwd, + Dir: false, + Src: checksumFile, + Dst: tempfile, + ProgressListener: c.ProgressListener, + } + if err = c2.Get(); err != nil { + return nil, fmt.Errorf( + "Error downloading checksum file: %s", err) + } + + filename := filepath.Base(src.Path) + absPath, err := filepath.Abs(src.Path) + if err != nil { + return nil, err + } + checksumFileDir := filepath.Dir(checksumFileURL.Path) + relpath, err := filepath.Rel(checksumFileDir, absPath) + switch { + case err == nil || + err.Error() == "Rel: can't make "+absPath+" relative to "+checksumFileDir: + // ex: on windows C:\gopath\...\content.txt cannot be relative to \ + // which is okay, may be another expected path will work. + break + default: + return nil, err + } + + // possible file identifiers: + options := []string{ + filename, // ubuntu-14.04.1-server-amd64.iso + "*" + filename, // *ubuntu-14.04.1-server-amd64.iso Standard checksum + "?" + filename, // ?ubuntu-14.04.1-server-amd64.iso shasum -p + relpath, // dir/ubuntu-14.04.1-server-amd64.iso + "./" + relpath, // ./dir/ubuntu-14.04.1-server-amd64.iso + absPath, // fullpath; set if local + } + + f, err := os.Open(tempfile) + if err != nil { + return nil, fmt.Errorf( + "Error opening downloaded file: %s", err) + } + defer f.Close() + rd := bufio.NewReader(f) + for { + line, err := rd.ReadString('\n') + if err != nil { + if err != io.EOF { + return nil, fmt.Errorf( + "Error reading checksum file: %s", err) + } + break + } + checksum, err := parseChecksumLine(line) + if err != nil || checksum == nil { + continue + } + if checksum.Filename == "" { + // filename not sure, let's try + return checksum, nil + } + // make sure the checksum is for the right file + for _, option := range options { + if option != "" && checksum.Filename == option { + // any checksum will work so we return the first one + return checksum, nil + } + } + } + return nil, fmt.Errorf("no checksum found in: %s", checksumFile) +} + +// parseChecksumLine takes a line from a checksum file and returns +// checksumType, checksumValue and filename parseChecksumLine guesses the style +// of the checksum BSD vs GNU by splitting the line and by counting the parts. +// of a line. +// for BSD type sums parseChecksumLine guesses the hashing algorithm +// by checking the length of the checksum. +func parseChecksumLine(line string) (*FileChecksum, error) { + parts := strings.Fields(line) + + switch len(parts) { + case 4: + // BSD-style checksum: + // MD5 (file1) = + // MD5 (file2) = + if len(parts[1]) <= 2 || + parts[1][0] != '(' || parts[1][len(parts[1])-1] != ')' { + return nil, fmt.Errorf( + "Unexpected BSD-style-checksum filename format: %s", line) + } + filename := parts[1][1 : len(parts[1])-1] + return newChecksumFromType(parts[0], parts[3], filename) + case 2: + // GNU-style: + // file1 + // *file2 + return newChecksumFromValue(parts[0], parts[1]) + case 0: + return nil, nil // empty line + default: + return newChecksumFromValue(parts[0], "") + } +} diff --git a/api/internal/getter/client.go b/api/internal/getter/client.go new file mode 100644 index 00000000000..df9a004fa79 --- /dev/null +++ b/api/internal/getter/client.go @@ -0,0 +1,298 @@ +package getter + +import ( + "context" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "strconv" + "strings" + + urlhelper "./helper/url" + safetemp "github.com/hashicorp/go-safetemp" +) + +// Client is a client for downloading things. +// +// Top-level functions such as Get are shortcuts for interacting with a client. +// Using a client directly allows more fine-grained control over how downloading +// is done, as well as customizing the protocols supported. +type Client struct { + // Ctx for cancellation + Ctx context.Context + + // Src is the source URL to get. + // + // Dst is the path to save the downloaded thing as. If Dir is set to + // true, then this should be a directory. If the directory doesn't exist, + // it will be created for you. + // + // Pwd is the working directory for detection. If this isn't set, some + // detection may fail. Client will not default pwd to the current + // working directory for security reasons. + Src string + Dst string + Pwd string + + // Mode is the method of download the client will use. See ClientMode + // for documentation. + Mode ClientMode + + // Detectors is the list of detectors that are tried on the source. + // If this is nil, then the default Detectors will be used. + Detectors []Detector + + // Decompressors is the map of decompressors supported by this client. + // If this is nil, then the default value is the Decompressors global. + Decompressors map[string]Decompressor + + // Getters is the map of protocols supported by this client. If this + // is nil, then the default Getters variable will be used. + Getters map[string]Getter + + // Dir, if true, tells the Client it is downloading a directory (versus + // a single file). This distinction is necessary since filenames and + // directory names follow the same format so disambiguating is impossible + // without knowing ahead of time. + // + // WARNING: deprecated. If Mode is set, that will take precedence. + Dir bool + + // ProgressListener allows to track file downloads. + // By default a no op progress listener is used. + ProgressListener ProgressTracker + + Options []ClientOption +} + +// Get downloads the configured source to the destination. +func (c *Client) Get() error { + if err := c.Configure(c.Options...); err != nil { + return err + } + + // Store this locally since there are cases we swap this + mode := c.Mode + if mode == ClientModeInvalid { + if c.Dir { + mode = ClientModeDir + } else { + mode = ClientModeFile + } + } + + src, err := Detect(c.Src, c.Pwd, c.Detectors) + if err != nil { + return err + } + + // Determine if we have a forced protocol, i.e. "git::http://..." + force, src := getForcedGetter(src) + + // If there is a subdir component, then we download the root separately + // and then copy over the proper subdir. + var realDst string + dst := c.Dst + src, subDir := SourceDirSubdir(src) + if subDir != "" { + td, tdcloser, err := safetemp.Dir("", "getter") + if err != nil { + return err + } + defer tdcloser.Close() + + realDst = dst + dst = td + } + + u, err := urlhelper.Parse(src) + if err != nil { + return err + } + if force == "" { + force = u.Scheme + } + + g, ok := c.Getters[force] + if !ok { + return fmt.Errorf( + "download not supported for scheme '%s'", force) + } + + // We have magic query parameters that we use to signal different features + q := u.Query() + + // Determine if we have an archive type + archiveV := q.Get("archive") + if archiveV != "" { + // Delete the paramter since it is a magic parameter we don't + // want to pass on to the Getter + q.Del("archive") + u.RawQuery = q.Encode() + + // If we can parse the value as a bool and it is false, then + // set the archive to "-" which should never map to a decompressor + if b, err := strconv.ParseBool(archiveV); err == nil && !b { + archiveV = "-" + } + } + if archiveV == "" { + // We don't appear to... but is it part of the filename? + matchingLen := 0 + for k := range c.Decompressors { + if strings.HasSuffix(u.Path, "."+k) && len(k) > matchingLen { + archiveV = k + matchingLen = len(k) + } + } + } + + // If we have a decompressor, then we need to change the destination + // to download to a temporary path. We unarchive this into the final, + // real path. + var decompressDst string + var decompressDir bool + decompressor := c.Decompressors[archiveV] + if decompressor != nil { + // Create a temporary directory to store our archive. We delete + // this at the end of everything. + td, err := ioutil.TempDir("", "getter") + if err != nil { + return fmt.Errorf( + "Error creating temporary directory for archive: %s", err) + } + defer os.RemoveAll(td) + + // Swap the download directory to be our temporary path and + // store the old values. + decompressDst = dst + decompressDir = mode != ClientModeFile + dst = filepath.Join(td, "archive") + mode = ClientModeFile + } + + // Determine checksum if we have one + checksum, err := c.extractChecksum(u) + if err != nil { + return fmt.Errorf("invalid checksum: %s", err) + } + + // Delete the query parameter if we have it. + q.Del("checksum") + u.RawQuery = q.Encode() + + if mode == ClientModeAny { + // Ask the getter which client mode to use + mode, err = g.ClientMode(u) + if err != nil { + return err + } + + // Destination is the base name of the URL path in "any" mode when + // a file source is detected. + if mode == ClientModeFile { + filename := filepath.Base(u.Path) + + // Determine if we have a custom file name + if v := q.Get("filename"); v != "" { + // Delete the query parameter if we have it. + q.Del("filename") + u.RawQuery = q.Encode() + + filename = v + } + + dst = filepath.Join(dst, filename) + } + } + + // If we're not downloading a directory, then just download the file + // and return. + if mode == ClientModeFile { + getFile := true + if checksum != nil { + if err := checksum.checksum(dst); err == nil { + // don't get the file if the checksum of dst is correct + getFile = false + } + } + if getFile { + err := g.GetFile(dst, u) + if err != nil { + return err + } + + if checksum != nil { + if err := checksum.checksum(dst); err != nil { + return err + } + } + } + + if decompressor != nil { + // We have a decompressor, so decompress the current destination + // into the final destination with the proper mode. + err := decompressor.Decompress(decompressDst, dst, decompressDir) + if err != nil { + return err + } + + // Swap the information back + dst = decompressDst + if decompressDir { + mode = ClientModeAny + } else { + mode = ClientModeFile + } + } + + // We check the dir value again because it can be switched back + // if we were unarchiving. If we're still only Get-ing a file, then + // we're done. + if mode == ClientModeFile { + return nil + } + } + + // If we're at this point we're either downloading a directory or we've + // downloaded and unarchived a directory and we're just checking subdir. + // In the case we have a decompressor we don't Get because it was Get + // above. + if decompressor == nil { + // If we're getting a directory, then this is an error. You cannot + // checksum a directory. TODO: test + if checksum != nil { + return fmt.Errorf( + "checksum cannot be specified for directory download") + } + + // We're downloading a directory, which might require a bit more work + // if we're specifying a subdir. + err := g.Get(dst, u) + if err != nil { + err = fmt.Errorf("error downloading '%s': %s", src, err) + return err + } + } + + // If we have a subdir, copy that over + if subDir != "" { + if err := os.RemoveAll(realDst); err != nil { + return err + } + if err := os.MkdirAll(realDst, 0755); err != nil { + return err + } + + // Process any globs + subDir, err := SubdirGlob(dst, subDir) + if err != nil { + return err + } + + return copyDir(c.Ctx, realDst, subDir, false) + } + + return nil +} diff --git a/api/internal/getter/client_mode.go b/api/internal/getter/client_mode.go new file mode 100644 index 00000000000..7f02509a789 --- /dev/null +++ b/api/internal/getter/client_mode.go @@ -0,0 +1,24 @@ +package getter + +// ClientMode is the mode that the client operates in. +type ClientMode uint + +const ( + ClientModeInvalid ClientMode = iota + + // ClientModeAny downloads anything it can. In this mode, dst must + // be a directory. If src is a file, it is saved into the directory + // with the basename of the URL. If src is a directory or archive, + // it is unpacked directly into dst. + ClientModeAny + + // ClientModeFile downloads a single file. In this mode, dst must + // be a file path (doesn't have to exist). src must point to a single + // file. It is saved as dst. + ClientModeFile + + // ClientModeDir downloads a directory. In this mode, dst must be + // a directory path (doesn't have to exist). src must point to an + // archive or directory (such as in s3). + ClientModeDir +) diff --git a/api/internal/getter/client_option.go b/api/internal/getter/client_option.go new file mode 100644 index 00000000000..c1ee413b055 --- /dev/null +++ b/api/internal/getter/client_option.go @@ -0,0 +1,46 @@ +package getter + +import "context" + +// A ClientOption allows to configure a client +type ClientOption func(*Client) error + +// Configure configures a client with options. +func (c *Client) Configure(opts ...ClientOption) error { + if c.Ctx == nil { + c.Ctx = context.Background() + } + c.Options = opts + for _, opt := range opts { + err := opt(c) + if err != nil { + return err + } + } + // Default decompressor values + if c.Decompressors == nil { + c.Decompressors = Decompressors + } + // Default detector values + if c.Detectors == nil { + c.Detectors = Detectors + } + // Default getter values + if c.Getters == nil { + c.Getters = Getters + } + + for _, getter := range c.Getters { + getter.SetClient(c) + } + return nil +} + +// WithContext allows to pass a context to operation +// in order to be able to cancel a download in progress. +func WithContext(ctx context.Context) func(*Client) error { + return func(c *Client) error { + c.Ctx = ctx + return nil + } +} diff --git a/api/internal/getter/client_option_progress.go b/api/internal/getter/client_option_progress.go new file mode 100644 index 00000000000..9b185f71de6 --- /dev/null +++ b/api/internal/getter/client_option_progress.go @@ -0,0 +1,38 @@ +package getter + +import ( + "io" +) + +// WithProgress allows for a user to track +// the progress of a download. +// For example by displaying a progress bar with +// current download. +// Not all getters have progress support yet. +func WithProgress(pl ProgressTracker) func(*Client) error { + return func(c *Client) error { + c.ProgressListener = pl + return nil + } +} + +// ProgressTracker allows to track the progress of downloads. +type ProgressTracker interface { + // TrackProgress should be called when + // a new object is being downloaded. + // src is the location the file is + // downloaded from. + // currentSize is the current size of + // the file in case it is a partial + // download. + // totalSize is the total size in bytes, + // size can be zero if the file size + // is not known. + // stream is the file being downloaded, every + // written byte will add up to processed size. + // + // TrackProgress returns a ReadCloser that wraps the + // download in progress ( stream ). + // When the download is finished, body shall be closed. + TrackProgress(src string, currentSize, totalSize int64, stream io.ReadCloser) (body io.ReadCloser) +} diff --git a/api/internal/getter/client_option_progress_test.go b/api/internal/getter/client_option_progress_test.go new file mode 100644 index 00000000000..a578fed6741 --- /dev/null +++ b/api/internal/getter/client_option_progress_test.go @@ -0,0 +1,65 @@ +package getter + +import ( + "io" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "sync" + "testing" +) + +type MockProgressTracking struct { + sync.Mutex + downloaded map[string]int +} + +func (p *MockProgressTracking) TrackProgress(src string, + currentSize, totalSize int64, stream io.ReadCloser) (body io.ReadCloser) { + p.Lock() + defer p.Unlock() + + if p.downloaded == nil { + p.downloaded = map[string]int{} + } + + v, _ := p.downloaded[src] + p.downloaded[src] = v + 1 + return stream +} + +func TestGet_progress(t *testing.T) { + s := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + // all good + rw.Header().Add("X-Terraform-Get", "something") + })) + defer s.Close() + + { // dl without tracking + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + if err := GetFile(dst, s.URL+"/file?thig=this&that"); err != nil { + t.Fatalf("download failed: %v", err) + } + } + + { // tracking + p := &MockProgressTracking{} + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + if err := GetFile(dst, s.URL+"/file?thig=this&that", WithProgress(p)); err != nil { + t.Fatalf("download failed: %v", err) + } + if err := GetFile(dst, s.URL+"/otherfile?thig=this&that", WithProgress(p)); err != nil { + t.Fatalf("download failed: %v", err) + } + + if p.downloaded["file"] != 1 { + t.Error("Expected a file download") + } + if p.downloaded["otherfile"] != 1 { + t.Error("Expected a otherfile download") + } + } +} diff --git a/api/internal/getter/common.go b/api/internal/getter/common.go new file mode 100644 index 00000000000..d2afd8ad888 --- /dev/null +++ b/api/internal/getter/common.go @@ -0,0 +1,14 @@ +package getter + +import ( + "io/ioutil" +) + +func tmpFile(dir, pattern string) (string, error) { + f, err := ioutil.TempFile(dir, pattern) + if err != nil { + return "", err + } + f.Close() + return f.Name(), nil +} diff --git a/api/internal/getter/copy_dir.go b/api/internal/getter/copy_dir.go new file mode 100644 index 00000000000..641fe6d0f10 --- /dev/null +++ b/api/internal/getter/copy_dir.go @@ -0,0 +1,78 @@ +package getter + +import ( + "context" + "os" + "path/filepath" + "strings" +) + +// copyDir copies the src directory contents into dst. Both directories +// should already exist. +// +// If ignoreDot is set to true, then dot-prefixed files/folders are ignored. +func copyDir(ctx context.Context, dst string, src string, ignoreDot bool) error { + src, err := filepath.EvalSymlinks(src) + if err != nil { + return err + } + + walkFn := func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if path == src { + return nil + } + + if ignoreDot && strings.HasPrefix(filepath.Base(path), ".") { + // Skip any dot files + if info.IsDir() { + return filepath.SkipDir + } else { + return nil + } + } + + // The "path" has the src prefixed to it. We need to join our + // destination with the path without the src on it. + dstPath := filepath.Join(dst, path[len(src):]) + + // If we have a directory, make that subdirectory, then continue + // the walk. + if info.IsDir() { + if path == filepath.Join(src, dst) { + // dst is in src; don't walk it. + return nil + } + + if err := os.MkdirAll(dstPath, 0755); err != nil { + return err + } + + return nil + } + + // If we have a file, copy the contents. + srcF, err := os.Open(path) + if err != nil { + return err + } + defer srcF.Close() + + dstF, err := os.Create(dstPath) + if err != nil { + return err + } + defer dstF.Close() + + if _, err := Copy(ctx, dstF, srcF); err != nil { + return err + } + + // Chmod it + return os.Chmod(dstPath, info.Mode()) + } + + return filepath.Walk(src, walkFn) +} diff --git a/api/internal/getter/decompress.go b/api/internal/getter/decompress.go new file mode 100644 index 00000000000..198bb0edd01 --- /dev/null +++ b/api/internal/getter/decompress.go @@ -0,0 +1,58 @@ +package getter + +import ( + "strings" +) + +// Decompressor defines the interface that must be implemented to add +// support for decompressing a type. +// +// Important: if you're implementing a decompressor, please use the +// containsDotDot helper in this file to ensure that files can't be +// decompressed outside of the specified directory. +type Decompressor interface { + // Decompress should decompress src to dst. dir specifies whether dst + // is a directory or single file. src is guaranteed to be a single file + // that exists. dst is not guaranteed to exist already. + Decompress(dst, src string, dir bool) error +} + +// Decompressors is the mapping of extension to the Decompressor implementation +// that will decompress that extension/type. +var Decompressors map[string]Decompressor + +func init() { + tbzDecompressor := new(TarBzip2Decompressor) + tgzDecompressor := new(TarGzipDecompressor) + txzDecompressor := new(TarXzDecompressor) + + Decompressors = map[string]Decompressor{ + "bz2": new(Bzip2Decompressor), + "gz": new(GzipDecompressor), + "xz": new(XzDecompressor), + "tar.bz2": tbzDecompressor, + "tar.gz": tgzDecompressor, + "tar.xz": txzDecompressor, + "tbz2": tbzDecompressor, + "tgz": tgzDecompressor, + "txz": txzDecompressor, + "zip": new(ZipDecompressor), + } +} + +// containsDotDot checks if the filepath value v contains a ".." entry. +// This will check filepath components by splitting along / or \. This +// function is copied directly from the Go net/http implementation. +func containsDotDot(v string) bool { + if !strings.Contains(v, "..") { + return false + } + for _, ent := range strings.FieldsFunc(v, isSlashRune) { + if ent == ".." { + return true + } + } + return false +} + +func isSlashRune(r rune) bool { return r == '/' || r == '\\' } diff --git a/api/internal/getter/decompress_bzip2.go b/api/internal/getter/decompress_bzip2.go new file mode 100644 index 00000000000..339f4cf7af2 --- /dev/null +++ b/api/internal/getter/decompress_bzip2.go @@ -0,0 +1,45 @@ +package getter + +import ( + "compress/bzip2" + "fmt" + "io" + "os" + "path/filepath" +) + +// Bzip2Decompressor is an implementation of Decompressor that can +// decompress bz2 files. +type Bzip2Decompressor struct{} + +func (d *Bzip2Decompressor) Decompress(dst, src string, dir bool) error { + // Directory isn't supported at all + if dir { + return fmt.Errorf("bzip2-compressed files can only unarchive to a single file") + } + + // If we're going into a directory we should make that first + if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { + return err + } + + // File first + f, err := os.Open(src) + if err != nil { + return err + } + defer f.Close() + + // Bzip2 compression is second + bzipR := bzip2.NewReader(f) + + // Copy it out + dstF, err := os.Create(dst) + if err != nil { + return err + } + defer dstF.Close() + + _, err = io.Copy(dstF, bzipR) + return err +} diff --git a/api/internal/getter/decompress_bzip2_test.go b/api/internal/getter/decompress_bzip2_test.go new file mode 100644 index 00000000000..0315c59c875 --- /dev/null +++ b/api/internal/getter/decompress_bzip2_test.go @@ -0,0 +1,34 @@ +package getter + +import ( + "path/filepath" + "testing" +) + +func TestBzip2Decompressor(t *testing.T) { + cases := []TestDecompressCase{ + { + "single.bz2", + false, + false, + nil, + "d3b07384d113edec49eaa6238ad5ff00", + nil, + }, + + { + "single.bz2", + true, + true, + nil, + "", + nil, + }, + } + + for i, tc := range cases { + cases[i].Input = filepath.Join("./testdata", "decompress-bz2", tc.Input) + } + + TestDecompressor(t, new(Bzip2Decompressor), cases) +} diff --git a/api/internal/getter/decompress_gzip.go b/api/internal/getter/decompress_gzip.go new file mode 100644 index 00000000000..5ebf709b4f9 --- /dev/null +++ b/api/internal/getter/decompress_gzip.go @@ -0,0 +1,49 @@ +package getter + +import ( + "compress/gzip" + "fmt" + "io" + "os" + "path/filepath" +) + +// GzipDecompressor is an implementation of Decompressor that can +// decompress gzip files. +type GzipDecompressor struct{} + +func (d *GzipDecompressor) Decompress(dst, src string, dir bool) error { + // Directory isn't supported at all + if dir { + return fmt.Errorf("gzip-compressed files can only unarchive to a single file") + } + + // If we're going into a directory we should make that first + if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { + return err + } + + // File first + f, err := os.Open(src) + if err != nil { + return err + } + defer f.Close() + + // gzip compression is second + gzipR, err := gzip.NewReader(f) + if err != nil { + return err + } + defer gzipR.Close() + + // Copy it out + dstF, err := os.Create(dst) + if err != nil { + return err + } + defer dstF.Close() + + _, err = io.Copy(dstF, gzipR) + return err +} diff --git a/api/internal/getter/decompress_gzip_test.go b/api/internal/getter/decompress_gzip_test.go new file mode 100644 index 00000000000..b805678a427 --- /dev/null +++ b/api/internal/getter/decompress_gzip_test.go @@ -0,0 +1,34 @@ +package getter + +import ( + "path/filepath" + "testing" +) + +func TestGzipDecompressor(t *testing.T) { + cases := []TestDecompressCase{ + { + "single.gz", + false, + false, + nil, + "d3b07384d113edec49eaa6238ad5ff00", + nil, + }, + + { + "single.gz", + true, + true, + nil, + "", + nil, + }, + } + + for i, tc := range cases { + cases[i].Input = filepath.Join("./testdata", "decompress-gz", tc.Input) + } + + TestDecompressor(t, new(GzipDecompressor), cases) +} diff --git a/api/internal/getter/decompress_tar.go b/api/internal/getter/decompress_tar.go new file mode 100644 index 00000000000..b6986a25aec --- /dev/null +++ b/api/internal/getter/decompress_tar.go @@ -0,0 +1,160 @@ +package getter + +import ( + "archive/tar" + "fmt" + "io" + "os" + "path/filepath" + "time" +) + +// untar is a shared helper for untarring an archive. The reader should provide +// an uncompressed view of the tar archive. +func untar(input io.Reader, dst, src string, dir bool) error { + tarR := tar.NewReader(input) + done := false + dirHdrs := []*tar.Header{} + now := time.Now() + for { + hdr, err := tarR.Next() + if err == io.EOF { + if !done { + // Empty archive + return fmt.Errorf("empty archive: %s", src) + } + + break + } + if err != nil { + return err + } + + if hdr.Typeflag == tar.TypeXGlobalHeader || hdr.Typeflag == tar.TypeXHeader { + // don't unpack extended headers as files + continue + } + + path := dst + if dir { + // Disallow parent traversal + if containsDotDot(hdr.Name) { + return fmt.Errorf("entry contains '..': %s", hdr.Name) + } + + path = filepath.Join(path, hdr.Name) + } + + if hdr.FileInfo().IsDir() { + if !dir { + return fmt.Errorf("expected a single file: %s", src) + } + + // A directory, just make the directory and continue unarchiving... + if err := os.MkdirAll(path, 0755); err != nil { + return err + } + + // Record the directory information so that we may set its attributes + // after all files have been extracted + dirHdrs = append(dirHdrs, hdr) + + continue + } else { + // There is no ordering guarantee that a file in a directory is + // listed before the directory + dstPath := filepath.Dir(path) + + // Check that the directory exists, otherwise create it + if _, err := os.Stat(dstPath); os.IsNotExist(err) { + if err := os.MkdirAll(dstPath, 0755); err != nil { + return err + } + } + } + + // We have a file. If we already decoded, then it is an error + if !dir && done { + return fmt.Errorf("expected a single file, got multiple: %s", src) + } + + // Mark that we're done so future in single file mode errors + done = true + + // Open the file for writing + dstF, err := os.Create(path) + if err != nil { + return err + } + _, err = io.Copy(dstF, tarR) + dstF.Close() + if err != nil { + return err + } + + // Chmod the file + if err := os.Chmod(path, hdr.FileInfo().Mode()); err != nil { + return err + } + + // Set the access and modification time if valid, otherwise default to current time + aTime := now + mTime := now + if hdr.AccessTime.Unix() > 0 { + aTime = hdr.AccessTime + } + if hdr.ModTime.Unix() > 0 { + mTime = hdr.ModTime + } + if err := os.Chtimes(path, aTime, mTime); err != nil { + return err + } + } + + // Perform a final pass over extracted directories to update metadata + for _, dirHdr := range dirHdrs { + path := filepath.Join(dst, dirHdr.Name) + // Chmod the directory since they might be created before we know the mode flags + if err := os.Chmod(path, dirHdr.FileInfo().Mode()); err != nil { + return err + } + // Set the mtime/atime attributes since they would have been changed during extraction + aTime := now + mTime := now + if dirHdr.AccessTime.Unix() > 0 { + aTime = dirHdr.AccessTime + } + if dirHdr.ModTime.Unix() > 0 { + mTime = dirHdr.ModTime + } + if err := os.Chtimes(path, aTime, mTime); err != nil { + return err + } + } + + return nil +} + +// tarDecompressor is an implementation of Decompressor that can +// unpack tar files. +type tarDecompressor struct{} + +func (d *tarDecompressor) Decompress(dst, src string, dir bool) error { + // If we're going into a directory we should make that first + mkdir := dst + if !dir { + mkdir = filepath.Dir(dst) + } + if err := os.MkdirAll(mkdir, 0755); err != nil { + return err + } + + // File first + f, err := os.Open(src) + if err != nil { + return err + } + defer f.Close() + + return untar(f, dst, src, dir) +} diff --git a/api/internal/getter/decompress_tar_test.go b/api/internal/getter/decompress_tar_test.go new file mode 100644 index 00000000000..b8f4b8cc4b5 --- /dev/null +++ b/api/internal/getter/decompress_tar_test.go @@ -0,0 +1,43 @@ +package getter + +import ( + "path/filepath" + "testing" + "time" +) + +func TestTar(t *testing.T) { + mtime := time.Unix(0, 0) + cases := []TestDecompressCase{ + { + "extended_header.tar", + true, + false, + []string{"directory/", "directory/a", "directory/b"}, + "", + nil, + }, + { + "implied_dir.tar", + true, + false, + []string{"directory/", "directory/sub/", "directory/sub/a", "directory/sub/b"}, + "", + nil, + }, + { + "unix_time_0.tar", + true, + false, + []string{"directory/", "directory/sub/", "directory/sub/a", "directory/sub/b"}, + "", + &mtime, + }, + } + + for i, tc := range cases { + cases[i].Input = filepath.Join("./testdata", "decompress-tar", tc.Input) + } + + TestDecompressor(t, new(tarDecompressor), cases) +} diff --git a/api/internal/getter/decompress_tbz2.go b/api/internal/getter/decompress_tbz2.go new file mode 100644 index 00000000000..5391b5c8c52 --- /dev/null +++ b/api/internal/getter/decompress_tbz2.go @@ -0,0 +1,33 @@ +package getter + +import ( + "compress/bzip2" + "os" + "path/filepath" +) + +// TarBzip2Decompressor is an implementation of Decompressor that can +// decompress tar.bz2 files. +type TarBzip2Decompressor struct{} + +func (d *TarBzip2Decompressor) Decompress(dst, src string, dir bool) error { + // If we're going into a directory we should make that first + mkdir := dst + if !dir { + mkdir = filepath.Dir(dst) + } + if err := os.MkdirAll(mkdir, 0755); err != nil { + return err + } + + // File first + f, err := os.Open(src) + if err != nil { + return err + } + defer f.Close() + + // Bzip2 compression is second + bzipR := bzip2.NewReader(f) + return untar(bzipR, dst, src, dir) +} diff --git a/api/internal/getter/decompress_tbz2_test.go b/api/internal/getter/decompress_tbz2_test.go new file mode 100644 index 00000000000..14a16c21b56 --- /dev/null +++ b/api/internal/getter/decompress_tbz2_test.go @@ -0,0 +1,73 @@ +package getter + +import ( + "path/filepath" + "testing" +) + +func TestTarBzip2Decompressor(t *testing.T) { + orderingPaths := []string{"workers/", "workers/mq/", "workers/mq/__init__.py"} + + cases := []TestDecompressCase{ + { + "empty.tar.bz2", + false, + true, + nil, + "", + nil, + }, + + { + "single.tar.bz2", + false, + false, + nil, + "d3b07384d113edec49eaa6238ad5ff00", + nil, + }, + + { + "single.tar.bz2", + true, + false, + []string{"file"}, + "", + nil, + }, + + { + "multiple.tar.bz2", + true, + false, + []string{"file1", "file2"}, + "", + nil, + }, + + { + "multiple.tar.bz2", + false, + true, + nil, + "", + nil, + }, + + // Tests when the file is listed before the parent folder + { + "ordering.tar.bz2", + true, + false, + orderingPaths, + "", + nil, + }, + } + + for i, tc := range cases { + cases[i].Input = filepath.Join("./testdata", "decompress-tbz2", tc.Input) + } + + TestDecompressor(t, new(TarBzip2Decompressor), cases) +} diff --git a/api/internal/getter/decompress_testing.go b/api/internal/getter/decompress_testing.go new file mode 100644 index 00000000000..b2f662a89df --- /dev/null +++ b/api/internal/getter/decompress_testing.go @@ -0,0 +1,171 @@ +package getter + +import ( + "crypto/md5" + "encoding/hex" + "io" + "io/ioutil" + "os" + "path/filepath" + "reflect" + "runtime" + "sort" + "strings" + "time" + + "github.com/mitchellh/go-testing-interface" +) + +// TestDecompressCase is a single test case for testing decompressors +type TestDecompressCase struct { + Input string // Input is the complete path to the input file + Dir bool // Dir is whether or not we're testing directory mode + Err bool // Err is whether we expect an error or not + DirList []string // DirList is the list of files for Dir mode + FileMD5 string // FileMD5 is the expected MD5 for a single file + Mtime *time.Time // Mtime is the optionally expected mtime for a single file (or all files if in Dir mode) +} + +// TestDecompressor is a helper function for testing generic decompressors. +func TestDecompressor(t testing.T, d Decompressor, cases []TestDecompressCase) { + t.Helper() + + for _, tc := range cases { + t.Logf("Testing: %s", tc.Input) + + // Temporary dir to store stuff + td, err := ioutil.TempDir("", "getter") + if err != nil { + t.Fatalf("err: %s", err) + } + + // Destination is always joining result so that we have a new path + dst := filepath.Join(td, "subdir", "result") + + // We use a function so defers work + func() { + defer os.RemoveAll(td) + + // Decompress + err := d.Decompress(dst, tc.Input, tc.Dir) + if (err != nil) != tc.Err { + t.Fatalf("err %s: %s", tc.Input, err) + } + if tc.Err { + return + } + + // If it isn't a directory, then check for a single file + if !tc.Dir { + fi, err := os.Stat(dst) + if err != nil { + t.Fatalf("err %s: %s", tc.Input, err) + } + if fi.IsDir() { + t.Fatalf("err %s: expected file, got directory", tc.Input) + } + if tc.FileMD5 != "" { + actual := testMD5(t, dst) + expected := tc.FileMD5 + if actual != expected { + t.Fatalf("err %s: expected MD5 %s, got %s", tc.Input, expected, actual) + } + } + + if tc.Mtime != nil { + actual := fi.ModTime() + if tc.Mtime.Unix() > 0 { + expected := *tc.Mtime + if actual != expected { + t.Fatalf("err %s: expected mtime '%s' for %s, got '%s'", tc.Input, expected.String(), dst, actual.String()) + } + } else if actual.Unix() <= 0 { + t.Fatalf("err %s: expected mtime to be > 0, got '%s'", actual.String()) + } + } + + return + } + + // Convert expected for windows + expected := tc.DirList + if runtime.GOOS == "windows" { + for i, v := range expected { + expected[i] = strings.Replace(v, "/", "\\", -1) + } + } + + // Directory, check for the correct contents + actual := testListDir(t, dst) + if !reflect.DeepEqual(actual, expected) { + t.Fatalf("bad %s\n\n%#v\n\n%#v", tc.Input, actual, expected) + } + // Check for correct atime/mtime + for _, dir := range actual { + path := filepath.Join(dst, dir) + if tc.Mtime != nil { + fi, err := os.Stat(path) + if err != nil { + t.Fatalf("err: %s", err) + } + actual := fi.ModTime() + if tc.Mtime.Unix() > 0 { + expected := *tc.Mtime + if actual != expected { + t.Fatalf("err %s: expected mtime '%s' for %s, got '%s'", tc.Input, expected.String(), path, actual.String()) + } + } else if actual.Unix() < 0 { + t.Fatalf("err %s: expected mtime to be > 0, got '%s'", actual.String()) + } + + } + } + }() + } +} + +func testListDir(t testing.T, path string) []string { + var result []string + err := filepath.Walk(path, func(sub string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + sub = strings.TrimPrefix(sub, path) + if sub == "" { + return nil + } + sub = sub[1:] // Trim the leading path sep. + + // If it is a dir, add trailing sep + if info.IsDir() { + sub += string(os.PathSeparator) + } + + result = append(result, sub) + return nil + }) + if err != nil { + t.Fatalf("err: %s", err) + } + + sort.Strings(result) + return result +} + +func testMD5(t testing.T, path string) string { + f, err := os.Open(path) + if err != nil { + t.Fatalf("err: %s", err) + } + defer f.Close() + + h := md5.New() + _, err = io.Copy(h, f) + if err != nil { + t.Fatalf("err: %s", err) + } + + result := h.Sum(nil) + return hex.EncodeToString(result) +} diff --git a/api/internal/getter/decompress_tgz.go b/api/internal/getter/decompress_tgz.go new file mode 100644 index 00000000000..65eb70dd2c2 --- /dev/null +++ b/api/internal/getter/decompress_tgz.go @@ -0,0 +1,39 @@ +package getter + +import ( + "compress/gzip" + "fmt" + "os" + "path/filepath" +) + +// TarGzipDecompressor is an implementation of Decompressor that can +// decompress tar.gzip files. +type TarGzipDecompressor struct{} + +func (d *TarGzipDecompressor) Decompress(dst, src string, dir bool) error { + // If we're going into a directory we should make that first + mkdir := dst + if !dir { + mkdir = filepath.Dir(dst) + } + if err := os.MkdirAll(mkdir, 0755); err != nil { + return err + } + + // File first + f, err := os.Open(src) + if err != nil { + return err + } + defer f.Close() + + // Gzip compression is second + gzipR, err := gzip.NewReader(f) + if err != nil { + return fmt.Errorf("Error opening a gzip reader for %s: %s", src, err) + } + defer gzipR.Close() + + return untar(gzipR, dst, src, dir) +} diff --git a/api/internal/getter/decompress_tgz_test.go b/api/internal/getter/decompress_tgz_test.go new file mode 100644 index 00000000000..e0e4d2380c9 --- /dev/null +++ b/api/internal/getter/decompress_tgz_test.go @@ -0,0 +1,95 @@ +package getter + +import ( + "path/filepath" + "testing" +) + +func TestTarGzipDecompressor(t *testing.T) { + + multiplePaths := []string{"dir/", "dir/test2", "test1"} + orderingPaths := []string{"workers/", "workers/mq/", "workers/mq/__init__.py"} + + cases := []TestDecompressCase{ + { + "empty.tar.gz", + false, + true, + nil, + "", + nil, + }, + + { + "single.tar.gz", + false, + false, + nil, + "d3b07384d113edec49eaa6238ad5ff00", + nil, + }, + + { + "single.tar.gz", + true, + false, + []string{"file"}, + "", + nil, + }, + + { + "multiple.tar.gz", + true, + false, + []string{"file1", "file2"}, + "", + nil, + }, + + { + "multiple.tar.gz", + false, + true, + nil, + "", + nil, + }, + + { + "multiple_dir.tar.gz", + true, + false, + multiplePaths, + "", + nil, + }, + + // Tests when the file is listed before the parent folder + { + "ordering.tar.gz", + true, + false, + orderingPaths, + "", + nil, + }, + + // Tests that a tar.gz can't contain references with "..". + // GNU `tar` also disallows this. + { + "outside_parent.tar.gz", + true, + true, + nil, + "", + nil, + }, + } + + for i, tc := range cases { + cases[i].Input = filepath.Join("./testdata", "decompress-tgz", tc.Input) + } + + TestDecompressor(t, new(TarGzipDecompressor), cases) +} diff --git a/api/internal/getter/decompress_txz.go b/api/internal/getter/decompress_txz.go new file mode 100644 index 00000000000..5e151c127df --- /dev/null +++ b/api/internal/getter/decompress_txz.go @@ -0,0 +1,39 @@ +package getter + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/ulikunitz/xz" +) + +// TarXzDecompressor is an implementation of Decompressor that can +// decompress tar.xz files. +type TarXzDecompressor struct{} + +func (d *TarXzDecompressor) Decompress(dst, src string, dir bool) error { + // If we're going into a directory we should make that first + mkdir := dst + if !dir { + mkdir = filepath.Dir(dst) + } + if err := os.MkdirAll(mkdir, 0755); err != nil { + return err + } + + // File first + f, err := os.Open(src) + if err != nil { + return err + } + defer f.Close() + + // xz compression is second + txzR, err := xz.NewReader(f) + if err != nil { + return fmt.Errorf("Error opening an xz reader for %s: %s", src, err) + } + + return untar(txzR, dst, src, dir) +} diff --git a/api/internal/getter/decompress_txz_test.go b/api/internal/getter/decompress_txz_test.go new file mode 100644 index 00000000000..cb71c42505a --- /dev/null +++ b/api/internal/getter/decompress_txz_test.go @@ -0,0 +1,84 @@ +package getter + +import ( + "path/filepath" + "testing" +) + +func TestTarXzDecompressor(t *testing.T) { + + multiplePaths := []string{"dir/", "dir/test2", "test1"} + orderingPaths := []string{"workers/", "workers/mq/", "workers/mq/__init__.py"} + + cases := []TestDecompressCase{ + { + "empty.tar.xz", + false, + true, + nil, + "", + nil, + }, + + { + "single.tar.xz", + false, + false, + nil, + "d3b07384d113edec49eaa6238ad5ff00", + nil, + }, + + { + "single.tar.xz", + true, + false, + []string{"file"}, + "", + nil, + }, + + { + "multiple.tar.xz", + true, + false, + []string{"file1", "file2"}, + "", + nil, + }, + + { + "multiple.tar.xz", + false, + true, + nil, + "", + nil, + }, + + { + "multiple_dir.tar.xz", + true, + false, + multiplePaths, + "", + nil, + }, + + // Tests when the file is listed before the parent folder + { + "ordering.tar.xz", + true, + false, + orderingPaths, + "", + nil, + }, + } + + for i, tc := range cases { + cases[i].Input = filepath.Join("./testdata", "decompress-txz", tc.Input) + } + + TestDecompressor(t, new(TarXzDecompressor), cases) +} diff --git a/api/internal/getter/decompress_xz.go b/api/internal/getter/decompress_xz.go new file mode 100644 index 00000000000..4e37abab108 --- /dev/null +++ b/api/internal/getter/decompress_xz.go @@ -0,0 +1,49 @@ +package getter + +import ( + "fmt" + "io" + "os" + "path/filepath" + + "github.com/ulikunitz/xz" +) + +// XzDecompressor is an implementation of Decompressor that can +// decompress xz files. +type XzDecompressor struct{} + +func (d *XzDecompressor) Decompress(dst, src string, dir bool) error { + // Directory isn't supported at all + if dir { + return fmt.Errorf("xz-compressed files can only unarchive to a single file") + } + + // If we're going into a directory we should make that first + if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { + return err + } + + // File first + f, err := os.Open(src) + if err != nil { + return err + } + defer f.Close() + + // xz compression is second + xzR, err := xz.NewReader(f) + if err != nil { + return err + } + + // Copy it out + dstF, err := os.Create(dst) + if err != nil { + return err + } + defer dstF.Close() + + _, err = io.Copy(dstF, xzR) + return err +} diff --git a/api/internal/getter/decompress_xz_test.go b/api/internal/getter/decompress_xz_test.go new file mode 100644 index 00000000000..9aa55b1f86a --- /dev/null +++ b/api/internal/getter/decompress_xz_test.go @@ -0,0 +1,34 @@ +package getter + +import ( + "path/filepath" + "testing" +) + +func TestXzDecompressor(t *testing.T) { + cases := []TestDecompressCase{ + { + "single.xz", + false, + false, + nil, + "d3b07384d113edec49eaa6238ad5ff00", + nil, + }, + + { + "single.xz", + true, + true, + nil, + "", + nil, + }, + } + + for i, tc := range cases { + cases[i].Input = filepath.Join("./testdata", "decompress-xz", tc.Input) + } + + TestDecompressor(t, new(XzDecompressor), cases) +} diff --git a/api/internal/getter/decompress_zip.go b/api/internal/getter/decompress_zip.go new file mode 100644 index 00000000000..0830f79143e --- /dev/null +++ b/api/internal/getter/decompress_zip.go @@ -0,0 +1,101 @@ +package getter + +import ( + "archive/zip" + "fmt" + "io" + "os" + "path/filepath" +) + +// ZipDecompressor is an implementation of Decompressor that can +// decompress zip files. +type ZipDecompressor struct{} + +func (d *ZipDecompressor) Decompress(dst, src string, dir bool) error { + // If we're going into a directory we should make that first + mkdir := dst + if !dir { + mkdir = filepath.Dir(dst) + } + if err := os.MkdirAll(mkdir, 0755); err != nil { + return err + } + + // Open the zip + zipR, err := zip.OpenReader(src) + if err != nil { + return err + } + defer zipR.Close() + + // Check the zip integrity + if len(zipR.File) == 0 { + // Empty archive + return fmt.Errorf("empty archive: %s", src) + } + if !dir && len(zipR.File) > 1 { + return fmt.Errorf("expected a single file: %s", src) + } + + // Go through and unarchive + for _, f := range zipR.File { + path := dst + if dir { + // Disallow parent traversal + if containsDotDot(f.Name) { + return fmt.Errorf("entry contains '..': %s", f.Name) + } + + path = filepath.Join(path, f.Name) + } + + if f.FileInfo().IsDir() { + if !dir { + return fmt.Errorf("expected a single file: %s", src) + } + + // A directory, just make the directory and continue unarchiving... + if err := os.MkdirAll(path, 0755); err != nil { + return err + } + + continue + } + + // Create the enclosing directories if we must. ZIP files aren't + // required to contain entries for just the directories so this + // can happen. + if dir { + if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { + return err + } + } + + // Open the file for reading + srcF, err := f.Open() + if err != nil { + return err + } + + // Open the file for writing + dstF, err := os.Create(path) + if err != nil { + srcF.Close() + return err + } + _, err = io.Copy(dstF, srcF) + srcF.Close() + dstF.Close() + if err != nil { + return err + } + + // Chmod the file + if err := os.Chmod(path, f.Mode()); err != nil { + return err + } + } + + return nil +} diff --git a/api/internal/getter/decompress_zip_test.go b/api/internal/getter/decompress_zip_test.go new file mode 100644 index 00000000000..d22ea109c8a --- /dev/null +++ b/api/internal/getter/decompress_zip_test.go @@ -0,0 +1,98 @@ +package getter + +import ( + "path/filepath" + "testing" +) + +func TestZipDecompressor(t *testing.T) { + cases := []TestDecompressCase{ + { + "empty.zip", + false, + true, + nil, + "", + nil, + }, + + { + "single.zip", + false, + false, + nil, + "d3b07384d113edec49eaa6238ad5ff00", + nil, + }, + + { + "single.zip", + true, + false, + []string{"file"}, + "", + nil, + }, + + { + "multiple.zip", + true, + false, + []string{"file1", "file2"}, + "", + nil, + }, + + { + "multiple.zip", + false, + true, + nil, + "", + nil, + }, + + { + "subdir.zip", + true, + false, + []string{"file1", "subdir/", "subdir/child"}, + "", + nil, + }, + + { + "subdir_empty.zip", + true, + false, + []string{"file1", "subdir/"}, + "", + nil, + }, + + { + "subdir_missing_dir.zip", + true, + false, + []string{"file1", "subdir/", "subdir/child"}, + "", + nil, + }, + + // Tests that a zip can't contain references with "..". + { + "outside_parent.zip", + true, + true, + nil, + "", + nil, + }, + } + + for i, tc := range cases { + cases[i].Input = filepath.Join("./testdata", "decompress-zip", tc.Input) + } + + TestDecompressor(t, new(ZipDecompressor), cases) +} diff --git a/api/internal/getter/detect.go b/api/internal/getter/detect.go new file mode 100644 index 00000000000..4b59eb935f7 --- /dev/null +++ b/api/internal/getter/detect.go @@ -0,0 +1,103 @@ +package getter + +import ( + "fmt" + "path/filepath" + + "./helper/url" +) + +// Detector defines the interface that an invalid URL or a URL with a blank +// scheme is passed through in order to determine if its shorthand for +// something else well-known. +type Detector interface { + // Detect will detect whether the string matches a known pattern to + // turn it into a proper URL. + Detect(string, string) (string, bool, error) +} + +// Detectors is the list of detectors that are tried on an invalid URL. +// This is also the order they're tried (index 0 is first). +var Detectors []Detector + +func init() { + Detectors = []Detector{ + new(GitHubDetector), + new(GitDetector), + new(BitBucketDetector), + new(FileDetector), + } +} + +// Detect turns a source string into another source string if it is +// detected to be of a known pattern. +// +// The third parameter should be the list of detectors to use in the +// order to try them. If you don't want to configure this, just use +// the global Detectors variable. +// +// This is safe to be called with an already valid source string: Detect +// will just return it. +func Detect(src string, pwd string, ds []Detector) (string, error) { + getForce, getSrc := getForcedGetter(src) + + // Separate out the subdir if there is one, we don't pass that to detect + getSrc, subDir := SourceDirSubdir(getSrc) + + u, err := url.Parse(getSrc) + if err == nil && u.Scheme != "" { + // Valid URL + return src, nil + } + + for _, d := range ds { + result, ok, err := d.Detect(getSrc, pwd) + if err != nil { + return "", err + } + if !ok { + continue + } + + var detectForce string + detectForce, result = getForcedGetter(result) + result, detectSubdir := SourceDirSubdir(result) + + // If we have a subdir from the detection, then prepend it to our + // requested subdir. + if detectSubdir != "" { + if subDir != "" { + subDir = filepath.Join(detectSubdir, subDir) + } else { + subDir = detectSubdir + } + } + + if subDir != "" { + u, err := url.Parse(result) + if err != nil { + return "", fmt.Errorf("Error parsing URL: %s", err) + } + u.Path += "//" + subDir + + // a subdir may contain wildcards, but in order to support them we + // have to ensure the path isn't escaped. + u.RawPath = u.Path + + result = u.String() + } + + // Preserve the forced getter if it exists. We try to use the + // original set force first, followed by any force set by the + // detector. + if getForce != "" { + result = fmt.Sprintf("%s::%s", getForce, result) + } else if detectForce != "" { + result = fmt.Sprintf("%s::%s", detectForce, result) + } + + return result, nil + } + + return "", fmt.Errorf("invalid source string: %s", src) +} diff --git a/api/internal/getter/detect_bitbucket.go b/api/internal/getter/detect_bitbucket.go new file mode 100644 index 00000000000..19047eb1979 --- /dev/null +++ b/api/internal/getter/detect_bitbucket.go @@ -0,0 +1,66 @@ +package getter + +import ( + "encoding/json" + "fmt" + "net/http" + "net/url" + "strings" +) + +// BitBucketDetector implements Detector to detect BitBucket URLs and turn +// them into URLs that the Git or Hg Getter can understand. +type BitBucketDetector struct{} + +func (d *BitBucketDetector) Detect(src, _ string) (string, bool, error) { + if len(src) == 0 { + return "", false, nil + } + + if strings.HasPrefix(src, "bitbucket.org/") { + return d.detectHTTP(src) + } + + return "", false, nil +} + +func (d *BitBucketDetector) detectHTTP(src string) (string, bool, error) { + u, err := url.Parse("https://" + src) + if err != nil { + return "", true, fmt.Errorf("error parsing BitBucket URL: %s", err) + } + + // We need to get info on this BitBucket repository to determine whether + // it is Git or Hg. + var info struct { + SCM string `json:"scm"` + } + infoUrl := "https://api.bitbucket.org/2.0/repositories" + u.Path + resp, err := http.Get(infoUrl) + if err != nil { + return "", true, fmt.Errorf("error looking up BitBucket URL: %s", err) + } + if resp.StatusCode == 403 { + // A private repo + return "", true, fmt.Errorf( + "shorthand BitBucket URL can't be used for private repos, " + + "please use a full URL") + } + dec := json.NewDecoder(resp.Body) + if err := dec.Decode(&info); err != nil { + return "", true, fmt.Errorf("error looking up BitBucket URL: %s", err) + } + + switch info.SCM { + case "git": + if !strings.HasSuffix(u.Path, ".git") { + u.Path += ".git" + } + + return "git::" + u.String(), true, nil + case "hg": + return "hg::" + u.String(), true, nil + default: + return "", true, fmt.Errorf("unknown BitBucket SCM type: %s", info.SCM) + } +} diff --git a/api/internal/getter/detect_bitbucket_test.go b/api/internal/getter/detect_bitbucket_test.go new file mode 100644 index 00000000000..202c932560a --- /dev/null +++ b/api/internal/getter/detect_bitbucket_test.go @@ -0,0 +1,67 @@ +package getter + +import ( + "net/http" + "strings" + "testing" +) + +const testBBUrl = "https://bitbucket.org/hashicorp/tf-test-git" + +func TestBitBucketDetector(t *testing.T) { + t.Parallel() + + if _, err := http.Get(testBBUrl); err != nil { + t.Log("internet may not be working, skipping BB tests") + t.Skip() + } + + cases := []struct { + Input string + Output string + }{ + // HTTP + { + "bitbucket.org/hashicorp/tf-test-git", + "git::https://bitbucket.org/hashicorp/tf-test-git.git", + }, + { + "bitbucket.org/hashicorp/tf-test-git.git", + "git::https://bitbucket.org/hashicorp/tf-test-git.git", + }, + { + "bitbucket.org/hashicorp/tf-test-hg", + "hg::https://bitbucket.org/hashicorp/tf-test-hg", + }, + } + + pwd := "/pwd" + f := new(BitBucketDetector) + for i, tc := range cases { + var err error + for i := 0; i < 3; i++ { + var output string + var ok bool + output, ok, err = f.Detect(tc.Input, pwd) + if err != nil { + if strings.Contains(err.Error(), "invalid character") { + continue + } + + t.Fatalf("err: %s", err) + } + if !ok { + t.Fatal("not ok") + } + + if output != tc.Output { + t.Fatalf("%d: bad: %#v", i, output) + } + + break + } + if i >= 3 { + t.Fatalf("failure from bitbucket: %s", err) + } + } +} diff --git a/api/internal/getter/detect_file.go b/api/internal/getter/detect_file.go new file mode 100644 index 00000000000..4ef41ea73fa --- /dev/null +++ b/api/internal/getter/detect_file.go @@ -0,0 +1,67 @@ +package getter + +import ( + "fmt" + "os" + "path/filepath" + "runtime" +) + +// FileDetector implements Detector to detect file paths. +type FileDetector struct{} + +func (d *FileDetector) Detect(src, pwd string) (string, bool, error) { + if len(src) == 0 { + return "", false, nil + } + + if !filepath.IsAbs(src) { + if pwd == "" { + return "", true, fmt.Errorf( + "relative paths require a module with a pwd") + } + + // Stat the pwd to determine if its a symbolic link. If it is, + // then the pwd becomes the original directory. Otherwise, + // `filepath.Join` below does some weird stuff. + // + // We just ignore if the pwd doesn't exist. That error will be + // caught later when we try to use the URL. + if fi, err := os.Lstat(pwd); !os.IsNotExist(err) { + if err != nil { + return "", true, err + } + if fi.Mode()&os.ModeSymlink != 0 { + pwd, err = filepath.EvalSymlinks(pwd) + if err != nil { + return "", true, err + } + + // The symlink itself might be a relative path, so we have to + // resolve this to have a correctly rooted URL. + pwd, err = filepath.Abs(pwd) + if err != nil { + return "", true, err + } + } + } + + src = filepath.Join(pwd, src) + } + + return fmtFileURL(src), true, nil +} + +func fmtFileURL(path string) string { + if runtime.GOOS == "windows" { + // Make sure we're using "/" on Windows. URLs are "/"-based. + path = filepath.ToSlash(path) + return fmt.Sprintf("file://%s", path) + } + + // Make sure that we don't start with "/" since we add that below. + if path[0] == '/' { + path = path[1:] + } + return fmt.Sprintf("file:///%s", path) +} diff --git a/api/internal/getter/detect_file_test.go b/api/internal/getter/detect_file_test.go new file mode 100644 index 00000000000..0f7a055c937 --- /dev/null +++ b/api/internal/getter/detect_file_test.go @@ -0,0 +1,115 @@ +package getter + +import ( + "fmt" + "os" + "path/filepath" + "runtime" + "strings" + "testing" +) + +type fileTest struct { + in, pwd, out string + err bool +} + +var fileTests = []fileTest{ + {"./foo", "/pwd", "file:///pwd/foo", false}, + {"./foo?foo=bar", "/pwd", "file:///pwd/foo?foo=bar", false}, + {"foo", "/pwd", "file:///pwd/foo", false}, +} + +var unixFileTests = []fileTest{ + {"./foo", "testdata/detect-file-symlink-pwd/syml/pwd", + "testdata/detect-file-symlink-pwd/real/foo", false}, + + {"/foo", "/pwd", "file:///foo", false}, + {"/foo?bar=baz", "/pwd", "file:///foo?bar=baz", false}, +} + +var winFileTests = []fileTest{ + {"/foo", "/pwd", "file:///pwd/foo", false}, + {`C:\`, `/pwd`, `file://C:/`, false}, + {`C:\?bar=baz`, `/pwd`, `file://C:/?bar=baz`, false}, +} + +func TestFileDetector(t *testing.T) { + if runtime.GOOS == "windows" { + fileTests = append(fileTests, winFileTests...) + } else { + fileTests = append(fileTests, unixFileTests...) + } + + // Get the pwd + pwdRoot, err := os.Getwd() + if err != nil { + t.Fatalf("err: %s", err) + } + pwdRoot, err = filepath.Abs(pwdRoot) + if err != nil { + t.Fatalf("err: %s", err) + } + + f := new(FileDetector) + for i, tc := range fileTests { + t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { + pwd := tc.pwd + + out, ok, err := f.Detect(tc.in, pwd) + if err != nil { + t.Fatalf("err: %s", err) + } + if !ok { + t.Fatal("not ok") + } + + expected := tc.out + if !strings.HasPrefix(expected, "file://") { + expected = "file://" + filepath.Join(pwdRoot, expected) + } + + if out != expected { + t.Fatalf("input: %q\npwd: %q\nexpected: %q\nbad output: %#v", + tc.in, pwd, expected, out) + } + }) + } +} + +var noPwdFileTests = []fileTest{ + {in: "./foo", pwd: "", out: "", err: true}, + {in: "foo", pwd: "", out: "", err: true}, +} + +var noPwdUnixFileTests = []fileTest{ + {in: "/foo", pwd: "", out: "file:///foo", err: false}, +} + +var noPwdWinFileTests = []fileTest{ + {in: "/foo", pwd: "", out: "", err: true}, + {in: `C:\`, pwd: ``, out: `file://C:/`, err: false}, +} + +func TestFileDetector_noPwd(t *testing.T) { + if runtime.GOOS == "windows" { + noPwdFileTests = append(noPwdFileTests, noPwdWinFileTests...) + } else { + noPwdFileTests = append(noPwdFileTests, noPwdUnixFileTests...) + } + + f := new(FileDetector) + for i, tc := range noPwdFileTests { + out, ok, err := f.Detect(tc.in, tc.pwd) + if err != nil != tc.err { + t.Fatalf("%d: err: %s", i, err) + } + if !ok { + t.Fatal("not ok") + } + + if out != tc.out { + t.Fatalf("%d: bad: %#v", i, out) + } + } +} diff --git a/api/internal/getter/detect_file_unix_test.go b/api/internal/getter/detect_file_unix_test.go new file mode 100644 index 00000000000..657f1a41c96 --- /dev/null +++ b/api/internal/getter/detect_file_unix_test.go @@ -0,0 +1,70 @@ +// +build test unix + +package getter + +import ( + "io/ioutil" + "os" + "path/filepath" + "testing" +) + +// If a relative symlink is passed in as the pwd to Detect, the resulting URL +// can have an invalid path. +func TestFileDetector_relativeSymlink(t *testing.T) { + tmpDir, err := ioutil.TempDir("", "go-getter") + if err != nil { + t.Fatal(err) + } + + defer os.RemoveAll(tmpDir) + + // We may have a symlinked tmp dir, + // e.g. OSX uses /var -> /private/var + tmpDir, err = filepath.EvalSymlinks(tmpDir) + if err != nil { + t.Fatal(err) + } + + err = os.Mkdir(filepath.Join(tmpDir, "realPWD"), 0755) + if err != nil { + t.Fatal(err) + } + + subdir := filepath.Join(tmpDir, "subdir") + err = os.Mkdir(subdir, 0755) + if err != nil { + t.Fatal(err) + } + + prevDir, err := os.Getwd() + if err != nil { + t.Fatal(err) + } + defer os.Chdir(prevDir) + + err = os.Chdir(subdir) + if err != nil { + t.Fatal(err) + } + + err = os.Symlink("../realPWD", "linkedPWD") + if err != nil { + t.Fatal(err) + } + + // if detech doesn't fully resolve the pwd symlink, the output will be the + // invalid path: "file:///../modules/foo" + f := new(FileDetector) + out, ok, err := f.Detect("../modules/foo", "./linkedPWD") + if err != nil { + t.Fatalf("err: %v", err) + } + if !ok { + t.Fatal("not ok") + } + if out != "file://"+filepath.Join(tmpDir, "modules/foo") { + t.Logf("expected: %v", "file://"+filepath.Join(tmpDir, "modules/foo")) + t.Fatalf("bad: %v", out) + } +} diff --git a/api/internal/getter/detect_git.go b/api/internal/getter/detect_git.go new file mode 100644 index 00000000000..eeb8a04c5e9 --- /dev/null +++ b/api/internal/getter/detect_git.go @@ -0,0 +1,26 @@ +package getter + +// GitDetector implements Detector to detect Git SSH URLs such as +// git@host.com:dir1/dir2 and converts them to proper URLs. +type GitDetector struct{} + +func (d *GitDetector) Detect(src, _ string) (string, bool, error) { + if len(src) == 0 { + return "", false, nil + } + + u, err := detectSSH(src) + if err != nil { + return "", true, err + } + if u == nil { + return "", false, nil + } + + // We require the username to be "git" to assume that this is a Git URL + if u.User.Username() != "git" { + return "", false, nil + } + + return "git::" + u.String(), true, nil +} diff --git a/api/internal/getter/detect_git_test.go b/api/internal/getter/detect_git_test.go new file mode 100644 index 00000000000..a71dde2c315 --- /dev/null +++ b/api/internal/getter/detect_git_test.go @@ -0,0 +1,69 @@ +package getter + +import ( + "testing" +) + +func TestGitDetector(t *testing.T) { + cases := []struct { + Input string + Output string + }{ + { + "git@github.com:hashicorp/foo.git", + "git::ssh://git@github.com/hashicorp/foo.git", + }, + { + "git@github.com:org/project.git?ref=test-branch", + "git::ssh://git@github.com/org/project.git?ref=test-branch", + }, + { + "git@github.com:hashicorp/foo.git//bar", + "git::ssh://git@github.com/hashicorp/foo.git//bar", + }, + { + "git@github.com:hashicorp/foo.git?foo=bar", + "git::ssh://git@github.com/hashicorp/foo.git?foo=bar", + }, + { + "git@github.xyz.com:org/project.git", + "git::ssh://git@github.xyz.com/org/project.git", + }, + { + "git@github.xyz.com:org/project.git?ref=test-branch", + "git::ssh://git@github.xyz.com/org/project.git?ref=test-branch", + }, + { + "git@github.xyz.com:org/project.git//module/a", + "git::ssh://git@github.xyz.com/org/project.git//module/a", + }, + { + "git@github.xyz.com:org/project.git//module/a?ref=test-branch", + "git::ssh://git@github.xyz.com/org/project.git//module/a?ref=test-branch", + }, + { + // Already in the canonical form, so no rewriting required + // When the ssh: protocol is used explicitly, we recognize it as + // URL form rather than SCP-like form, so the part after the colon + // is a port number, not part of the path. + "git::ssh://git@git.example.com:2222/hashicorp/foo.git", + "git::ssh://git@git.example.com:2222/hashicorp/foo.git", + }, + } + + pwd := "/pwd" + f := new(GitDetector) + ds := []Detector{f} + for _, tc := range cases { + t.Run(tc.Input, func(t *testing.T) { + output, err := Detect(tc.Input, pwd, ds) + if err != nil { + t.Fatalf("unexpected error: %s", err) + } + + if output != tc.Output { + t.Errorf("wrong result\ninput: %s\ngot: %s\nwant: %s", tc.Input, output, tc.Output) + } + }) + } +} diff --git a/api/internal/getter/detect_github.go b/api/internal/getter/detect_github.go new file mode 100644 index 00000000000..4bf4daf238d --- /dev/null +++ b/api/internal/getter/detect_github.go @@ -0,0 +1,47 @@ +package getter + +import ( + "fmt" + "net/url" + "strings" +) + +// GitHubDetector implements Detector to detect GitHub URLs and turn +// them into URLs that the Git Getter can understand. +type GitHubDetector struct{} + +func (d *GitHubDetector) Detect(src, _ string) (string, bool, error) { + if len(src) == 0 { + return "", false, nil + } + + if strings.HasPrefix(src, "github.com/") { + return d.detectHTTP(src) + } + + return "", false, nil +} + +func (d *GitHubDetector) detectHTTP(src string) (string, bool, error) { + parts := strings.Split(src, "/") + if len(parts) < 3 { + return "", false, fmt.Errorf( + "GitHub URLs should be github.com/username/repo") + } + + urlStr := fmt.Sprintf("https://%s", strings.Join(parts[:3], "/")) + url, err := url.Parse(urlStr) + if err != nil { + return "", true, fmt.Errorf("error parsing GitHub URL: %s", err) + } + + if !strings.HasSuffix(url.Path, ".git") { + url.Path += ".git" + } + + if len(parts) > 3 { + url.Path += "//" + strings.Join(parts[3:], "/") + } + + return "git::" + url.String(), true, nil +} diff --git a/api/internal/getter/detect_github_test.go b/api/internal/getter/detect_github_test.go new file mode 100644 index 00000000000..70f1c832919 --- /dev/null +++ b/api/internal/getter/detect_github_test.go @@ -0,0 +1,44 @@ +package getter + +import ( + "testing" +) + +func TestGitHubDetector(t *testing.T) { + cases := []struct { + Input string + Output string + }{ + // HTTP + {"github.com/hashicorp/foo", "git::https://github.com/hashicorp/foo.git"}, + {"github.com/hashicorp/foo.git", "git::https://github.com/hashicorp/foo.git"}, + { + "github.com/hashicorp/foo/bar", + "git::https://github.com/hashicorp/foo.git//bar", + }, + { + "github.com/hashicorp/foo?foo=bar", + "git::https://github.com/hashicorp/foo.git?foo=bar", + }, + { + "github.com/hashicorp/foo.git?foo=bar", + "git::https://github.com/hashicorp/foo.git?foo=bar", + }, + } + + pwd := "/pwd" + f := new(GitHubDetector) + for i, tc := range cases { + output, ok, err := f.Detect(tc.Input, pwd) + if err != nil { + t.Fatalf("err: %s", err) + } + if !ok { + t.Fatal("not ok") + } + + if output != tc.Output { + t.Fatalf("%d: bad: %#v", i, output) + } + } +} diff --git a/api/internal/getter/detect_ssh.go b/api/internal/getter/detect_ssh.go new file mode 100644 index 00000000000..c0dbe9d4754 --- /dev/null +++ b/api/internal/getter/detect_ssh.go @@ -0,0 +1,49 @@ +package getter + +import ( + "fmt" + "net/url" + "regexp" + "strings" +) + +// Note that we do not have an SSH-getter currently so this file serves +// only to hold the detectSSH helper that is used by other detectors. + +// sshPattern matches SCP-like SSH patterns (user@host:path) +var sshPattern = regexp.MustCompile("^(?:([^@]+)@)?([^:]+):/?(.+)$") + +// detectSSH determines if the src string matches an SSH-like URL and +// converts it into a net.URL compatible string. This returns nil if the +// string doesn't match the SSH pattern. +// +// This function is tested indirectly via detect_git_test.go +func detectSSH(src string) (*url.URL, error) { + matched := sshPattern.FindStringSubmatch(src) + if matched == nil { + return nil, nil + } + + user := matched[1] + host := matched[2] + path := matched[3] + qidx := strings.Index(path, "?") + if qidx == -1 { + qidx = len(path) + } + + var u url.URL + u.Scheme = "ssh" + u.User = url.User(user) + u.Host = host + u.Path = path[0:qidx] + if qidx < len(path) { + q, err := url.ParseQuery(path[qidx+1:]) + if err != nil { + return nil, fmt.Errorf("error parsing GitHub SSH URL: %s", err) + } + u.RawQuery = q.Encode() + } + + return &u, nil +} diff --git a/api/internal/getter/detect_test.go b/api/internal/getter/detect_test.go new file mode 100644 index 00000000000..9bef662a7bb --- /dev/null +++ b/api/internal/getter/detect_test.go @@ -0,0 +1,92 @@ +package getter + +import ( + "fmt" + "testing" +) + +func TestDetect(t *testing.T) { + cases := []struct { + Input string + Pwd string + Output string + Err bool + }{ + {"./foo", "/foo", "file:///foo/foo", false}, + {"git::./foo", "/foo", "git::file:///foo/foo", false}, + { + "git::github.com/hashicorp/foo", + "", + "git::https://github.com/hashicorp/foo.git", + false, + }, + { + "./foo//bar", + "/foo", + "file:///foo/foo//bar", + false, + }, + { + "git::github.com/hashicorp/foo//bar", + "", + "git::https://github.com/hashicorp/foo.git//bar", + false, + }, + { + "git::https://github.com/hashicorp/consul.git", + "", + "git::https://github.com/hashicorp/consul.git", + false, + }, + { + "git::https://person@someothergit.com/foo/bar", + "", + "git::https://person@someothergit.com/foo/bar", + false, + }, + { + "git::https://person@someothergit.com/foo/bar", + "/bar", + "git::https://person@someothergit.com/foo/bar", + false, + }, + { + "./foo/archive//*", + "/bar", + "file:///bar/foo/archive//*", + false, + }, + + // https://github.com/hashicorp/go-getter/pull/124 + { + "git::ssh://git@my.custom.git/dir1/dir2", + "", + "git::ssh://git@my.custom.git/dir1/dir2", + false, + }, + { + "git::git@my.custom.git:dir1/dir2", + "/foo", + "git::ssh://git@my.custom.git/dir1/dir2", + false, + }, + { + "git::git@my.custom.git:dir1/dir2", + "", + "git::ssh://git@my.custom.git/dir1/dir2", + false, + }, + } + + for i, tc := range cases { + t.Run(fmt.Sprintf("%d %s", i, tc.Input), func(t *testing.T) { + output, err := Detect(tc.Input, tc.Pwd, Detectors) + if err != nil != tc.Err { + t.Fatalf("%d: bad err: %s", i, err) + } + if output != tc.Output { + t.Fatalf("%d: bad output: %s\nexpected: %s", i, output, tc.Output) + } + }) + } +} diff --git a/api/internal/getter/folder_storage.go b/api/internal/getter/folder_storage.go new file mode 100644 index 00000000000..647ccf45928 --- /dev/null +++ b/api/internal/getter/folder_storage.go @@ -0,0 +1,65 @@ +package getter + +import ( + "crypto/md5" + "encoding/hex" + "fmt" + "os" + "path/filepath" +) + +// FolderStorage is an implementation of the Storage interface that manages +// modules on the disk. +type FolderStorage struct { + // StorageDir is the directory where the modules will be stored. + StorageDir string +} + +// Dir implements Storage.Dir +func (s *FolderStorage) Dir(key string) (d string, e bool, err error) { + d = s.dir(key) + _, err = os.Stat(d) + if err == nil { + // Directory exists + e = true + return + } + if os.IsNotExist(err) { + // Directory doesn't exist + d = "" + e = false + err = nil + return + } + + // An error + d = "" + e = false + return +} + +// Get implements Storage.Get +func (s *FolderStorage) Get(key string, source string, update bool) error { + dir := s.dir(key) + if !update { + if _, err := os.Stat(dir); err == nil { + // If the directory already exists, then we're done since + // we're not updating. + return nil + } else if !os.IsNotExist(err) { + // If the error we got wasn't a file-not-exist error, then + // something went wrong and we should report it. + return fmt.Errorf("Error reading module directory: %s", err) + } + } + + // Get the source. This always forces an update. + return Get(dir, source) +} + +// dir returns the directory name internally that we'll use to map to +// internally. +func (s *FolderStorage) dir(key string) string { + sum := md5.Sum([]byte(key)) + return filepath.Join(s.StorageDir, hex.EncodeToString(sum[:])) +} diff --git a/api/internal/getter/folder_storage_test.go b/api/internal/getter/folder_storage_test.go new file mode 100644 index 00000000000..feb8d342529 --- /dev/null +++ b/api/internal/getter/folder_storage_test.go @@ -0,0 +1,48 @@ +package getter + +import ( + "os" + "path/filepath" + "testing" +) + +func TestFolderStorage_impl(t *testing.T) { + var _ Storage = new(FolderStorage) +} + +func TestFolderStorage(t *testing.T) { + s := &FolderStorage{StorageDir: tempDir(t)} + + module := testModule("basic") + + // A module shouldn't exist at first... + _, ok, err := s.Dir(module) + if err != nil { + t.Fatalf("err: %s", err) + } + if ok { + t.Fatal("should not exist") + } + + key := "foo" + + // We can get it + err = s.Get(key, module, false) + if err != nil { + t.Fatalf("err: %s", err) + } + + // Now the module exists + dir, ok, err := s.Dir(key) + if err != nil { + t.Fatalf("err: %s", err) + } + if !ok { + t.Fatal("should exist") + } + + mainPath := filepath.Join(dir, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} diff --git a/api/internal/getter/get.go b/api/internal/getter/get.go new file mode 100644 index 00000000000..932f786e361 --- /dev/null +++ b/api/internal/getter/get.go @@ -0,0 +1,150 @@ +// getter is a package for downloading files or directories from a variety of +// protocols. +// +// getter is unique in its ability to download both directories and files. +// It also detects certain source strings to be protocol-specific URLs. For +// example, "github.com/hashicorp/go-getter" would turn into a Git URL and +// use the Git protocol. +// +// Protocols and detectors are extensible. +// +// To get started, see Client. +package getter + +import ( + "bytes" + "fmt" + "net/url" + "os/exec" + "regexp" + "syscall" + + cleanhttp "github.com/hashicorp/go-cleanhttp" +) + +// Getter defines the interface that schemes must implement to download +// things. +type Getter interface { + // Get downloads the given URL into the given directory. This always + // assumes that we're updating and gets the latest version that it can. + // + // The directory may already exist (if we're updating). If it is in a + // format that isn't understood, an error should be returned. Get shouldn't + // simply nuke the directory. + Get(string, *url.URL) error + + // GetFile downloads the give URL into the given path. The URL must + // reference a single file. If possible, the Getter should check if + // the remote end contains the same file and no-op this operation. + GetFile(string, *url.URL) error + + // ClientMode returns the mode based on the given URL. This is used to + // allow clients to let the getters decide which mode to use. + ClientMode(*url.URL) (ClientMode, error) + + // SetClient allows a getter to know it's client + // in order to access client's Get functions or + // progress tracking. + SetClient(*Client) +} + +// Getters is the mapping of scheme to the Getter implementation that will +// be used to get a dependency. +var Getters map[string]Getter + +// forcedRegexp is the regular expression that finds forced getters. This +// syntax is schema::url, example: git::https://foo.com +var forcedRegexp = regexp.MustCompile(`^([A-Za-z0-9]+)::(.+)$`) + +// httpClient is the default client to be used by HttpGetters. +var httpClient = cleanhttp.DefaultClient() + +func init() { + httpGetter := &HttpGetter{ + Netrc: true, + } + + Getters = map[string]Getter{ + "file": new(FileGetter), + "git": new(GitGetter), + "hg": new(HgGetter), + "http": httpGetter, + "https": httpGetter, + } +} + +// Get downloads the directory specified by src into the folder specified by +// dst. If dst already exists, Get will attempt to update it. +// +// src is a URL, whereas dst is always just a file path to a folder. This +// folder doesn't need to exist. It will be created if it doesn't exist. +func Get(dst, src string, opts ...ClientOption) error { + return (&Client{ + Src: src, + Dst: dst, + Dir: true, + Options: opts, + }).Get() +} + +// GetAny downloads a URL into the given destination. Unlike Get or +// GetFile, both directories and files are supported. +// +// dst must be a directory. If src is a file, it will be downloaded +// into dst with the basename of the URL. If src is a directory or +// archive, it will be unpacked directly into dst. +func GetAny(dst, src string, opts ...ClientOption) error { + return (&Client{ + Src: src, + Dst: dst, + Mode: ClientModeAny, + Options: opts, + }).Get() +} + +// GetFile downloads the file specified by src into the path specified by +// dst. +func GetFile(dst, src string, opts ...ClientOption) error { + return (&Client{ + Src: src, + Dst: dst, + Dir: false, + Options: opts, + }).Get() +} + +// getRunCommand is a helper that will run a command and capture the output +// in the case an error happens. +func getRunCommand(cmd *exec.Cmd) error { + var buf bytes.Buffer + cmd.Stdout = &buf + cmd.Stderr = &buf + err := cmd.Run() + if err == nil { + return nil + } + if exiterr, ok := err.(*exec.ExitError); ok { + // The program has exited with an exit code != 0 + if status, ok := exiterr.Sys().(syscall.WaitStatus); ok { + return fmt.Errorf( + "%s exited with %d: %s", + cmd.Path, + status.ExitStatus(), + buf.String()) + } + } + + return fmt.Errorf("error running %s: %s", cmd.Path, buf.String()) +} + +// getForcedGetter takes a source and returns the tuple of the forced +// getter and the raw URL (without the force syntax). +func getForcedGetter(src string) (string, string) { + var forced string + if ms := forcedRegexp.FindStringSubmatch(src); ms != nil { + forced = ms[1] + src = ms[2] + } + + return forced, src +} diff --git a/api/internal/getter/get_base.go b/api/internal/getter/get_base.go new file mode 100644 index 00000000000..09e9b6313b1 --- /dev/null +++ b/api/internal/getter/get_base.go @@ -0,0 +1,20 @@ +package getter + +import "context" + +// getter is our base getter; it regroups +// fields all getters have in common. +type getter struct { + client *Client +} + +func (g *getter) SetClient(c *Client) { g.client = c } + +// Context tries to returns the Contex from the getter's +// client. otherwise context.Background() is returned. +func (g *getter) Context() context.Context { + if g == nil || g.client == nil { + return context.Background() + } + return g.client.Ctx +} diff --git a/api/internal/getter/get_file.go b/api/internal/getter/get_file.go new file mode 100644 index 00000000000..78660839a07 --- /dev/null +++ b/api/internal/getter/get_file.go @@ -0,0 +1,36 @@ +package getter + +import ( + "net/url" + "os" +) + +// FileGetter is a Getter implementation that will download a module from +// a file scheme. +type FileGetter struct { + getter + + // Copy, if set to true, will copy data instead of using a symlink. If + // false, attempts to symlink to speed up the operation and to lower the + // disk space usage. If the symlink fails, may attempt to copy on windows. + Copy bool +} + +func (g *FileGetter) ClientMode(u *url.URL) (ClientMode, error) { + path := u.Path + if u.RawPath != "" { + path = u.RawPath + } + + fi, err := os.Stat(path) + if err != nil { + return 0, err + } + + // Check if the source is a directory. + if fi.IsDir() { + return ClientModeDir, nil + } + + return ClientModeFile, nil +} diff --git a/api/internal/getter/get_file_copy.go b/api/internal/getter/get_file_copy.go new file mode 100644 index 00000000000..d70fb495128 --- /dev/null +++ b/api/internal/getter/get_file_copy.go @@ -0,0 +1,29 @@ +package getter + +import ( + "context" + "io" +) + +// readerFunc is syntactic sugar for read interface. +type readerFunc func(p []byte) (n int, err error) + +func (rf readerFunc) Read(p []byte) (n int, err error) { return rf(p) } + +// Copy is a io.Copy cancellable by context +func Copy(ctx context.Context, dst io.Writer, src io.Reader) (int64, error) { + // Copy will call the Reader and Writer interface multiple time, in order + // to copy by chunk (avoiding loading the whole file in memory). + return io.Copy(dst, readerFunc(func(p []byte) (int, error) { + + select { + case <-ctx.Done(): + // context has been canceled + // stop process and propagate "context canceled" error + return 0, ctx.Err() + default: + // otherwise just run default io.Reader implementation + return src.Read(p) + } + })) +} diff --git a/api/internal/getter/get_file_copy_test.go b/api/internal/getter/get_file_copy_test.go new file mode 100644 index 00000000000..659edd2003a --- /dev/null +++ b/api/internal/getter/get_file_copy_test.go @@ -0,0 +1,82 @@ +package getter + +import ( + "bytes" + "context" + "io" + "testing" + "time" +) + +// OneDoneContext is a context that is +// cancelled after a first done is called. +type OneDoneContext bool + +func (*OneDoneContext) Deadline() (deadline time.Time, ok bool) { return } +func (*OneDoneContext) Value(key interface{}) interface{} { return nil } + +func (o *OneDoneContext) Err() error { + if *o == false { + return nil + } + return context.Canceled +} + +func (o *OneDoneContext) Done() <-chan struct{} { + if *o == false { + *o = true + return nil + } + c := make(chan struct{}) + close(c) + return c +} + +func (o *OneDoneContext) String() string { + if *o { + return "done OneDoneContext" + } + return "OneDoneContext" +} + +func TestCopy(t *testing.T) { + const text3lines = `line1 + line2 + line3 + ` + + cancelledContext, cancel := context.WithCancel(context.Background()) + _ = cancelledContext + cancel() + type args struct { + ctx context.Context + src io.Reader + } + tests := []struct { + name string + args args + want int64 + wantDst string + wantErr error + }{ + {"read all", args{context.Background(), bytes.NewBufferString(text3lines)}, int64(len(text3lines)), text3lines, nil}, + {"read none", args{cancelledContext, bytes.NewBufferString(text3lines)}, 0, "", context.Canceled}, + {"cancel after read", args{new(OneDoneContext), bytes.NewBufferString(text3lines)}, int64(len(text3lines)), text3lines, context.Canceled}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + dst := &bytes.Buffer{} + got, err := Copy(tt.args.ctx, dst, tt.args.src) + if err != tt.wantErr { + t.Errorf("Copy() error = %v, wantErr %v", err, tt.wantErr) + return + } + if got != tt.want { + t.Errorf("Copy() = %v, want %v", got, tt.want) + } + if gotDst := dst.String(); gotDst != tt.wantDst { + t.Errorf("Copy() = %v, want %v", gotDst, tt.wantDst) + } + }) + } +} diff --git a/api/internal/getter/get_file_test.go b/api/internal/getter/get_file_test.go new file mode 100644 index 00000000000..94ab3c1c14b --- /dev/null +++ b/api/internal/getter/get_file_test.go @@ -0,0 +1,204 @@ +package getter + +import ( + "os" + "path/filepath" + "testing" +) + +func TestFileGetter_impl(t *testing.T) { + var _ Getter = new(FileGetter) +} + +func TestFileGetter(t *testing.T) { + g := new(FileGetter) + dst := tempDir(t) + + // With a dir that doesn't exist + if err := g.Get(dst, testModuleURL("basic")); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the destination folder is a symlink + fi, err := os.Lstat(dst) + if err != nil { + t.Fatalf("err: %s", err) + } + if fi.Mode()&os.ModeSymlink == 0 { + t.Fatal("destination is not a symlink") + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestFileGetter_sourceFile(t *testing.T) { + g := new(FileGetter) + dst := tempDir(t) + + // With a source URL that is a path to a file + u := testModuleURL("basic") + u.Path += "/main.tf" + if err := g.Get(dst, u); err == nil { + t.Fatal("should error") + } +} + +func TestFileGetter_sourceNoExist(t *testing.T) { + g := new(FileGetter) + dst := tempDir(t) + + // With a source URL that doesn't exist + u := testModuleURL("basic") + u.Path += "/main" + if err := g.Get(dst, u); err == nil { + t.Fatal("should error") + } +} + +func TestFileGetter_dir(t *testing.T) { + g := new(FileGetter) + dst := tempDir(t) + + if err := os.MkdirAll(dst, 0755); err != nil { + t.Fatalf("err: %s", err) + } + + // With a dir that exists that isn't a symlink + if err := g.Get(dst, testModuleURL("basic")); err == nil { + t.Fatal("should error") + } +} + +func TestFileGetter_dirSymlink(t *testing.T) { + g := new(FileGetter) + dst := tempDir(t) + dst2 := tempDir(t) + + // Make parents + if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { + t.Fatalf("err: %s", err) + } + if err := os.MkdirAll(dst2, 0755); err != nil { + t.Fatalf("err: %s", err) + } + + // Make a symlink + if err := os.Symlink(dst2, dst); err != nil { + t.Fatalf("err: %s", err) + } + + // With a dir that exists that isn't a symlink + if err := g.Get(dst, testModuleURL("basic")); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestFileGetter_GetFile(t *testing.T) { + g := new(FileGetter) + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + + // With a dir that doesn't exist + if err := g.GetFile(dst, testModuleURL("basic-file/foo.txt")); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the destination folder is a symlink + fi, err := os.Lstat(dst) + if err != nil { + t.Fatalf("err: %s", err) + } + if fi.Mode()&os.ModeSymlink == 0 { + t.Fatal("destination is not a symlink") + } + + // Verify the main file exists + assertContents(t, dst, "Hello\n") +} + +func TestFileGetter_GetFile_Copy(t *testing.T) { + g := new(FileGetter) + g.Copy = true + + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + + // With a dir that doesn't exist + if err := g.GetFile(dst, testModuleURL("basic-file/foo.txt")); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the destination folder is a symlink + fi, err := os.Lstat(dst) + if err != nil { + t.Fatalf("err: %s", err) + } + if fi.Mode()&os.ModeSymlink != 0 { + t.Fatal("destination is a symlink") + } + + // Verify the main file exists + assertContents(t, dst, "Hello\n") +} + +// https://github.com/hashicorp/terraform/issues/8418 +func TestFileGetter_percent2F(t *testing.T) { + g := new(FileGetter) + dst := tempDir(t) + + // With a dir that doesn't exist + if err := g.Get(dst, testModuleURL("basic%2Ftest")); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestFileGetter_ClientMode_notexist(t *testing.T) { + g := new(FileGetter) + + u := testURL("nonexistent") + if _, err := g.ClientMode(u); err == nil { + t.Fatal("expect source file error") + } +} + +func TestFileGetter_ClientMode_file(t *testing.T) { + g := new(FileGetter) + + // Check the client mode when pointed at a file. + mode, err := g.ClientMode(testModuleURL("basic-file/foo.txt")) + if err != nil { + t.Fatalf("err: %s", err) + } + if mode != ClientModeFile { + t.Fatal("expect ClientModeFile") + } +} + +func TestFileGetter_ClientMode_dir(t *testing.T) { + g := new(FileGetter) + + // Check the client mode when pointed at a directory. + mode, err := g.ClientMode(testModuleURL("basic")) + if err != nil { + t.Fatalf("err: %s", err) + } + if mode != ClientModeDir { + t.Fatal("expect ClientModeDir") + } +} diff --git a/api/internal/getter/get_file_unix.go b/api/internal/getter/get_file_unix.go new file mode 100644 index 00000000000..c3b28ae517a --- /dev/null +++ b/api/internal/getter/get_file_unix.go @@ -0,0 +1,103 @@ +// +build !windows + +package getter + +import ( + "fmt" + "net/url" + "os" + "path/filepath" +) + +func (g *FileGetter) Get(dst string, u *url.URL) error { + path := u.Path + if u.RawPath != "" { + path = u.RawPath + } + + // The source path must exist and be a directory to be usable. + if fi, err := os.Stat(path); err != nil { + return fmt.Errorf("source path error: %s", err) + } else if !fi.IsDir() { + return fmt.Errorf("source path must be a directory") + } + + fi, err := os.Lstat(dst) + if err != nil && !os.IsNotExist(err) { + return err + } + + // If the destination already exists, it must be a symlink + if err == nil { + mode := fi.Mode() + if mode&os.ModeSymlink == 0 { + return fmt.Errorf("destination exists and is not a symlink") + } + + // Remove the destination + if err := os.Remove(dst); err != nil { + return err + } + } + + // Create all the parent directories + if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { + return err + } + + return os.Symlink(path, dst) +} + +func (g *FileGetter) GetFile(dst string, u *url.URL) error { + ctx := g.Context() + path := u.Path + if u.RawPath != "" { + path = u.RawPath + } + + // The source path must exist and be a file to be usable. + if fi, err := os.Stat(path); err != nil { + return fmt.Errorf("source path error: %s", err) + } else if fi.IsDir() { + return fmt.Errorf("source path must be a file") + } + + _, err := os.Lstat(dst) + if err != nil && !os.IsNotExist(err) { + return err + } + + // If the destination already exists, it must be a symlink + if err == nil { + // Remove the destination + if err := os.Remove(dst); err != nil { + return err + } + } + + // Create all the parent directories + if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { + return err + } + + // If we're not copying, just symlink and we're done + if !g.Copy { + return os.Symlink(path, dst) + } + + // Copy + srcF, err := os.Open(path) + if err != nil { + return err + } + defer srcF.Close() + + dstF, err := os.Create(dst) + if err != nil { + return err + } + defer dstF.Close() + + _, err = Copy(ctx, dstF, srcF) + return err +} diff --git a/api/internal/getter/get_file_windows.go b/api/internal/getter/get_file_windows.go new file mode 100644 index 00000000000..24f1acb1762 --- /dev/null +++ b/api/internal/getter/get_file_windows.go @@ -0,0 +1,136 @@ +// +build windows + +package getter + +import ( + "fmt" + "net/url" + "os" + "os/exec" + "path/filepath" + "strings" + "syscall" +) + +func (g *FileGetter) Get(dst string, u *url.URL) error { + ctx := g.Context() + path := u.Path + if u.RawPath != "" { + path = u.RawPath + } + + // The source path must exist and be a directory to be usable. + if fi, err := os.Stat(path); err != nil { + return fmt.Errorf("source path error: %s", err) + } else if !fi.IsDir() { + return fmt.Errorf("source path must be a directory") + } + + fi, err := os.Lstat(dst) + if err != nil && !os.IsNotExist(err) { + return err + } + + // If the destination already exists, it must be a symlink + if err == nil { + mode := fi.Mode() + if mode&os.ModeSymlink == 0 { + return fmt.Errorf("destination exists and is not a symlink") + } + + // Remove the destination + if err := os.Remove(dst); err != nil { + return err + } + } + + // Create all the parent directories + if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { + return err + } + + sourcePath := toBackslash(path) + + // Use mklink to create a junction point + output, err := exec.CommandContext(ctx, "cmd", "/c", "mklink", "/J", dst, sourcePath).CombinedOutput() + if err != nil { + return fmt.Errorf("failed to run mklink %v %v: %v %q", dst, sourcePath, err, output) + } + + return nil +} + +func (g *FileGetter) GetFile(dst string, u *url.URL) error { + ctx := g.Context() + path := u.Path + if u.RawPath != "" { + path = u.RawPath + } + + // The source path must exist and be a directory to be usable. + if fi, err := os.Stat(path); err != nil { + return fmt.Errorf("source path error: %s", err) + } else if fi.IsDir() { + return fmt.Errorf("source path must be a file") + } + + _, err := os.Lstat(dst) + if err != nil && !os.IsNotExist(err) { + return err + } + + // If the destination already exists, it must be a symlink + if err == nil { + // Remove the destination + if err := os.Remove(dst); err != nil { + return err + } + } + + // Create all the parent directories + if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { + return err + } + + // If we're not copying, just symlink and we're done + if !g.Copy { + if err = os.Symlink(path, dst); err == nil { + return err + } + lerr, ok := err.(*os.LinkError) + if !ok { + return err + } + switch lerr.Err { + case syscall.ERROR_PRIVILEGE_NOT_HELD: + // no symlink privilege, let's + // fallback to a copy to avoid an error. + break + default: + return err + } + } + + // Copy + srcF, err := os.Open(path) + if err != nil { + return err + } + defer srcF.Close() + + dstF, err := os.Create(dst) + if err != nil { + return err + } + defer dstF.Close() + + _, err = Copy(ctx, dstF, srcF) + return err +} + +// toBackslash returns the result of replacing each slash character +// in path with a backslash ('\') character. Multiple separators are +// replaced by multiple backslashes. +func toBackslash(path string) string { + return strings.Replace(path, "/", "\\", -1) +} diff --git a/api/internal/getter/get_git.go b/api/internal/getter/get_git.go new file mode 100644 index 00000000000..3e7d0c02adf --- /dev/null +++ b/api/internal/getter/get_git.go @@ -0,0 +1,313 @@ +package getter + +import ( + "bytes" + "context" + "encoding/base64" + "fmt" + "io/ioutil" + "net/url" + "os" + "os/exec" + "path/filepath" + "regexp" + "runtime" + "strconv" + "strings" + + urlhelper "./helper/url" + safetemp "github.com/hashicorp/go-safetemp" + version "github.com/hashicorp/go-version" +) + +// GitGetter is a Getter implementation that will download a module from +// a git repository. +type GitGetter struct { + getter +} + +var defaultBranchRegexp = regexp.MustCompile(`\s->\sorigin/(.*)`) + +func (g *GitGetter) ClientMode(_ *url.URL) (ClientMode, error) { + return ClientModeDir, nil +} + +func (g *GitGetter) Get(dst string, u *url.URL) error { + ctx := g.Context() + if _, err := exec.LookPath("git"); err != nil { + return fmt.Errorf("git must be available and on the PATH") + } + + // The port number must be parseable as an integer. If not, the user + // was probably trying to use a scp-style address, in which case the + // ssh:// prefix must be removed to indicate that. + // + // This is not necessary in versions of Go which have patched + // CVE-2019-14809 (e.g. Go 1.12.8+) + if portStr := u.Port(); portStr != "" { + if _, err := strconv.ParseUint(portStr, 10, 16); err != nil { + return fmt.Errorf("invalid port number %q; if using the \"scp-like\" git address scheme where a colon introduces the path instead, remove the ssh:// portion and use just the git:: prefix", portStr) + } + } + + // Extract some query parameters we use + var ref, sshKey string + var depth int + q := u.Query() + if len(q) > 0 { + ref = q.Get("ref") + q.Del("ref") + + sshKey = q.Get("sshkey") + q.Del("sshkey") + + if n, err := strconv.Atoi(q.Get("depth")); err == nil { + depth = n + } + q.Del("depth") + + // Copy the URL + var newU url.URL = *u + u = &newU + u.RawQuery = q.Encode() + } + + var sshKeyFile string + if sshKey != "" { + // Check that the git version is sufficiently new. + if err := checkGitVersion("2.3"); err != nil { + return fmt.Errorf("Error using ssh key: %v", err) + } + + // We have an SSH key - decode it. + raw, err := base64.StdEncoding.DecodeString(sshKey) + if err != nil { + return err + } + + // Create a temp file for the key and ensure it is removed. + fh, err := ioutil.TempFile("", "go-getter") + if err != nil { + return err + } + sshKeyFile = fh.Name() + defer os.Remove(sshKeyFile) + + // Set the permissions prior to writing the key material. + if err := os.Chmod(sshKeyFile, 0600); err != nil { + return err + } + + // Write the raw key into the temp file. + _, err = fh.Write(raw) + fh.Close() + if err != nil { + return err + } + } + + // Clone or update the repository + _, err := os.Stat(dst) + if err != nil && !os.IsNotExist(err) { + return err + } + if err == nil { + err = g.update(ctx, dst, sshKeyFile, ref, depth) + } else { + err = g.clone(ctx, dst, sshKeyFile, u, depth) + } + if err != nil { + return err + } + + // Next: check out the proper tag/branch if it is specified, and checkout + if ref != "" { + if err := g.checkout(dst, ref); err != nil { + return err + } + } + + // Lastly, download any/all submodules. + return g.fetchSubmodules(ctx, dst, sshKeyFile, depth) +} + +// GetFile for Git doesn't support updating at this time. It will download +// the file every time. +func (g *GitGetter) GetFile(dst string, u *url.URL) error { + td, tdcloser, err := safetemp.Dir("", "getter") + if err != nil { + return err + } + defer tdcloser.Close() + + // Get the filename, and strip the filename from the URL so we can + // just get the repository directly. + filename := filepath.Base(u.Path) + u.Path = filepath.Dir(u.Path) + + // Get the full repository + if err := g.Get(td, u); err != nil { + return err + } + + // Copy the single file + u, err = urlhelper.Parse(fmtFileURL(filepath.Join(td, filename))) + if err != nil { + return err + } + + fg := &FileGetter{Copy: true} + return fg.GetFile(dst, u) +} + +func (g *GitGetter) checkout(dst string, ref string) error { + cmd := exec.Command("git", "checkout", ref) + cmd.Dir = dst + return getRunCommand(cmd) +} + +func (g *GitGetter) clone(ctx context.Context, dst, sshKeyFile string, u *url.URL, depth int) error { + args := []string{"clone"} + + if depth > 0 { + args = append(args, "--depth", strconv.Itoa(depth)) + } + + args = append(args, u.String(), dst) + cmd := exec.CommandContext(ctx, "git", args...) + setupGitEnv(cmd, sshKeyFile) + return getRunCommand(cmd) +} + +func (g *GitGetter) update(ctx context.Context, dst, sshKeyFile, ref string, depth int) error { + // Determine if we're a branch. If we're NOT a branch, then we just + // switch to master prior to checking out + cmd := exec.CommandContext(ctx, "git", "show-ref", "-q", "--verify", "refs/heads/"+ref) + cmd.Dir = dst + + if getRunCommand(cmd) != nil { + // Not a branch, switch to default branch. This will also catch + // non-existent branches, in which case we want to switch to default + // and then checkout the proper branch later. + ref = findDefaultBranch(dst) + } + + // We have to be on a branch to pull + if err := g.checkout(dst, ref); err != nil { + return err + } + + if depth > 0 { + cmd = exec.Command("git", "pull", "--depth", strconv.Itoa(depth), "--ff-only") + } else { + cmd = exec.Command("git", "pull", "--ff-only") + } + + cmd.Dir = dst + setupGitEnv(cmd, sshKeyFile) + return getRunCommand(cmd) +} + +// fetchSubmodules downloads any configured submodules recursively. +func (g *GitGetter) fetchSubmodules(ctx context.Context, dst, sshKeyFile string, depth int) error { + args := []string{"submodule", "update", "--init", "--recursive"} + if depth > 0 { + args = append(args, "--depth", strconv.Itoa(depth)) + } + cmd := exec.CommandContext(ctx, "git", args...) + cmd.Dir = dst + setupGitEnv(cmd, sshKeyFile) + return getRunCommand(cmd) +} + +// findDefaultBranch checks the repo's origin remote for its default branch +// (generally "master"). "master" is returned if an origin default branch +// can't be determined. +func findDefaultBranch(dst string) string { + var stdoutbuf bytes.Buffer + cmd := exec.Command("git", "branch", "-r", "--points-at", "refs/remotes/origin/HEAD") + cmd.Dir = dst + cmd.Stdout = &stdoutbuf + err := cmd.Run() + matches := defaultBranchRegexp.FindStringSubmatch(stdoutbuf.String()) + if err != nil || matches == nil { + return "master" + } + return matches[len(matches)-1] +} + +// setupGitEnv sets up the environment for the given command. This is used to +// pass configuration data to git and ssh and enables advanced cloning methods. +func setupGitEnv(cmd *exec.Cmd, sshKeyFile string) { + const gitSSHCommand = "GIT_SSH_COMMAND=" + var sshCmd []string + + // If we have an existing GIT_SSH_COMMAND, we need to append our options. + // We will also remove our old entry to make sure the behavior is the same + // with versions of Go < 1.9. + env := os.Environ() + for i, v := range env { + if strings.HasPrefix(v, gitSSHCommand) && len(v) > len(gitSSHCommand) { + sshCmd = []string{v} + + env[i], env[len(env)-1] = env[len(env)-1], env[i] + env = env[:len(env)-1] + break + } + } + + if len(sshCmd) == 0 { + sshCmd = []string{gitSSHCommand + "ssh"} + } + + if sshKeyFile != "" { + // We have an SSH key temp file configured, tell ssh about this. + if runtime.GOOS == "windows" { + sshKeyFile = strings.Replace(sshKeyFile, `\`, `/`, -1) + } + sshCmd = append(sshCmd, "-i", sshKeyFile) + } + + env = append(env, strings.Join(sshCmd, " ")) + cmd.Env = env +} + +// checkGitVersion is used to check the version of git installed on the system +// against a known minimum version. Returns an error if the installed version +// is older than the given minimum. +func checkGitVersion(min string) error { + want, err := version.NewVersion(min) + if err != nil { + return err + } + + out, err := exec.Command("git", "version").Output() + if err != nil { + return err + } + + fields := strings.Fields(string(out)) + if len(fields) < 3 { + return fmt.Errorf("Unexpected 'git version' output: %q", string(out)) + } + v := fields[2] + if runtime.GOOS == "windows" && strings.Contains(v, ".windows.") { + // on windows, git version will return for example: + // git version 2.20.1.windows.1 + // Which does not follow the semantic versionning specs + // https://semver.org. We remove that part in order for + // go-version to not error. + v = v[:strings.Index(v, ".windows.")] + } + + have, err := version.NewVersion(v) + if err != nil { + return err + } + + if have.LessThan(want) { + return fmt.Errorf("Required git version = %s, have %s", want, have) + } + + return nil +} diff --git a/api/internal/getter/get_git_test.go b/api/internal/getter/get_git_test.go new file mode 100644 index 00000000000..3a78bf2faf2 --- /dev/null +++ b/api/internal/getter/get_git_test.go @@ -0,0 +1,643 @@ +package getter + +import ( + "bytes" + "encoding/base64" + "io/ioutil" + "net/url" + "os" + "os/exec" + "path/filepath" + "runtime" + "strings" + "testing" + + urlhelper "./helper/url" +) + +var testHasGit bool + +func init() { + if _, err := exec.LookPath("git"); err == nil { + testHasGit = true + } +} + +func TestGitGetter_impl(t *testing.T) { + var _ Getter = new(GitGetter) +} + +func TestGitGetter(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + + g := new(GitGetter) + dst := tempDir(t) + + repo := testGitRepo(t, "basic") + repo.commitFile("foo.txt", "hello") + + // With a dir that doesn't exist + if err := g.Get(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "foo.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGitGetter_branch(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + + g := new(GitGetter) + dst := tempDir(t) + + repo := testGitRepo(t, "branch") + repo.git("checkout", "-b", "test-branch") + repo.commitFile("branch.txt", "branch") + + q := repo.url.Query() + q.Add("ref", "test-branch") + repo.url.RawQuery = q.Encode() + + if err := g.Get(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "branch.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } + + // Get again should work + if err := g.Get(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath = filepath.Join(dst, "branch.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGitGetter_remoteWithoutMaster(t *testing.T) { + if !testHasGit { + t.Log("git not found, skipping") + t.Skip() + } + + g := new(GitGetter) + dst := tempDir(t) + + repo := testGitRepo(t, "branch") + repo.git("checkout", "-b", "test-branch") + repo.commitFile("branch.txt", "branch") + + q := repo.url.Query() + repo.url.RawQuery = q.Encode() + + if err := g.Get(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "branch.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } + + // Get again should work + if err := g.Get(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath = filepath.Join(dst, "branch.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGitGetter_shallowClone(t *testing.T) { + if !testHasGit { + t.Log("git not found, skipping") + t.Skip() + } + + g := new(GitGetter) + dst := tempDir(t) + + repo := testGitRepo(t, "upstream") + repo.commitFile("upstream.txt", "0") + repo.commitFile("upstream.txt", "1") + + // Specifiy a clone depth of 1 + q := repo.url.Query() + q.Add("depth", "1") + repo.url.RawQuery = q.Encode() + + if err := g.Get(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Assert rev-list count is '1' + cmd := exec.Command("git", "rev-list", "HEAD", "--count") + cmd.Dir = dst + b, err := cmd.Output() + if err != nil { + t.Fatalf("err: %s", err) + } + + out := strings.TrimSpace(string(b)) + if out != "1" { + t.Fatalf("expected rev-list count to be '1' but got %v", out) + } +} + +func TestGitGetter_branchUpdate(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + + g := new(GitGetter) + dst := tempDir(t) + + // First setup the state with a fresh branch + repo := testGitRepo(t, "branch-update") + repo.git("checkout", "-b", "test-branch") + repo.commitFile("branch.txt", "branch") + + // Get the "test-branch" branch + q := repo.url.Query() + q.Add("ref", "test-branch") + repo.url.RawQuery = q.Encode() + if err := g.Get(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "branch.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } + + // Commit an update to the branch + repo.commitFile("branch-update.txt", "branch-update") + + // Get again should work + if err := g.Get(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath = filepath.Join(dst, "branch-update.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGitGetter_tag(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + + g := new(GitGetter) + dst := tempDir(t) + + repo := testGitRepo(t, "tag") + repo.commitFile("tag.txt", "tag") + repo.git("tag", "v1.0") + + q := repo.url.Query() + q.Add("ref", "v1.0") + repo.url.RawQuery = q.Encode() + + if err := g.Get(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "tag.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } + + // Get again should work + if err := g.Get(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath = filepath.Join(dst, "tag.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGitGetter_GetFile(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + + g := new(GitGetter) + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + + repo := testGitRepo(t, "file") + repo.commitFile("file.txt", "hello") + + // Download the file + repo.url.Path = filepath.Join(repo.url.Path, "file.txt") + if err := g.GetFile(dst, repo.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + if _, err := os.Stat(dst); err != nil { + t.Fatalf("err: %s", err) + } + assertContents(t, dst, "hello") +} + +func TestGitGetter_gitVersion(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + if runtime.GOOS == "windows" { + t.Skip("skipping on windows since the test requires sh") + } + dir, err := ioutil.TempDir("", "go-getter") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(dir) + + script := filepath.Join(dir, "git") + err = ioutil.WriteFile( + script, + []byte("#!/bin/sh\necho \"git version 2.0 (Some Metadata Here)\n\""), + 0700) + if err != nil { + t.Fatal(err) + } + + defer func(v string) { + os.Setenv("PATH", v) + }(os.Getenv("PATH")) + + os.Setenv("PATH", dir) + + // Asking for a higher version throws an error + if err := checkGitVersion("2.3"); err == nil { + t.Fatal("expect git version error") + } + + // Passes when version is satisfied + if err := checkGitVersion("1.9"); err != nil { + t.Fatal(err) + } +} + +func TestGitGetter_sshKey(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + + g := new(GitGetter) + dst := tempDir(t) + + encodedKey := base64.StdEncoding.EncodeToString([]byte(testGitToken)) + + // avoid getting locked by a github authenticity validation prompt + os.Setenv("GIT_SSH_COMMAND", "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes") + defer os.Setenv("GIT_SSH_COMMAND", "") + + u, err := urlhelper.Parse("ssh://git@github.com/hashicorp/test-private-repo" + + "?sshkey=" + encodedKey) + if err != nil { + t.Fatal(err) + } + + if err := g.Get(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + readmePath := filepath.Join(dst, "README.md") + if _, err := os.Stat(readmePath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGitGetter_sshSCPStyle(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + + g := new(GitGetter) + dst := tempDir(t) + + encodedKey := base64.StdEncoding.EncodeToString([]byte(testGitToken)) + + // avoid getting locked by a github authenticity validation prompt + os.Setenv("GIT_SSH_COMMAND", "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes") + defer os.Setenv("GIT_SSH_COMMAND", "") + + // This test exercises the combination of the git detector and the + // git getter, to make sure that together they make scp-style URLs work. + client := &Client{ + Src: "git@github.com:hashicorp/test-private-repo?sshkey=" + encodedKey, + Dst: dst, + Pwd: ".", + + Mode: ClientModeDir, + + Detectors: []Detector{ + new(GitDetector), + }, + Getters: map[string]Getter{ + "git": g, + }, + } + + if err := client.Get(); err != nil { + t.Fatalf("client.Get failed: %s", err) + } + + readmePath := filepath.Join(dst, "README.md") + if _, err := os.Stat(readmePath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGitGetter_sshExplicitPort(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + + g := new(GitGetter) + dst := tempDir(t) + + encodedKey := base64.StdEncoding.EncodeToString([]byte(testGitToken)) + + // avoid getting locked by a github authenticity validation prompt + os.Setenv("GIT_SSH_COMMAND", "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes") + defer os.Setenv("GIT_SSH_COMMAND", "") + + // This test exercises the combination of the git detector and the + // git getter, to make sure that together they make scp-style URLs work. + client := &Client{ + Src: "git::ssh://git@github.com:22/hashicorp/test-private-repo?sshkey=" + encodedKey, + Dst: dst, + Pwd: ".", + + Mode: ClientModeDir, + + Detectors: []Detector{ + new(GitDetector), + }, + Getters: map[string]Getter{ + "git": g, + }, + } + + if err := client.Get(); err != nil { + t.Fatalf("client.Get failed: %s", err) + } + + readmePath := filepath.Join(dst, "README.md") + if _, err := os.Stat(readmePath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGitGetter_sshSCPStyleInvalidScheme(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + + g := new(GitGetter) + dst := tempDir(t) + + encodedKey := base64.StdEncoding.EncodeToString([]byte(testGitToken)) + + // avoid getting locked by a github authenticity validation prompt + os.Setenv("GIT_SSH_COMMAND", "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes") + defer os.Setenv("GIT_SSH_COMMAND", "") + + // This test exercises the combination of the git detector and the + // git getter, to make sure that together they make scp-style URLs work. + client := &Client{ + Src: "git::ssh://git@github.com:hashicorp/test-private-repo?sshkey=" + encodedKey, + Dst: dst, + Pwd: ".", + + Mode: ClientModeDir, + + Detectors: []Detector{ + new(GitDetector), + }, + Getters: map[string]Getter{ + "git": g, + }, + } + + err := client.Get() + if err == nil { + t.Fatalf("get succeeded; want error") + } + + got := err.Error() + want1, want2 := `invalid source string`, `invalid port number "hashicorp"` + if !(strings.Contains(got, want1) || strings.Contains(got, want2)) { + t.Fatalf("wrong error\ngot: %s\nwant: %q or %q", got, want1, want2) + } +} + +func TestGitGetter_submodule(t *testing.T) { + if !testHasGit { + t.Skip("git not found, skipping") + } + + g := new(GitGetter) + dst := tempDir(t) + + relpath := func(basepath, targpath string) string { + relpath, err := filepath.Rel(basepath, targpath) + if err != nil { + t.Fatal(err) + } + return strings.Replace(relpath, `\`, `/`, -1) + // on windows git still prefers relatives paths + // containing `/` for submodules + } + + // Set up the grandchild + gc := testGitRepo(t, "grandchild") + gc.commitFile("grandchild.txt", "grandchild") + + // Set up the child + c := testGitRepo(t, "child") + c.commitFile("child.txt", "child") + c.git("submodule", "add", "-f", relpath(c.dir, gc.dir)) + c.git("commit", "-m", "Add grandchild submodule") + + // Set up the parent + p := testGitRepo(t, "parent") + p.commitFile("parent.txt", "parent") + p.git("submodule", "add", "-f", relpath(p.dir, c.dir)) + p.git("commit", "-m", "Add child submodule") + + // Clone the root repository + if err := g.Get(dst, p.url); err != nil { + t.Fatalf("err: %s", err) + } + + // Check that the files exist + for _, path := range []string{ + filepath.Join(dst, "parent.txt"), + filepath.Join(dst, "child", "child.txt"), + filepath.Join(dst, "child", "grandchild", "grandchild.txt"), + } { + if _, err := os.Stat(path); err != nil { + t.Fatalf("err: %s", err) + } + } +} + +func TestGitGetter_setupGitEnv_sshKey(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("skipping on windows since the test requires sh") + } + + cmd := exec.Command("/bin/sh", "-c", "echo $GIT_SSH_COMMAND") + setupGitEnv(cmd, "/tmp/foo.pem") + out, err := cmd.Output() + if err != nil { + t.Fatal(err) + } + + actual := strings.TrimSpace(string(out)) + if actual != "ssh -i /tmp/foo.pem" { + t.Fatalf("unexpected GIT_SSH_COMMAND: %q", actual) + } +} + +func TestGitGetter_setupGitEnvWithExisting_sshKey(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skipf("skipping on windows since the test requires sh") + return + } + + // start with an existing ssh command configuration + os.Setenv("GIT_SSH_COMMAND", "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes") + defer os.Setenv("GIT_SSH_COMMAND", "") + + cmd := exec.Command("/bin/sh", "-c", "echo $GIT_SSH_COMMAND") + setupGitEnv(cmd, "/tmp/foo.pem") + out, err := cmd.Output() + if err != nil { + t.Fatal(err) + } + + actual := strings.TrimSpace(string(out)) + if actual != "ssh -o StrictHostKeyChecking=no -o IdentitiesOnly=yes -i /tmp/foo.pem" { + t.Fatalf("unexpected GIT_SSH_COMMAND: %q", actual) + } +} + +// gitRepo is a helper struct which controls a single temp git repo. +type gitRepo struct { + t *testing.T + url *url.URL + dir string +} + +// testGitRepo creates a new test git repository. +func testGitRepo(t *testing.T, name string) *gitRepo { + dir, err := ioutil.TempDir("", "go-getter") + if err != nil { + t.Fatal(err) + } + dir = filepath.Join(dir, name) + if err := os.Mkdir(dir, 0700); err != nil { + t.Fatal(err) + } + + r := &gitRepo{ + t: t, + dir: dir, + } + + url, err := urlhelper.Parse("file://" + r.dir) + if err != nil { + t.Fatal(err) + } + r.url = url + + t.Logf("initializing git repo in %s", dir) + r.git("init") + r.git("config", "user.name", "go-getter") + r.git("config", "user.email", "go-getter@hashicorp.com") + + return r +} + +// git runs a git command against the repo. +func (r *gitRepo) git(args ...string) { + cmd := exec.Command("git", args...) + cmd.Dir = r.dir + bfr := bytes.NewBuffer(nil) + cmd.Stderr = bfr + if err := cmd.Run(); err != nil { + r.t.Fatal(err, bfr.String()) + } +} + +// commitFile writes and commits a text file to the repo. +func (r *gitRepo) commitFile(file, content string) { + path := filepath.Join(r.dir, file) + if err := ioutil.WriteFile(path, []byte(content), 0600); err != nil { + r.t.Fatal(err) + } + r.git("add", file) + r.git("commit", "-m", "Adding "+file) +} + +// This is a read-only deploy key for an empty test repository. +// Note: This is split over multiple lines to avoid being disabled by key +// scanners automatically. +var testGitToken = `-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEA9cHsxCl3Jjgu9DHpwvmfFOl1XEdY+ShHDR/cMnzJ5ddk5/oV +Wy6EWatvyHZfRSZMwzv4PtKeUPm6iXjqWp4xdWU9khlPzozyj+U9Fq70TRVUW9E5 +T1XdQVwJE421yffr4VMMwu60wBqjI1epapH2i2inYvw9Zl9X2MXq0+jTvFvDerbT +mDtfStDPljenELAIZtWVETSvbI46gALwbxbM2292ZUIL4D6jRz0aZMmyy/twYv8r +9WGJLwmYzU518Ie7zqKW/mCTdTrV0WRiDj0MeRaPgrGY9amuHE4r9iG/cJkwpKAO +Ccz0Hs6i89u9vZnTqZU9V7weJqRAQcMjXXR6yQIDAQABAoIBAQDBzICKnGxiTlHw +rd+6qqChnAy5jWYDbZjCJ8q8YZ3RS08+g/8NXZxvHftTqM0uOaq1FviHig3gq15H +hHvCpBc6jXDFYoKFzq6FfO/0kFkE5HoWweIgxwRow0xBCDJAJ+ryUEyy+Ay/pQHb +IAjwilRS0V+WdnVw4mTjBAhPvb4jPOo97Yfy3PYUyx2F3newkqXOZy+zx3G/ANoa +ncypfMGyy76sfCWKqw4J1gVkVQLwbB6gQkXUFGYwY9sRrxbG93kQw76Flc/E/s52 +62j4v1IM0fq0t/St+Y/+s6Lkw` + `aqt3ft1nsqWcRaVDdqvMfkzgJGXlw0bGzJG5MEQ +AIBq3dHRAoGBAP8OeG/DKG2Z1VmSfzuz1pas1fbZ+F7venOBrjez3sKlb3Pyl2aH +mt2wjaTUi5v10VrHgYtOEdqyhQeUSYydWXIBKNMag0NLLrfFUKZK+57wrHWFdFjn +VgpsdkLSNTOZpC8gA5OaJ+36IcOPfGqyyP9wuuRoaYnVT1KEzqLa9FEFAoGBAPaq +pglwhil2rxjJE4zq0afQLNpAfi7Xqcrepij+xvJIcIj7nawxXuPxqRFxONE/h3yX +zkybO8wLdbHX9Iw/wc1j50Uf1Z5gHdLf7/hQJoWKpz1RnkWRy6CYON8v1tpVp0tb +OAajR/kZnzebq2mfa7pyy5zDCX++2kp/dcFwHf31AoGAE8oupBVTZLWj7TBFuP8q +LkS40U92Sv9v09iDCQVmylmFvUxcXPM2m+7f/qMTNgWrucxzC7kB/6MMWVszHbrz +vrnCTibnemgx9sZTjKOSxHFOIEw7i85fSa3Cu0qOIDPSnmlwfZpfcMKQrhjLAYhf +uhooFiLX1X78iZ2OXup4PHUCgYEAsmBrm83sp1V1gAYBBlnVbXakyNv0pCk/Vz61 +iFXeRt1NzDGxLxGw3kQnED8BaIh5kQcyn8Fud7sdzJMv/LAqlT4Ww60mzNYTGyjo +H3jOsqm3ESfRvduWFreeAQBWbiOczGjV1i8D4EbAFfWT+tjXjchwKBf+6Yt5zn/o +Bw/uEHUCgYAFs+JPOR25oRyBs7ujrMo/OY1z/eXTVVgZxY+tYGe1FJqDeFyR7ytK ++JBB1MuDwQKGm2wSIXdCzTNoIx2B9zTseiPTwT8G7vqNFhXoIaTBp4P2xIQb45mJ +7GkTsMBHwpSMOXgX9Weq3v5xOJ2WxVtjENmd6qzxcYCO5lP15O17hA== +-----END RSA PRIVATE KEY-----` diff --git a/api/internal/getter/get_hg.go b/api/internal/getter/get_hg.go new file mode 100644 index 00000000000..1f10ec1a25b --- /dev/null +++ b/api/internal/getter/get_hg.go @@ -0,0 +1,135 @@ +package getter + +import ( + "context" + "fmt" + "net/url" + "os" + "os/exec" + "path/filepath" + "runtime" + + urlhelper "./helper/url" + safetemp "github.com/hashicorp/go-safetemp" +) + +// HgGetter is a Getter implementation that will download a module from +// a Mercurial repository. +type HgGetter struct { + getter +} + +func (g *HgGetter) ClientMode(_ *url.URL) (ClientMode, error) { + return ClientModeDir, nil +} + +func (g *HgGetter) Get(dst string, u *url.URL) error { + ctx := g.Context() + if _, err := exec.LookPath("hg"); err != nil { + return fmt.Errorf("hg must be available and on the PATH") + } + + newURL, err := urlhelper.Parse(u.String()) + if err != nil { + return err + } + if fixWindowsDrivePath(newURL) { + // See valid file path form on http://www.selenic.com/hg/help/urls + newURL.Path = fmt.Sprintf("/%s", newURL.Path) + } + + // Extract some query parameters we use + var rev string + q := newURL.Query() + if len(q) > 0 { + rev = q.Get("rev") + q.Del("rev") + + newURL.RawQuery = q.Encode() + } + + _, err = os.Stat(dst) + if err != nil && !os.IsNotExist(err) { + return err + } + if err != nil { + if err := g.clone(dst, newURL); err != nil { + return err + } + } + + if err := g.pull(dst, newURL); err != nil { + return err + } + + return g.update(ctx, dst, newURL, rev) +} + +// GetFile for Hg doesn't support updating at this time. It will download +// the file every time. +func (g *HgGetter) GetFile(dst string, u *url.URL) error { + // Create a temporary directory to store the full source. This has to be + // a non-existent directory. + td, tdcloser, err := safetemp.Dir("", "getter") + if err != nil { + return err + } + defer tdcloser.Close() + + // Get the filename, and strip the filename from the URL so we can + // just get the repository directly. + filename := filepath.Base(u.Path) + u.Path = filepath.ToSlash(filepath.Dir(u.Path)) + + // If we're on Windows, we need to set the host to "localhost" for hg + if runtime.GOOS == "windows" { + u.Host = "localhost" + } + + // Get the full repository + if err := g.Get(td, u); err != nil { + return err + } + + // Copy the single file + u, err = urlhelper.Parse(fmtFileURL(filepath.Join(td, filename))) + if err != nil { + return err + } + + fg := &FileGetter{Copy: true, getter: g.getter} + return fg.GetFile(dst, u) +} + +func (g *HgGetter) clone(dst string, u *url.URL) error { + cmd := exec.Command("hg", "clone", "-U", u.String(), dst) + return getRunCommand(cmd) +} + +func (g *HgGetter) pull(dst string, u *url.URL) error { + cmd := exec.Command("hg", "pull") + cmd.Dir = dst + return getRunCommand(cmd) +} + +func (g *HgGetter) update(ctx context.Context, dst string, u *url.URL, rev string) error { + args := []string{"update"} + if rev != "" { + args = append(args, rev) + } + + cmd := exec.CommandContext(ctx, "hg", args...) + cmd.Dir = dst + return getRunCommand(cmd) +} + +func fixWindowsDrivePath(u *url.URL) bool { + // hg assumes a file:/// prefix for Windows drive letter file paths. + // (e.g. file:///c:/foo/bar) + // If the URL Path does not begin with a '/' character, the resulting URL + // path will have a file:// prefix. (e.g. file://c:/foo/bar) + // See http://www.selenic.com/hg/help/urls and the examples listed in + // http://selenic.com/repo/hg-stable/file/1265a3a71d75/mercurial/util.py#l1936 + return runtime.GOOS == "windows" && u.Scheme == "file" && + len(u.Path) > 1 && u.Path[0] != '/' && u.Path[1] == ':' +} diff --git a/api/internal/getter/get_hg_test.go b/api/internal/getter/get_hg_test.go new file mode 100644 index 00000000000..ee16579451c --- /dev/null +++ b/api/internal/getter/get_hg_test.go @@ -0,0 +1,99 @@ +package getter + +import ( + "os" + "os/exec" + "path/filepath" + "testing" +) + +var testHasHg bool + +func init() { + if _, err := exec.LookPath("hg"); err == nil { + testHasHg = true + } +} + +func TestHgGetter_impl(t *testing.T) { + var _ Getter = new(HgGetter) +} + +func TestHgGetter(t *testing.T) { + if !testHasHg { + t.Log("hg not found, skipping") + t.Skip() + } + + g := new(HgGetter) + dst := tempDir(t) + + // With a dir that doesn't exist + if err := g.Get(dst, testModuleURL("basic-hg")); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestHgGetter_branch(t *testing.T) { + if !testHasHg { + t.Log("hg not found, skipping") + t.Skip() + } + + g := new(HgGetter) + dst := tempDir(t) + + url := testModuleURL("basic-hg") + q := url.Query() + q.Add("rev", "test-branch") + url.RawQuery = q.Encode() + + if err := g.Get(dst, url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "main_branch.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } + + // Get again should work + if err := g.Get(dst, url); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath = filepath.Join(dst, "main_branch.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestHgGetter_GetFile(t *testing.T) { + if !testHasHg { + t.Log("hg not found, skipping") + t.Skip() + } + + g := new(HgGetter) + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + + // Download + if err := g.GetFile(dst, testModuleURL("basic-hg/foo.txt")); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + if _, err := os.Stat(dst); err != nil { + t.Fatalf("err: %s", err) + } + assertContents(t, dst, "Hello\n") +} diff --git a/api/internal/getter/get_http.go b/api/internal/getter/get_http.go new file mode 100644 index 00000000000..618a411f961 --- /dev/null +++ b/api/internal/getter/get_http.go @@ -0,0 +1,328 @@ +package getter + +import ( + "context" + "encoding/xml" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + "strings" + + safetemp "github.com/hashicorp/go-safetemp" +) + +// HttpGetter is a Getter implementation that will download from an HTTP +// endpoint. +// +// For file downloads, HTTP is used directly. +// +// The protocol for downloading a directory from an HTTP endpoint is as follows: +// +// An HTTP GET request is made to the URL with the additional GET parameter +// "terraform-get=1". This lets you handle that scenario specially if you +// wish. The response must be a 2xx. +// +// First, a header is looked for "X-Terraform-Get" which should contain +// a source URL to download. +// +// If the header is not present, then a meta tag is searched for named +// "terraform-get" and the content should be a source URL. +// +// The source URL, whether from the header or meta tag, must be a fully +// formed URL. The shorthand syntax of "github.com/foo/bar" or relative +// paths are not allowed. +type HttpGetter struct { + getter + + // Netrc, if true, will lookup and use auth information found + // in the user's netrc file if available. + Netrc bool + + // Client is the http.Client to use for Get requests. + // This defaults to a cleanhttp.DefaultClient if left unset. + Client *http.Client + + // Header contains optional request header fields that should be included + // with every HTTP request. Note that the zero value of this field is nil, + // and as such it needs to be initialized before use, via something like + // make(http.Header). + Header http.Header +} + +func (g *HttpGetter) ClientMode(u *url.URL) (ClientMode, error) { + if strings.HasSuffix(u.Path, "/") { + return ClientModeDir, nil + } + return ClientModeFile, nil +} + +func (g *HttpGetter) Get(dst string, u *url.URL) error { + ctx := g.Context() + // Copy the URL so we can modify it + var newU url.URL = *u + u = &newU + + if g.Netrc { + // Add auth from netrc if we can + if err := addAuthFromNetrc(u); err != nil { + return err + } + } + + if g.Client == nil { + g.Client = httpClient + } + + // Add terraform-get to the parameter. + q := u.Query() + q.Add("terraform-get", "1") + u.RawQuery = q.Encode() + + // Get the URL + req, err := http.NewRequest("GET", u.String(), nil) + if err != nil { + return err + } + + if g.Header != nil { + req.Header = g.Header.Clone() + } + + resp, err := g.Client.Do(req) + if err != nil { + return err + } + + defer resp.Body.Close() + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return fmt.Errorf("bad response code: %d", resp.StatusCode) + } + + // Extract the source URL + var source string + if v := resp.Header.Get("X-Terraform-Get"); v != "" { + source = v + } else { + source, err = g.parseMeta(resp.Body) + if err != nil { + return err + } + } + if source == "" { + return fmt.Errorf("no source URL was returned") + } + + // If there is a subdir component, then we download the root separately + // into a temporary directory, then copy over the proper subdir. + source, subDir := SourceDirSubdir(source) + if subDir == "" { + var opts []ClientOption + if g.client != nil { + opts = g.client.Options + } + return Get(dst, source, opts...) + } + + // We have a subdir, time to jump some hoops + return g.getSubdir(ctx, dst, source, subDir) +} + +// GetFile fetches the file from src and stores it at dst. +// If the server supports Accept-Range, HttpGetter will attempt a range +// request. This means it is the caller's responsibility to ensure that an +// older version of the destination file does not exist, else it will be either +// falsely identified as being replaced, or corrupted with extra bytes +// appended. +func (g *HttpGetter) GetFile(dst string, src *url.URL) error { + ctx := g.Context() + if g.Netrc { + // Add auth from netrc if we can + if err := addAuthFromNetrc(src); err != nil { + return err + } + } + // Create all the parent directories if needed + if err := os.MkdirAll(filepath.Dir(dst), 0755); err != nil { + return err + } + + f, err := os.OpenFile(dst, os.O_RDWR|os.O_CREATE, os.FileMode(0666)) + if err != nil { + return err + } + defer f.Close() + + if g.Client == nil { + g.Client = httpClient + } + + var currentFileSize int64 + + // We first make a HEAD request so we can check + // if the server supports range queries. If the server/URL doesn't + // support HEAD requests, we just fall back to GET. + req, err := http.NewRequest("HEAD", src.String(), nil) + if err != nil { + return err + } + if g.Header != nil { + req.Header = g.Header.Clone() + } + headResp, err := g.Client.Do(req) + if err == nil { + headResp.Body.Close() + if headResp.StatusCode == 200 { + // If the HEAD request succeeded, then attempt to set the range + // query if we can. + if headResp.Header.Get("Accept-Ranges") == "bytes" && headResp.ContentLength >= 0 { + if fi, err := f.Stat(); err == nil { + if _, err = f.Seek(0, io.SeekEnd); err == nil { + currentFileSize = fi.Size() + req.Header.Set("Range", fmt.Sprintf("bytes=%d-", currentFileSize)) + if currentFileSize >= headResp.ContentLength { + // file already present + return nil + } + } + } + } + } + } + req.Method = "GET" + + resp, err := g.Client.Do(req) + if err != nil { + return err + } + switch resp.StatusCode { + case http.StatusOK, http.StatusPartialContent: + // all good + default: + resp.Body.Close() + return fmt.Errorf("bad response code: %d", resp.StatusCode) + } + + body := resp.Body + + if g.client != nil && g.client.ProgressListener != nil { + // track download + fn := filepath.Base(src.EscapedPath()) + body = g.client.ProgressListener.TrackProgress(fn, currentFileSize, currentFileSize+resp.ContentLength, resp.Body) + } + defer body.Close() + + n, err := Copy(ctx, f, body) + if err == nil && n < resp.ContentLength { + err = io.ErrShortWrite + } + return err +} + +// getSubdir downloads the source into the destination, but with +// the proper subdir. +func (g *HttpGetter) getSubdir(ctx context.Context, dst, source, subDir string) error { + // Create a temporary directory to store the full source. This has to be + // a non-existent directory. + td, tdcloser, err := safetemp.Dir("", "getter") + if err != nil { + return err + } + defer tdcloser.Close() + + var opts []ClientOption + if g.client != nil { + opts = g.client.Options + } + // Download that into the given directory + if err := Get(td, source, opts...); err != nil { + return err + } + + // Process any globbing + sourcePath, err := SubdirGlob(td, subDir) + if err != nil { + return err + } + + // Make sure the subdir path actually exists + if _, err := os.Stat(sourcePath); err != nil { + return fmt.Errorf( + "Error downloading %s: %s", source, err) + } + + // Copy the subdirectory into our actual destination. + if err := os.RemoveAll(dst); err != nil { + return err + } + + // Make the final destination + if err := os.MkdirAll(dst, 0755); err != nil { + return err + } + + return copyDir(ctx, dst, sourcePath, false) +} + +// parseMeta looks for the first meta tag in the given reader that +// will give us the source URL. +func (g *HttpGetter) parseMeta(r io.Reader) (string, error) { + d := xml.NewDecoder(r) + d.CharsetReader = charsetReader + d.Strict = false + var err error + var t xml.Token + for { + t, err = d.Token() + if err != nil { + if err == io.EOF { + err = nil + } + return "", err + } + if e, ok := t.(xml.StartElement); ok && strings.EqualFold(e.Name.Local, "body") { + return "", nil + } + if e, ok := t.(xml.EndElement); ok && strings.EqualFold(e.Name.Local, "head") { + return "", nil + } + e, ok := t.(xml.StartElement) + if !ok || !strings.EqualFold(e.Name.Local, "meta") { + continue + } + if attrValue(e.Attr, "name") != "terraform-get" { + continue + } + if f := attrValue(e.Attr, "content"); f != "" { + return f, nil + } + } +} + +// attrValue returns the attribute value for the case-insensitive key +// `name', or the empty string if nothing is found. +func attrValue(attrs []xml.Attr, name string) string { + for _, a := range attrs { + if strings.EqualFold(a.Name.Local, name) { + return a.Value + } + } + return "" +} + +// charsetReader returns a reader for the given charset. Currently +// it only supports UTF-8 and ASCII. Otherwise, it returns a meaningful +// error which is printed by go get, so the user can find why the package +// wasn't downloaded if the encoding is not supported. Note that, in +// order to reduce potential errors, ASCII is treated as UTF-8 (i.e. characters +// greater than 0x7f are not rejected). +func charsetReader(charset string, input io.Reader) (io.Reader, error) { + switch strings.ToLower(charset) { + case "ascii": + return input, nil + default: + return nil, fmt.Errorf("can't decode XML document using charset %q", charset) + } +} diff --git a/api/internal/getter/get_http_test.go b/api/internal/getter/get_http_test.go new file mode 100644 index 00000000000..6d7fb90ace0 --- /dev/null +++ b/api/internal/getter/get_http_test.go @@ -0,0 +1,516 @@ +package getter + +import ( + "crypto/sha256" + "encoding/hex" + "errors" + "fmt" + "io/ioutil" + "net" + "net/http" + "net/url" + "os" + "path/filepath" + "strconv" + "strings" + "testing" +) + +func TestHttpGetter_impl(t *testing.T) { + var _ Getter = new(HttpGetter) +} + +func TestHttpGetter_header(t *testing.T) { + ln := testHttpServer(t) + defer ln.Close() + + g := new(HttpGetter) + dst := tempDir(t) + defer os.RemoveAll(dst) + + var u url.URL + u.Scheme = "http" + u.Host = ln.Addr().String() + u.Path = "/header" + + // Get it! + if err := g.Get(dst, &u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestHttpGetter_requestHeader(t *testing.T) { + ln := testHttpServer(t) + defer ln.Close() + + g := new(HttpGetter) + g.Header = make(http.Header) + g.Header.Add("X-Foobar", "foobar") + dst := tempDir(t) + defer os.RemoveAll(dst) + + var u url.URL + u.Scheme = "http" + u.Host = ln.Addr().String() + u.Path = "/expect-header" + u.RawQuery = "expected=X-Foobar" + + // Get it! + if err := g.GetFile(dst, &u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + if _, err := os.Stat(dst); err != nil { + t.Fatalf("err: %s", err) + } + assertContents(t, dst, "Hello\n") +} + +func TestHttpGetter_meta(t *testing.T) { + ln := testHttpServer(t) + defer ln.Close() + + g := new(HttpGetter) + dst := tempDir(t) + defer os.RemoveAll(dst) + + var u url.URL + u.Scheme = "http" + u.Host = ln.Addr().String() + u.Path = "/meta" + + // Get it! + if err := g.Get(dst, &u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestHttpGetter_metaSubdir(t *testing.T) { + ln := testHttpServer(t) + defer ln.Close() + + g := new(HttpGetter) + dst := tempDir(t) + defer os.RemoveAll(dst) + + var u url.URL + u.Scheme = "http" + u.Host = ln.Addr().String() + u.Path = "/meta-subdir" + + // Get it! + if err := g.Get(dst, &u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "sub.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestHttpGetter_metaSubdirGlob(t *testing.T) { + ln := testHttpServer(t) + defer ln.Close() + + g := new(HttpGetter) + dst := tempDir(t) + defer os.RemoveAll(dst) + + var u url.URL + u.Scheme = "http" + u.Host = ln.Addr().String() + u.Path = "/meta-subdir-glob" + + // Get it! + if err := g.Get(dst, &u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "sub.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestHttpGetter_none(t *testing.T) { + ln := testHttpServer(t) + defer ln.Close() + + g := new(HttpGetter) + dst := tempDir(t) + defer os.RemoveAll(dst) + + var u url.URL + u.Scheme = "http" + u.Host = ln.Addr().String() + u.Path = "/none" + + // Get it! + if err := g.Get(dst, &u); err == nil { + t.Fatal("should error") + } +} + +func TestHttpGetter_resume(t *testing.T) { + load := []byte(testHttpMetaStr) + sha := sha256.New() + if n, err := sha.Write(load); n != len(load) || err != nil { + t.Fatalf("sha write failed: %d, %s", n, err) + } + checksum := hex.EncodeToString(sha.Sum(nil)) + downloadFrom := len(load) / 2 + + ln := testHttpServer(t) + defer ln.Close() + + dst := tempDir(t) + defer os.RemoveAll(dst) + + dst = filepath.Join(dst, "..", "range") + f, err := os.Create(dst) + if err != nil { + t.Fatalf("create: %v", err) + } + if n, err := f.Write(load[:downloadFrom]); n != downloadFrom || err != nil { + t.Fatalf("partial file write failed: %d, %s", n, err) + } + if err := f.Close(); err != nil { + t.Fatalf("close failed: %s", err) + } + + u := url.URL{ + Scheme: "http", + Host: ln.Addr().String(), + Path: "/range", + RawQuery: "checksum=" + checksum, + } + t.Logf("url: %s", u.String()) + + // Finish getting it! + if err := GetFile(dst, u.String()); err != nil { + t.Fatalf("finishing download should not error: %v", err) + } + + b, err := ioutil.ReadFile(dst) + if err != nil { + t.Fatalf("readfile failed: %v", err) + } + + if string(b) != string(load) { + t.Fatalf("file differs: got:\n%s\n expected:\n%s\n", string(b), string(load)) + } + + // Get it again + if err := GetFile(dst, u.String()); err != nil { + t.Fatalf("should not error: %v", err) + } +} + +// The server may support Byte-Range, but has no size for the requested object +func TestHttpGetter_resumeNoRange(t *testing.T) { + load := []byte(testHttpMetaStr) + sha := sha256.New() + if n, err := sha.Write(load); n != len(load) || err != nil { + t.Fatalf("sha write failed: %d, %s", n, err) + } + checksum := hex.EncodeToString(sha.Sum(nil)) + downloadFrom := len(load) / 2 + + ln := testHttpServer(t) + defer ln.Close() + + dst := tempDir(t) + defer os.RemoveAll(dst) + + dst = filepath.Join(dst, "..", "range") + f, err := os.Create(dst) + if err != nil { + t.Fatalf("create: %v", err) + } + if n, err := f.Write(load[:downloadFrom]); n != downloadFrom || err != nil { + t.Fatalf("partial file write failed: %d, %s", n, err) + } + if err := f.Close(); err != nil { + t.Fatalf("close failed: %s", err) + } + + u := url.URL{ + Scheme: "http", + Host: ln.Addr().String(), + Path: "/no-range", + RawQuery: "checksum=" + checksum, + } + t.Logf("url: %s", u.String()) + + // Finish getting it! + if err := GetFile(dst, u.String()); err != nil { + t.Fatalf("finishing download should not error: %v", err) + } + + b, err := ioutil.ReadFile(dst) + if err != nil { + t.Fatalf("readfile failed: %v", err) + } + + if string(b) != string(load) { + t.Fatalf("file differs: got:\n%s\n expected:\n%s\n", string(b), string(load)) + } +} + +func TestHttpGetter_file(t *testing.T) { + ln := testHttpServer(t) + defer ln.Close() + + g := new(HttpGetter) + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + + var u url.URL + u.Scheme = "http" + u.Host = ln.Addr().String() + u.Path = "/file" + + // Get it! + if err := g.GetFile(dst, &u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + if _, err := os.Stat(dst); err != nil { + t.Fatalf("err: %s", err) + } + assertContents(t, dst, "Hello\n") +} + +func TestHttpGetter_auth(t *testing.T) { + ln := testHttpServer(t) + defer ln.Close() + + g := new(HttpGetter) + dst := tempDir(t) + defer os.RemoveAll(dst) + + var u url.URL + u.Scheme = "http" + u.Host = ln.Addr().String() + u.Path = "/meta-auth" + u.User = url.UserPassword("foo", "bar") + + // Get it! + if err := g.Get(dst, &u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestHttpGetter_authNetrc(t *testing.T) { + ln := testHttpServer(t) + defer ln.Close() + + g := new(HttpGetter) + dst := tempDir(t) + defer os.RemoveAll(dst) + + var u url.URL + u.Scheme = "http" + u.Host = ln.Addr().String() + u.Path = "/meta" + + // Write the netrc file + path, closer := tempFileContents(t, fmt.Sprintf(testHttpNetrc, ln.Addr().String())) + defer closer() + defer tempEnv(t, "NETRC", path)() + + // Get it! + if err := g.Get(dst, &u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +// test round tripper that only returns an error +type errRoundTripper struct{} + +func (errRoundTripper) RoundTrip(r *http.Request) (*http.Response, error) { + return nil, errors.New("test round tripper") +} + +// verify that the default httpClient no longer comes from http.DefaultClient +func TestHttpGetter_cleanhttp(t *testing.T) { + ln := testHttpServer(t) + defer ln.Close() + + // break the default http client + http.DefaultClient.Transport = errRoundTripper{} + defer func() { + http.DefaultClient.Transport = http.DefaultTransport + }() + + g := new(HttpGetter) + dst := tempDir(t) + defer os.RemoveAll(dst) + + var u url.URL + u.Scheme = "http" + u.Host = ln.Addr().String() + u.Path = "/header" + + // Get it! + if err := g.Get(dst, &u); err != nil { + t.Fatalf("err: %s", err) + } +} + +func testHttpServer(t *testing.T) net.Listener { + ln, err := net.Listen("tcp", "127.0.0.1:0") + if err != nil { + t.Fatalf("err: %s", err) + } + + mux := http.NewServeMux() + mux.HandleFunc("/expect-header", testHttpHandlerExpectHeader) + mux.HandleFunc("/file", testHttpHandlerFile) + mux.HandleFunc("/header", testHttpHandlerHeader) + mux.HandleFunc("/meta", testHttpHandlerMeta) + mux.HandleFunc("/meta-auth", testHttpHandlerMetaAuth) + mux.HandleFunc("/meta-subdir", testHttpHandlerMetaSubdir) + mux.HandleFunc("/meta-subdir-glob", testHttpHandlerMetaSubdirGlob) + mux.HandleFunc("/range", testHttpHandlerRange) + mux.HandleFunc("/no-range", testHttpHandlerNoRange) + + var server http.Server + server.Handler = mux + go server.Serve(ln) + + return ln +} + +func testHttpHandlerExpectHeader(w http.ResponseWriter, r *http.Request) { + if expected, ok := r.URL.Query()["expected"]; ok { + if r.Header.Get(expected[0]) != "" { + w.Write([]byte("Hello\n")) + return + } + } + + w.WriteHeader(400) +} + +func testHttpHandlerFile(w http.ResponseWriter, r *http.Request) { + w.Write([]byte("Hello\n")) +} + +func testHttpHandlerHeader(w http.ResponseWriter, r *http.Request) { + w.Header().Add("X-Terraform-Get", testModuleURL("basic").String()) + w.WriteHeader(200) +} + +func testHttpHandlerMeta(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(fmt.Sprintf(testHttpMetaStr, testModuleURL("basic").String()))) +} + +func testHttpHandlerMetaAuth(w http.ResponseWriter, r *http.Request) { + user, pass, ok := r.BasicAuth() + if !ok { + w.WriteHeader(401) + return + } + + if user != "foo" || pass != "bar" { + w.WriteHeader(401) + return + } + + w.Write([]byte(fmt.Sprintf(testHttpMetaStr, testModuleURL("basic").String()))) +} + +func testHttpHandlerMetaSubdir(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(fmt.Sprintf(testHttpMetaStr, testModuleURL("basic//subdir").String()))) +} + +func testHttpHandlerMetaSubdirGlob(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(fmt.Sprintf(testHttpMetaStr, testModuleURL("basic//sub*").String()))) +} + +func testHttpHandlerNone(w http.ResponseWriter, r *http.Request) { + w.Write([]byte(testHttpNoneStr)) +} + +func testHttpHandlerRange(w http.ResponseWriter, r *http.Request) { + load := []byte(testHttpMetaStr) + switch r.Method { + case "HEAD": + w.Header().Add("accept-ranges", "bytes") + w.Header().Add("content-length", strconv.Itoa(len(load))) + default: + // request should have header "Range: bytes=0-1023" + // or "Range: bytes=123-" + rangeHeaderValue := strings.Split(r.Header.Get("Range"), "=")[1] + rng, _ := strconv.Atoi(strings.Split(rangeHeaderValue, "-")[0]) + if rng < 1 || rng > len(load) { + http.Error(w, "", http.StatusBadRequest) + } + w.Write(load[rng:]) + } +} + +func testHttpHandlerNoRange(w http.ResponseWriter, r *http.Request) { + load := []byte(testHttpMetaStr) + switch r.Method { + case "HEAD": + // we support range, but the object size isn't known + w.Header().Add("accept-ranges", "bytes") + default: + if r.Header.Get("Range") != "" { + http.Error(w, "range not supported", http.StatusBadRequest) + } + w.Write(load) + } +} + +const testHttpMetaStr = ` + + + + + +` + +const testHttpNoneStr = ` + + + + +` + +const testHttpNetrc = ` +machine %s +login foo +password bar +` diff --git a/api/internal/getter/get_mock.go b/api/internal/getter/get_mock.go new file mode 100644 index 00000000000..e2a98ea2843 --- /dev/null +++ b/api/internal/getter/get_mock.go @@ -0,0 +1,54 @@ +package getter + +import ( + "net/url" +) + +// MockGetter is an implementation of Getter that can be used for tests. +type MockGetter struct { + getter + + // Proxy, if set, will be called after recording the calls below. + // If it isn't set, then the *Err values will be returned. + Proxy Getter + + GetCalled bool + GetDst string + GetURL *url.URL + GetErr error + + GetFileCalled bool + GetFileDst string + GetFileURL *url.URL + GetFileErr error +} + +func (g *MockGetter) Get(dst string, u *url.URL) error { + g.GetCalled = true + g.GetDst = dst + g.GetURL = u + + if g.Proxy != nil { + return g.Proxy.Get(dst, u) + } + + return g.GetErr +} + +func (g *MockGetter) GetFile(dst string, u *url.URL) error { + g.GetFileCalled = true + g.GetFileDst = dst + g.GetFileURL = u + + if g.Proxy != nil { + return g.Proxy.GetFile(dst, u) + } + return g.GetFileErr +} + +func (g *MockGetter) ClientMode(u *url.URL) (ClientMode, error) { + if l := len(u.Path); l > 0 && u.Path[l-1:] == "/" { + return ClientModeDir, nil + } + return ClientModeFile, nil +} diff --git a/api/internal/getter/get_test.go b/api/internal/getter/get_test.go new file mode 100644 index 00000000000..d70aed88193 --- /dev/null +++ b/api/internal/getter/get_test.go @@ -0,0 +1,524 @@ +package getter + +import ( + "os" + "path/filepath" + "strings" + "testing" +) + +func TestGet_badSchema(t *testing.T) { + dst := tempDir(t) + u := testModule("basic") + u = strings.Replace(u, "file", "nope", -1) + + if err := Get(dst, u); err == nil { + t.Fatal("should error") + } +} + +func TestGet_file(t *testing.T) { + dst := tempDir(t) + u := testModule("basic") + + if err := Get(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +// https://github.com/hashicorp/terraform/issues/11438 +func TestGet_fileDecompressorExt(t *testing.T) { + dst := tempDir(t) + u := testModule("basic-tgz") + + if err := Get(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +// https://github.com/hashicorp/terraform/issues/8418 +func TestGet_filePercent2F(t *testing.T) { + dst := tempDir(t) + u := testModule("basic%2Ftest") + + if err := Get(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGet_fileDetect(t *testing.T) { + dst := tempDir(t) + u := filepath.Join(".", "testdata", "basic") + pwd, err := os.Getwd() + if err != nil { + t.Fatalf("err: %s", err) + } + + client := &Client{ + Src: u, + Dst: dst, + Pwd: pwd, + Dir: true, + } + + if err := client.Configure(); err != nil { + t.Fatalf("configure: %s", err) + } + + if err := client.Get(); err != nil { + t.Fatalf("get: %s", err) + } + + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("stat: %s", err) + } +} + +func TestGet_fileForced(t *testing.T) { + dst := tempDir(t) + u := testModule("basic") + u = "file::" + u + + if err := Get(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGet_fileSubdir(t *testing.T) { + dst := tempDir(t) + u := testModule("basic//subdir") + + if err := Get(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "sub.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGet_archive(t *testing.T) { + dst := tempDir(t) + u := filepath.Join("./testdata", "archive.tar.gz") + u, _ = filepath.Abs(u) + + if err := Get(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGetAny_archive(t *testing.T) { + dst := tempDir(t) + u := filepath.Join("./testdata", "archive.tar.gz") + u, _ = filepath.Abs(u) + + if err := GetAny(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "main.tf") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGet_archiveRooted(t *testing.T) { + dst := tempDir(t) + u := testModule("archive-rooted/archive.tar.gz") + if err := Get(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "root", "hello.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGet_archiveSubdirWild(t *testing.T) { + dst := tempDir(t) + u := testModule("archive-rooted/archive.tar.gz") + u += "//*" + if err := Get(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "hello.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGet_archiveSubdirWildMultiMatch(t *testing.T) { + dst := tempDir(t) + u := testModule("archive-rooted-multi/archive.tar.gz") + u += "//*" + if err := Get(dst, u); err == nil { + t.Fatal("should error") + } else if !strings.Contains(err.Error(), "multiple") { + t.Fatalf("err: %s", err) + } +} + +func TestGetAny_file(t *testing.T) { + dst := tempDir(t) + u := testModule("basic-file/foo.txt") + + if err := GetAny(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "foo.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGetAny_dir(t *testing.T) { + dst := tempDir(t) + u := filepath.Join("./testdata", "basic") + u, _ = filepath.Abs(u) + + if err := GetAny(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + check := []string{ + "main.tf", + "foo/main.tf", + } + + for _, name := range check { + mainPath := filepath.Join(dst, name) + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } + } +} + +func TestGetFile(t *testing.T) { + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + u := testModule("basic-file/foo.txt") + + if err := GetFile(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + assertContents(t, dst, "Hello\n") +} + +func TestGetFile_archive(t *testing.T) { + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + u := testModule("basic-file-archive/archive.tar.gz") + + if err := GetFile(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + assertContents(t, dst, "Hello\n") +} + +func TestGetFile_archiveChecksum(t *testing.T) { + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + u := testModule( + "basic-file-archive/archive.tar.gz?checksum=md5:fbd90037dacc4b1ab40811d610dde2f0") + + if err := GetFile(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + assertContents(t, dst, "Hello\n") +} + +func TestGetFile_archiveNoUnarchive(t *testing.T) { + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + u := testModule("basic-file-archive/archive.tar.gz") + u += "?archive=false" + + if err := GetFile(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + // Verify the main file exists + actual := testMD5(t, dst) + expected := "fbd90037dacc4b1ab40811d610dde2f0" + if actual != expected { + t.Fatalf("bad: %s", actual) + } +} + +func TestGetFile_checksum(t *testing.T) { + cases := []struct { + Append string + Err bool + }{ + { + "", + false, + }, + + // MD5 + { + "?checksum=09f7e02f1290be211da707a266f153b3", + false, + }, + { + "?checksum=md5:09f7e02f1290be211da707a266f153b3", + false, + }, + { + "?checksum=md5:09f7e02f1290be211da707a266f153b4", + true, + }, + + // SHA1 + { + "?checksum=1d229271928d3f9e2bb0375bd6ce5db6c6d348d9", + false, + }, + { + "?checksum=sha1:1d229271928d3f9e2bb0375bd6ce5db6c6d348d9", + false, + }, + { + "?checksum=sha1:1d229271928d3f9e2bb0375bd6ce5db6c6d348d0", + true, + }, + + // SHA256 + { + "?checksum=66a045b452102c59d840ec097d59d9467e13a3f34f6494e539ffd32c1bb35f18", + false, + }, + { + "?checksum=sha256:66a045b452102c59d840ec097d59d9467e13a3f34f6494e539ffd32c1bb35f18", + false, + }, + { + "?checksum=sha256:66a045b452102c59d840ec097d59d9467e13a3f34f6494e539ffd32c1bb35f19", + true, + }, + + // SHA512 + { + "?checksum=c2bad2223811194582af4d1508ac02cd69eeeeedeeb98d54fcae4dcefb13cc882e7640328206603d3fb9cd5f949a9be0db054dd34fbfa190c498a5fe09750cef", + false, + }, + { + "?checksum=sha512:c2bad2223811194582af4d1508ac02cd69eeeeedeeb98d54fcae4dcefb13cc882e7640328206603d3fb9cd5f949a9be0db054dd34fbfa190c498a5fe09750cef", + false, + }, + { + "?checksum=sha512:c2bad2223811194582af4d1508ac02cd69eeeeedeeb98d54fcae4dcefb13cc882e7640328206603d3fb9cd5f949a9be0db054dd34fbfa190c498a5fe09750ced", + true, + }, + } + + for _, tc := range cases { + u := testModule("basic-file/foo.txt") + tc.Append + + func() { + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + if err := GetFile(dst, u); (err != nil) != tc.Err { + t.Fatalf("append: %s\n\nerr: %s", tc.Append, err) + } + + // Verify the main file exists + assertContents(t, dst, "Hello\n") + }() + } +} + +func TestGetFile_checksum_from_file(t *testing.T) { + checksums := testModule("checksum-file") + httpChecksums := httpTestModule("checksum-file") + defer httpChecksums.Close() + + cases := []struct { + Append string + WantTransfer bool + WantErr bool + }{ + { + "", + true, + false, + }, + + // md5 + { + "?checksum=file:" + checksums + "/md5-p.sum", + true, + false, + }, + { + "?checksum=file:" + httpChecksums.URL + "/md5-bsd.sum", + true, + false, + }, + { + "?checksum=file:" + checksums + "/md5-bsd-bad.sum", + false, + true, + }, + { + "?checksum=file:" + httpChecksums.URL + "/md5-bsd-wrong.sum", + true, + true, + }, + + // sha1 + { + "?checksum=file:" + checksums + "/sha1-p.sum", + true, + false, + }, + { + "?checksum=file:" + httpChecksums.URL + "/sha1.sum", + true, + false, + }, + + // sha256 + { + "?checksum=file:" + checksums + "/sha256-p.sum", + true, + false, + }, + + // sha512 + { + "?checksum=file:" + httpChecksums.URL + "/sha512-p.sum", + true, + false, + }, + } + + for _, tc := range cases { + u := checksums + "/content.txt" + tc.Append + t.Run(tc.Append, func(t *testing.T) { + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + if err := GetFile(dst, u); (err != nil) != tc.WantErr { + t.Fatalf("append: %s\n\nerr: %s", tc.Append, err) + } + + if tc.WantTransfer { + // Verify the main file exists + assertContents(t, dst, "I am a file with some content\n") + } + }) + } +} + +func TestGetFile_checksumURL(t *testing.T) { + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + u := testModule("basic-file/foo.txt") + "?checksum=md5:09f7e02f1290be211da707a266f153b3" + + getter := &MockGetter{Proxy: new(FileGetter)} + client := &Client{ + Src: u, + Dst: dst, + Dir: false, + Getters: map[string]Getter{ + "file": getter, + }, + } + + if err := client.Get(); err != nil { + t.Fatalf("err: %s", err) + } + + if v := getter.GetFileURL.Query().Get("checksum"); v != "" { + t.Fatalf("bad: %s", v) + } +} + +func TestGetFile_filename(t *testing.T) { + dst := tempDir(t) + u := testModule("basic-file/foo.txt") + + u += "?filename=bar.txt" + + if err := GetAny(dst, u); err != nil { + t.Fatalf("err: %s", err) + } + + mainPath := filepath.Join(dst, "bar.txt") + if _, err := os.Stat(mainPath); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestGetFile_checksumSkip(t *testing.T) { + dst := tempTestFile(t) + defer os.RemoveAll(filepath.Dir(dst)) + u := testModule("basic-file/foo.txt") + "?checksum=md5:09f7e02f1290be211da707a266f153b3" + + getter := &MockGetter{Proxy: new(FileGetter)} + client := &Client{ + Src: u, + Dst: dst, + Dir: false, + Getters: map[string]Getter{ + "file": getter, + }, + } + + // get the file + if err := client.Get(); err != nil { + t.Fatalf("err: %s", err) + } + + if v := getter.GetFileURL.Query().Get("checksum"); v != "" { + t.Fatalf("bad: %s", v) + } + + // remove proxy file getter and reset GetFileCalled so that we can re-test. + getter.Proxy = nil + getter.GetFileCalled = false + + if err := client.Get(); err != nil { + t.Fatalf("err: %s", err) + } + + if getter.GetFileCalled { + t.Fatalf("get should not have been called") + } +} diff --git a/api/internal/getter/helper/url/url.go b/api/internal/getter/helper/url/url.go new file mode 100644 index 00000000000..02497c25433 --- /dev/null +++ b/api/internal/getter/helper/url/url.go @@ -0,0 +1,14 @@ +package url + +import ( + "net/url" +) + +// Parse parses rawURL into a URL structure. +// The rawURL may be relative or absolute. +// +// Parse is a wrapper for the Go stdlib net/url Parse function, but returns +// Windows "safe" URLs on Windows platforms. +func Parse(rawURL string) (*url.URL, error) { + return parse(rawURL) +} diff --git a/api/internal/getter/helper/url/url_test.go b/api/internal/getter/helper/url/url_test.go new file mode 100644 index 00000000000..1015f4afc55 --- /dev/null +++ b/api/internal/getter/helper/url/url_test.go @@ -0,0 +1,88 @@ +package url + +import ( + "runtime" + "testing" +) + +type parseTest struct { + rawURL string + scheme string + host string + path string + str string + err bool +} + +var parseTests = []parseTest{ + { + rawURL: "/foo/bar", + scheme: "", + host: "", + path: "/foo/bar", + str: "/foo/bar", + err: false, + }, + { + rawURL: "file:///dir/", + scheme: "file", + host: "", + path: "/dir/", + str: "file:///dir/", + err: false, + }, +} + +var winParseTests = []parseTest{ + { + rawURL: `C:\`, + scheme: `file`, + host: ``, + path: `C:/`, + str: `file://C:/`, + err: false, + }, + { + rawURL: `file://C:\`, + scheme: `file`, + host: ``, + path: `C:/`, + str: `file://C:/`, + err: false, + }, + { + rawURL: `file:///C:\`, + scheme: `file`, + host: ``, + path: `C:/`, + str: `file://C:/`, + err: false, + }, +} + +func TestParse(t *testing.T) { + if runtime.GOOS == "windows" { + parseTests = append(parseTests, winParseTests...) + } + for i, pt := range parseTests { + url, err := Parse(pt.rawURL) + if err != nil && !pt.err { + t.Errorf("test %d: unexpected error: %s", i, err) + } + if err == nil && pt.err { + t.Errorf("test %d: expected an error", i) + } + if url.Scheme != pt.scheme { + t.Errorf("test %d: expected Scheme = %q, got %q", i, pt.scheme, url.Scheme) + } + if url.Host != pt.host { + t.Errorf("test %d: expected Host = %q, got %q", i, pt.host, url.Host) + } + if url.Path != pt.path { + t.Errorf("test %d: expected Path = %q, got %q", i, pt.path, url.Path) + } + if url.String() != pt.str { + t.Errorf("test %d: expected url.String() = %q, got %q", i, pt.str, url.String()) + } + } +} diff --git a/api/internal/getter/helper/url/url_unix.go b/api/internal/getter/helper/url/url_unix.go new file mode 100644 index 00000000000..ed1352a9176 --- /dev/null +++ b/api/internal/getter/helper/url/url_unix.go @@ -0,0 +1,11 @@ +// +build !windows + +package url + +import ( + "net/url" +) + +func parse(rawURL string) (*url.URL, error) { + return url.Parse(rawURL) +} diff --git a/api/internal/getter/helper/url/url_windows.go b/api/internal/getter/helper/url/url_windows.go new file mode 100644 index 00000000000..4280ec59a89 --- /dev/null +++ b/api/internal/getter/helper/url/url_windows.go @@ -0,0 +1,39 @@ +package url + +import ( + "fmt" + "net/url" + "path/filepath" + "strings" +) + +func parse(rawURL string) (*url.URL, error) { + // Make sure we're using "/" since URLs are "/"-based. + rawURL = filepath.ToSlash(rawURL) + + if len(rawURL) > 1 && rawURL[1] == ':' { + // Assume we're dealing with a drive letter. In which case we + // force the 'file' scheme to avoid "net/url" URL.String() prepending + // our url with "./". + rawURL = "file://" + rawURL + } + + u, err := url.Parse(rawURL) + if err != nil { + return nil, err + } + + if len(u.Host) > 1 && u.Host[1] == ':' && strings.HasPrefix(rawURL, "file://") { + // Assume we're dealing with a drive letter file path where the drive + // letter has been parsed into the URL Host. + u.Path = fmt.Sprintf("%s%s", u.Host, u.Path) + u.Host = "" + } + + // Remove leading slash for absolute file paths. + if len(u.Path) > 2 && u.Path[0] == '/' && u.Path[2] == ':' { + u.Path = u.Path[1:] + } + + return u, err +} diff --git a/api/internal/getter/module_test.go b/api/internal/getter/module_test.go new file mode 100644 index 00000000000..c9f0ab16cd7 --- /dev/null +++ b/api/internal/getter/module_test.go @@ -0,0 +1,89 @@ +package getter + +import ( + "io/ioutil" + "net/http" + "net/http/httptest" + "net/url" + "os" + "path/filepath" + "reflect" + "testing" + + urlhelper "./helper/url" +) + +const fixtureDir = "./testdata" + +func tempDir(t *testing.T) string { + dir, err := ioutil.TempDir("", "tf") + if err != nil { + t.Fatalf("err: %s", err) + } + if err := os.RemoveAll(dir); err != nil { + t.Fatalf("err: %s", err) + } + + return dir +} + +func tempTestFile(t *testing.T) string { + dir := tempDir(t) + return filepath.Join(dir, "foo") +} + +func testModule(n string) string { + p := filepath.Join(fixtureDir, n) + p, err := filepath.Abs(p) + if err != nil { + panic(err) + } + return fmtFileURL(p) +} +func httpTestModule(n string) *httptest.Server { + p := filepath.Join(fixtureDir, n) + p, err := filepath.Abs(p) + if err != nil { + panic(err) + } + + return httptest.NewServer(http.FileServer(http.Dir(p))) +} + +func testModuleURL(n string) *url.URL { + n, subDir := SourceDirSubdir(n) + u, err := urlhelper.Parse(testModule(n)) + if err != nil { + panic(err) + } + if subDir != "" { + u.Path += "//" + subDir + u.RawPath = u.Path + } + + return u +} + +func testURL(s string) *url.URL { + u, err := urlhelper.Parse(s) + if err != nil { + panic(err) + } + + return u +} + +func testStorage(t *testing.T) Storage { + return &FolderStorage{StorageDir: tempDir(t)} +} + +func assertContents(t *testing.T, path string, contents string) { + data, err := ioutil.ReadFile(path) + if err != nil { + t.Fatalf("err: %s", err) + } + + if !reflect.DeepEqual(data, []byte(contents)) { + t.Fatalf("bad. expected:\n\n%s\n\nGot:\n\n%s", contents, string(data)) + } +} diff --git a/api/internal/getter/netrc.go b/api/internal/getter/netrc.go new file mode 100644 index 00000000000..c7f6a3fb3fb --- /dev/null +++ b/api/internal/getter/netrc.go @@ -0,0 +1,67 @@ +package getter + +import ( + "fmt" + "net/url" + "os" + "runtime" + + "github.com/bgentry/go-netrc/netrc" + "github.com/mitchellh/go-homedir" +) + +// addAuthFromNetrc adds auth information to the URL from the user's +// netrc file if it can be found. This will only add the auth info +// if the URL doesn't already have auth info specified and the +// the username is blank. +func addAuthFromNetrc(u *url.URL) error { + // If the URL already has auth information, do nothing + if u.User != nil && u.User.Username() != "" { + return nil + } + + // Get the netrc file path + path := os.Getenv("NETRC") + if path == "" { + filename := ".netrc" + if runtime.GOOS == "windows" { + filename = "_netrc" + } + + var err error + path, err = homedir.Expand("~/" + filename) + if err != nil { + return err + } + } + + // If the file is not a file, then do nothing + if fi, err := os.Stat(path); err != nil { + // File doesn't exist, do nothing + if os.IsNotExist(err) { + return nil + } + + // Some other error! + return err + } else if fi.IsDir() { + // File is directory, ignore + return nil + } + + // Load up the netrc file + net, err := netrc.ParseFile(path) + if err != nil { + return fmt.Errorf("Error parsing netrc file at %q: %s", path, err) + } + + machine := net.FindMachine(u.Host) + if machine == nil { + // Machine not found, no problem + return nil + } + + // Set the user info + u.User = url.UserPassword(machine.Login, machine.Password) + return nil +} diff --git a/api/internal/getter/netrc_test.go b/api/internal/getter/netrc_test.go new file mode 100644 index 00000000000..618367619ec --- /dev/null +++ b/api/internal/getter/netrc_test.go @@ -0,0 +1,63 @@ +package getter + +import ( + "net/url" + "testing" +) + +func TestAddAuthFromNetrc(t *testing.T) { + defer tempEnv(t, "NETRC", "./testdata/netrc/basic")() + + u, err := url.Parse("http://example.com") + if err != nil { + t.Fatalf("err: %s", err) + } + + if err := addAuthFromNetrc(u); err != nil { + t.Fatalf("err: %s", err) + } + + expected := "http://foo:bar@example.com" + actual := u.String() + if expected != actual { + t.Fatalf("Mismatch: %q != %q", actual, expected) + } +} + +func TestAddAuthFromNetrc_hasAuth(t *testing.T) { + defer tempEnv(t, "NETRC", "./testdata/netrc/basic")() + + u, err := url.Parse("http://username:password@example.com") + if err != nil { + t.Fatalf("err: %s", err) + } + + expected := u.String() + if err := addAuthFromNetrc(u); err != nil { + t.Fatalf("err: %s", err) + } + + actual := u.String() + if expected != actual { + t.Fatalf("Mismatch: %q != %q", actual, expected) + } +} + +func TestAddAuthFromNetrc_hasUsername(t *testing.T) { + defer tempEnv(t, "NETRC", "./testdata/netrc/basic")() + + u, err := url.Parse("http://username@example.com") + if err != nil { + t.Fatalf("err: %s", err) + } + + expected := u.String() + if err := addAuthFromNetrc(u); err != nil { + t.Fatalf("err: %s", err) + } + + actual := u.String() + if expected != actual { + t.Fatalf("Mismatch: %q != %q", actual, expected) + } +} diff --git a/api/internal/getter/source.go b/api/internal/getter/source.go new file mode 100644 index 00000000000..dab6d400cb7 --- /dev/null +++ b/api/internal/getter/source.go @@ -0,0 +1,75 @@ +package getter + +import ( + "fmt" + "path/filepath" + "strings" +) + +// SourceDirSubdir takes a source URL and returns a tuple of the URL without +// the subdir and the subdir. +// +// ex: +// dom.com/path/?q=p => dom.com/path/?q=p, "" +// proto://dom.com/path//*?q=p => proto://dom.com/path?q=p, "*" +// proto://dom.com/path//path2?q=p => proto://dom.com/path?q=p, "path2" +// +func SourceDirSubdir(src string) (string, string) { + + // URL might contains another url in query parameters + stop := len(src) + if idx := strings.Index(src, "?"); idx > -1 { + stop = idx + } + + // Calculate an offset to avoid accidentally marking the scheme + // as the dir. + var offset int + if idx := strings.Index(src[:stop], "://"); idx > -1 { + offset = idx + 3 + } + + // First see if we even have an explicit subdir + idx := strings.Index(src[offset:stop], "//") + if idx == -1 { + return src, "" + } + + idx += offset + subdir := src[idx+2:] + src = src[:idx] + + // Next, check if we have query parameters and push them onto the + // URL. + if idx = strings.Index(subdir, "?"); idx > -1 { + query := subdir[idx:] + subdir = subdir[:idx] + src += query + } + + return src, subdir +} + +// SubdirGlob returns the actual subdir with globbing processed. +// +// dst should be a destination directory that is already populated (the +// download is complete) and subDir should be the set subDir. If subDir +// is an empty string, this returns an empty string. +// +// The returned path is the full absolute path. +func SubdirGlob(dst, subDir string) (string, error) { + matches, err := filepath.Glob(filepath.Join(dst, subDir)) + if err != nil { + return "", err + } + + if len(matches) == 0 { + return "", fmt.Errorf("subdir %q not found", subDir) + } + + if len(matches) > 1 { + return "", fmt.Errorf("subdir %q matches multiple paths", subDir) + } + + return matches[0], nil +} diff --git a/api/internal/getter/source_test.go b/api/internal/getter/source_test.go new file mode 100644 index 00000000000..c65f50f1b11 --- /dev/null +++ b/api/internal/getter/source_test.go @@ -0,0 +1,106 @@ +package getter + +import ( + "io/ioutil" + "os" + "path/filepath" + "testing" +) + +func TestSourceDirSubdir(t *testing.T) { + cases := []struct { + Input string + Dir, Sub string + }{ + { + "hashicorp.com", + "hashicorp.com", "", + }, + { + "hashicorp.com//foo", + "hashicorp.com", "foo", + }, + { + "hashicorp.com//foo?bar=baz", + "hashicorp.com?bar=baz", "foo", + }, + { + "https://hashicorp.com/path//*?archive=foo", + "https://hashicorp.com/path?archive=foo", "*", + }, + { + "https://hashicorp.com/path?checksum=file:http://url.com/....iso.sha256", + "https://hashicorp.com/path?checksum=file:http://url.com/....iso.sha256", "", + }, + { + "https://hashicorp.com/path//*?checksum=file:http://url.com/....iso.sha256", + "https://hashicorp.com/path?checksum=file:http://url.com/....iso.sha256", "*", + }, + { + "file://foo//bar", + "file://foo", "bar", + }, + } + + for i, tc := range cases { + adir, asub := SourceDirSubdir(tc.Input) + if adir != tc.Dir { + t.Fatalf("%d: bad dir: %#v", i, adir) + } + if asub != tc.Sub { + t.Fatalf("%d: bad sub: %#v", i, asub) + } + } +} + +func TestSourceSubdirGlob(t *testing.T) { + td, err := ioutil.TempDir("", "subdir-glob") + if err != nil { + t.Fatal(err) + } + defer os.RemoveAll(td) + + if err := os.Mkdir(filepath.Join(td, "subdir"), 0755); err != nil { + t.Fatal(err) + } + + if err := os.Mkdir(filepath.Join(td, "subdir/one"), 0755); err != nil { + t.Fatal(err) + } + + if err := os.Mkdir(filepath.Join(td, "subdir/two"), 0755); err != nil { + t.Fatal(err) + } + + subdir := filepath.Join(td, "subdir") + + // match the exact directory + res, err := SubdirGlob(td, "subdir") + if err != nil { + t.Fatal(err) + } + if res != subdir { + t.Fatalf(`expected "subdir", got: %q`, subdir) + } + + // single match from a wildcard + res, err = SubdirGlob(td, "*") + if err != nil { + t.Fatal(err) + } + if res != subdir { + t.Fatalf(`expected "subdir", got: %q`, subdir) + } + + // multiple matches + res, err = SubdirGlob(td, "subdir/*") + if err == nil { + t.Fatalf("expected multiple matches, got %q", res) + } + + // non-existent + res, err = SubdirGlob(td, "foo") + if err == nil { + t.Fatalf("expected no matches, got %q", res) + } +} diff --git a/api/internal/getter/storage.go b/api/internal/getter/storage.go new file mode 100644 index 00000000000..2bc6b9ec331 --- /dev/null +++ b/api/internal/getter/storage.go @@ -0,0 +1,13 @@ +package getter + +// Storage is an interface that knows how to lookup downloaded directories +// as well as download and update directories from their sources into the +// proper location. +type Storage interface { + // Dir returns the directory on local disk where the directory source + // can be loaded from. + Dir(string) (string, bool, error) + + // Get will download and optionally update the given directory. + Get(string, string, bool) error +} diff --git a/api/internal/getter/testdata/archive-rooted-multi/archive.tar.gz b/api/internal/getter/testdata/archive-rooted-multi/archive.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..faa7cea44b54d275fe5524b1753a855c88535813 GIT binary patch literal 210 zcmV;@04@I?iwFQ>D5_Zi1MSt#3W7ishT+y-#aw~TztQjxTnl6-4PFroHHbN?6q&x_hwX4zn7EXO^<$4L{ zrf%Ej%YBcguG@v@d>%?<=roHW<^Mi;SA=KC@s9XCytPaD`&s@@*_gk~(ahYR=RX90 z+`9|xCH?<8>jGFY|E)59nEw^?e-l&wITyf+`9J3G;v!)FZ-My_0000000000006(? M1-c8Wy8tKv0H{@EKmY&$ literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/archive-rooted/archive.tar.gz b/api/internal/getter/testdata/archive-rooted/archive.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..038bdf847a401deaee21693ead822452c37fe2c6 GIT binary patch literal 173 zcmb2|=3r1zTNTN`{Pv-cch_>M@+bc}&gFiU5BppFw&;J5@|j!nzuSxN W|6R((fDG literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/archive.tar.gz b/api/internal/getter/testdata/archive.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..999e5fc9d6974cb108aa94abadac6e2aecfac0dc GIT binary patch literal 141 zcmb2|=3qGQG&PKY`RxTqu4V_G*2L?!u6_YV$#X3GJ=j^-Ug-NCzIsRYLT=60|F>wZ`SUo>_I@-|6W0$D QtU3%5&rQtC(0BXk*g#Z8m literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/basic-hg/.hg/last-message.txt b/api/internal/getter/testdata/basic-hg/.hg/last-message.txt new file mode 100644 index 00000000000..86df9aae0f8 --- /dev/null +++ b/api/internal/getter/testdata/basic-hg/.hg/last-message.txt @@ -0,0 +1,2 @@ +add file + diff --git a/api/internal/getter/testdata/basic-hg/.hg/requires b/api/internal/getter/testdata/basic-hg/.hg/requires new file mode 100644 index 00000000000..f634f664bf3 --- /dev/null +++ b/api/internal/getter/testdata/basic-hg/.hg/requires @@ -0,0 +1,4 @@ +dotencode +fncache +revlogv1 +store diff --git a/api/internal/getter/testdata/basic-hg/.hg/store/00changelog.i b/api/internal/getter/testdata/basic-hg/.hg/store/00changelog.i new file mode 100644 index 0000000000000000000000000000000000000000..6da1bc1e97dc6dad8799cbab64e2ea8a9273e438 GIT binary patch literal 528 zcmZQzWME`~fC3;@4P`U@hk`rnu9x~osQK(%Ix$Ifsf%*CLG?wL8W3GE$Mm!#7qcRd z!^OSJKd?;72omwqEV=B=^>}vhYRG!~=>ejxFxg7V^;JV-L`jXd&PhOer=+rTh@j}%%hBW{9?W+v?SN{IR z{@{bE57=&yOBoo7fp{vI#Q^aG$jQgzX7-lU|K9k|T86DEAkBO8@7N_!HDIb@PI3YR zv#)^P79V5JjV5N>P0anhjJ6qgZ}Zq_;$`M-WN75M$-vNWlaJ394$K# z%*;#-O*U-UU|?zE~8+H~68-ISJn6r11$%Jn~3hUUC z%+xn%FqHDjM}qBr3?_4dWDyWEK`}U(XR5ITRG&>V=l&_hGdC#bY_o~bez0mNQ87pK zgf$N@+y2{m{(Q$;`N}?Z3m1{Z?3)#zt~H&jT(g%UNz*NQ Kz5P)IrG)?+$-hnj literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/basic-hg/.hg/store/00manifest.i b/api/internal/getter/testdata/basic-hg/.hg/store/00manifest.i new file mode 100644 index 0000000000000000000000000000000000000000..48ce4c674b153195539f398ff1e1eb6aea7bda89 GIT binary patch literal 376 zcmZQzWME`~03#q}2xT+;hXRIf+oy9R`GtjZS|7jmaf#B=uR&8JsyZaZ;PC)>%p#d_9?E$&b?Fbd>K5VinW9-mZ{n3tRZc7ugMN~&p! zp|ORzsfDSDVX|qOfvJ(1X<~|@iAiFTWm=+zS&Csw62uKC|%t=mYU_QL%PtD#*T|#Wao1Z*!=}z(P-jZ_J srFo0UX_aE<#UkBHRGLGKPpbrbE0~yU@GyMNl*#nSXF?u>nHJwY03C2?ssI20 literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/basic-hg/.hg/store/data/foo.txt.i b/api/internal/getter/testdata/basic-hg/.hg/store/data/foo.txt.i new file mode 100644 index 0000000000000000000000000000000000000000..974eb7aede8a04f08905786459e9c21251b121d3 GIT binary patch literal 71 zcmZQzWME`~0Cph724yq-hXS`VD^B(wUGR5yp>FqvXR{+C139ik)qtr|kJOx;d@caL CG!@7I literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/basic-hg/.hg/store/data/main.tf.i b/api/internal/getter/testdata/basic-hg/.hg/store/data/main.tf.i new file mode 100644 index 0000000000000000000000000000000000000000..f45ddc33f19db6ef2a8f723ce4e36e10db12a436 GIT binary patch literal 112 zcmZQzWME`~00SVU4`nm_hk{)i%ZqQMPS2Zr>zb=wCx2OxuW=Ym4TvsPR`5v8$;s#9 n%FRzH%}G^IO3TkzQmE!q0D|KD(xT*41zQCrJ$;ZcS1lI+ym=or literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/basic-hg/.hg/store/data/main__branch.tf.i b/api/internal/getter/testdata/basic-hg/.hg/store/data/main__branch.tf.i new file mode 100644 index 0000000000000000000000000000000000000000..a6bdf46f1091be151835fda73ea8ccb9f8c70c21 GIT binary patch literal 64 ocmZQzWME{#1dRWoUQPrLGn6+WD*P;%#K*02?S1MgRZ+ literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/basic-hg/.hg/store/fncache b/api/internal/getter/testdata/basic-hg/.hg/store/fncache new file mode 100644 index 00000000000..b634e1f3abe --- /dev/null +++ b/api/internal/getter/testdata/basic-hg/.hg/store/fncache @@ -0,0 +1,3 @@ +data/main.tf.i +data/foo.txt.i +data/main_branch.tf.i diff --git a/api/internal/getter/testdata/basic-hg/.hg/store/phaseroots b/api/internal/getter/testdata/basic-hg/.hg/store/phaseroots new file mode 100644 index 00000000000..a08565294c2 --- /dev/null +++ b/api/internal/getter/testdata/basic-hg/.hg/store/phaseroots @@ -0,0 +1 @@ +1 dcaed7754d58264cb9a5916215a5442377307bd1 diff --git a/api/internal/getter/testdata/basic-hg/.hg/store/undo b/api/internal/getter/testdata/basic-hg/.hg/store/undo new file mode 100644 index 0000000000000000000000000000000000000000..4fae5607035311eebbdaf33e347ec9edcda6e465 GIT binary patch literal 52 zcmYdEEJ@T)%g@&xT(e P9R`W#CT8a8m81ax#aSAk literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/basic-hg/foo.txt b/api/internal/getter/testdata/basic-hg/foo.txt new file mode 100644 index 00000000000..e965047ad7c --- /dev/null +++ b/api/internal/getter/testdata/basic-hg/foo.txt @@ -0,0 +1 @@ +Hello diff --git a/api/internal/getter/testdata/basic-hg/main.tf b/api/internal/getter/testdata/basic-hg/main.tf new file mode 100644 index 00000000000..3830637158f --- /dev/null +++ b/api/internal/getter/testdata/basic-hg/main.tf @@ -0,0 +1,5 @@ +# Hello + +module "foo" { + source = "./foo" +} diff --git a/api/internal/getter/testdata/basic-parent/a/a.tf b/api/internal/getter/testdata/basic-parent/a/a.tf new file mode 100644 index 00000000000..b9b44f46403 --- /dev/null +++ b/api/internal/getter/testdata/basic-parent/a/a.tf @@ -0,0 +1,3 @@ +module "b" { + source = "../c" +} diff --git a/api/internal/getter/testdata/basic-parent/c/c.tf b/api/internal/getter/testdata/basic-parent/c/c.tf new file mode 100644 index 00000000000..fec56017dc1 --- /dev/null +++ b/api/internal/getter/testdata/basic-parent/c/c.tf @@ -0,0 +1 @@ +# Hello diff --git a/api/internal/getter/testdata/basic-parent/main.tf b/api/internal/getter/testdata/basic-parent/main.tf new file mode 100644 index 00000000000..2326ee22acc --- /dev/null +++ b/api/internal/getter/testdata/basic-parent/main.tf @@ -0,0 +1,3 @@ +module "a" { + source = "./a" +} diff --git a/api/internal/getter/testdata/basic-subdir/foo/sub/baz/main.tf b/api/internal/getter/testdata/basic-subdir/foo/sub/baz/main.tf new file mode 100644 index 00000000000..e69de29bb2d diff --git a/api/internal/getter/testdata/basic-subdir/foo/sub/main.tf b/api/internal/getter/testdata/basic-subdir/foo/sub/main.tf new file mode 100644 index 00000000000..22905dd531c --- /dev/null +++ b/api/internal/getter/testdata/basic-subdir/foo/sub/main.tf @@ -0,0 +1,3 @@ +module "bar" { + source = "./baz" +} diff --git a/api/internal/getter/testdata/basic-subdir/main.tf b/api/internal/getter/testdata/basic-subdir/main.tf new file mode 100644 index 00000000000..19fb5dde7f8 --- /dev/null +++ b/api/internal/getter/testdata/basic-subdir/main.tf @@ -0,0 +1,3 @@ +module "foo" { + source = "./foo//sub" +} diff --git a/api/internal/getter/testdata/basic-tgz/main.tf b/api/internal/getter/testdata/basic-tgz/main.tf new file mode 100644 index 00000000000..fec56017dc1 --- /dev/null +++ b/api/internal/getter/testdata/basic-tgz/main.tf @@ -0,0 +1 @@ +# Hello diff --git a/api/internal/getter/testdata/basic/foo/main.tf b/api/internal/getter/testdata/basic/foo/main.tf new file mode 100644 index 00000000000..fec56017dc1 --- /dev/null +++ b/api/internal/getter/testdata/basic/foo/main.tf @@ -0,0 +1 @@ +# Hello diff --git a/api/internal/getter/testdata/basic/main.tf b/api/internal/getter/testdata/basic/main.tf new file mode 100644 index 00000000000..3830637158f --- /dev/null +++ b/api/internal/getter/testdata/basic/main.tf @@ -0,0 +1,5 @@ +# Hello + +module "foo" { + source = "./foo" +} diff --git a/api/internal/getter/testdata/basic/subdir/sub.tf b/api/internal/getter/testdata/basic/subdir/sub.tf new file mode 100644 index 00000000000..e69de29bb2d diff --git a/api/internal/getter/testdata/checksum-file/content.txt b/api/internal/getter/testdata/checksum-file/content.txt new file mode 100644 index 00000000000..13c6d88677f --- /dev/null +++ b/api/internal/getter/testdata/checksum-file/content.txt @@ -0,0 +1 @@ +I am a file with some content diff --git a/api/internal/getter/testdata/checksum-file/md5-bsd-bad.sum b/api/internal/getter/testdata/checksum-file/md5-bsd-bad.sum new file mode 100644 index 00000000000..807f6aa162d --- /dev/null +++ b/api/internal/getter/testdata/checksum-file/md5-bsd-bad.sum @@ -0,0 +1 @@ +MD5 (content.txt) = bad diff --git a/api/internal/getter/testdata/checksum-file/md5-bsd-wrong.sum b/api/internal/getter/testdata/checksum-file/md5-bsd-wrong.sum new file mode 100644 index 00000000000..8cc6007f147 --- /dev/null +++ b/api/internal/getter/testdata/checksum-file/md5-bsd-wrong.sum @@ -0,0 +1 @@ +MD5 (content.txt) = 074729f0ccb41a391fb646c38f86ea53 diff --git a/api/internal/getter/testdata/checksum-file/md5-bsd.sum b/api/internal/getter/testdata/checksum-file/md5-bsd.sum new file mode 100644 index 00000000000..bc1c6ad3e4d --- /dev/null +++ b/api/internal/getter/testdata/checksum-file/md5-bsd.sum @@ -0,0 +1 @@ +MD5 (content.txt) = 074729f0ccb41a391fb646c38f86ea54 diff --git a/api/internal/getter/testdata/checksum-file/md5-p.sum b/api/internal/getter/testdata/checksum-file/md5-p.sum new file mode 100644 index 00000000000..d6c61d90fac --- /dev/null +++ b/api/internal/getter/testdata/checksum-file/md5-p.sum @@ -0,0 +1 @@ +074729f0ccb41a391fb646c38f86ea54 content.txt diff --git a/api/internal/getter/testdata/checksum-file/sha1-p.sum b/api/internal/getter/testdata/checksum-file/sha1-p.sum new file mode 100644 index 00000000000..5212fddbe34 --- /dev/null +++ b/api/internal/getter/testdata/checksum-file/sha1-p.sum @@ -0,0 +1 @@ +e2c7dc83ac8aa7f181314387f6dfb132cd117e3a ?content.txt diff --git a/api/internal/getter/testdata/checksum-file/sha1.sum b/api/internal/getter/testdata/checksum-file/sha1.sum new file mode 100644 index 00000000000..0910848521b --- /dev/null +++ b/api/internal/getter/testdata/checksum-file/sha1.sum @@ -0,0 +1 @@ +e2c7dc83ac8aa7f181314387f6dfb132cd117e3a diff --git a/api/internal/getter/testdata/checksum-file/sha256-p.sum b/api/internal/getter/testdata/checksum-file/sha256-p.sum new file mode 100644 index 00000000000..f7828657160 --- /dev/null +++ b/api/internal/getter/testdata/checksum-file/sha256-p.sum @@ -0,0 +1 @@ +47afcdfff05a6e5d9db5f6c6df2140f04a6e7422d7ad7f6a7006a4f5a78570e4 ?content.txt diff --git a/api/internal/getter/testdata/checksum-file/sha512-p.sum b/api/internal/getter/testdata/checksum-file/sha512-p.sum new file mode 100644 index 00000000000..c99a7dc3b06 --- /dev/null +++ b/api/internal/getter/testdata/checksum-file/sha512-p.sum @@ -0,0 +1 @@ +060a8cc41c501e41b4537029661090597aeb4366702ac3cae8959f24b2c49005d6bd339833ebbeb481b127ac822d70b937c1637c8d0eaf81b6979d4c1d75d0e1 ?content.txt diff --git a/api/internal/getter/testdata/child/foo/bar/main.tf b/api/internal/getter/testdata/child/foo/bar/main.tf new file mode 100644 index 00000000000..df592750182 --- /dev/null +++ b/api/internal/getter/testdata/child/foo/bar/main.tf @@ -0,0 +1,2 @@ +# Hello + diff --git a/api/internal/getter/testdata/child/foo/main.tf b/api/internal/getter/testdata/child/foo/main.tf new file mode 100644 index 00000000000..548d21b99da --- /dev/null +++ b/api/internal/getter/testdata/child/foo/main.tf @@ -0,0 +1,5 @@ +# Hello + +module "bar" { + source = "./bar" +} diff --git a/api/internal/getter/testdata/child/main.tf b/api/internal/getter/testdata/child/main.tf new file mode 100644 index 00000000000..3830637158f --- /dev/null +++ b/api/internal/getter/testdata/child/main.tf @@ -0,0 +1,5 @@ +# Hello + +module "foo" { + source = "./foo" +} diff --git a/api/internal/getter/testdata/decompress-bz2/single.bz2 b/api/internal/getter/testdata/decompress-bz2/single.bz2 new file mode 100644 index 0000000000000000000000000000000000000000..63d21a073723dfe5f5c1a12066087fe670213ab9 GIT binary patch literal 40 wcmZ>Y%CIzaj8qGbEc>?kDFXw;K?Vi^Mur6piZdizBw|mBJalrree3)b00(*wbpQYW literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-gz/single.gz b/api/internal/getter/testdata/decompress-gz/single.gz new file mode 100644 index 0000000000000000000000000000000000000000..00754d02d1b766c6fe83562c04e4a1a3c389596d GIT binary patch literal 29 kcmb2|=HL+Yn;OQzoR*oB%HVza{BtIT6{$vbEDQ_`0DQX$Hvj+t literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-tar/extended_header.tar b/api/internal/getter/testdata/decompress-tar/extended_header.tar new file mode 100644 index 0000000000000000000000000000000000000000..aed74f06a99600f4368bf0f93ab297d081ce74d8 GIT binary patch literal 10240 zcmeIyOK!s;5C&k@Jw5uwHyVHV zuk~h|gWs-SlaHye+c!?l7{h8$t?enOn2^#ct?HHHLg~S>&Fw$sbjQ#Ay0i+cS3{6ap z3>g#*XyF)iGfRt05{rON%SuWtO3BPamxl9!%F@!{JWK}EA0STxfdZz2!6}l068}jw eaU?K2NRNM`(fFrHWQ-a*8UmvsFd72T5C8xrgEKz> literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-tar/unix_time_0.tar b/api/internal/getter/testdata/decompress-tar/unix_time_0.tar new file mode 100644 index 0000000000000000000000000000000000000000..67cb72e1aa3fb85ed20b5bbdeb6b79135d99584f GIT binary patch literal 10240 zcmeIy(F(#K7=~dl#XHEMZr9_|gCGjr26gxChZeSz7|ft}{}bIDPM%MVr<{)CHQx`a zTA8^z@73Hioi}AGLsIf?DP_UhwxVL;Pis2^YX z4^v-h4@j`v@^{ua^8dy*de1*p`K9FFI#-_iwmC z3TK~333zkY%CIzaj8qGbjI7@Agn>aMfZ+fG3j+g(0)xRBR?TI?N$TA@mF8%8=kY5~ literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-tbz2/multiple.tar.bz2 b/api/internal/getter/testdata/decompress-tbz2/multiple.tar.bz2 new file mode 100644 index 0000000000000000000000000000000000000000..1d59e6ad2ce760bf7ff834cee8570812ae501c24 GIT binary patch literal 166 zcmV;X09pS+T4*^jL0KkKS*=**^Z)>``;gHP2#`Pke}Di8Yi6DR1ONaiFaWqlG{j*V zU;qHdn2aM#000G2H8h%;lru)7C#jS5O`9*CnlxbJKvO7z7UyBRg2+OM-B@PTMx>`H zWZ)vlH64^mMhc_9WTiTM;^eF)#j7-}RthNzu1dCASsqclX3q}yyBF&hr=||A9vHbi UuDR^Hq9EA2k}1N3g=)typwQSw`2YX_ literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-tbz2/ordering.tar.bz2 b/api/internal/getter/testdata/decompress-tbz2/ordering.tar.bz2 new file mode 100644 index 0000000000000000000000000000000000000000..46f0d814a6e18920c439ff7eef39b7bedc5fb548 GIT binary patch literal 248 zcmVOD-F4K(#MXu!&8#Bv_bi;JH=ZKGGC^f)Isdsi{!^-KYv^%{>s%x`!D*mO0S#*Wbg{9VZu;X*<)AdK{;By2NC#0hYyhCR~w{Pwah*C7LewukF%UHc4vzO8hz+9IHtv3LJQ!E(`&M))_)b(-tu-EZ2c*ZQKSU{X&+{@V9FCik+x?$Cc89r@HscY5K8 z$~!Z(lUK#RTPbfi--cuE^UL?F6YQgYy_eemG5Y`STmL=&|8M-q%zywgw}qcRy!Dnr HgMk46B&J9$ literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-tgz/multiple_dir.tar.gz b/api/internal/getter/testdata/decompress-tgz/multiple_dir.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..452e6bef19f6217c11449ce77a70a432a9190076 GIT binary patch literal 194 zcmV;z06qU7iwFSozTH*;1MSpJ3c@fDhT*O~MNZJ>XPREYYlybAP!ZaAe`8xj1Pd-= z5#MJsn~+TMG0BTlHxyY?TI;GT)l~glwUxOpl(F7d&Q(rCY2%EQOqFTdnq9|m7-B2u zE*#ss@7p));#SKcmDqWJL{SMZ(#lb0000000000008)XH_0s6MgS-P0PK!n(*OVf literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-tgz/ordering.tar.gz b/api/internal/getter/testdata/decompress-tgz/ordering.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..4f6dcc3b66bfe30061b47b5e07144afdbb7e4573 GIT binary patch literal 187 zcmV;s07U;EiwFSs_25_l1MQQ`3c@fDMZ5MZ0`BePG0Eoy7g-e1)+Ac|`;JJp3m*s# zMVzy_kOy;z%kA5^3o+IEW9@x&XtMX4?o!PP09a!LsdNZa9kD`cuudTwX$45D04o9W z+IY?>yNO`A?4N5E|{h0AN?;jgZkhQ-}N7^{daV%92)As pp%0b7VvZ*KdH=`!OV0Qvf8`YAuMKL>pJ5oLBm>PK6tVye0067xT!{bx literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-tgz/outside_parent.tar.gz b/api/internal/getter/testdata/decompress-tgz/outside_parent.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..52cb14d0f88e3c1f9c67dfcf73c937936407535f GIT binary patch literal 192 zcmb2|=HQrSx-*LDe?fk-UP)pR!`sVy`I-zw+8&;ZaqYF9BKAtRg8keHe=nvh4EOf4 z#>(&rtIhfzxo=^7QPHe97JjRCegAlNdXm1@2Cuc1n|{vnN8 jr&}fb-%W8(Id3m`{Nu;hd<@8-;dPo|NzQQw4F(1PJX<=r literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-txz/empty.tar.xz b/api/internal/getter/testdata/decompress-txz/empty.tar.xz new file mode 100644 index 0000000000000000000000000000000000000000..94451c0687f5871d650ec1e6dfc9fd00c032f6ac GIT binary patch literal 108 zcmV-y0F(dyH+ooF000E$*0e?f03iVu0001VFXf})C;tE}T>t=Y{r~@?xBo{zNELE9 zVNtk}DCVPv2l?*g9)i;^#X39zOR;{*id-@qWB>rK7!tEDRm?L00Z4#Q0002&vv0N!GYZUQ($2I#_b} z>oC8b$t$R`$L5H0txlV3Gj>X*7jOQ&v-S~xRZ+I?mmU+DN_)jdidSaZK1JO;z@xQ6 zu)zRW3@yQs#S#iJfyqh*8@T0;IH#3@>7JxKW^2e6i_G18X1Mw%c*eEd?^=Yx^VZPs zIgTybN-2N@X)m+k!1Ul#i>B5f0002BTz1gOS!vM#0jB|gPyhh1M9F)x#Ao{g00000 G1X)^tK3-M; literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-txz/multiple_dir.tar.xz b/api/internal/getter/testdata/decompress-txz/multiple_dir.tar.xz new file mode 100644 index 0000000000000000000000000000000000000000..8de936c932d62d1c21daddf504ef15e6fcb76ab9 GIT binary patch literal 232 zcmVuvg$p>;GD6-*?fXk_4 zLp)$A(3sti>x9gailfrxDUVh_C+0rWf^Q#*N}bkddwrw?iLm)N7qK#9i{dPy&7kxWEjKG;S~A9T75{2rPr~))vmEO}%p@rTR$Bv|q%Bx?eo6L#%ohPT1 z__aqTq|yGI?x~}Sn?PGj=GnGjd@FD+5yzBm!qk6MKo%bQe^Z>xjevjhp^{@vp@_Vt z(omRw;C61&VoCeO3n43`_gjq5s4>DAQI*};4Y4sJsfMrM#Yx4+h<4-V-3OIy5C$ix zGBjv7pc+QVQO32_#&hFqUD;SnJm>%JA7)QEvv0N!GSFA+G`1uT>U5 za7vQNgao|r!+F=n80t5DLLSeS0L|^9a?Y2%4qgdJt07g7g7G~mxKA%Wq^21Q0O1_p)_{ill=8JN@Z^SK!QUtbetU^wR!1EUN}zRsM3 Q^4mVwG6K~wutY`y07|YBvj6}9 literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-zip/empty.zip b/api/internal/getter/testdata/decompress-zip/empty.zip new file mode 100644 index 0000000000000000000000000000000000000000..15cb0ecb3e219d1701294bfdf0fe3f5cb5d208e7 GIT binary patch literal 22 NcmWIWW@Tf*000g10H*)| literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-zip/multiple.zip b/api/internal/getter/testdata/decompress-zip/multiple.zip new file mode 100644 index 0000000000000000000000000000000000000000..a032181d43b89865241dd9e1d05dc5712706267d GIT binary patch literal 306 zcmWIWW@h1H0D%o5Ivy)hjp|r{Y!GH;kYPy6%tzw+5H!Mwun~+*a?H5gApx|AfdObG!;(f23+gIXh^x?C6yQy)QO;1KfUd)3 S6e}CZJ|-ZX2Bh0S90mY1;x;}2 literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-zip/outside_parent.zip b/api/internal/getter/testdata/decompress-zip/outside_parent.zip new file mode 100644 index 0000000000000000000000000000000000000000..bafd7d6f361b997b579bdda1feb4c1269a78ff47 GIT binary patch literal 237 zcmWIWW@Zs#0D&!$RX%Erf*#U9HVAVAv7VlON@{MtUO|4cvZ8)cW}bd=23KlwM!tea zYEDkRLU?{rP6}55iWxv{aKH{^6N4fcr!AIFypogrl+A1#6q$dT_a2(hz6Mt Z0}YKXKqlNaRyL3X6AY!-u%r>hf|$q(F%iwQ0B=?{kQzoH^a9dOAPxfna#|Ph literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-zip/subdir.zip b/api/internal/getter/testdata/decompress-zip/subdir.zip new file mode 100644 index 0000000000000000000000000000000000000000..9fafb407d5aadff1697c141514b8042eab118b4f GIT binary patch literal 466 zcmWIWW@h1H0D-kdr5*|fR%UEKHVCsa$S|a3=A;^ihHx@4UrYTE_7#XrE4UdLS-vtd zFtCUK6=kI6V-N;xL~{iX!hqzA z%$yXkDffU50bw*#V2)vAl4Hi@Cke3AfX-)F(gGeFia#fkA-b wt)nZ@D7Zg?hGF$1!Z1u%VHniUKoha}H^7?}6j2P!K==wsUjlI$0OR^sn*aa+ literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/decompress-zip/subdir_empty.zip b/api/internal/getter/testdata/decompress-zip/subdir_empty.zip new file mode 100644 index 0000000000000000000000000000000000000000..d95cb90d2a3cc524cfafedcf83bb38b60ca0b43a GIT binary patch literal 308 zcmWIWW@h1H0D-kdr5*|fR%UEKHVCsa$S|a3=A;^ihHx@4UrYTEmXr1&th9ogfsy4a zBLf4A2vAW*YEDi*R{-3Q-#|lv!f?P2G^DsRDJ8Q=A8bq}(1>D8V*zabkcsD=26fn1FCDke&$Q GFaQ8720M)a literal 0 HcmV?d00001 diff --git a/api/internal/getter/testdata/detect-file-symlink-pwd/real/hello.txt b/api/internal/getter/testdata/detect-file-symlink-pwd/real/hello.txt new file mode 100644 index 00000000000..e69de29bb2d diff --git a/api/internal/getter/testdata/detect-file-symlink-pwd/syml/pwd b/api/internal/getter/testdata/detect-file-symlink-pwd/syml/pwd new file mode 120000 index 00000000000..05b44e001ef --- /dev/null +++ b/api/internal/getter/testdata/detect-file-symlink-pwd/syml/pwd @@ -0,0 +1 @@ +../real \ No newline at end of file diff --git a/api/internal/getter/testdata/dup/foo/main.tf b/api/internal/getter/testdata/dup/foo/main.tf new file mode 100644 index 00000000000..e69de29bb2d diff --git a/api/internal/getter/testdata/dup/main.tf b/api/internal/getter/testdata/dup/main.tf new file mode 100644 index 00000000000..98efd6e4ff2 --- /dev/null +++ b/api/internal/getter/testdata/dup/main.tf @@ -0,0 +1,7 @@ +module "foo" { + source = "./foo" +} + +module "foo" { + source = "./foo" +} diff --git a/api/internal/getter/testdata/netrc/basic b/api/internal/getter/testdata/netrc/basic new file mode 100644 index 00000000000..574dd49ac56 --- /dev/null +++ b/api/internal/getter/testdata/netrc/basic @@ -0,0 +1,3 @@ +machine example.com +login foo +password bar diff --git a/api/internal/getter/testdata/validate-bad-output-to-module/child/main.tf b/api/internal/getter/testdata/validate-bad-output-to-module/child/main.tf new file mode 100644 index 00000000000..4d68c80b3e7 --- /dev/null +++ b/api/internal/getter/testdata/validate-bad-output-to-module/child/main.tf @@ -0,0 +1 @@ +variable "memory" { default = "foo" } diff --git a/api/internal/getter/testdata/validate-bad-output-to-module/main.tf b/api/internal/getter/testdata/validate-bad-output-to-module/main.tf new file mode 100644 index 00000000000..4b627bbe57a --- /dev/null +++ b/api/internal/getter/testdata/validate-bad-output-to-module/main.tf @@ -0,0 +1,8 @@ +module "child" { + source = "./child" +} + +module "child2" { + source = "./child" + memory = "${module.child.memory_max}" +} diff --git a/api/internal/getter/testdata/validate-bad-output/child/main.tf b/api/internal/getter/testdata/validate-bad-output/child/main.tf new file mode 100644 index 00000000000..e69de29bb2d diff --git a/api/internal/getter/testdata/validate-bad-output/main.tf b/api/internal/getter/testdata/validate-bad-output/main.tf new file mode 100644 index 00000000000..a19233e12d0 --- /dev/null +++ b/api/internal/getter/testdata/validate-bad-output/main.tf @@ -0,0 +1,7 @@ +module "child" { + source = "./child" +} + +resource "aws_instance" "foo" { + memory = "${module.child.memory}" +} diff --git a/api/internal/getter/testdata/validate-bad-var/child/main.tf b/api/internal/getter/testdata/validate-bad-var/child/main.tf new file mode 100644 index 00000000000..e69de29bb2d diff --git a/api/internal/getter/testdata/validate-bad-var/main.tf b/api/internal/getter/testdata/validate-bad-var/main.tf new file mode 100644 index 00000000000..7cc785d1782 --- /dev/null +++ b/api/internal/getter/testdata/validate-bad-var/main.tf @@ -0,0 +1,5 @@ +module "child" { + source = "./child" + + memory = "foo" +} diff --git a/api/internal/getter/testdata/validate-child-bad/child/main.tf b/api/internal/getter/testdata/validate-child-bad/child/main.tf new file mode 100644 index 00000000000..93b36540331 --- /dev/null +++ b/api/internal/getter/testdata/validate-child-bad/child/main.tf @@ -0,0 +1,3 @@ +# Duplicate resources +resource "aws_instance" "foo" {} +resource "aws_instance" "foo" {} diff --git a/api/internal/getter/testdata/validate-child-bad/main.tf b/api/internal/getter/testdata/validate-child-bad/main.tf new file mode 100644 index 00000000000..813f7ef8ec5 --- /dev/null +++ b/api/internal/getter/testdata/validate-child-bad/main.tf @@ -0,0 +1,3 @@ +module "foo" { + source = "./child" +} diff --git a/api/internal/getter/testdata/validate-child-good/child/main.tf b/api/internal/getter/testdata/validate-child-good/child/main.tf new file mode 100644 index 00000000000..2cfd2a80f54 --- /dev/null +++ b/api/internal/getter/testdata/validate-child-good/child/main.tf @@ -0,0 +1,3 @@ +variable "memory" {} + +output "result" {} diff --git a/api/internal/getter/testdata/validate-child-good/main.tf b/api/internal/getter/testdata/validate-child-good/main.tf new file mode 100644 index 00000000000..5f3ad8da5b9 --- /dev/null +++ b/api/internal/getter/testdata/validate-child-good/main.tf @@ -0,0 +1,8 @@ +module "child" { + source = "./child" + memory = "1G" +} + +resource "aws_instance" "foo" { + memory = "${module.child.result}" +} diff --git a/api/internal/getter/testdata/validate-required-var/child/main.tf b/api/internal/getter/testdata/validate-required-var/child/main.tf new file mode 100644 index 00000000000..618ae3c42e2 --- /dev/null +++ b/api/internal/getter/testdata/validate-required-var/child/main.tf @@ -0,0 +1 @@ +variable "memory" {} diff --git a/api/internal/getter/testdata/validate-required-var/main.tf b/api/internal/getter/testdata/validate-required-var/main.tf new file mode 100644 index 00000000000..0f6991c536c --- /dev/null +++ b/api/internal/getter/testdata/validate-required-var/main.tf @@ -0,0 +1,3 @@ +module "child" { + source = "./child" +} diff --git a/api/internal/getter/testdata/validate-root-bad/main.tf b/api/internal/getter/testdata/validate-root-bad/main.tf new file mode 100644 index 00000000000..93b36540331 --- /dev/null +++ b/api/internal/getter/testdata/validate-root-bad/main.tf @@ -0,0 +1,3 @@ +# Duplicate resources +resource "aws_instance" "foo" {} +resource "aws_instance" "foo" {} diff --git a/api/internal/getter/util_test.go b/api/internal/getter/util_test.go new file mode 100644 index 00000000000..a3e7d829cb8 --- /dev/null +++ b/api/internal/getter/util_test.go @@ -0,0 +1,49 @@ +package getter + +import ( + "io" + "io/ioutil" + "os" + "strings" + "testing" +) + +// tempEnv sets the env var temporarily and returns a function that should +// be deferred to clean it up. +func tempEnv(t *testing.T, k, v string) func() { + old := os.Getenv(k) + + // Set env + if err := os.Setenv(k, v); err != nil { + t.Fatalf("err: %s", err) + } + + // Easy cleanup + return func() { + if err := os.Setenv(k, old); err != nil { + t.Fatalf("err: %s", err) + } + } +} + +// tempFileContents writes a temporary file and returns the path and a function +// to clean it up. +func tempFileContents(t *testing.T, contents string) (string, func()) { + tf, err := ioutil.TempFile("", "getter") + if err != nil { + t.Fatalf("err: %s", err) + } + + if _, err := io.Copy(tf, strings.NewReader(contents)); err != nil { + t.Fatalf("err: %s", err) + } + + tf.Close() + + path := tf.Name() + return path, func() { + if err := os.Remove(path); err != nil { + t.Fatalf("err: %s", err) + } + } +} diff --git a/api/loader/loader.go b/api/loader/loader.go index 431924fe709..57149be7864 100644 --- a/api/loader/loader.go +++ b/api/loader/loader.go @@ -12,9 +12,8 @@ import ( "sigs.k8s.io/kustomize/api/filesys" "sigs.k8s.io/kustomize/api/ifc" + "sigs.k8s.io/kustomize/api/internal/getter" "sigs.k8s.io/kustomize/api/internal/git" - - getter "github.com/hashicorp/go-getter" ) // NewLoader returns a Loader pointed at the given target.