From 1bc275acd619f49039140385f920e4314cee0353 Mon Sep 17 00:00:00 2001 From: Richard Maynard Date: Sun, 16 Jun 2024 20:41:23 -0500 Subject: [PATCH] finishing updated providers - added -lockfile=readonly to terraform init - this cleans up the terraform init output significantly - this required ensuring the handling of the provider lockfile was exact in all cases - Add thorough tests - added google-beta provider test - fixed testconf related to improper google-beta provider config - Removed commented out / empty files - Updated pyproject.toml to reflect post 1.2.0 group config - this minimizes the modules installed with base poetry install - to install dev deps you must `poetry install --with dev` - removed unused dependencies from old plugin package - requests - markupsafe - tenacity - updated tfworker.util.terraform to only include public functions - split all private/helper methods into terraform_helpers.py --- .isort.cfg | 2 +- Makefile | 6 +- poetry.lock | 31 +- pyproject.toml | 21 +- tests/commands/test_terraform.py | 7 - tests/fixtures/test_config.yaml | 2 +- tests/providers/test_aws.py | 21 -- tests/providers/test_google_beta.py | 8 + tests/test_plugins.py | 154 -------- tests/util/test_system.py | 8 - tests/util/test_terraform_util.py | 98 ----- tests/util/test_util_terraform.py | 433 ++++++++++++++++++++++ tests/util/test_util_terraform_helpers.py | 244 ++++++++++++ tfworker/authenticators/__init__.py | 4 +- tfworker/authenticators/google.py | 4 + tfworker/commands/base.py | 1 - tfworker/commands/terraform.py | 2 +- tfworker/definitions.py | 4 +- tfworker/plugins.py | 250 ------------- tfworker/providers/__init__.py | 8 - tfworker/util/__init__.py | 13 - tfworker/util/terraform.py | 220 ++--------- tfworker/util/terraform_helpers.py | 179 +++++++++ 23 files changed, 926 insertions(+), 794 deletions(-) delete mode 100644 tests/providers/test_aws.py create mode 100644 tests/providers/test_google_beta.py delete mode 100644 tests/test_plugins.py delete mode 100644 tests/util/test_terraform_util.py create mode 100644 tests/util/test_util_terraform.py create mode 100644 tests/util/test_util_terraform_helpers.py delete mode 100644 tfworker/plugins.py delete mode 100644 tfworker/providers/__init__.py delete mode 100644 tfworker/util/__init__.py create mode 100644 tfworker/util/terraform_helpers.py diff --git a/.isort.cfg b/.isort.cfg index 0917a18..4541644 100644 --- a/.isort.cfg +++ b/.isort.cfg @@ -1,3 +1,3 @@ [settings] -known_third_party = atlassian,boto3,botocore,click,deepdiff,google,hcl2,jinja2,mergedeep,moto,pytest,yaml +known_third_party = atlassian,boto3,botocore,click,deepdiff,google,hcl2,jinja2,mergedeep,moto,pydantic,pytest,yaml profile = black diff --git a/Makefile b/Makefile index 01691d3..4a47a98 100644 --- a/Makefile +++ b/Makefile @@ -20,5 +20,7 @@ dep-test: init poetry run coverage report --fail-under=60 -m --skip-empty clean: - rm -rf build dist .eggs terraform_worker.egg-info - find . -name *.pyc -exec rm {} \; + @echo "removing python temporary and build files " + @rm -rf build dist .eggs terraform_worker.egg-info + @find . -name *.pyc -exec rm {} \; + @find . -name __pycache__ -type d -exec rmdir {} \; diff --git a/poetry.lock b/poetry.lock index 5923a7e..1495a44 100644 --- a/poetry.lock +++ b/poetry.lock @@ -633,18 +633,18 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "flake8" -version = "7.0.0" +version = "7.1.0" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" files = [ - {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, - {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, + {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, + {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.11.0,<2.12.0" +pycodestyle = ">=2.12.0,<2.13.0" pyflakes = ">=3.2.0,<3.3.0" [[package]] @@ -1421,13 +1421,13 @@ pyasn1 = ">=0.4.6,<0.7.0" [[package]] name = "pycodestyle" -version = "2.11.1" +version = "2.12.0" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, - {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] [[package]] @@ -2071,21 +2071,6 @@ pure-eval = "*" [package.extras] tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] -[[package]] -name = "tenacity" -version = "8.3.0" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"}, - {file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - [[package]] name = "tomli" version = "2.0.1" @@ -2275,4 +2260,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "2a2d7cbcea1bb7da185c4f1922f9a3aab70cf56fe02c9372ab4fa91d629c1465" +content-hash = "9649053b9cee29538584e3e3c5f8ad24012bc3c24632b6082c4c39f6f7db2656" diff --git a/pyproject.toml b/pyproject.toml index df61b2d..378b88a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,19 +31,22 @@ python = "^3.10" boto3 = "^1.34" click = "^8.1" jinja2 = "^3.1" -tenacity = "^8.2.2" -requests = "^2.28" -google-cloud-storage = "^2.7" +google-cloud-storage = "^2.17" python-hcl2 = "^4.3" -lark = "^1.1" pyyaml = "^6.0" -markupsafe = "^2.1" mergedeep = "^1.3" setuptools = "^70.0" atlassian-python-api = "^3.41" pydantic = "^2.7" -[tool.poetry.dev-dependencies] +[tool.poetry.scripts] +worker = 'tfworker.cli:cli' + +[tool.poetry.group.dev] +optional = true + +[tool.poetry.group.dev.dependencies] +pytest-timeout = "2.3.1" ipython = "^8.24" pytest = "^8.2" black = "^24.4" @@ -59,12 +62,6 @@ moto = {extras = ["sts","dynamodb", "s3"], version = "^5.0"} deepdiff = "^7.0" Sphinx = "^7.3" -[tool.poetry.scripts] -worker = 'tfworker.cli:cli' - -[tool.poetry.group.dev.dependencies] -pytest-timeout = "2.3.1" - [tool.pytest.ini_options] addopts = "--capture=sys --cov=tfworker --cov-report=" diff --git a/tests/commands/test_terraform.py b/tests/commands/test_terraform.py index 1e8a988..2c24cf0 100644 --- a/tests/commands/test_terraform.py +++ b/tests/commands/test_terraform.py @@ -13,7 +13,6 @@ # limitations under the License. import pathlib -from contextlib import contextmanager from typing import Tuple from unittest import mock from unittest.mock import MagicMock, patch @@ -27,12 +26,6 @@ from tfworker.handlers import HandlerError -# context manager to allow testing exceptions in parameterized tests -@contextmanager -def does_not_raise(): - yield - - def mock_pipe_exec( args: str, stdin: str = None, diff --git a/tests/fixtures/test_config.yaml b/tests/fixtures/test_config.yaml index 0fef253..cfb92b4 100644 --- a/tests/fixtures/test_config.yaml +++ b/tests/fixtures/test_config.yaml @@ -12,7 +12,7 @@ terraform: vars: region: {{ aws_region }} - google_beta: + google-beta: requirements: version: 3.38.0 diff --git a/tests/providers/test_aws.py b/tests/providers/test_aws.py deleted file mode 100644 index de3434c..0000000 --- a/tests/providers/test_aws.py +++ /dev/null @@ -1,21 +0,0 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -def test_aws_hcl(basec): - render = basec.providers["aws"].hcl() - expected_render = """provider "aws" { - region = "us-west-2" -}""" - assert render == expected_render diff --git a/tests/providers/test_google_beta.py b/tests/providers/test_google_beta.py new file mode 100644 index 0000000..0367371 --- /dev/null +++ b/tests/providers/test_google_beta.py @@ -0,0 +1,8 @@ +def test_google_hcl(basec): + render = basec.providers["google-beta"].hcl() + expected_render = """provider "google-beta" { + region = "us-west-2" + credentials = file("/home/test/test-creds.json") +}""" + + assert render == expected_render diff --git a/tests/test_plugins.py b/tests/test_plugins.py deleted file mode 100644 index e848d33..0000000 --- a/tests/test_plugins.py +++ /dev/null @@ -1,154 +0,0 @@ -# # Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# # -# # Licensed under the Apache License, Version 2.0 (the "License"); -# # you may not use this file except in compliance with the License. -# # You may obtain a copy of the License at -# # -# # http://www.apache.org/licenses/LICENSE-2.0 -# # -# # Unless required by applicable law or agreed to in writing, software -# # distributed under the License is distributed on an "AS IS" BASIS, -# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# # See the License for the specific language governing permissions and -# # limitations under the License. - -# import glob -# import os -# from contextlib import contextmanager - -# import pytest - -# import tfworker.commands.root -# import tfworker.plugins -# from tfworker.util.system import get_platform - -# # values needed by multiple tests -# opsys, machine = get_platform() -# _platform = f"{opsys}_{machine}" - - -# # context manager to allow testing exceptions in parameterized tests -# @contextmanager -# def does_not_raise(): -# yield - - -# # test data to ensure URL's are formed correctly, and exception is thrown -# # when version is not passed -# test_get_url_data = [ -# ( -# "default_test", -# {"version": "1.0.0"}, -# f"https://releases.hashicorp.com/terraform-provider-default_test/1.0.0/terraform-provider-default_test_1.0.0_{_platform}.zip", -# does_not_raise(), -# ), -# ( -# "uri_test", -# {"version": "1.5.0", "baseURL": "http://localhost/"}, -# f"http://localhost/terraform-provider-uri_test_1.5.0_{_platform}.zip", -# does_not_raise(), -# ), -# ( -# "filename_test", -# {"version": "2.0.0", "filename": "filename_test.zip"}, -# "https://releases.hashicorp.com/terraform-provider-filename_test/2.0.0/filename_test.zip", -# does_not_raise(), -# ), -# ( -# "filename_and_uri_test", -# { -# "version": "2.5.0", -# "filename": "filename_test.zip", -# "baseURL": "http://localhost/", -# }, -# "http://localhost/filename_test.zip", -# does_not_raise(), -# ), -# ("bad_version", {}, None, pytest.raises(KeyError)), -# ] - - -# class TestPlugins: -# @pytest.mark.enable_socket -# @pytest.mark.depends(on="get_url") -# def test_plugin_download(self, rootc): -# plugins = tfworker.plugins.PluginsCollection( -# {"null": {"version": "3.2.1"}}, rootc.temp_dir, None, 1 -# ) -# plugins.download() -# files = glob.glob( -# f"{rootc.temp_dir}/terraform-plugins/registry.terraform.io/hashicorp/null/*null*3.2.1*.zip" -# ) -# assert len(files) == 1 -# for afile in files: -# assert os.path.isfile(afile) -# assert (os.stat(afile).st_mode & 0o777) == 0o755 - -# @pytest.mark.depends(name="get_url") -# @pytest.mark.parametrize( -# "name,details,expected_url, expected_exception", test_get_url_data -# ) -# def test_get_url(self, name, details, expected_url, expected_exception): -# with expected_exception: -# actual_url = tfworker.plugins.get_url(name, details) -# assert expected_url == actual_url - -# @pytest.mark.parametrize( -# "name,details,expected_host,expected_ns,expected_provider,expected_exception", -# [ -# ( -# "bar", -# {"source": "foo/bar"}, -# "registry.terraform.io", -# "foo", -# "bar", -# does_not_raise(), -# ), -# ( -# "bar", -# {"source": "gh.com/foo/bar"}, -# "gh.com", -# "foo", -# "bar", -# does_not_raise(), -# ), -# ( -# "bar", -# {"source": "bar"}, -# "registry.terraform.io", -# "hashicorp", -# "bar", -# does_not_raise(), -# ), -# ( -# "bar", -# {}, -# "registry.terraform.io", -# "hashicorp", -# "bar", -# does_not_raise(), -# ), -# ( -# "bar", -# {"source": "gh.com/extra/foo/bar"}, -# "registry.terraform.io", -# "hashicorp", -# "bar", -# pytest.raises(tfworker.plugins.PluginSourceParseException), -# ), -# ], -# ) -# def test_plugin_source( -# self, -# name, -# details, -# expected_host, -# expected_ns, -# expected_provider, -# expected_exception, -# ): -# with expected_exception: -# source = tfworker.plugins.PluginSource(name, details) -# assert source.host == expected_host -# assert source.namespace == expected_ns -# assert source.provider == expected_provider diff --git a/tests/util/test_system.py b/tests/util/test_system.py index 4715366..ee76445 100644 --- a/tests/util/test_system.py +++ b/tests/util/test_system.py @@ -11,8 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from contextlib import contextmanager from unittest import mock import pytest @@ -20,12 +18,6 @@ from tfworker.util.system import get_platform, get_version, pipe_exec, strip_ansi, which -# context manager to allow testing exceptions in parameterized tests -@contextmanager -def does_not_raise(): - yield - - def mock_pipe_exec(args, stdin=None, cwd=None, env=None): return (0, "".encode(), "".encode()) diff --git a/tests/util/test_terraform_util.py b/tests/util/test_terraform_util.py deleted file mode 100644 index 566e34b..0000000 --- a/tests/util/test_terraform_util.py +++ /dev/null @@ -1,98 +0,0 @@ -import shutil - -import pytest - -from tfworker.util.terraform import prep_modules - - -def test_prep_modules(tmp_path): - test_file_content = "test" - - module_path = tmp_path / "terraform-modules" - module_path.mkdir() - - target_path = tmp_path / "target" - target_path.mkdir() - - # Create a test module directory with a file - test_module_dir = module_path / "test_module_dir" - test_module_dir.mkdir() - test_module_file = test_module_dir / "test_module_file.tf" - with open(test_module_file, "w") as f: - f.write(test_file_content) - test_module_ignored_file = test_module_dir / "test_module_ignored_file.txt" - test_module_ignored_file.touch() - test_module_default_ignored_file = test_module_dir / "terraform.tfstate" - test_module_default_ignored_file.touch() - - prep_modules(str(module_path), str(target_path)) - - final_target_path = target_path / "terraform-modules" / "test_module_dir" - - # check the target path exists - assert final_target_path.exists() - - # check the file is copied to the target directory - assert (final_target_path / "test_module_file.tf").exists() - - # check the file content is the same - with open(final_target_path / "test_module_file.tf") as f: - assert f.read() == test_file_content - - # check that the ignored file is not copied to the target directory - assert not (final_target_path / "terraform.tfstate").exists() - - # remove the contents of the target directory - shutil.rmtree(target_path) - assert not target_path.exists() - - # Use a custom ignore pattern - prep_modules(str(module_path), str(target_path), ignore_patterns=["*.txt"]) - - # ensure the default ignored file is copied - assert (final_target_path / "terraform.tfstate").exists() - - # ensure the custom ignored file is not copied - assert not (final_target_path / "test_module_ignored_file.txt").exists() - - -def test_prep_modules_not_found(tmp_path): - module_path = tmp_path / "terraform-modules" - target_path = tmp_path / "target" - - prep_modules(str(module_path), str(target_path)) - - # check the target path does not exist - assert not target_path.exists() - - -def test_prep_modules_required(tmp_path): - module_path = tmp_path / "terraform-modules" - target_path = tmp_path / "target" - - with pytest.raises(SystemExit): - prep_modules(str(module_path), str(target_path), required=True) - - # check the target path does not exist - assert not target_path.exists() - - # @pytest.mark.parametrize( - # "stdout, major, minor, expected_exception", - # [ - # ("Terraform v0.12.29", 0, 12, does_not_raise()), - # ("Terraform v1.3.5", 1, 3, does_not_raise()), - # ("TF 14", "", "", pytest.raises(SystemExit)), - # ], - # ) - # def test_get_tf_version( - # self, stdout: str, major: int, minor: int, expected_exception: callable - # ): - # with mock.patch( - # "tfworker.commands.base.pipe_exec", - # side_effect=mock_tf_version, - # ) as mocked: - # with expected_exception: - # (actual_major, actual_minor) = BaseCommand.get_terraform_version(stdout) - # assert actual_major == major - # assert actual_minor == minor - # mocked.assert_called_once() diff --git a/tests/util/test_util_terraform.py b/tests/util/test_util_terraform.py new file mode 100644 index 0000000..a413fed --- /dev/null +++ b/tests/util/test_util_terraform.py @@ -0,0 +1,433 @@ +import shutil +from contextlib import contextmanager +from unittest.mock import MagicMock, call, patch + +import pytest + +from tfworker.constants import ( + DEFAULT_REPOSITORY_PATH, + TF_PROVIDER_DEFAULT_HOSTNAME, + TF_PROVIDER_DEFAULT_NAMESPACE, +) +from tfworker.providers.providers_collection import ProvidersCollection +from tfworker.types import ProviderGID +from tfworker.util.terraform import ( + find_required_providers, + generate_terraform_lockfile, + get_provider_gid_from_source, + get_terraform_version, + mirror_providers, + prep_modules, +) + + +@contextmanager +def does_not_raise(): + yield + + +@pytest.fixture +def providers_collection(): + providers_odict = { + "provider1": { + "requirements": {"source": "hashicorp/provider1", "version": "1.0.0"} + }, + "provider2": { + "requirements": {"source": "hashicorp/provider2", "version": "2.0.0"} + }, + } + return ProvidersCollection( + providers_odict=providers_odict, + authenticators=MagicMock(), + ) + + +@pytest.fixture +def empty_providers_collection(): + return ProvidersCollection( + providers_odict={}, + authenticators=MagicMock(), + ) + + +def test_prep_modules(tmp_path): + test_file_content = "test" + + module_path = tmp_path / "terraform-modules" + module_path.mkdir() + + target_path = tmp_path / "target" + target_path.mkdir() + + # Create a test module directory with a file + test_module_dir = module_path / "test_module_dir" + test_module_dir.mkdir() + test_module_file = test_module_dir / "test_module_file.tf" + with open(test_module_file, "w") as f: + f.write(test_file_content) + test_module_ignored_file = test_module_dir / "test_module_ignored_file.txt" + test_module_ignored_file.touch() + test_module_default_ignored_file = test_module_dir / "terraform.tfstate" + test_module_default_ignored_file.touch() + + prep_modules(str(module_path), str(target_path)) + + final_target_path = target_path / "terraform-modules" / "test_module_dir" + + # check the target path exists + assert final_target_path.exists() + + # check the file is copied to the target directory + assert (final_target_path / "test_module_file.tf").exists() + + # check the file content is the same + with open(final_target_path / "test_module_file.tf") as f: + assert f.read() == test_file_content + + # check that the ignored file is not copied to the target directory + assert not (final_target_path / "terraform.tfstate").exists() + + # remove the contents of the target directory + shutil.rmtree(target_path) + assert not target_path.exists() + + # Use a custom ignore pattern + prep_modules(str(module_path), str(target_path), ignore_patterns=["*.txt"]) + + # ensure the default ignored file is copied + assert (final_target_path / "terraform.tfstate").exists() + + # ensure the custom ignored file is not copied + assert not (final_target_path / "test_module_ignored_file.txt").exists() + + +def test_prep_modules_not_found(tmp_path): + module_path = tmp_path / "terraform-modules" + target_path = tmp_path / "target" + + prep_modules(str(module_path), str(target_path)) + + # check the target path does not exist + assert not target_path.exists() + + +def test_prep_modules_required(tmp_path): + module_path = tmp_path / "terraform-modules" + target_path = tmp_path / "target" + + with pytest.raises(SystemExit): + prep_modules(str(module_path), str(target_path), required=True) + + # check the target path does not exist + assert not target_path.exists() + + +def test_prep_modules_default_path(): + class MockPath: + def __init__(self, exists_return_value): + self.exists_return_value = exists_return_value + + def exists(self): + return self.exists_return_value + + with patch( + "pathlib.Path", return_value=MockPath(exists_return_value=False) + ) as MockPath: + result = prep_modules("", "test_target") + assert result is None + assert MockPath.call_count == 2 + MockPath.assert_has_calls( + [ + call(f"{DEFAULT_REPOSITORY_PATH}/terraform-modules"), + call("test_target/terraform-modules"), + ], + any_order=True, + ) + + +@pytest.mark.parametrize( + "stdout, stderr, return_code, major, minor, expected_exception", + [ + ("Terraform v0.12.29", "", 0, 0, 12, does_not_raise()), + ("Terraform v1.3.5", "", 0, 1, 3, does_not_raise()), + ("TF 14", "", 0, "", "", pytest.raises(SystemExit)), + ("", "error", 1, "", "", pytest.raises(SystemExit)), + ], +) +def test_get_tf_version( + stdout: str, + stderr: str, + return_code: int, + major: int, + minor: int, + expected_exception: callable, +): + with patch( + "tfworker.util.terraform.pipe_exec", + side_effect=[(return_code, stdout.encode(), stderr.encode())], + ) as mocked: + with expected_exception: + (actual_major, actual_minor) = get_terraform_version(stdout) + assert actual_major == major + assert actual_minor == minor + mocked.assert_called_once() + + +@pytest.fixture +def mock_mirror_setup(): + mock_mirror_settings = { + "providers": MagicMock(), + "terraform_bin": "/path/to/terraform", + "working_dir": "/working/dir", + "cache_dir": "/cache/dir", + "temp_dir": "/temp/dir", + } + with patch("tfworker.util.terraform.pipe_exec") as mock_pipe_exec, patch( + "tfworker.util.terraform.tfhelpers._write_mirror_configuration" + ) as mock_write_mirror_configuration, patch( + "tfworker.util.terraform.tfhelpers._validate_cache_dir" + ) as mock_validate_cache_dir, patch( + "tfworker.util.terraform.click.secho" + ) as mock_secho: + + yield mock_secho, mock_validate_cache_dir, mock_write_mirror_configuration, mock_pipe_exec, mock_mirror_settings + + +def test_mirror_providers(mock_mirror_setup): + ( + mock_secho, + mock_validate_cache_dir, + mock_write_mirror_configuration, + mock_pipe_exec, + mock_mirror_settings, + ) = mock_mirror_setup + mock_write_mirror_configuration.return_value.__enter__.return_value = ( + mock_mirror_settings["temp_dir"] + ) + mock_pipe_exec.return_value = (0, b"stdout", b"stderr") + + result = mirror_providers( + providers=mock_mirror_settings["providers"], + terraform_bin=mock_mirror_settings["terraform_bin"], + working_dir=mock_mirror_settings["working_dir"], + cache_dir=mock_mirror_settings["cache_dir"], + ) + + mock_validate_cache_dir.assert_called_once_with(mock_mirror_settings["cache_dir"]) + mock_write_mirror_configuration.assert_called_once_with( + mock_mirror_settings["providers"], + mock_mirror_settings["working_dir"], + mock_mirror_settings["cache_dir"], + ) + mock_pipe_exec.assert_called_once_with( + f"{mock_mirror_settings["terraform_bin"]} providers mirror {mock_mirror_settings['cache_dir']}", + cwd=mock_mirror_settings["temp_dir"], + stream_output=True, + ) + assert result is None + + +def test_mirror_providers_tf_error(mock_mirror_setup): + ( + mock_secho, + mock_validate_cache_dir, + mock_write_mirror_configuration, + mock_pipe_exec, + mock_mirror_settings, + ) = mock_mirror_setup + mock_write_mirror_configuration.return_value.__enter__.return_value = ( + mock_mirror_settings["temp_dir"] + ) + mock_pipe_exec.return_value = (1, b"stdout", b"stderr") + + with pytest.raises(SystemExit): + mirror_providers( + providers=mock_mirror_settings["providers"], + terraform_bin=mock_mirror_settings["terraform_bin"], + working_dir=mock_mirror_settings["working_dir"], + cache_dir=mock_mirror_settings["cache_dir"], + ) + + mock_validate_cache_dir.assert_called_once_with(mock_mirror_settings["cache_dir"]) + mock_write_mirror_configuration.assert_called_once_with( + mock_mirror_settings["providers"], + mock_mirror_settings["working_dir"], + mock_mirror_settings["cache_dir"], + ) + mock_pipe_exec.assert_called_once_with( + f"{mock_mirror_settings["terraform_bin"]} providers mirror {mock_mirror_settings['cache_dir']}", + cwd=mock_mirror_settings["temp_dir"], + stream_output=True, + ) + + +def test_mirror_providers_all_in_cache(mock_mirror_setup): + ( + mock_secho, + mock_validate_cache_dir, + mock_write_mirror_configuration, + mock_pipe_exec, + mock_mirror_settings, + ) = mock_mirror_setup + mock_write_mirror_configuration.return_value.__enter__.side_effect = IndexError + + mirror_providers( + providers=mock_mirror_settings["providers"], + terraform_bin=mock_mirror_settings["terraform_bin"], + working_dir=mock_mirror_settings["working_dir"], + cache_dir=mock_mirror_settings["cache_dir"], + ) + + mock_validate_cache_dir.assert_called_once_with(mock_mirror_settings["cache_dir"]) + mock_write_mirror_configuration.assert_called_once_with( + mock_mirror_settings["providers"], + mock_mirror_settings["working_dir"], + mock_mirror_settings["cache_dir"], + ) + mock_pipe_exec.assert_not_called() + mock_secho.assert_called_with("All providers in cache", fg="yellow") + + +@patch("tfworker.util.terraform.click.secho") +@patch("tfworker.util.terraform.tfhelpers._get_cached_hash") +@patch("tfworker.util.terraform.tfhelpers._not_in_cache") +def test_generate_terraform_lockfile( + mock_not_in_cache, mock_get_cached_hash, mock_secho, providers_collection +): + providers = providers_collection + included_providers = ["provider1"] + cache_dir = "/cache/dir" + mock_not_in_cache.return_value = False + mock_get_cached_hash.return_value = ["hash1", "hash2"] + + expected_result = """provider "registry.terraform.io/hashicorp/provider1" { + version = "1.0.0" + constraints = "1.0.0" + hashes = [ + "hash1", + "hash2", + ] +} +""" + + result = generate_terraform_lockfile(providers, included_providers, cache_dir) + mock_not_in_cache.assert_called() + mock_get_cached_hash.assert_called() + assert result == expected_result + + +@patch("tfworker.util.terraform.click.secho") +@patch("tfworker.util.terraform.tfhelpers._get_cached_hash") +@patch("tfworker.util.terraform.tfhelpers._not_in_cache") +def test_generate_terraform_lockfile_no_includes( + mock_not_in_cache, mock_get_cached_hash, mock_secho, providers_collection +): + providers = providers_collection + included_providers = None + cache_dir = "/cache/dir" + mock_not_in_cache.return_value = False + mock_get_cached_hash.return_value = ["hash1", "hash2"] + + expected_result = """provider "registry.terraform.io/hashicorp/provider1" { + version = "1.0.0" + constraints = "1.0.0" + hashes = [ + "hash1", + "hash2", + ] +} + +provider "registry.terraform.io/hashicorp/provider2" { + version = "2.0.0" + constraints = "2.0.0" + hashes = [ + "hash1", + "hash2", + ] +} +""" + + result = generate_terraform_lockfile(providers, included_providers, cache_dir) + mock_not_in_cache.assert_called() + mock_get_cached_hash.assert_called() + assert result == expected_result + + +@patch("tfworker.util.terraform.click.secho") +@patch("tfworker.util.terraform.tfhelpers._get_cached_hash") +@patch("tfworker.util.terraform.tfhelpers._not_in_cache") +def test_generate_terraform_lockfile_not_in_cache( + mock_not_in_cache, mock_get_cached_hash, mock_secho +): + providers = MagicMock() + providers.__iter__.return_value = [MagicMock()] + included_providers = ["provider1", "provider2"] + cache_dir = "/cache/dir" + mock_not_in_cache.return_value = True + + result = generate_terraform_lockfile(providers, included_providers, cache_dir) + + mock_secho.assert_called_once_with( + f"Generating lockfile for providers: {included_providers}", fg="yellow" + ) + mock_not_in_cache.assert_called() + assert result is None + + +def test_get_provider_gid_from_source_full(): + result = get_provider_gid_from_source("example.com/namespace/provider") + assert result == ProviderGID( + hostname="example.com", namespace="namespace", type="provider" + ) + + +def test_get_provider_gid_from_source_long(): + with pytest.raises(ValueError): + get_provider_gid_from_source("example.com/namespace/provider/invalid") + + +def test_get_provider_gid_from_source_short(): + with pytest.raises(ValueError): + get_provider_gid_from_source(None) + + +def test_get_provider_from_source_provider(): + result = get_provider_gid_from_source("provider") + assert result == ProviderGID( + hostname=TF_PROVIDER_DEFAULT_HOSTNAME, + namespace=TF_PROVIDER_DEFAULT_NAMESPACE, + type="provider", + ) + + +def test_get_provider_from_source_namespace(): + result = get_provider_gid_from_source("namespace/provider") + assert result == ProviderGID( + hostname=TF_PROVIDER_DEFAULT_HOSTNAME, namespace="namespace", type="provider" + ) + + +@patch("tfworker.util.terraform.tfhelpers._find_required_providers") +def test_find_required_providers(mock_find_required_providers): + search_dir = "/search/dir" + mock_find_required_providers.return_value = { + "provider1": [{"version": "1.0.0", "source": "hashicorp/provider1"}] + } + + result = find_required_providers(search_dir) + + mock_find_required_providers.assert_called_once_with(search_dir) + assert result == { + "provider1": [{"version": "1.0.0", "source": "hashicorp/provider1"}] + } + + +@patch("tfworker.util.terraform.tfhelpers._find_required_providers") +def test_find_required_providers_empty(mock_find_required_providers): + search_dir = "/search/dir/empty" + mock_find_required_providers.return_value = {} + + result = find_required_providers(search_dir) + + mock_find_required_providers.assert_called_once_with(search_dir) + assert result is None diff --git a/tests/util/test_util_terraform_helpers.py b/tests/util/test_util_terraform_helpers.py new file mode 100644 index 0000000..6e2993c --- /dev/null +++ b/tests/util/test_util_terraform_helpers.py @@ -0,0 +1,244 @@ +import json +import pathlib +from tempfile import TemporaryDirectory +from unittest.mock import MagicMock + +import pytest + +from tfworker.providers.providers_collection import ProvidersCollection +from tfworker.types import ProviderGID +from tfworker.util.system import get_platform +from tfworker.util.terraform_helpers import ( + _create_mirror_configuration, + _find_required_providers, + _get_cached_hash, + _get_provider_cache_dir, + _not_in_cache, + _parse_required_providers, + _validate_cache_dir, + _write_mirror_configuration, +) + + +@pytest.fixture +def provider_gid(): + return ProviderGID(hostname="example.com", namespace="namespace", type="provider") + + +@pytest.fixture +def cache_dir(tmp_path): + return tmp_path + + +@pytest.fixture +def version(): + return "1.0.0" + + +@pytest.fixture +def providers_collection(): + providers_odict = { + "provider1": { + "requirements": {"source": "hashicorp/provider1", "version": "1.0.0"} + }, + } + return ProvidersCollection( + providers_odict=providers_odict, + authenticators=MagicMock(), + ) + + +@pytest.fixture +def empty_providers_collection(): + return ProvidersCollection( + providers_odict={}, + authenticators=MagicMock(), + ) + + +@pytest.fixture +def create_cache_files(cache_dir, provider_gid, version): + provider_dir = ( + pathlib.Path(cache_dir) + / provider_gid.hostname + / provider_gid.namespace + / provider_gid.type + ) + provider_dir.mkdir(parents=True, exist_ok=True) + + version_file = provider_dir / f"{version}.json" + platform = get_platform() + provider_file = ( + provider_dir + / f"terraform-provider-{provider_gid.type}_{version}_{platform[0]}_{platform[1]}.zip" + ) + + version_data = { + "archives": {f"{platform[0]}_{platform[1]}": {"hashes": "dummy_hash"}} + } + + with open(version_file, "w") as f: + json.dump(version_data, f) + + with open(provider_file, "w") as f: + f.write("dummy_provider_content") + + return cache_dir, version_file, provider_file + + +def test_not_in_cache_false(provider_gid, version, create_cache_files): + cache_dir, version_file, provider_file = create_cache_files + assert not _not_in_cache(provider_gid, version, str(cache_dir)) + + +def test_not_in_cache_true(provider_gid, version, cache_dir): + assert _not_in_cache(provider_gid, version, str(cache_dir)) + + +def test_not_in_cache_missing_version_file(provider_gid, version, create_cache_files): + cache_dir, version_file, provider_file = create_cache_files + version_file.unlink() # Remove the version file + assert _not_in_cache(provider_gid, version, str(cache_dir)) + + +def test_not_in_cache_missing_provider_file(provider_gid, version, create_cache_files): + cache_dir, version_file, provider_file = create_cache_files + provider_file.unlink() # Remove the provider file + assert _not_in_cache(provider_gid, version, str(cache_dir)) + + +def test_get_cached_hash(provider_gid, version, create_cache_files): + cache_dir, _, _ = create_cache_files + cached_hash = _get_cached_hash(provider_gid, version, str(cache_dir)) + assert cached_hash == "dummy_hash" + + +def test_validate_cache_dir(cache_dir): + _validate_cache_dir(str(cache_dir)) + + +def test_validate_cache_dir_nonexistent(): + with pytest.raises(SystemExit): + _validate_cache_dir("nonexistent_dir") + + +def test_validate_cache_dir_not_a_directory(tmp_path): + file_path = tmp_path / "not_a_directory" + file_path.touch() # Create a file instead of a directory + with pytest.raises(SystemExit): + _validate_cache_dir(str(file_path)) + + +def test_validate_cache_dir_not_writable(tmp_path): + cache_dir = tmp_path / "cache" + cache_dir.mkdir() + cache_dir.chmod(0o555) # Read and execute permissions only + with pytest.raises(SystemExit): + _validate_cache_dir(str(cache_dir)) + cache_dir.chmod(0o755) # Restore permissions for cleanup + + +def test_validate_cache_dir_not_readable(tmp_path): + cache_dir = tmp_path / "cache" + cache_dir.mkdir() + cache_dir.chmod(0o333) # Write and execute permissions only + with pytest.raises(SystemExit): + _validate_cache_dir(str(cache_dir)) + cache_dir.chmod(0o755) # Restore permissions for cleanup + + +def test_validate_cache_dir_not_executable(tmp_path): + cache_dir = tmp_path / "cache" + cache_dir.mkdir() + cache_dir.chmod(0o666) # Read and write permissions only + with pytest.raises(SystemExit): + _validate_cache_dir(str(cache_dir)) + cache_dir.chmod(0o755) # Restore permissions for cleanup + + +def test_get_provider_cache_dir(provider_gid, cache_dir): + provider_cache_dir = _get_provider_cache_dir(provider_gid, str(cache_dir)) + expected_dir = ( + pathlib.Path(cache_dir) + / provider_gid.hostname + / provider_gid.namespace + / provider_gid.type + ) + assert provider_cache_dir == expected_dir + + +def test_write_mirror_configuration(providers_collection, cache_dir): + with TemporaryDirectory() as working_dir: + temp_dir = _write_mirror_configuration( + providers_collection, working_dir, str(cache_dir) + ) + assert temp_dir is not None + assert (pathlib.Path(temp_dir.name) / "terraform.tf").exists() + + +def test_write_mirror_configuration_empty_providers( + empty_providers_collection, cache_dir +): + with TemporaryDirectory() as working_dir: + with pytest.raises(IndexError): + _write_mirror_configuration( + empty_providers_collection, working_dir, str(cache_dir) + ) + + +def test_create_mirror_configuration(providers_collection): + includes = ["provider1", "provider2"] + tf_config = _create_mirror_configuration(providers_collection, includes) + assert "terraform {" in tf_config + + +def test_parse_required_providers(): + content = { + "terraform": [ + { + "required_providers": [ + {"provider1": {"source": "hashicorp/provider1", "version": "1.0.0"}} + ] + } + ] + } + expected_providers = { + "provider1": {"source": "hashicorp/provider1", "version": "1.0.0"} + } + assert _parse_required_providers(content) == expected_providers + + +def test_parse_required_providers_no_providers(): + content = {"terraform": [{"required_providers": []}]} + assert _parse_required_providers(content) is None + + +def test_parse_required_providers_no_terraform(): + content = { + "required_providers": [ + {"provider1": {"source": "hashicorp/provider1", "version": "1.0.0"}} + ] + } + assert _parse_required_providers(content) is None + + +def test_find_required_providers(tmp_path): + tf_content = """ + terraform { + required_providers { + provider1 = { + source = "hashicorp/provider1" + version = "1.0.0" + } + } + } + """ + test_file = tmp_path / "main.tf" + with open(test_file, "w") as f: + f.write(tf_content) + + providers = _find_required_providers(str(tmp_path)) + expected_providers = { + "provider1": {"source": "hashicorp/provider1", "version": "1.0.0"} + } + assert providers == expected_providers diff --git a/tfworker/authenticators/__init__.py b/tfworker/authenticators/__init__.py index f341f4f..071188f 100644 --- a/tfworker/authenticators/__init__.py +++ b/tfworker/authenticators/__init__.py @@ -16,9 +16,9 @@ from .aws import AWSAuthenticator # noqa from .base import UnknownAuthenticator # noqa -from .google import GoogleAuthenticator # noqa +from .google import GoogleAuthenticator, GoogleBetaAuthenticator # noqa -ALL = [AWSAuthenticator, GoogleAuthenticator] +ALL = [AWSAuthenticator, GoogleAuthenticator, GoogleBetaAuthenticator] class AuthenticatorsCollection(collections.abc.Mapping): diff --git a/tfworker/authenticators/google.py b/tfworker/authenticators/google.py index a8ead6e..dc0562a 100644 --- a/tfworker/authenticators/google.py +++ b/tfworker/authenticators/google.py @@ -43,3 +43,7 @@ def env(self): if self.creds_path: result["GOOGLE_APPLICATION_CREDENTIALS"] = shlex.quote(self.creds_path) return result + + +class GoogleBetaAuthenticator(GoogleAuthenticator): + tag = "google-beta" diff --git a/tfworker/commands/base.py b/tfworker/commands/base.py index 966b4fa..22bddbf 100644 --- a/tfworker/commands/base.py +++ b/tfworker/commands/base.py @@ -69,7 +69,6 @@ def __init__(self, rootc, deployment="undefined", limit=tuple(), **kwargs): self._authenticators = AuthenticatorsCollection( rootc.args, deployment=deployment, **kwargs ) - self._providers = ProvidersCollection( rootc.providers_odict, self._authenticators ) diff --git a/tfworker/commands/terraform.py b/tfworker/commands/terraform.py index 9d108a3..ffd9bf2 100644 --- a/tfworker/commands/terraform.py +++ b/tfworker/commands/terraform.py @@ -438,7 +438,7 @@ def _run( color_str = "-no-color" if self._use_colors is False else "" params = { - "init": f"-input=false {color_str} -plugin-dir={plugin_dir}", + "init": f"-input=false {color_str} -plugin-dir={plugin_dir} -lockfile=readonly", "plan": f"-input=false -detailed-exitcode {color_str}", "apply": f"-input=false {color_str} -auto-approve", "destroy": f"-input=false {color_str} -auto-approve", diff --git a/tfworker/definitions.py b/tfworker/definitions.py index fbf3864..09c4c06 100644 --- a/tfworker/definitions.py +++ b/tfworker/definitions.py @@ -215,7 +215,9 @@ def _prep_terraform_lockfile(self): with open(f"{self._target}/{TF_PROVIDER_DEFAULT_LOCKFILE}", "w") as lockfile: lockfile.write( generate_terraform_lockfile( - providers=self._providers, cache_dir=self._provider_cache + providers=self._providers, + included_providers=self.provider_names, + cache_dir=self._provider_cache, ) ) diff --git a/tfworker/plugins.py b/tfworker/plugins.py deleted file mode 100644 index c11b1be..0000000 --- a/tfworker/plugins.py +++ /dev/null @@ -1,250 +0,0 @@ -# # Copyright 2020-2023 Richard Maynard (richard.maynard@gmail.com) -# # -# # Licensed under the Apache License, Version 2.0 (the "License"); -# # you may not use this file except in compliance with the License. -# # You may obtain a copy of the License at -# # -# # http://www.apache.org/licenses/LICENSE-2.0 -# # -# # Unless required by applicable law or agreed to in writing, software -# # distributed under the License is distributed on an "AS IS" BASIS, -# # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# # See the License for the specific language governing permissions and -# # limitations under the License. - -# import collections -# import glob -# import json -# import os -# import shutil -# import urllib - -# import click -# from tenacity import retry, stop_after_attempt, wait_chain, wait_fixed - -# from tfworker.exceptions import PluginSourceParseException -# from tfworker.util.system import get_platform - - -# class PluginsCollection(collections.abc.Mapping): -# def __init__(self, body, temp_dir, cache_dir, tf_version_major): -# self._plugins = body -# self._temp_dir = temp_dir -# self._cache_dir = cache_dir -# self._tf_version_major = tf_version_major -# self._downloaded = False - -# def __len__(self): -# return len(self._providers) - -# def __getitem__(self, value): -# if type(value) is int: -# return self._providers[list(self._providers.keys())[value]] -# return self._providers[value] - -# def __iter__(self): -# return iter(self._providers.values()) - -# def download(self): -# """ -# Download the required plugins; or put them in place from the cache dir - -# This could be further optimized to not download plugins from hashicorp, -# but rather have them in a local repository or host them in s3, and get -# them from an internal s3 endpoint so no transit charges are incurred. -# Ideally these would be stored between runs, and only downloaded if the -# versions have changed. In production try to remove all all external -# repositories/sources from the critical path. -# """ -# if self._downloaded: -# return - -# opsys, machine = get_platform() -# _platform = f"{opsys}_{machine}" - -# plugin_dir = f"{self._temp_dir}/terraform-plugins" - -# if not os.path.isdir(plugin_dir): -# os.mkdir(plugin_dir) - -# # for each plugin, check if it exists in the cache directory if not download it -# # if it does exist, put it into the rendered terraform plugin cache - -# for name, details in self._plugins.items(): -# uri = get_url(name, details) -# file_name = uri.split("/")[-1] -# source = PluginSource(name, details) -# provider_path = os.path.join(source.host, source.namespace, name) - -# cache_hit = False -# if self._cache_dir is not None: -# cache_hit = check_cache( -# os.path.join(self._cache_dir, provider_path), -# os.path.join(plugin_dir, provider_path), -# name, -# details["version"], -# _platform, -# ) - -# if cache_hit is False: -# # the provider cache is not populated, download and put the plugin in place -# try: -# download_from_remote( -# uri, -# plugin_dir, -# self._cache_dir, -# provider_path, -# file_name, -# name, -# details, -# ) -# except PluginSourceParseException as e: -# click.secho(str(e), fg="red") -# click.Abort() - -# self._downloaded = True - - -# class PluginSource: -# """ -# Utility object for divining the local module path details from a provider - -# Customized source fields are expected in the form: / -# The host can also be specified: // - -# Where the host is NOT specified, registry.terraform.io is assumed. -# """ - -# def __init__(self, provider, details): -# # Set sensible defaults -# self.provider = provider -# self.namespace = "hashicorp" -# self.host = "registry.terraform.io" -# source = details.get("source") - -# # Parse the parts if source defined -# if source: -# items = ["provider", "namespace", "host"] -# parts = source.split("/") -# if len(parts) > 3: -# raise PluginSourceParseException( -# f"Unable to parse source with more than three segments: {parts}" -# ) -# # pop the items in reverse order until there's nothing left -# for item in items: -# if parts: -# setattr(self, item, parts.pop()) - -# def __repr__(self): -# return json.dumps(self.__dict__) - - -# @retry( -# wait=wait_chain( -# wait_fixed(2), -# wait_fixed(5), -# wait_fixed(10), -# ), -# stop=stop_after_attempt(3), -# reraise=True, -# ) -# def download_from_remote( -# uri, plugin_dir, cache_dir, provider_path, file_name, name, details -# ): -# """ -# download_and_extract_from_remote handles downloading a plugin from the hashicorp -# provider registry, retries according to the decorator, and optionally places -# downloaded plugins into a local provider cache -# """ -# opsys, machine = get_platform() -# _platform = f"{opsys}_{machine}" - -# click.secho( -# f"downloading plugin: {name} version {details['version']} from {uri}", -# fg="yellow", -# ) - -# # download the remote file -# try: -# with urllib.request.urlopen(uri) as response, open( -# f"{plugin_dir}/{file_name}", "wb" -# ) as plug_file: -# shutil.copyfileobj(response, plug_file) -# except urllib.error.HTTPError as e: -# raise PluginSourceParseException( -# f"{e} while downloading plugin: {name}:{details['version']} from {uri}" -# ) - -# # put the file into the working provider directory and cache if necessary -# files = glob.glob( -# f"{plugin_dir}/terraform-provider*-{name}_{details['version']}*_{_platform}*" -# ) -# for afile in files: -# os.chmod(afile, 0o755) -# filename = os.path.basename(afile) -# # handle populating cache -# if cache_dir is not None: -# os.makedirs(os.path.join(cache_dir, provider_path), exist_ok=True) -# shutil.copy(afile, os.path.join(cache_dir, provider_path)) -# click.secho( -# f"saved plugin to cache: {name} version {details['version']}", -# fg="yellow", -# ) -# os.makedirs(os.path.join(plugin_dir, provider_path), exist_ok=True) -# os.rename(afile, os.path.join(plugin_dir, provider_path, filename)) -# click.secho(f"plugin installed to: {plugin_dir}/{provider_path}/", fg="yellow") - - -# def check_cache(cache_path, plugin_path, name, version, platform): -# """ -# Determine if the required plugin version already exists in the cache -# """ -# # the cache dir doesn't exist, so there's no cache -# if not os.path.exists(cache_path): -# return False - -# files = glob.glob(f"{cache_path}/terraform-provider*-{name}_{version}*_{platform}*") -# for afile in files: -# os.makedirs(plugin_path, exist_ok=True) -# shutil.copy(afile, plugin_path) -# click.secho( -# f"using cached provider {name}:{version} from {afile}", -# fg="yellow", -# ) -# return True - -# # if arrived here, the expected provider package didn't exist -# return False - - -# def get_url(name, details): -# """ -# Determine the URL for the plugin - -# get URL returns a fully qualifed URL, including the file name. - -# In order to support third party terraform plugins we can not -# assume the hashicorp repository. It will function as a default, -# but if baseURL is provided in the plugin settings it will be -# used instead. The logic to determine the complete remote path -# will also be here to simplify the logic in the download method. -# """ -# opsys, machine = get_platform() -# _platform = f"{opsys}_{machine}" - -# try: -# version = details["version"] -# except KeyError: -# raise KeyError(f"version must be specified for plugin {name}") - -# # set the file name, allow it to be overridden with key "filename" -# default_file_name = f"terraform-provider-{name}_{version}_{_platform}.zip" -# file_name = details.get("filename", default_file_name) - -# # set the base url, allow it to be overridden with key "baseURL" -# default_base_url = ( -# f"https://releases.hashicorp.com/terraform-provider-{name}/{version}" -# ) -# base_uri = details.get("baseURL", default_base_url).rstrip("/") - -# return f"{base_uri}/{file_name}" diff --git a/tfworker/providers/__init__.py b/tfworker/providers/__init__.py deleted file mode 100644 index e7b37f2..0000000 --- a/tfworker/providers/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# flake8: noqa: F401 -# from tfworker.providers.base import BaseProvider -# from tfworker.providers.generic import GenericProvider -# from tfworker.providers.google import GoogleProvider -# from tfworker.providers.google_beta import GoogleBetaProvider - -# ProvidersCollection imports NAMED_PROVIDERS, so must be imported after NAMED_PROVIDERS is defined -# from tfworker.providers.providers_collection import ProvidersCollection # noqa: E402 diff --git a/tfworker/util/__init__.py b/tfworker/util/__init__.py deleted file mode 100644 index bf6e46e..0000000 --- a/tfworker/util/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2020 Richard Maynard (richard.maynard@gmail.com) -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/tfworker/util/terraform.py b/tfworker/util/terraform.py index 2ca3638..11bb8f1 100644 --- a/tfworker/util/terraform.py +++ b/tfworker/util/terraform.py @@ -1,18 +1,15 @@ # This file contains functions primarily used by the "TerraformCommand" class # the goal of moving these functions here is to reduce the responsibility of # the TerraformCommand class, making it easier to test and maintain -import json -import os import pathlib import re import shutil from functools import lru_cache -from tempfile import TemporaryDirectory from typing import Dict, List, Union import click -import hcl2 +import tfworker.util.terraform_helpers as tfhelpers from tfworker.constants import ( DEFAULT_REPOSITORY_PATH, TF_PROVIDER_DEFAULT_HOSTNAME, @@ -20,7 +17,7 @@ ) from tfworker.providers.providers_collection import ProvidersCollection from tfworker.types import ProviderGID -from tfworker.util.system import get_platform, pipe_exec +from tfworker.util.system import pipe_exec def prep_modules( @@ -39,11 +36,9 @@ def prep_modules( ignore_patterns (list(str)): A list of patterns to ignore required (bool): If the terraform modules directory is required """ - module_path = ( - module_path - if module_path != "" - else f"{DEFAULT_REPOSITORY_PATH}/terraform-modules" - ) + if module_path == "": + module_path = f"{DEFAULT_REPOSITORY_PATH}/terraform-modules" + module_path = pathlib.Path(module_path) target_path = pathlib.Path(f"{target_path}/terraform-modules".replace("//", "/")) @@ -108,9 +103,11 @@ def mirror_providers( cache_dir (str): The cache directory. """ click.secho(f"Mirroring providers to {cache_dir}", fg="yellow") - _validate_cache_dir(cache_dir) + tfhelpers._validate_cache_dir(cache_dir) try: - with _write_mirror_configuration(providers, working_dir, cache_dir) as temp_dir: + with tfhelpers._write_mirror_configuration( + providers, working_dir, cache_dir + ) as temp_dir: (return_code, stdout, stderr) = pipe_exec( f"{terraform_bin} providers mirror {cache_dir}", cwd=temp_dir, @@ -119,18 +116,14 @@ def mirror_providers( if return_code != 0: click.secho(f"Unable to mirror providers\n{stderr.decode()}", fg="red") raise SystemExit(1) - - # after mirroring the providers, copy the lock file to the provider mirror - # so it can be stored, and ensure providers are not downloaded with each run - lock_file = pathlib.Path(temp_dir) / ".terraform.lock" - if lock_file.exists(): - shutil.copy(lock_file, cache_dir) except IndexError: click.secho("All providers in cache", fg="yellow") def generate_terraform_lockfile( - providers: ProvidersCollection, cache_dir: str + providers: ProvidersCollection, + included_providers: Union[None, List[str]], + cache_dir: str, ) -> Union[None, str]: """ Generate the content to put in a .terraform.lock.hcl file to lock providers using the @@ -138,22 +131,30 @@ def generate_terraform_lockfile( Args: providers (ProvidersCollection): The providers to lock. + included_providers (List[str] or None): The providers to include in the lockfile; if none + is provided, all providers will be included. cache_dir (str): The cache directory. Returns: Union[None, str]: The content of the .terraform.lock.hcl file or None if any required providers are not in the cache """ lockfile = [] + click.secho( + f"Generating lockfile for providers: {included_providers or [x.tag for x in providers]}", + fg="yellow", + ) for provider in providers: - if _not_in_cache(provider.gid, provider.version, cache_dir): + if tfhelpers._not_in_cache(provider.gid, provider.version, cache_dir): return None - + if included_providers is not None and provider.tag not in included_providers: + continue lockfile.append(f'provider "{str(provider.gid)}" {{') lockfile.append(f' version = "{provider.version}"') lockfile.append(f' constraints = "{provider.version}"') lockfile.append(" hashes = [") - # {str(_get_cached_hash(provider.gid, provider.version, cache_dir))}') - for hash in _get_cached_hash(provider.gid, provider.version, cache_dir): + for hash in tfhelpers._get_cached_hash( + provider.gid, provider.version, cache_dir + ): lockfile.append(f' "{hash}",') lockfile.append(" ]") lockfile.append("}") @@ -161,127 +162,6 @@ def generate_terraform_lockfile( return "\n".join(lockfile) -def _not_in_cache(gid: ProviderGID, version: str, cache_dir: str) -> bool: - """ - Check if the provider is not in the cache directory. - - Args: - provider (str): The provider to check. - cache_dir (str): The cache directory. - - Returns: - bool: True if the provider is not in the cache directory. - """ - provider_dir = pathlib.Path(cache_dir) / gid.hostname / gid.namespace / gid.type - platform = get_platform() - if not provider_dir.exists(): - return True - - # look for version.json and terraform-provider-_version_platform.zip in the provider directory - version_file = provider_dir / f"{version}.json" - provider_file = ( - provider_dir - / f"terraform-provider-{gid.type}_{version}_{platform[0]}_{platform[1]}.zip" - ) - if not version_file.exists() or not provider_file.exists(): - return True - return False - - -def _get_cached_hash(gid: ProviderGID, version: str, cache_dir: str) -> str: - """ - Get the hash of the cached provider. - - Args: - provider (str): The provider to get the hash for. - cache_dir (str): The cache directory. - - Returns: - str: The hash of the cached provider. - - Raises: - ValueError: If the provider hash can not be determined but the file is present - """ - provider_dir = _get_provider_cache_dir(gid, cache_dir) - version_file = provider_dir / f"{version}.json" - with open(version_file, "r") as f: - hash_data = json.load(f) - - platform = get_platform() - - return hash_data["archives"][f"{platform[0]}_{platform[1]}"]["hashes"] - - -def _write_mirror_configuration( - providers: ProvidersCollection, working_dir: str, cache_dir: str -) -> TemporaryDirectory: - """ - Write the mirror configuration to a temporary directory in the working directory. - - Args: - providers (ProvidersCollection): The providers to mirror. - working_dir (str): The working directory. - - Returns: - TemporaryDirectory: A temporary directory containing the mirror configuration. - - Raises: - IndexError: If there are no providers to mirror. - """ - includes = [x for x in providers if _not_in_cache(x.gid, x.version, cache_dir)] - if len(includes) == 0: - raise IndexError("No providers to mirror") - click.secho(f"Mirroring providers: {includes}", fg="yellow") - - mirror_configuration = _create_mirror_configuration( - providers=providers, includes=includes - ) - temp_dir = TemporaryDirectory(dir=working_dir) - mirror_file = pathlib.Path(temp_dir.name) / "terraform.tf" - with open(mirror_file, "w") as f: - f.write(mirror_configuration) - return temp_dir - - -def _create_mirror_configuration( - providers: ProvidersCollection, includes: List[str] = [] -) -> str: - """ - Generate a terraform configuration file with all of the providers - to mirror. - """ - tf_string = [] - tf_string.append("terraform {") - tf_string.append(providers.required_hcl(includes=includes)) - tf_string.append("}") - return "\n".join(tf_string) - - -def _validate_cache_dir(cache_dir: str) -> None: - """ - Validate the cache directory, it should exist and be writable. - - Args: - cache_dir (str): The cache directory. - """ - cache_dir = pathlib.Path(cache_dir) - if not cache_dir.exists(): - click.secho(f"Cache directory {cache_dir} does not exist", fg="red") - raise SystemExit(1) - if not cache_dir.is_dir(): - click.secho(f"Cache directory {cache_dir} is not a directory", fg="red") - raise SystemExit(1) - if not os.access(cache_dir, os.W_OK): - click.secho(f"Cache directory {cache_dir} is not writable", fg="red") - raise SystemExit(1) - if not os.access(cache_dir, os.R_OK): - click.secho(f"Cache directory {cache_dir} is not readable", fg="red") - raise SystemExit(1) - if not os.access(cache_dir, os.X_OK): - click.secho(f"Cache directory {cache_dir} is not executable", fg="red") - raise SystemExit(1) - - @lru_cache def get_provider_gid_from_source(source: str) -> ProviderGID: """ @@ -299,6 +179,10 @@ def get_provider_gid_from_source(source: str) -> ProviderGID: Raises: ValueError: If the source string is invalid. """ + if source is None or len(source) == 0: + raise ValueError( + f"Invalid source string, must contain between 1 and 3 parts: {source}" + ) parts = source.split("/") if len(parts) > 3 or len(parts) < 1: raise ValueError( @@ -313,20 +197,6 @@ def get_provider_gid_from_source(source: str) -> ProviderGID: return ProviderGID(hostname=hostname, namespace=namespace, type=ptype) -def _get_provider_cache_dir(gid: ProviderGID, cache_dir: str) -> str: - """ - Get the cache directory for a provider. - - Args: - gid (ProviderGID): The provider GID. - cache_dir (str): The cache directory. - - Returns: - str: The cache directory for the provider. - """ - return pathlib.Path(cache_dir) / gid.hostname / gid.namespace / gid.type - - @lru_cache def find_required_providers( search_dir: str, @@ -341,39 +211,7 @@ def find_required_providers( Dict[str, [Dict[str, str]]]: A dictionary of required providers, with the provider name as the key and the provider details as the value. """ - required_providers = _find_required_providers(search_dir) + required_providers = tfhelpers._find_required_providers(search_dir) if len(required_providers) == 0: return None return required_providers - - -def _find_required_providers(search_dir: str) -> Dict[str, [Dict[str, str]]]: - providers = {} - for root, _, files in os.walk(search_dir): - for file in files: - if file.endswith(".tf"): - with open(f"{root}/{file}", "r") as f: - content = hcl2.load(f) - new_providers = _parse_required_providers(content) - if new_providers is not None: - providers.update(new_providers) - return providers - - -def _parse_required_providers(content: dict) -> Union[None, Dict[str, Dict[str, str]]]: - if "terraform" not in content: - return None - - providers = {} - terraform_blocks = content["terraform"] - - for block in terraform_blocks: - if "required_providers" in block: - for required_provider in block["required_providers"]: - for k, v in required_provider.items(): - providers[k] = v - - if len(providers.keys()) == 0: - return None - - return providers diff --git a/tfworker/util/terraform_helpers.py b/tfworker/util/terraform_helpers.py new file mode 100644 index 0000000..f6d535e --- /dev/null +++ b/tfworker/util/terraform_helpers.py @@ -0,0 +1,179 @@ +import json +import os +import pathlib +from tempfile import TemporaryDirectory +from typing import Dict, List, Union + +import click +import hcl2 + +from tfworker.providers.providers_collection import ProvidersCollection +from tfworker.types import ProviderGID +from tfworker.util.system import get_platform + + +def _not_in_cache(gid: ProviderGID, version: str, cache_dir: str) -> bool: + """ + Check if the provider is not in the cache directory. + + Args: + provider (str): The provider to check. + cache_dir (str): The cache directory. + + Returns: + bool: True if the provider is not in the cache directory. + """ + provider_dir = pathlib.Path(cache_dir) / gid.hostname / gid.namespace / gid.type + platform = get_platform() + if not provider_dir.exists(): + return True + + # look for version.json and terraform-provider-_version_platform.zip in the provider directory + version_file = provider_dir / f"{version}.json" + provider_file = ( + provider_dir + / f"terraform-provider-{gid.type}_{version}_{platform[0]}_{platform[1]}.zip" + ) + if not version_file.exists() or not provider_file.exists(): + return True + return False + + +def _get_cached_hash(gid: ProviderGID, version: str, cache_dir: str) -> str: + """ + Get the hash of the cached provider. + + Args: + provider (str): The provider to get the hash for. + cache_dir (str): The cache directory. + + Returns: + str: The hash of the cached provider. + + Raises: + ValueError: If the provider hash can not be determined but the file is present + """ + provider_dir = _get_provider_cache_dir(gid, cache_dir) + version_file = provider_dir / f"{version}.json" + with open(version_file, "r") as f: + hash_data = json.load(f) + + platform = get_platform() + + return hash_data["archives"][f"{platform[0]}_{platform[1]}"]["hashes"] + + +def _write_mirror_configuration( + providers: ProvidersCollection, working_dir: str, cache_dir: str +) -> TemporaryDirectory: + """ + Write the mirror configuration to a temporary directory in the working directory. + + Args: + providers (ProvidersCollection): The providers to mirror. + working_dir (str): The working directory. + + Returns: + TemporaryDirectory: A temporary directory containing the mirror configuration. + + Raises: + IndexError: If there are no providers to mirror. + """ + includes = [x for x in providers if _not_in_cache(x.gid, x.version, cache_dir)] + if len(includes) == 0: + raise IndexError("No providers to mirror") + click.secho(f"Mirroring providers: {includes}", fg="yellow") + + mirror_configuration = _create_mirror_configuration( + providers=providers, includes=includes + ) + temp_dir = TemporaryDirectory(dir=working_dir) + mirror_file = pathlib.Path(temp_dir.name) / "terraform.tf" + with open(mirror_file, "w") as f: + f.write(mirror_configuration) + return temp_dir + + +def _create_mirror_configuration( + providers: ProvidersCollection, includes: List[str] = [] +) -> str: + """ + Generate a terraform configuration file with all of the providers + to mirror. + """ + tf_string = [] + tf_string.append("terraform {") + tf_string.append(providers.required_hcl(includes=includes)) + tf_string.append("}") + return "\n".join(tf_string) + + +def _validate_cache_dir(cache_dir: str) -> None: + """ + Validate the cache directory, it should exist and be writable. + + Args: + cache_dir (str): The cache directory. + """ + cache_dir = pathlib.Path(cache_dir) + if not cache_dir.exists(): + click.secho(f"Cache directory {cache_dir} does not exist", fg="red") + raise SystemExit(1) + if not cache_dir.is_dir(): + click.secho(f"Cache directory {cache_dir} is not a directory", fg="red") + raise SystemExit(1) + if not os.access(cache_dir, os.W_OK): + click.secho(f"Cache directory {cache_dir} is not writable", fg="red") + raise SystemExit(1) + if not os.access(cache_dir, os.R_OK): + click.secho(f"Cache directory {cache_dir} is not readable", fg="red") + raise SystemExit(1) + if not os.access(cache_dir, os.X_OK): + click.secho(f"Cache directory {cache_dir} is not executable", fg="red") + raise SystemExit(1) + + +def _get_provider_cache_dir(gid: ProviderGID, cache_dir: str) -> str: + """ + Get the cache directory for a provider. + + Args: + gid (ProviderGID): The provider GID. + cache_dir (str): The cache directory. + + Returns: + str: The cache directory for the provider. + """ + return pathlib.Path(cache_dir) / gid.hostname / gid.namespace / gid.type + + +def _parse_required_providers(content: dict) -> Union[None, Dict[str, Dict[str, str]]]: + if "terraform" not in content: + return None + + providers = {} + terraform_blocks = content["terraform"] + + for block in terraform_blocks: + if "required_providers" in block: + for required_provider in block["required_providers"]: + for k, v in required_provider.items(): + providers[k] = v + + if len(providers.keys()) == 0: + return None + + return providers + + +def _find_required_providers(search_dir: str) -> Dict[str, [Dict[str, str]]]: + providers = {} + for root, _, files in os.walk(search_dir): + for file in files: + if file.endswith(".tf"): + with open(f"{root}/{file}", "r") as f: + content = hcl2.load(f) + new_providers = _parse_required_providers(content) + if new_providers is not None: + providers.update(new_providers) + return providers